Initial commit
This commit is contained in:
커밋
2a708b3318
2
.gitattributes
vendored
Normal file
2
.gitattributes
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
/mvnw text eol=lf
|
||||||
|
*.cmd text eol=crlf
|
||||||
41
.gitignore
vendored
Normal file
41
.gitignore
vendored
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
|
||||||
|
target/
|
||||||
|
.mvn/wrapper/maven-wrapper.jar
|
||||||
|
!**/src/main/**/target/
|
||||||
|
!**/src/test/**/target/
|
||||||
|
|
||||||
|
### STS ###
|
||||||
|
.apt_generated
|
||||||
|
.classpath
|
||||||
|
.factorypath
|
||||||
|
.project
|
||||||
|
.settings
|
||||||
|
.springBeans
|
||||||
|
.sts4-cache
|
||||||
|
|
||||||
|
### IntelliJ IDEA ###
|
||||||
|
.idea
|
||||||
|
*.iws
|
||||||
|
*.iml
|
||||||
|
*.ipr
|
||||||
|
|
||||||
|
### NetBeans ###
|
||||||
|
/nbproject/private/
|
||||||
|
/nbbuild/
|
||||||
|
/dist/
|
||||||
|
/nbdist/
|
||||||
|
/.nb-gradle/
|
||||||
|
build/
|
||||||
|
!**/src/main/**/build/
|
||||||
|
!**/src/test/**/build/
|
||||||
|
|
||||||
|
### VS Code ###
|
||||||
|
.claudeignore
|
||||||
|
.vscode/
|
||||||
|
scripts/
|
||||||
|
.claude
|
||||||
|
.mvn/
|
||||||
|
logs/
|
||||||
|
sql/
|
||||||
|
*.md
|
||||||
|
nul
|
||||||
259
mvnw
vendored
Normal file
259
mvnw
vendored
Normal file
@ -0,0 +1,259 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
# ----------------------------------------------------------------------------
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
# or more contributor license agreements. See the NOTICE file
|
||||||
|
# distributed with this work for additional information
|
||||||
|
# regarding copyright ownership. The ASF licenses this file
|
||||||
|
# to you under the Apache License, Version 2.0 (the
|
||||||
|
# "License"); you may not use this file except in compliance
|
||||||
|
# with the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing,
|
||||||
|
# software distributed under the License is distributed on an
|
||||||
|
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
# KIND, either express or implied. See the License for the
|
||||||
|
# specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
# ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------------
|
||||||
|
# Apache Maven Wrapper startup batch script, version 3.3.2
|
||||||
|
#
|
||||||
|
# Optional ENV vars
|
||||||
|
# -----------------
|
||||||
|
# JAVA_HOME - location of a JDK home dir, required when download maven via java source
|
||||||
|
# MVNW_REPOURL - repo url base for downloading maven distribution
|
||||||
|
# MVNW_USERNAME/MVNW_PASSWORD - user and password for downloading maven
|
||||||
|
# MVNW_VERBOSE - true: enable verbose log; debug: trace the mvnw script; others: silence the output
|
||||||
|
# ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
set -euf
|
||||||
|
[ "${MVNW_VERBOSE-}" != debug ] || set -x
|
||||||
|
|
||||||
|
# OS specific support.
|
||||||
|
native_path() { printf %s\\n "$1"; }
|
||||||
|
case "$(uname)" in
|
||||||
|
CYGWIN* | MINGW*)
|
||||||
|
[ -z "${JAVA_HOME-}" ] || JAVA_HOME="$(cygpath --unix "$JAVA_HOME")"
|
||||||
|
native_path() { cygpath --path --windows "$1"; }
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
# set JAVACMD and JAVACCMD
|
||||||
|
set_java_home() {
|
||||||
|
# For Cygwin and MinGW, ensure paths are in Unix format before anything is touched
|
||||||
|
if [ -n "${JAVA_HOME-}" ]; then
|
||||||
|
if [ -x "$JAVA_HOME/jre/sh/java" ]; then
|
||||||
|
# IBM's JDK on AIX uses strange locations for the executables
|
||||||
|
JAVACMD="$JAVA_HOME/jre/sh/java"
|
||||||
|
JAVACCMD="$JAVA_HOME/jre/sh/javac"
|
||||||
|
else
|
||||||
|
JAVACMD="$JAVA_HOME/bin/java"
|
||||||
|
JAVACCMD="$JAVA_HOME/bin/javac"
|
||||||
|
|
||||||
|
if [ ! -x "$JAVACMD" ] || [ ! -x "$JAVACCMD" ]; then
|
||||||
|
echo "The JAVA_HOME environment variable is not defined correctly, so mvnw cannot run." >&2
|
||||||
|
echo "JAVA_HOME is set to \"$JAVA_HOME\", but \"\$JAVA_HOME/bin/java\" or \"\$JAVA_HOME/bin/javac\" does not exist." >&2
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
JAVACMD="$(
|
||||||
|
'set' +e
|
||||||
|
'unset' -f command 2>/dev/null
|
||||||
|
'command' -v java
|
||||||
|
)" || :
|
||||||
|
JAVACCMD="$(
|
||||||
|
'set' +e
|
||||||
|
'unset' -f command 2>/dev/null
|
||||||
|
'command' -v javac
|
||||||
|
)" || :
|
||||||
|
|
||||||
|
if [ ! -x "${JAVACMD-}" ] || [ ! -x "${JAVACCMD-}" ]; then
|
||||||
|
echo "The java/javac command does not exist in PATH nor is JAVA_HOME set, so mvnw cannot run." >&2
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# hash string like Java String::hashCode
|
||||||
|
hash_string() {
|
||||||
|
str="${1:-}" h=0
|
||||||
|
while [ -n "$str" ]; do
|
||||||
|
char="${str%"${str#?}"}"
|
||||||
|
h=$(((h * 31 + $(LC_CTYPE=C printf %d "'$char")) % 4294967296))
|
||||||
|
str="${str#?}"
|
||||||
|
done
|
||||||
|
printf %x\\n $h
|
||||||
|
}
|
||||||
|
|
||||||
|
verbose() { :; }
|
||||||
|
[ "${MVNW_VERBOSE-}" != true ] || verbose() { printf %s\\n "${1-}"; }
|
||||||
|
|
||||||
|
die() {
|
||||||
|
printf %s\\n "$1" >&2
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
trim() {
|
||||||
|
# MWRAPPER-139:
|
||||||
|
# Trims trailing and leading whitespace, carriage returns, tabs, and linefeeds.
|
||||||
|
# Needed for removing poorly interpreted newline sequences when running in more
|
||||||
|
# exotic environments such as mingw bash on Windows.
|
||||||
|
printf "%s" "${1}" | tr -d '[:space:]'
|
||||||
|
}
|
||||||
|
|
||||||
|
# parse distributionUrl and optional distributionSha256Sum, requires .mvn/wrapper/maven-wrapper.properties
|
||||||
|
while IFS="=" read -r key value; do
|
||||||
|
case "${key-}" in
|
||||||
|
distributionUrl) distributionUrl=$(trim "${value-}") ;;
|
||||||
|
distributionSha256Sum) distributionSha256Sum=$(trim "${value-}") ;;
|
||||||
|
esac
|
||||||
|
done <"${0%/*}/.mvn/wrapper/maven-wrapper.properties"
|
||||||
|
[ -n "${distributionUrl-}" ] || die "cannot read distributionUrl property in ${0%/*}/.mvn/wrapper/maven-wrapper.properties"
|
||||||
|
|
||||||
|
case "${distributionUrl##*/}" in
|
||||||
|
maven-mvnd-*bin.*)
|
||||||
|
MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/
|
||||||
|
case "${PROCESSOR_ARCHITECTURE-}${PROCESSOR_ARCHITEW6432-}:$(uname -a)" in
|
||||||
|
*AMD64:CYGWIN* | *AMD64:MINGW*) distributionPlatform=windows-amd64 ;;
|
||||||
|
:Darwin*x86_64) distributionPlatform=darwin-amd64 ;;
|
||||||
|
:Darwin*arm64) distributionPlatform=darwin-aarch64 ;;
|
||||||
|
:Linux*x86_64*) distributionPlatform=linux-amd64 ;;
|
||||||
|
*)
|
||||||
|
echo "Cannot detect native platform for mvnd on $(uname)-$(uname -m), use pure java version" >&2
|
||||||
|
distributionPlatform=linux-amd64
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
distributionUrl="${distributionUrl%-bin.*}-$distributionPlatform.zip"
|
||||||
|
;;
|
||||||
|
maven-mvnd-*) MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/ ;;
|
||||||
|
*) MVN_CMD="mvn${0##*/mvnw}" _MVNW_REPO_PATTERN=/org/apache/maven/ ;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
# apply MVNW_REPOURL and calculate MAVEN_HOME
|
||||||
|
# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-<version>,maven-mvnd-<version>-<platform>}/<hash>
|
||||||
|
[ -z "${MVNW_REPOURL-}" ] || distributionUrl="$MVNW_REPOURL$_MVNW_REPO_PATTERN${distributionUrl#*"$_MVNW_REPO_PATTERN"}"
|
||||||
|
distributionUrlName="${distributionUrl##*/}"
|
||||||
|
distributionUrlNameMain="${distributionUrlName%.*}"
|
||||||
|
distributionUrlNameMain="${distributionUrlNameMain%-bin}"
|
||||||
|
MAVEN_USER_HOME="${MAVEN_USER_HOME:-${HOME}/.m2}"
|
||||||
|
MAVEN_HOME="${MAVEN_USER_HOME}/wrapper/dists/${distributionUrlNameMain-}/$(hash_string "$distributionUrl")"
|
||||||
|
|
||||||
|
exec_maven() {
|
||||||
|
unset MVNW_VERBOSE MVNW_USERNAME MVNW_PASSWORD MVNW_REPOURL || :
|
||||||
|
exec "$MAVEN_HOME/bin/$MVN_CMD" "$@" || die "cannot exec $MAVEN_HOME/bin/$MVN_CMD"
|
||||||
|
}
|
||||||
|
|
||||||
|
if [ -d "$MAVEN_HOME" ]; then
|
||||||
|
verbose "found existing MAVEN_HOME at $MAVEN_HOME"
|
||||||
|
exec_maven "$@"
|
||||||
|
fi
|
||||||
|
|
||||||
|
case "${distributionUrl-}" in
|
||||||
|
*?-bin.zip | *?maven-mvnd-?*-?*.zip) ;;
|
||||||
|
*) die "distributionUrl is not valid, must match *-bin.zip or maven-mvnd-*.zip, but found '${distributionUrl-}'" ;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
# prepare tmp dir
|
||||||
|
if TMP_DOWNLOAD_DIR="$(mktemp -d)" && [ -d "$TMP_DOWNLOAD_DIR" ]; then
|
||||||
|
clean() { rm -rf -- "$TMP_DOWNLOAD_DIR"; }
|
||||||
|
trap clean HUP INT TERM EXIT
|
||||||
|
else
|
||||||
|
die "cannot create temp dir"
|
||||||
|
fi
|
||||||
|
|
||||||
|
mkdir -p -- "${MAVEN_HOME%/*}"
|
||||||
|
|
||||||
|
# Download and Install Apache Maven
|
||||||
|
verbose "Couldn't find MAVEN_HOME, downloading and installing it ..."
|
||||||
|
verbose "Downloading from: $distributionUrl"
|
||||||
|
verbose "Downloading to: $TMP_DOWNLOAD_DIR/$distributionUrlName"
|
||||||
|
|
||||||
|
# select .zip or .tar.gz
|
||||||
|
if ! command -v unzip >/dev/null; then
|
||||||
|
distributionUrl="${distributionUrl%.zip}.tar.gz"
|
||||||
|
distributionUrlName="${distributionUrl##*/}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# verbose opt
|
||||||
|
__MVNW_QUIET_WGET=--quiet __MVNW_QUIET_CURL=--silent __MVNW_QUIET_UNZIP=-q __MVNW_QUIET_TAR=''
|
||||||
|
[ "${MVNW_VERBOSE-}" != true ] || __MVNW_QUIET_WGET='' __MVNW_QUIET_CURL='' __MVNW_QUIET_UNZIP='' __MVNW_QUIET_TAR=v
|
||||||
|
|
||||||
|
# normalize http auth
|
||||||
|
case "${MVNW_PASSWORD:+has-password}" in
|
||||||
|
'') MVNW_USERNAME='' MVNW_PASSWORD='' ;;
|
||||||
|
has-password) [ -n "${MVNW_USERNAME-}" ] || MVNW_USERNAME='' MVNW_PASSWORD='' ;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [ -z "${MVNW_USERNAME-}" ] && command -v wget >/dev/null; then
|
||||||
|
verbose "Found wget ... using wget"
|
||||||
|
wget ${__MVNW_QUIET_WGET:+"$__MVNW_QUIET_WGET"} "$distributionUrl" -O "$TMP_DOWNLOAD_DIR/$distributionUrlName" || die "wget: Failed to fetch $distributionUrl"
|
||||||
|
elif [ -z "${MVNW_USERNAME-}" ] && command -v curl >/dev/null; then
|
||||||
|
verbose "Found curl ... using curl"
|
||||||
|
curl ${__MVNW_QUIET_CURL:+"$__MVNW_QUIET_CURL"} -f -L -o "$TMP_DOWNLOAD_DIR/$distributionUrlName" "$distributionUrl" || die "curl: Failed to fetch $distributionUrl"
|
||||||
|
elif set_java_home; then
|
||||||
|
verbose "Falling back to use Java to download"
|
||||||
|
javaSource="$TMP_DOWNLOAD_DIR/Downloader.java"
|
||||||
|
targetZip="$TMP_DOWNLOAD_DIR/$distributionUrlName"
|
||||||
|
cat >"$javaSource" <<-END
|
||||||
|
public class Downloader extends java.net.Authenticator
|
||||||
|
{
|
||||||
|
protected java.net.PasswordAuthentication getPasswordAuthentication()
|
||||||
|
{
|
||||||
|
return new java.net.PasswordAuthentication( System.getenv( "MVNW_USERNAME" ), System.getenv( "MVNW_PASSWORD" ).toCharArray() );
|
||||||
|
}
|
||||||
|
public static void main( String[] args ) throws Exception
|
||||||
|
{
|
||||||
|
setDefault( new Downloader() );
|
||||||
|
java.nio.file.Files.copy( java.net.URI.create( args[0] ).toURL().openStream(), java.nio.file.Paths.get( args[1] ).toAbsolutePath().normalize() );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
END
|
||||||
|
# For Cygwin/MinGW, switch paths to Windows format before running javac and java
|
||||||
|
verbose " - Compiling Downloader.java ..."
|
||||||
|
"$(native_path "$JAVACCMD")" "$(native_path "$javaSource")" || die "Failed to compile Downloader.java"
|
||||||
|
verbose " - Running Downloader.java ..."
|
||||||
|
"$(native_path "$JAVACMD")" -cp "$(native_path "$TMP_DOWNLOAD_DIR")" Downloader "$distributionUrl" "$(native_path "$targetZip")"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If specified, validate the SHA-256 sum of the Maven distribution zip file
|
||||||
|
if [ -n "${distributionSha256Sum-}" ]; then
|
||||||
|
distributionSha256Result=false
|
||||||
|
if [ "$MVN_CMD" = mvnd.sh ]; then
|
||||||
|
echo "Checksum validation is not supported for maven-mvnd." >&2
|
||||||
|
echo "Please disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2
|
||||||
|
exit 1
|
||||||
|
elif command -v sha256sum >/dev/null; then
|
||||||
|
if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | sha256sum -c >/dev/null 2>&1; then
|
||||||
|
distributionSha256Result=true
|
||||||
|
fi
|
||||||
|
elif command -v shasum >/dev/null; then
|
||||||
|
if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | shasum -a 256 -c >/dev/null 2>&1; then
|
||||||
|
distributionSha256Result=true
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "Checksum validation was requested but neither 'sha256sum' or 'shasum' are available." >&2
|
||||||
|
echo "Please install either command, or disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
if [ $distributionSha256Result = false ]; then
|
||||||
|
echo "Error: Failed to validate Maven distribution SHA-256, your Maven distribution might be compromised." >&2
|
||||||
|
echo "If you updated your Maven version, you need to update the specified distributionSha256Sum property." >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# unzip and move
|
||||||
|
if command -v unzip >/dev/null; then
|
||||||
|
unzip ${__MVNW_QUIET_UNZIP:+"$__MVNW_QUIET_UNZIP"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -d "$TMP_DOWNLOAD_DIR" || die "failed to unzip"
|
||||||
|
else
|
||||||
|
tar xzf${__MVNW_QUIET_TAR:+"$__MVNW_QUIET_TAR"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -C "$TMP_DOWNLOAD_DIR" || die "failed to untar"
|
||||||
|
fi
|
||||||
|
printf %s\\n "$distributionUrl" >"$TMP_DOWNLOAD_DIR/$distributionUrlNameMain/mvnw.url"
|
||||||
|
mv -- "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" "$MAVEN_HOME" || [ -d "$MAVEN_HOME" ] || die "fail to move MAVEN_HOME"
|
||||||
|
|
||||||
|
clean || :
|
||||||
|
exec_maven "$@"
|
||||||
149
mvnw.cmd
vendored
Normal file
149
mvnw.cmd
vendored
Normal file
@ -0,0 +1,149 @@
|
|||||||
|
<# : batch portion
|
||||||
|
@REM ----------------------------------------------------------------------------
|
||||||
|
@REM Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
@REM or more contributor license agreements. See the NOTICE file
|
||||||
|
@REM distributed with this work for additional information
|
||||||
|
@REM regarding copyright ownership. The ASF licenses this file
|
||||||
|
@REM to you under the Apache License, Version 2.0 (the
|
||||||
|
@REM "License"); you may not use this file except in compliance
|
||||||
|
@REM with the License. You may obtain a copy of the License at
|
||||||
|
@REM
|
||||||
|
@REM http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
@REM
|
||||||
|
@REM Unless required by applicable law or agreed to in writing,
|
||||||
|
@REM software distributed under the License is distributed on an
|
||||||
|
@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
@REM KIND, either express or implied. See the License for the
|
||||||
|
@REM specific language governing permissions and limitations
|
||||||
|
@REM under the License.
|
||||||
|
@REM ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
@REM ----------------------------------------------------------------------------
|
||||||
|
@REM Apache Maven Wrapper startup batch script, version 3.3.2
|
||||||
|
@REM
|
||||||
|
@REM Optional ENV vars
|
||||||
|
@REM MVNW_REPOURL - repo url base for downloading maven distribution
|
||||||
|
@REM MVNW_USERNAME/MVNW_PASSWORD - user and password for downloading maven
|
||||||
|
@REM MVNW_VERBOSE - true: enable verbose log; others: silence the output
|
||||||
|
@REM ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
@IF "%__MVNW_ARG0_NAME__%"=="" (SET __MVNW_ARG0_NAME__=%~nx0)
|
||||||
|
@SET __MVNW_CMD__=
|
||||||
|
@SET __MVNW_ERROR__=
|
||||||
|
@SET __MVNW_PSMODULEP_SAVE=%PSModulePath%
|
||||||
|
@SET PSModulePath=
|
||||||
|
@FOR /F "usebackq tokens=1* delims==" %%A IN (`powershell -noprofile "& {$scriptDir='%~dp0'; $script='%__MVNW_ARG0_NAME__%'; icm -ScriptBlock ([Scriptblock]::Create((Get-Content -Raw '%~f0'))) -NoNewScope}"`) DO @(
|
||||||
|
IF "%%A"=="MVN_CMD" (set __MVNW_CMD__=%%B) ELSE IF "%%B"=="" (echo %%A) ELSE (echo %%A=%%B)
|
||||||
|
)
|
||||||
|
@SET PSModulePath=%__MVNW_PSMODULEP_SAVE%
|
||||||
|
@SET __MVNW_PSMODULEP_SAVE=
|
||||||
|
@SET __MVNW_ARG0_NAME__=
|
||||||
|
@SET MVNW_USERNAME=
|
||||||
|
@SET MVNW_PASSWORD=
|
||||||
|
@IF NOT "%__MVNW_CMD__%"=="" (%__MVNW_CMD__% %*)
|
||||||
|
@echo Cannot start maven from wrapper >&2 && exit /b 1
|
||||||
|
@GOTO :EOF
|
||||||
|
: end batch / begin powershell #>
|
||||||
|
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
if ($env:MVNW_VERBOSE -eq "true") {
|
||||||
|
$VerbosePreference = "Continue"
|
||||||
|
}
|
||||||
|
|
||||||
|
# calculate distributionUrl, requires .mvn/wrapper/maven-wrapper.properties
|
||||||
|
$distributionUrl = (Get-Content -Raw "$scriptDir/.mvn/wrapper/maven-wrapper.properties" | ConvertFrom-StringData).distributionUrl
|
||||||
|
if (!$distributionUrl) {
|
||||||
|
Write-Error "cannot read distributionUrl property in $scriptDir/.mvn/wrapper/maven-wrapper.properties"
|
||||||
|
}
|
||||||
|
|
||||||
|
switch -wildcard -casesensitive ( $($distributionUrl -replace '^.*/','') ) {
|
||||||
|
"maven-mvnd-*" {
|
||||||
|
$USE_MVND = $true
|
||||||
|
$distributionUrl = $distributionUrl -replace '-bin\.[^.]*$',"-windows-amd64.zip"
|
||||||
|
$MVN_CMD = "mvnd.cmd"
|
||||||
|
break
|
||||||
|
}
|
||||||
|
default {
|
||||||
|
$USE_MVND = $false
|
||||||
|
$MVN_CMD = $script -replace '^mvnw','mvn'
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# apply MVNW_REPOURL and calculate MAVEN_HOME
|
||||||
|
# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-<version>,maven-mvnd-<version>-<platform>}/<hash>
|
||||||
|
if ($env:MVNW_REPOURL) {
|
||||||
|
$MVNW_REPO_PATTERN = if ($USE_MVND) { "/org/apache/maven/" } else { "/maven/mvnd/" }
|
||||||
|
$distributionUrl = "$env:MVNW_REPOURL$MVNW_REPO_PATTERN$($distributionUrl -replace '^.*'+$MVNW_REPO_PATTERN,'')"
|
||||||
|
}
|
||||||
|
$distributionUrlName = $distributionUrl -replace '^.*/',''
|
||||||
|
$distributionUrlNameMain = $distributionUrlName -replace '\.[^.]*$','' -replace '-bin$',''
|
||||||
|
$MAVEN_HOME_PARENT = "$HOME/.m2/wrapper/dists/$distributionUrlNameMain"
|
||||||
|
if ($env:MAVEN_USER_HOME) {
|
||||||
|
$MAVEN_HOME_PARENT = "$env:MAVEN_USER_HOME/wrapper/dists/$distributionUrlNameMain"
|
||||||
|
}
|
||||||
|
$MAVEN_HOME_NAME = ([System.Security.Cryptography.MD5]::Create().ComputeHash([byte[]][char[]]$distributionUrl) | ForEach-Object {$_.ToString("x2")}) -join ''
|
||||||
|
$MAVEN_HOME = "$MAVEN_HOME_PARENT/$MAVEN_HOME_NAME"
|
||||||
|
|
||||||
|
if (Test-Path -Path "$MAVEN_HOME" -PathType Container) {
|
||||||
|
Write-Verbose "found existing MAVEN_HOME at $MAVEN_HOME"
|
||||||
|
Write-Output "MVN_CMD=$MAVEN_HOME/bin/$MVN_CMD"
|
||||||
|
exit $?
|
||||||
|
}
|
||||||
|
|
||||||
|
if (! $distributionUrlNameMain -or ($distributionUrlName -eq $distributionUrlNameMain)) {
|
||||||
|
Write-Error "distributionUrl is not valid, must end with *-bin.zip, but found $distributionUrl"
|
||||||
|
}
|
||||||
|
|
||||||
|
# prepare tmp dir
|
||||||
|
$TMP_DOWNLOAD_DIR_HOLDER = New-TemporaryFile
|
||||||
|
$TMP_DOWNLOAD_DIR = New-Item -Itemtype Directory -Path "$TMP_DOWNLOAD_DIR_HOLDER.dir"
|
||||||
|
$TMP_DOWNLOAD_DIR_HOLDER.Delete() | Out-Null
|
||||||
|
trap {
|
||||||
|
if ($TMP_DOWNLOAD_DIR.Exists) {
|
||||||
|
try { Remove-Item $TMP_DOWNLOAD_DIR -Recurse -Force | Out-Null }
|
||||||
|
catch { Write-Warning "Cannot remove $TMP_DOWNLOAD_DIR" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
New-Item -Itemtype Directory -Path "$MAVEN_HOME_PARENT" -Force | Out-Null
|
||||||
|
|
||||||
|
# Download and Install Apache Maven
|
||||||
|
Write-Verbose "Couldn't find MAVEN_HOME, downloading and installing it ..."
|
||||||
|
Write-Verbose "Downloading from: $distributionUrl"
|
||||||
|
Write-Verbose "Downloading to: $TMP_DOWNLOAD_DIR/$distributionUrlName"
|
||||||
|
|
||||||
|
$webclient = New-Object System.Net.WebClient
|
||||||
|
if ($env:MVNW_USERNAME -and $env:MVNW_PASSWORD) {
|
||||||
|
$webclient.Credentials = New-Object System.Net.NetworkCredential($env:MVNW_USERNAME, $env:MVNW_PASSWORD)
|
||||||
|
}
|
||||||
|
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
|
||||||
|
$webclient.DownloadFile($distributionUrl, "$TMP_DOWNLOAD_DIR/$distributionUrlName") | Out-Null
|
||||||
|
|
||||||
|
# If specified, validate the SHA-256 sum of the Maven distribution zip file
|
||||||
|
$distributionSha256Sum = (Get-Content -Raw "$scriptDir/.mvn/wrapper/maven-wrapper.properties" | ConvertFrom-StringData).distributionSha256Sum
|
||||||
|
if ($distributionSha256Sum) {
|
||||||
|
if ($USE_MVND) {
|
||||||
|
Write-Error "Checksum validation is not supported for maven-mvnd. `nPlease disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties."
|
||||||
|
}
|
||||||
|
Import-Module $PSHOME\Modules\Microsoft.PowerShell.Utility -Function Get-FileHash
|
||||||
|
if ((Get-FileHash "$TMP_DOWNLOAD_DIR/$distributionUrlName" -Algorithm SHA256).Hash.ToLower() -ne $distributionSha256Sum) {
|
||||||
|
Write-Error "Error: Failed to validate Maven distribution SHA-256, your Maven distribution might be compromised. If you updated your Maven version, you need to update the specified distributionSha256Sum property."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# unzip and move
|
||||||
|
Expand-Archive "$TMP_DOWNLOAD_DIR/$distributionUrlName" -DestinationPath "$TMP_DOWNLOAD_DIR" | Out-Null
|
||||||
|
Rename-Item -Path "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" -NewName $MAVEN_HOME_NAME | Out-Null
|
||||||
|
try {
|
||||||
|
Move-Item -Path "$TMP_DOWNLOAD_DIR/$MAVEN_HOME_NAME" -Destination $MAVEN_HOME_PARENT | Out-Null
|
||||||
|
} catch {
|
||||||
|
if (! (Test-Path -Path "$MAVEN_HOME" -PathType Container)) {
|
||||||
|
Write-Error "fail to move MAVEN_HOME"
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
try { Remove-Item $TMP_DOWNLOAD_DIR -Recurse -Force | Out-Null }
|
||||||
|
catch { Write-Warning "Cannot remove $TMP_DOWNLOAD_DIR" }
|
||||||
|
}
|
||||||
|
|
||||||
|
Write-Output "MVN_CMD=$MAVEN_HOME/bin/$MVN_CMD"
|
||||||
360
pom.xml
Normal file
360
pom.xml
Normal file
@ -0,0 +1,360 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
<parent>
|
||||||
|
<groupId>org.springframework.boot</groupId>
|
||||||
|
<artifactId>spring-boot-starter-parent</artifactId>
|
||||||
|
<version>3.2.5</version> <!-- 안정적인 버전으로 변경 -->
|
||||||
|
<relativePath/>
|
||||||
|
</parent>
|
||||||
|
|
||||||
|
<groupId>gc.mda</groupId>
|
||||||
|
<artifactId>signal_batch</artifactId>
|
||||||
|
<version>1.0.0</version>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
|
||||||
|
<name>signal_batch</name>
|
||||||
|
<description>Vessel Signal Batch Aggregation System</description>
|
||||||
|
|
||||||
|
<properties>
|
||||||
|
<java.version>17</java.version>
|
||||||
|
<maven.compiler.source>17</maven.compiler.source>
|
||||||
|
<maven.compiler.target>17</maven.compiler.target>
|
||||||
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
|
|
||||||
|
<!-- 데이터베이스 관련 -->
|
||||||
|
<postgresql.version>42.5.6</postgresql.version>
|
||||||
|
<postgis-jdbc.version>2023.1.0</postgis-jdbc.version>
|
||||||
|
<hikaricp.version>5.1.0</hikaricp.version>
|
||||||
|
|
||||||
|
<!-- Spring Batch -->
|
||||||
|
<spring-batch.version>5.1.1</spring-batch.version>
|
||||||
|
|
||||||
|
<!-- 유틸리티 -->
|
||||||
|
<guava.version>33.2.0-jre</guava.version>
|
||||||
|
<commons-lang3.version>3.14.0</commons-lang3.version>
|
||||||
|
<jackson.version>2.17.0</jackson.version>
|
||||||
|
|
||||||
|
<!-- 모니터링 -->
|
||||||
|
<micrometer.version>1.12.5</micrometer.version>
|
||||||
|
|
||||||
|
<!-- 테스트 -->
|
||||||
|
<testcontainers.version>1.19.8</testcontainers.version>
|
||||||
|
</properties>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<!-- Spring Boot Starters -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.springframework.boot</groupId>
|
||||||
|
<artifactId>spring-boot-starter-batch</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.springframework.boot</groupId>
|
||||||
|
<artifactId>spring-boot-starter-jdbc</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.springframework.boot</groupId>
|
||||||
|
<artifactId>spring-boot-starter-web</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.springframework.boot</groupId>
|
||||||
|
<artifactId>spring-boot-starter-actuator</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.springframework.boot</groupId>
|
||||||
|
<artifactId>spring-boot-starter-validation</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.springframework.boot</groupId>
|
||||||
|
<artifactId>spring-boot-starter-aop</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.springframework.boot</groupId>
|
||||||
|
<artifactId>spring-boot-starter-cache</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.springframework.boot</groupId>
|
||||||
|
<artifactId>spring-boot-configuration-processor</artifactId>
|
||||||
|
<optional>true</optional>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- WebSocket -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.springframework.boot</groupId>
|
||||||
|
<artifactId>spring-boot-starter-websocket</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- Database -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.postgresql</groupId>
|
||||||
|
<artifactId>postgresql</artifactId>
|
||||||
|
<version>${postgresql.version}</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- PostGIS JDBC Extras (geometry 타입 처리용) -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>net.postgis</groupId>
|
||||||
|
<artifactId>postgis-jdbc</artifactId>
|
||||||
|
<version>${postgis-jdbc.version}</version>
|
||||||
|
<scope>runtime</scope>
|
||||||
|
<exclusions>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.postgresql</groupId>
|
||||||
|
<artifactId>postgresql</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.postgis</groupId>
|
||||||
|
<artifactId>postgis-geometry</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
</exclusions>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- Connection Pool -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.zaxxer</groupId>
|
||||||
|
<artifactId>HikariCP</artifactId>
|
||||||
|
<version>${hikaricp.version}</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- Spring Batch Extensions -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.springframework.batch</groupId>
|
||||||
|
<artifactId>spring-batch-integration</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- Spring Retry -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.springframework.retry</groupId>
|
||||||
|
<artifactId>spring-retry</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- AspectJ -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.aspectj</groupId>
|
||||||
|
<artifactId>aspectjweaver</artifactId>
|
||||||
|
<version>1.9.21</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- Monitoring -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.micrometer</groupId>
|
||||||
|
<artifactId>micrometer-registry-prometheus</artifactId>
|
||||||
|
<version>${micrometer.version}</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.micrometer</groupId>
|
||||||
|
<artifactId>micrometer-tracing-bridge-brave</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- Utilities -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.projectlombok</groupId>
|
||||||
|
<artifactId>lombok</artifactId>
|
||||||
|
<version>1.18.32</version>
|
||||||
|
<scope>provided</scope>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.commons</groupId>
|
||||||
|
<artifactId>commons-lang3</artifactId>
|
||||||
|
<version>${commons-lang3.version}</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.google.guava</groupId>
|
||||||
|
<artifactId>guava</artifactId>
|
||||||
|
<version>${guava.version}</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- JSON Processing -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.fasterxml.jackson.datatype</groupId>
|
||||||
|
<artifactId>jackson-datatype-jsr310</artifactId>
|
||||||
|
<version>${jackson.version}</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.fasterxml.jackson.module</groupId>
|
||||||
|
<artifactId>jackson-module-parameter-names</artifactId>
|
||||||
|
<version>${jackson.version}</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- Spatial Libraries -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.locationtech.jts</groupId>
|
||||||
|
<artifactId>jts-core</artifactId>
|
||||||
|
<version>1.19.0</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.locationtech.jts.io</groupId>
|
||||||
|
<artifactId>jts-io-common</artifactId>
|
||||||
|
<version>1.19.0</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- Jakarta Validation -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>jakarta.validation</groupId>
|
||||||
|
<artifactId>jakarta.validation-api</artifactId>
|
||||||
|
<version>3.0.2</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- Caffeine Cache -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.github.ben-manes.caffeine</groupId>
|
||||||
|
<artifactId>caffeine</artifactId>
|
||||||
|
<version>3.1.8</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- Test Dependencies -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.springframework.boot</groupId>
|
||||||
|
<artifactId>spring-boot-starter-test</artifactId>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.springframework.batch</groupId>
|
||||||
|
<artifactId>spring-batch-test</artifactId>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- H2 Database for Testing -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.h2database</groupId>
|
||||||
|
<artifactId>h2</artifactId>
|
||||||
|
<version>2.2.224</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- JUnit 5 -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.junit.jupiter</groupId>
|
||||||
|
<artifactId>junit-jupiter</artifactId>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- Testcontainers -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.testcontainers</groupId>
|
||||||
|
<artifactId>testcontainers</artifactId>
|
||||||
|
<version>${testcontainers.version}</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.testcontainers</groupId>
|
||||||
|
<artifactId>postgresql</artifactId>
|
||||||
|
<version>${testcontainers.version}</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>jakarta.annotation</groupId>
|
||||||
|
<artifactId>jakarta.annotation-api</artifactId>
|
||||||
|
<version>2.1.1</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- Swagger/OpenAPI 3 -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.springdoc</groupId>
|
||||||
|
<artifactId>springdoc-openapi-starter-webmvc-ui</artifactId>
|
||||||
|
<version>2.3.0</version>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
|
||||||
|
<build>
|
||||||
|
<finalName>vessel-batch-aggregation</finalName>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.springframework.boot</groupId>
|
||||||
|
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||||
|
<configuration>
|
||||||
|
<excludes>
|
||||||
|
<exclude>
|
||||||
|
<groupId>org.projectlombok</groupId>
|
||||||
|
<artifactId>lombok</artifactId>
|
||||||
|
</exclude>
|
||||||
|
</excludes>
|
||||||
|
</configuration>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<goals>
|
||||||
|
<goal>repackage</goal>
|
||||||
|
</goals>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
|
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-compiler-plugin</artifactId>
|
||||||
|
<version>3.13.0</version>
|
||||||
|
<configuration>
|
||||||
|
<source>17</source>
|
||||||
|
<target>17</target>
|
||||||
|
<annotationProcessorPaths>
|
||||||
|
<path>
|
||||||
|
<groupId>org.projectlombok</groupId>
|
||||||
|
<artifactId>lombok</artifactId>
|
||||||
|
<version>1.18.32</version>
|
||||||
|
</path>
|
||||||
|
<path>
|
||||||
|
<groupId>org.springframework.boot</groupId>
|
||||||
|
<artifactId>spring-boot-configuration-processor</artifactId>
|
||||||
|
<version>3.2.5</version>
|
||||||
|
</path>
|
||||||
|
</annotationProcessorPaths>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-surefire-plugin</artifactId>
|
||||||
|
<version>3.2.5</version>
|
||||||
|
<configuration>
|
||||||
|
<argLine>-Xmx2048m -Xms1024m</argLine>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
|
||||||
|
<repositories>
|
||||||
|
<repository>
|
||||||
|
<id>central</id>
|
||||||
|
<url>https://repo.maven.apache.org/maven2</url>
|
||||||
|
</repository>
|
||||||
|
<repository>
|
||||||
|
<id>spring-milestones</id>
|
||||||
|
<url>https://repo.spring.io/milestone</url>
|
||||||
|
</repository>
|
||||||
|
</repositories>
|
||||||
|
|
||||||
|
<pluginRepositories>
|
||||||
|
<pluginRepository>
|
||||||
|
<id>spring-milestones</id>
|
||||||
|
<url>https://repo.spring.io/milestone</url>
|
||||||
|
</pluginRepository>
|
||||||
|
</pluginRepositories>
|
||||||
|
|
||||||
|
<profiles>
|
||||||
|
<profile>
|
||||||
|
<id>skip-tests</id>
|
||||||
|
<properties>
|
||||||
|
<maven.test.skip>true</maven.test.skip>
|
||||||
|
</properties>
|
||||||
|
</profile>
|
||||||
|
</profiles>
|
||||||
|
|
||||||
|
</project>
|
||||||
@ -0,0 +1,58 @@
|
|||||||
|
package gc.mda.signal_batch;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.global.util.BatchUtils;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.core.Job;
|
||||||
|
import org.springframework.batch.core.JobExecution;
|
||||||
|
import org.springframework.batch.core.JobParameters;
|
||||||
|
import org.springframework.batch.core.launch.JobLauncher;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.boot.CommandLineRunner;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.context.annotation.Profile;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
@Profile({"!test", "!query"}) // 테스트 환경과 query 프로파일에서는 실행하지 않음
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class BatchCommandLineRunner implements CommandLineRunner {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
@Qualifier("asyncJobLauncher") // 명시적으로 asyncJobLauncher 사용
|
||||||
|
private JobLauncher jobLauncher;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
@Qualifier("vesselAggregationJob")
|
||||||
|
private Job vesselAggregationJob;
|
||||||
|
|
||||||
|
private final BatchUtils batchUtils;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void run(String... args) throws Exception {
|
||||||
|
// 명령행 인자로 배치 실행
|
||||||
|
if (args.length > 0 && "run".equals(args[0])) {
|
||||||
|
LocalDateTime endTime = LocalDateTime.now();
|
||||||
|
LocalDateTime startTime = endTime.minusHours(1);
|
||||||
|
|
||||||
|
if (args.length > 2) {
|
||||||
|
startTime = LocalDateTime.parse(args[1]);
|
||||||
|
endTime = LocalDateTime.parse(args[2]);
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info("Running batch job from {} to {}", startTime, endTime);
|
||||||
|
|
||||||
|
JobParameters params = batchUtils.createJobParameters(startTime, endTime);
|
||||||
|
JobExecution execution = jobLauncher.run(vesselAggregationJob, params);
|
||||||
|
|
||||||
|
log.info("Batch job completed: {}", execution.getStatus());
|
||||||
|
} else {
|
||||||
|
log.info("Batch application started. Use 'run' argument to execute job immediately.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,21 @@
|
|||||||
|
package gc.mda.signal_batch;
|
||||||
|
|
||||||
|
import org.springframework.boot.SpringApplication;
|
||||||
|
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||||
|
|
||||||
|
@SpringBootApplication
|
||||||
|
public class SignalBatchApplication {
|
||||||
|
|
||||||
|
public static void main(String[] args) {
|
||||||
|
// JVM 기본 시간대를 KST로 설정 (DB 데이터와 일치)
|
||||||
|
System.setProperty("user.timezone", "Asia/Seoul");
|
||||||
|
java.util.TimeZone.setDefault(java.util.TimeZone.getTimeZone("Asia/Seoul"));
|
||||||
|
|
||||||
|
// 중요: PostgreSQL JDBC 드라이버가 timestamp를 올바르게 처리하도록 설정
|
||||||
|
// timestamp (without time zone) 타입을 LocalDateTime으로 매핑
|
||||||
|
System.setProperty("jdbc.timestamp.tz", "false");
|
||||||
|
|
||||||
|
SpringApplication.run(SignalBatchApplication.class, args);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@ -0,0 +1,220 @@
|
|||||||
|
package gc.mda.signal_batch.batch.job;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselData;
|
||||||
|
import gc.mda.signal_batch.batch.processor.AccumulatingAreaProcessor;
|
||||||
|
import gc.mda.signal_batch.batch.processor.AreaStatisticsProcessor;
|
||||||
|
import gc.mda.signal_batch.batch.processor.AreaStatisticsProcessor.AreaStatistics;
|
||||||
|
import gc.mda.signal_batch.batch.reader.InMemoryVesselDataReader;
|
||||||
|
import gc.mda.signal_batch.batch.reader.PartitionedReader;
|
||||||
|
import gc.mda.signal_batch.batch.reader.VesselDataReader;
|
||||||
|
import gc.mda.signal_batch.batch.writer.UpsertWriter;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.core.Step;
|
||||||
|
import org.springframework.batch.core.ExitStatus;
|
||||||
|
import org.springframework.batch.core.StepExecution;
|
||||||
|
import org.springframework.batch.core.StepExecutionListener;
|
||||||
|
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||||
|
import org.springframework.batch.core.partition.support.TaskExecutorPartitionHandler;
|
||||||
|
import org.springframework.batch.core.repository.JobRepository;
|
||||||
|
import org.springframework.batch.core.step.builder.StepBuilder;
|
||||||
|
import org.springframework.batch.item.Chunk;
|
||||||
|
import org.springframework.batch.item.ItemReader;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.context.ApplicationContext;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.context.annotation.Profile;
|
||||||
|
import org.springframework.core.task.TaskExecutor;
|
||||||
|
import org.springframework.transaction.PlatformTransactionManager;
|
||||||
|
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Configuration
|
||||||
|
@Profile("!query") // query 프로파일에서는 배치 작업 비활성화
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
public class AreaStatisticsStepConfig {
|
||||||
|
|
||||||
|
private final JobRepository jobRepository;
|
||||||
|
private final PlatformTransactionManager queryTransactionManager;
|
||||||
|
private final VesselDataReader vesselDataReader;
|
||||||
|
|
||||||
|
private final AccumulatingAreaProcessor accumulatingAreaProcessor;
|
||||||
|
private final AreaStatisticsProcessor areaStatisticsProcessor;
|
||||||
|
private final UpsertWriter upsertWriter;
|
||||||
|
private final PartitionedReader partitionedReader;
|
||||||
|
private final ApplicationContext applicationContext;
|
||||||
|
|
||||||
|
@Value("${vessel.batch.area-statistics.chunk-size:1000}")
|
||||||
|
private int areaChunkSize;
|
||||||
|
|
||||||
|
@Value("${vessel.batch.area-statistics.batch-size:500}")
|
||||||
|
private int areaBatchSize;
|
||||||
|
|
||||||
|
@Qualifier("batchTaskExecutor")
|
||||||
|
private final TaskExecutor batchTaskExecutor;
|
||||||
|
|
||||||
|
@Qualifier("partitionTaskExecutor")
|
||||||
|
private final TaskExecutor partitionTaskExecutor;
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public Step aggregateAreaStatisticsStep() {
|
||||||
|
// InMemoryVesselDataReader를 ApplicationContext에서 가져옴
|
||||||
|
InMemoryVesselDataReader inMemoryReader = applicationContext.getBean(InMemoryVesselDataReader.class);
|
||||||
|
|
||||||
|
return new StepBuilder("aggregateAreaStatisticsStep", jobRepository)
|
||||||
|
.<VesselData, AreaStatistics>chunk(areaChunkSize, queryTransactionManager)
|
||||||
|
.reader(inMemoryReader) // 메모리 기반 Reader 사용
|
||||||
|
.processor(accumulatingAreaProcessor)
|
||||||
|
.writer(items -> {}) // 빈 writer, 실제 저장은 listener에서
|
||||||
|
.listener(areaStatisticsStepListener())
|
||||||
|
.faultTolerant()
|
||||||
|
.skipLimit(100)
|
||||||
|
.skip(Exception.class)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public Step partitionedAreaStatisticsStep() {
|
||||||
|
return new StepBuilder("partitionedAreaStatisticsStep", jobRepository)
|
||||||
|
.partitioner("areaStatisticsPartitioner", partitionedReader.dayPartitioner(null))
|
||||||
|
.partitionHandler(areaStatisticsPartitionHandler())
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public TaskExecutorPartitionHandler areaStatisticsPartitionHandler() {
|
||||||
|
TaskExecutorPartitionHandler handler = new TaskExecutorPartitionHandler();
|
||||||
|
handler.setTaskExecutor(partitionTaskExecutor);
|
||||||
|
handler.setStep(areaStatisticsSlaveStep());
|
||||||
|
handler.setGridSize(24);
|
||||||
|
return handler;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public Step areaStatisticsSlaveStep() {
|
||||||
|
return new StepBuilder("areaStatisticsSlaveStep", jobRepository)
|
||||||
|
.<List<VesselData>, List<AreaStatistics>>chunk(50, queryTransactionManager)
|
||||||
|
.reader(slaveAreaBatchVesselDataReader(null, null, null))
|
||||||
|
.processor(areaStatisticsProcessor.batchProcessor())
|
||||||
|
.writer(upsertWriter.areaStatisticsWriter())
|
||||||
|
.faultTolerant()
|
||||||
|
.skipLimit(100)
|
||||||
|
.skip(Exception.class)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public ItemReader<VesselData> areaVesselDataReader(
|
||||||
|
@Value("#{jobParameters['startTime']}") String startTimeStr,
|
||||||
|
@Value("#{jobParameters['endTime']}") String endTimeStr) {
|
||||||
|
return new ItemReader<VesselData>() {
|
||||||
|
private ItemReader<VesselData> delegate;
|
||||||
|
private boolean initialized = false;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public VesselData read() throws Exception {
|
||||||
|
if (!initialized) {
|
||||||
|
LocalDateTime startTime = startTimeStr != null ? LocalDateTime.parse(startTimeStr) : null;
|
||||||
|
LocalDateTime endTime = endTimeStr != null ? LocalDateTime.parse(endTimeStr) : null;
|
||||||
|
|
||||||
|
// 기존 reader close
|
||||||
|
if (delegate != null) {
|
||||||
|
try {
|
||||||
|
((org.springframework.batch.item.ItemStream) delegate).close();
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.debug("Failed to close previous reader: {}", e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 최신 위치만 사용
|
||||||
|
delegate = vesselDataReader.vesselLatestPositionReader(startTime, endTime, null);
|
||||||
|
((org.springframework.batch.item.ItemStream) delegate).open(
|
||||||
|
org.springframework.batch.core.scope.context.StepSynchronizationManager
|
||||||
|
.getContext().getStepExecution().getExecutionContext());
|
||||||
|
initialized = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
VesselData data = delegate.read();
|
||||||
|
|
||||||
|
// Reader 종료 시 close
|
||||||
|
if (data == null && delegate != null) {
|
||||||
|
try {
|
||||||
|
((org.springframework.batch.item.ItemStream) delegate).close();
|
||||||
|
delegate = null;
|
||||||
|
initialized = false;
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.debug("Failed to close reader on completion: {}", e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public ItemReader<List<VesselData>> slaveAreaBatchVesselDataReader(
|
||||||
|
@Value("#{stepExecutionContext['startTime']}") String startTime,
|
||||||
|
@Value("#{stepExecutionContext['endTime']}") String endTime,
|
||||||
|
@Value("#{stepExecutionContext['partition']}") String partition) {
|
||||||
|
|
||||||
|
return new ItemReader<List<VesselData>>() {
|
||||||
|
private ItemReader<VesselData> delegate = vesselDataReader.vesselDataPagingReader(
|
||||||
|
startTime != null ? LocalDateTime.parse(startTime) : null,
|
||||||
|
endTime != null ? LocalDateTime.parse(endTime) : null,
|
||||||
|
partition
|
||||||
|
);
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<VesselData> read() throws Exception {
|
||||||
|
List<VesselData> batch = new java.util.ArrayList<>();
|
||||||
|
|
||||||
|
for (int i = 0; i < areaBatchSize; i++) {
|
||||||
|
VesselData item = delegate.read();
|
||||||
|
if (item == null) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
batch.add(item);
|
||||||
|
}
|
||||||
|
|
||||||
|
return batch.isEmpty() ? null : batch;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public StepExecutionListener areaStatisticsStepListener() {
|
||||||
|
return new StepExecutionListener() {
|
||||||
|
@Override
|
||||||
|
public ExitStatus afterStep(StepExecution stepExecution) {
|
||||||
|
// 누적된 데이터를 DB에 저장
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
List<AreaStatistics> statistics = (List<AreaStatistics>)
|
||||||
|
stepExecution.getExecutionContext().get("areaStatistics");
|
||||||
|
|
||||||
|
if (statistics != null && !statistics.isEmpty()) {
|
||||||
|
try {
|
||||||
|
upsertWriter.areaStatisticsWriter().write(
|
||||||
|
new Chunk<>(List.of(statistics))
|
||||||
|
);
|
||||||
|
|
||||||
|
log.info("Successfully wrote {} area statistics", statistics.size());
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to write area statistics", e);
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return stepExecution.getExitStatus();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,47 @@
|
|||||||
|
package gc.mda.signal_batch.batch.job;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.batch.listener.JobCompletionListener;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.core.Job;
|
||||||
|
import org.springframework.batch.core.JobParametersValidator;
|
||||||
|
import org.springframework.batch.core.job.DefaultJobParametersValidator;
|
||||||
|
import org.springframework.batch.core.job.builder.JobBuilder;
|
||||||
|
import org.springframework.batch.core.launch.support.RunIdIncrementer;
|
||||||
|
import org.springframework.batch.core.repository.JobRepository;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.context.annotation.Profile;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Configuration
|
||||||
|
@Profile("!query") // query 프로파일에서는 배치 작업 비활성화
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
public class DailyAggregationJobConfig {
|
||||||
|
|
||||||
|
private final JobRepository jobRepository;
|
||||||
|
private final DailyAggregationStepConfig dailyAggregationStepConfig;
|
||||||
|
private final JobCompletionListener jobCompletionListener;
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public Job dailyAggregationJob() {
|
||||||
|
return new JobBuilder("dailyAggregationJob", jobRepository)
|
||||||
|
.incrementer(new RunIdIncrementer())
|
||||||
|
.validator(dailyJobParametersValidator())
|
||||||
|
.listener(jobCompletionListener)
|
||||||
|
.start(dailyAggregationStepConfig.mergeDailyTracksStep())
|
||||||
|
.next(dailyAggregationStepConfig.gridDailySummaryStep())
|
||||||
|
.next(dailyAggregationStepConfig.areaDailySummaryStep())
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public JobParametersValidator dailyJobParametersValidator() {
|
||||||
|
DefaultJobParametersValidator validator = new DefaultJobParametersValidator();
|
||||||
|
validator.setRequiredKeys(new String[]{"startTime", "endTime", "timeBucket"});
|
||||||
|
validator.setOptionalKeys(new String[]{"executionTime", "enableAbnormalDetection"});
|
||||||
|
return validator;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,365 @@
|
|||||||
|
package gc.mda.signal_batch.batch.job;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselTrack;
|
||||||
|
import gc.mda.signal_batch.batch.processor.DailyTrackProcessor;
|
||||||
|
import gc.mda.signal_batch.batch.processor.DailyTrackProcessorWithAbnormalDetection;
|
||||||
|
import gc.mda.signal_batch.batch.processor.AbnormalTrackDetector;
|
||||||
|
import gc.mda.signal_batch.batch.processor.AbnormalTrackDetector.AbnormalDetectionResult;
|
||||||
|
import gc.mda.signal_batch.batch.writer.VesselTrackBulkWriter;
|
||||||
|
import gc.mda.signal_batch.batch.writer.AbnormalTrackWriter;
|
||||||
|
import gc.mda.signal_batch.batch.writer.CompositeTrackWriter;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.core.Step;
|
||||||
|
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||||
|
import org.springframework.batch.core.repository.JobRepository;
|
||||||
|
import org.springframework.batch.core.step.builder.StepBuilder;
|
||||||
|
import org.springframework.batch.item.ItemProcessor;
|
||||||
|
import org.springframework.batch.item.ItemWriter;
|
||||||
|
import org.springframework.batch.item.database.JdbcCursorItemReader;
|
||||||
|
import org.springframework.batch.item.database.builder.JdbcCursorItemReaderBuilder;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.context.annotation.Profile;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.transaction.PlatformTransactionManager;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Configuration
|
||||||
|
@Profile("!query") // query 프로파일에서는 배치 작업 비활성화
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
public class DailyAggregationStepConfig {
|
||||||
|
|
||||||
|
private final JobRepository jobRepository;
|
||||||
|
private final DataSource queryDataSource;
|
||||||
|
private final PlatformTransactionManager transactionManager;
|
||||||
|
private final VesselTrackBulkWriter vesselTrackBulkWriter;
|
||||||
|
private final AbnormalTrackWriter abnormalTrackWriter;
|
||||||
|
private final AbnormalTrackDetector abnormalTrackDetector;
|
||||||
|
|
||||||
|
public DailyAggregationStepConfig(
|
||||||
|
JobRepository jobRepository,
|
||||||
|
@Qualifier("queryDataSource") DataSource queryDataSource,
|
||||||
|
@Qualifier("queryTransactionManager") PlatformTransactionManager transactionManager,
|
||||||
|
VesselTrackBulkWriter vesselTrackBulkWriter,
|
||||||
|
AbnormalTrackWriter abnormalTrackWriter,
|
||||||
|
AbnormalTrackDetector abnormalTrackDetector) {
|
||||||
|
this.jobRepository = jobRepository;
|
||||||
|
this.queryDataSource = queryDataSource;
|
||||||
|
this.transactionManager = transactionManager;
|
||||||
|
this.vesselTrackBulkWriter = vesselTrackBulkWriter;
|
||||||
|
this.abnormalTrackWriter = abnormalTrackWriter;
|
||||||
|
this.abnormalTrackDetector = abnormalTrackDetector;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Value("${vessel.batch.chunk-size:5000}")
|
||||||
|
private int chunkSize;
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public Step mergeDailyTracksStep() {
|
||||||
|
// 비정상 궤적 검출은 항상 활성화 (설정 파일로 제어)
|
||||||
|
boolean detectAbnormal = true;
|
||||||
|
|
||||||
|
if (detectAbnormal) {
|
||||||
|
log.info("Building mergeDailyTracksStep with abnormal detection enabled");
|
||||||
|
return new StepBuilder("mergeDailyTracksStep", jobRepository)
|
||||||
|
.<VesselTrack.VesselKey, AbnormalDetectionResult>chunk(chunkSize, transactionManager)
|
||||||
|
.reader(dailyVesselKeyReader(null, null))
|
||||||
|
.processor(dailyTrackProcessorWithAbnormalDetection())
|
||||||
|
.writer(dailyCompositeTrackWriter())
|
||||||
|
.build();
|
||||||
|
} else {
|
||||||
|
log.info("Building mergeDailyTracksStep without abnormal detection");
|
||||||
|
return new StepBuilder("mergeDailyTracksStep", jobRepository)
|
||||||
|
.<VesselTrack.VesselKey, VesselTrack>chunk(chunkSize, transactionManager)
|
||||||
|
.reader(dailyVesselKeyReader(null, null))
|
||||||
|
.processor(dailyTrackItemProcessor())
|
||||||
|
.writer(dailyTrackWriter())
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public Step gridDailySummaryStep() {
|
||||||
|
return new StepBuilder("gridDailySummaryStep", jobRepository)
|
||||||
|
.<Integer, DailyGridSummary>chunk(100, transactionManager)
|
||||||
|
.reader(dailyGridReader(null, null))
|
||||||
|
.processor(dailyGridProcessor())
|
||||||
|
.writer(dailyGridWriter(null, null))
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public Step areaDailySummaryStep() {
|
||||||
|
return new StepBuilder("areaDailySummaryStep", jobRepository)
|
||||||
|
.<String, DailyAreaSummary>chunk(100, transactionManager)
|
||||||
|
.reader(dailyAreaReader(null, null))
|
||||||
|
.processor(dailyAreaProcessor())
|
||||||
|
.writer(dailyAreaWriter(null, null))
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public JdbcCursorItemReader<VesselTrack.VesselKey> dailyVesselKeyReader(
|
||||||
|
@Value("#{jobParameters['startTime']}") String startTime,
|
||||||
|
@Value("#{jobParameters['endTime']}") String endTime) {
|
||||||
|
|
||||||
|
LocalDateTime start = LocalDateTime.parse(startTime);
|
||||||
|
LocalDateTime end = LocalDateTime.parse(endTime);
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
SELECT DISTINCT sig_src_cd, target_id, date_trunc('day', time_bucket) as day_bucket
|
||||||
|
FROM signal.t_vessel_tracks_hourly
|
||||||
|
WHERE time_bucket >= ? AND time_bucket < ?
|
||||||
|
ORDER BY sig_src_cd, target_id, day_bucket
|
||||||
|
""";
|
||||||
|
|
||||||
|
return new JdbcCursorItemReaderBuilder<VesselTrack.VesselKey>()
|
||||||
|
.name("dailyVesselKeyReader")
|
||||||
|
.dataSource(queryDataSource)
|
||||||
|
.sql(sql)
|
||||||
|
.preparedStatementSetter(ps -> {
|
||||||
|
ps.setTimestamp(1, java.sql.Timestamp.valueOf(start));
|
||||||
|
ps.setTimestamp(2, java.sql.Timestamp.valueOf(end));
|
||||||
|
})
|
||||||
|
.rowMapper((rs, rowNum) -> new VesselTrack.VesselKey(
|
||||||
|
rs.getString("sig_src_cd"),
|
||||||
|
rs.getString("target_id"),
|
||||||
|
rs.getObject("day_bucket", LocalDateTime.class)
|
||||||
|
))
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public ItemProcessor<VesselTrack.VesselKey, VesselTrack> dailyTrackItemProcessor() {
|
||||||
|
return new DailyTrackProcessor(queryDataSource, new JdbcTemplate(queryDataSource));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public ItemWriter<VesselTrack> dailyTrackWriter() {
|
||||||
|
return items -> {
|
||||||
|
List<VesselTrack> tracks = new ArrayList<>();
|
||||||
|
for (VesselTrack track : items) {
|
||||||
|
if (track != null) {
|
||||||
|
tracks.add(track);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!tracks.isEmpty()) {
|
||||||
|
vesselTrackBulkWriter.writeDailyTracks(tracks);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public JdbcCursorItemReader<Integer> dailyGridReader(
|
||||||
|
@Value("#{jobParameters['startTime']}") String startTime,
|
||||||
|
@Value("#{jobParameters['endTime']}") String endTime) {
|
||||||
|
|
||||||
|
LocalDateTime start = LocalDateTime.parse(startTime);
|
||||||
|
LocalDateTime end = LocalDateTime.parse(endTime);
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
SELECT DISTINCT haegu_no
|
||||||
|
FROM signal.t_grid_tracks_summary_hourly
|
||||||
|
WHERE time_bucket >= ? AND time_bucket < ?
|
||||||
|
ORDER BY haegu_no
|
||||||
|
""";
|
||||||
|
|
||||||
|
return new JdbcCursorItemReaderBuilder<Integer>()
|
||||||
|
.name("dailyGridReader")
|
||||||
|
.dataSource(queryDataSource)
|
||||||
|
.sql(sql)
|
||||||
|
.preparedStatementSetter(ps -> {
|
||||||
|
ps.setTimestamp(1, java.sql.Timestamp.valueOf(start));
|
||||||
|
ps.setTimestamp(2, java.sql.Timestamp.valueOf(end));
|
||||||
|
})
|
||||||
|
.rowMapper((rs, rowNum) -> rs.getInt("haegu_no"))
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public ItemProcessor<Integer, DailyGridSummary> dailyGridProcessor() {
|
||||||
|
return haeguNo -> {
|
||||||
|
DailyGridSummary summary = new DailyGridSummary();
|
||||||
|
summary.haeguNo = haeguNo;
|
||||||
|
return summary;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public ItemWriter<DailyGridSummary> dailyGridWriter(
|
||||||
|
@Value("#{jobParameters['startTime']}") String startTime,
|
||||||
|
@Value("#{jobParameters['endTime']}") String endTime) {
|
||||||
|
|
||||||
|
return items -> {
|
||||||
|
LocalDateTime start = LocalDateTime.parse(startTime);
|
||||||
|
LocalDateTime end = LocalDateTime.parse(endTime);
|
||||||
|
LocalDateTime dayBucket = start.withHour(0).withMinute(0).withSecond(0).withNano(0);
|
||||||
|
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
|
||||||
|
|
||||||
|
for (DailyGridSummary summary : items) {
|
||||||
|
if (summary == null) continue;
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
INSERT INTO signal.t_grid_tracks_summary_daily
|
||||||
|
(haegu_no, time_bucket, total_vessels, total_distance_nm, avg_speed, vessel_list, created_at)
|
||||||
|
SELECT
|
||||||
|
haegu_no,
|
||||||
|
?::timestamp as time_bucket,
|
||||||
|
COUNT(DISTINCT vessel_key) as total_vessels,
|
||||||
|
SUM(total_distance_nm) as total_distance_nm,
|
||||||
|
AVG(avg_speed) as avg_speed,
|
||||||
|
jsonb_agg(DISTINCT vessel_list) as vessel_list,
|
||||||
|
NOW()
|
||||||
|
FROM (
|
||||||
|
SELECT haegu_no, jsonb_array_elements(vessel_list) as vessel_list,
|
||||||
|
total_distance_nm, avg_speed,
|
||||||
|
(vessel_list->>'sig_src_cd') || '_' || (vessel_list->>'target_id') as vessel_key
|
||||||
|
FROM signal.t_grid_tracks_summary_hourly
|
||||||
|
WHERE haegu_no = ?
|
||||||
|
AND time_bucket >= ?
|
||||||
|
AND time_bucket < ?
|
||||||
|
) hourly_data
|
||||||
|
GROUP BY haegu_no
|
||||||
|
ON CONFLICT (haegu_no, time_bucket) DO UPDATE SET
|
||||||
|
total_vessels = EXCLUDED.total_vessels,
|
||||||
|
total_distance_nm = EXCLUDED.total_distance_nm,
|
||||||
|
avg_speed = EXCLUDED.avg_speed,
|
||||||
|
vessel_list = EXCLUDED.vessel_list,
|
||||||
|
created_at = NOW()
|
||||||
|
""";
|
||||||
|
|
||||||
|
jdbcTemplate.update(sql, dayBucket, summary.haeguNo, start, end);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public JdbcCursorItemReader<String> dailyAreaReader(
|
||||||
|
@Value("#{jobParameters['startTime']}") String startTime,
|
||||||
|
@Value("#{jobParameters['endTime']}") String endTime) {
|
||||||
|
|
||||||
|
LocalDateTime start = LocalDateTime.parse(startTime);
|
||||||
|
LocalDateTime end = LocalDateTime.parse(endTime);
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
SELECT DISTINCT area_id
|
||||||
|
FROM signal.t_area_tracks_summary_hourly
|
||||||
|
WHERE time_bucket >= ? AND time_bucket < ?
|
||||||
|
ORDER BY area_id
|
||||||
|
""";
|
||||||
|
|
||||||
|
return new JdbcCursorItemReaderBuilder<String>()
|
||||||
|
.name("dailyAreaReader")
|
||||||
|
.dataSource(queryDataSource)
|
||||||
|
.sql(sql)
|
||||||
|
.preparedStatementSetter(ps -> {
|
||||||
|
ps.setTimestamp(1, java.sql.Timestamp.valueOf(start));
|
||||||
|
ps.setTimestamp(2, java.sql.Timestamp.valueOf(end));
|
||||||
|
})
|
||||||
|
.rowMapper((rs, rowNum) -> rs.getString("area_id"))
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public ItemProcessor<String, DailyAreaSummary> dailyAreaProcessor() {
|
||||||
|
return areaId -> {
|
||||||
|
DailyAreaSummary summary = new DailyAreaSummary();
|
||||||
|
summary.areaId = areaId;
|
||||||
|
return summary;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public ItemWriter<DailyAreaSummary> dailyAreaWriter(
|
||||||
|
@Value("#{jobParameters['startTime']}") String startTime,
|
||||||
|
@Value("#{jobParameters['endTime']}") String endTime) {
|
||||||
|
|
||||||
|
return items -> {
|
||||||
|
LocalDateTime start = LocalDateTime.parse(startTime);
|
||||||
|
LocalDateTime end = LocalDateTime.parse(endTime);
|
||||||
|
LocalDateTime dayBucket = start.withHour(0).withMinute(0).withSecond(0).withNano(0);
|
||||||
|
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
|
||||||
|
|
||||||
|
for (DailyAreaSummary summary : items) {
|
||||||
|
if (summary == null) continue;
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
INSERT INTO signal.t_area_tracks_summary_daily
|
||||||
|
(area_id, time_bucket, total_vessels, total_distance_nm, avg_speed, vessel_list, created_at)
|
||||||
|
SELECT
|
||||||
|
area_id,
|
||||||
|
?::timestamp as time_bucket,
|
||||||
|
COUNT(DISTINCT vessel_key) as total_vessels,
|
||||||
|
SUM(total_distance_nm) as total_distance_nm,
|
||||||
|
AVG(avg_speed) as avg_speed,
|
||||||
|
jsonb_agg(DISTINCT vessel_list) as vessel_list,
|
||||||
|
NOW()
|
||||||
|
FROM (
|
||||||
|
SELECT area_id, jsonb_array_elements(vessel_list) as vessel_list,
|
||||||
|
total_distance_nm, avg_speed,
|
||||||
|
(vessel_list->>'sig_src_cd') || '_' || (vessel_list->>'target_id') as vessel_key
|
||||||
|
FROM signal.t_area_tracks_summary_hourly
|
||||||
|
WHERE area_id = ?
|
||||||
|
AND time_bucket >= ?
|
||||||
|
AND time_bucket < ?
|
||||||
|
) hourly_data
|
||||||
|
GROUP BY area_id
|
||||||
|
ON CONFLICT (area_id, time_bucket) DO UPDATE SET
|
||||||
|
total_vessels = EXCLUDED.total_vessels,
|
||||||
|
total_distance_nm = EXCLUDED.total_distance_nm,
|
||||||
|
avg_speed = EXCLUDED.avg_speed,
|
||||||
|
vessel_list = EXCLUDED.vessel_list,
|
||||||
|
created_at = NOW()
|
||||||
|
""";
|
||||||
|
|
||||||
|
jdbcTemplate.update(sql, dayBucket, summary.areaId, start, end);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// 비정상 궤적 검출 관련 빈 정의
|
||||||
|
@Bean
|
||||||
|
public ItemProcessor<VesselTrack.VesselKey, AbnormalDetectionResult> dailyTrackProcessorWithAbnormalDetection() {
|
||||||
|
return new DailyTrackProcessorWithAbnormalDetection(
|
||||||
|
dailyTrackItemProcessor(),
|
||||||
|
abnormalTrackDetector,
|
||||||
|
queryDataSource
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public ItemWriter<AbnormalDetectionResult> dailyCompositeTrackWriter() {
|
||||||
|
// Job 이름 직접 설정
|
||||||
|
abnormalTrackWriter.setJobName("dailyAggregationJob");
|
||||||
|
return new CompositeTrackWriter(
|
||||||
|
vesselTrackBulkWriter,
|
||||||
|
abnormalTrackWriter,
|
||||||
|
"daily"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Summary 클래스들
|
||||||
|
public static class DailyGridSummary {
|
||||||
|
public Integer haeguNo;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class DailyAreaSummary {
|
||||||
|
public String areaId;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,47 @@
|
|||||||
|
package gc.mda.signal_batch.batch.job;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.batch.listener.JobCompletionListener;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.core.Job;
|
||||||
|
import org.springframework.batch.core.JobParametersValidator;
|
||||||
|
import org.springframework.batch.core.job.DefaultJobParametersValidator;
|
||||||
|
import org.springframework.batch.core.job.builder.JobBuilder;
|
||||||
|
import org.springframework.batch.core.launch.support.RunIdIncrementer;
|
||||||
|
import org.springframework.batch.core.repository.JobRepository;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.context.annotation.Profile;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Configuration
|
||||||
|
@Profile("!query") // query 프로파일에서는 배치 작업 비활성화
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
public class HourlyAggregationJobConfig {
|
||||||
|
|
||||||
|
private final JobRepository jobRepository;
|
||||||
|
private final HourlyAggregationStepConfig hourlyAggregationStepConfig;
|
||||||
|
private final JobCompletionListener jobCompletionListener;
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public Job hourlyAggregationJob() {
|
||||||
|
return new JobBuilder("hourlyAggregationJob", jobRepository)
|
||||||
|
.incrementer(new RunIdIncrementer())
|
||||||
|
.validator(hourlyJobParametersValidator())
|
||||||
|
.listener(jobCompletionListener)
|
||||||
|
.start(hourlyAggregationStepConfig.mergeHourlyTracksStep())
|
||||||
|
.next(hourlyAggregationStepConfig.gridHourlySummaryStep())
|
||||||
|
.next(hourlyAggregationStepConfig.areaHourlySummaryStep())
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public JobParametersValidator hourlyJobParametersValidator() {
|
||||||
|
DefaultJobParametersValidator validator = new DefaultJobParametersValidator();
|
||||||
|
validator.setRequiredKeys(new String[]{"startTime", "endTime", "timeBucket"});
|
||||||
|
validator.setOptionalKeys(new String[]{"executionTime", "enableAbnormalDetection"});
|
||||||
|
return validator;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,373 @@
|
|||||||
|
package gc.mda.signal_batch.batch.job;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselTrack;
|
||||||
|
import gc.mda.signal_batch.batch.processor.HourlyTrackProcessor;
|
||||||
|
import gc.mda.signal_batch.batch.processor.HourlyTrackProcessorWithAbnormalDetection;
|
||||||
|
import gc.mda.signal_batch.batch.processor.AbnormalTrackDetector;
|
||||||
|
import gc.mda.signal_batch.batch.processor.AbnormalTrackDetector.AbnormalDetectionResult;
|
||||||
|
import gc.mda.signal_batch.batch.writer.VesselTrackBulkWriter;
|
||||||
|
import gc.mda.signal_batch.batch.writer.AbnormalTrackWriter;
|
||||||
|
import gc.mda.signal_batch.batch.writer.CompositeTrackWriter;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.core.Step;
|
||||||
|
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||||
|
import org.springframework.batch.core.repository.JobRepository;
|
||||||
|
import org.springframework.batch.core.step.builder.StepBuilder;
|
||||||
|
import org.springframework.batch.item.ItemProcessor;
|
||||||
|
import org.springframework.batch.item.ItemWriter;
|
||||||
|
import org.springframework.batch.item.database.JdbcCursorItemReader;
|
||||||
|
import org.springframework.batch.item.database.builder.JdbcCursorItemReaderBuilder;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.context.annotation.Profile;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.transaction.PlatformTransactionManager;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Configuration
|
||||||
|
@Profile("!query") // query 프로파일에서는 배치 작업 비활성화
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
public class HourlyAggregationStepConfig {
|
||||||
|
|
||||||
|
private final JobRepository jobRepository;
|
||||||
|
private final DataSource queryDataSource;
|
||||||
|
private final PlatformTransactionManager transactionManager;
|
||||||
|
private final VesselTrackBulkWriter vesselTrackBulkWriter;
|
||||||
|
private final AbnormalTrackWriter abnormalTrackWriter;
|
||||||
|
private final AbnormalTrackDetector abnormalTrackDetector;
|
||||||
|
|
||||||
|
public HourlyAggregationStepConfig(
|
||||||
|
JobRepository jobRepository,
|
||||||
|
@Qualifier("queryDataSource") DataSource queryDataSource,
|
||||||
|
@Qualifier("queryTransactionManager") PlatformTransactionManager transactionManager,
|
||||||
|
VesselTrackBulkWriter vesselTrackBulkWriter,
|
||||||
|
AbnormalTrackWriter abnormalTrackWriter,
|
||||||
|
AbnormalTrackDetector abnormalTrackDetector) {
|
||||||
|
this.jobRepository = jobRepository;
|
||||||
|
this.queryDataSource = queryDataSource;
|
||||||
|
this.transactionManager = transactionManager;
|
||||||
|
this.vesselTrackBulkWriter = vesselTrackBulkWriter;
|
||||||
|
this.abnormalTrackWriter = abnormalTrackWriter;
|
||||||
|
this.abnormalTrackDetector = abnormalTrackDetector;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Value("${vessel.batch.chunk-size:5000}")
|
||||||
|
private int chunkSize;
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public Step mergeHourlyTracksStep() {
|
||||||
|
// 비정상 궤적 검출은 항상 활성화 (설정 파일로 제어)
|
||||||
|
boolean detectAbnormal = true;
|
||||||
|
|
||||||
|
if (detectAbnormal) {
|
||||||
|
log.info("Building mergeHourlyTracksStep with abnormal detection enabled");
|
||||||
|
return new StepBuilder("mergeHourlyTracksStep", jobRepository)
|
||||||
|
.<VesselTrack.VesselKey, AbnormalDetectionResult>chunk(chunkSize, transactionManager)
|
||||||
|
.reader(hourlyVesselKeyReader(null, null))
|
||||||
|
.processor(hourlyTrackProcessorWithAbnormalDetection())
|
||||||
|
.writer(hourlyCompositeTrackWriter())
|
||||||
|
.build();
|
||||||
|
} else {
|
||||||
|
log.info("Building mergeHourlyTracksStep without abnormal detection");
|
||||||
|
return new StepBuilder("mergeHourlyTracksStep", jobRepository)
|
||||||
|
.<VesselTrack.VesselKey, VesselTrack>chunk(chunkSize, transactionManager)
|
||||||
|
.reader(hourlyVesselKeyReader(null, null))
|
||||||
|
.processor(hourlyTrackItemProcessor())
|
||||||
|
.writer(hourlyTrackWriter())
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public Step gridHourlySummaryStep() {
|
||||||
|
return new StepBuilder("gridHourlySummaryStep", jobRepository)
|
||||||
|
.<Integer, HourlyGridSummary>chunk(100, transactionManager)
|
||||||
|
.reader(hourlyGridReader(null, null))
|
||||||
|
.processor(hourlyGridProcessor())
|
||||||
|
.writer(hourlyGridWriter(null, null))
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public Step areaHourlySummaryStep() {
|
||||||
|
return new StepBuilder("areaHourlySummaryStep", jobRepository)
|
||||||
|
.<String, HourlyAreaSummary>chunk(100, transactionManager)
|
||||||
|
.reader(hourlyAreaReader(null, null))
|
||||||
|
.processor(hourlyAreaProcessor())
|
||||||
|
.writer(hourlyAreaWriter(null, null))
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public JdbcCursorItemReader<VesselTrack.VesselKey> hourlyVesselKeyReader(
|
||||||
|
@Value("#{jobParameters['startTime']}") String startTime,
|
||||||
|
@Value("#{jobParameters['endTime']}") String endTime) {
|
||||||
|
|
||||||
|
LocalDateTime start = LocalDateTime.parse(startTime);
|
||||||
|
LocalDateTime end = LocalDateTime.parse(endTime);
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
SELECT DISTINCT sig_src_cd, target_id, date_trunc('hour', time_bucket) as hour_bucket
|
||||||
|
FROM signal.t_vessel_tracks_5min
|
||||||
|
WHERE time_bucket >= ? AND time_bucket < ?
|
||||||
|
ORDER BY sig_src_cd, target_id, hour_bucket
|
||||||
|
""";
|
||||||
|
|
||||||
|
return new JdbcCursorItemReaderBuilder<VesselTrack.VesselKey>()
|
||||||
|
.name("hourlyVesselKeyReader")
|
||||||
|
.dataSource(queryDataSource)
|
||||||
|
.sql(sql)
|
||||||
|
.preparedStatementSetter(ps -> {
|
||||||
|
ps.setTimestamp(1, java.sql.Timestamp.valueOf(start));
|
||||||
|
ps.setTimestamp(2, java.sql.Timestamp.valueOf(end));
|
||||||
|
})
|
||||||
|
.rowMapper((rs, rowNum) -> new VesselTrack.VesselKey(
|
||||||
|
rs.getString("sig_src_cd"),
|
||||||
|
rs.getString("target_id"),
|
||||||
|
rs.getObject("hour_bucket", LocalDateTime.class)
|
||||||
|
))
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public ItemProcessor<VesselTrack.VesselKey, VesselTrack> hourlyTrackItemProcessor() {
|
||||||
|
return new HourlyTrackProcessor(queryDataSource, new JdbcTemplate(queryDataSource));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public ItemWriter<VesselTrack> hourlyTrackWriter() {
|
||||||
|
return items -> {
|
||||||
|
List<VesselTrack> tracks = new ArrayList<>();
|
||||||
|
for (VesselTrack track : items) {
|
||||||
|
if (track != null) {
|
||||||
|
tracks.add(track);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!tracks.isEmpty()) {
|
||||||
|
vesselTrackBulkWriter.writeHourlyTracks(tracks);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Grid summary reader
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public JdbcCursorItemReader<Integer> hourlyGridReader(
|
||||||
|
@Value("#{jobParameters['startTime']}") String startTime,
|
||||||
|
@Value("#{jobParameters['endTime']}") String endTime) {
|
||||||
|
|
||||||
|
LocalDateTime start = LocalDateTime.parse(startTime);
|
||||||
|
LocalDateTime end = LocalDateTime.parse(endTime);
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
SELECT DISTINCT haegu_no
|
||||||
|
FROM signal.t_grid_vessel_tracks
|
||||||
|
WHERE time_bucket >= ? AND time_bucket < ?
|
||||||
|
ORDER BY haegu_no
|
||||||
|
""";
|
||||||
|
|
||||||
|
return new JdbcCursorItemReaderBuilder<Integer>()
|
||||||
|
.name("hourlyGridReader")
|
||||||
|
.dataSource(queryDataSource)
|
||||||
|
.sql(sql)
|
||||||
|
.preparedStatementSetter(ps -> {
|
||||||
|
ps.setTimestamp(1, java.sql.Timestamp.valueOf(start));
|
||||||
|
ps.setTimestamp(2, java.sql.Timestamp.valueOf(end));
|
||||||
|
})
|
||||||
|
.rowMapper((rs, rowNum) -> rs.getInt("haegu_no"))
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public ItemProcessor<Integer, HourlyGridSummary> hourlyGridProcessor() {
|
||||||
|
return new ItemProcessor<Integer, HourlyGridSummary>() {
|
||||||
|
@Override
|
||||||
|
public HourlyGridSummary process(Integer haeguNo) throws Exception {
|
||||||
|
HourlyGridSummary summary = new HourlyGridSummary();
|
||||||
|
summary.haeguNo = haeguNo;
|
||||||
|
return summary;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public ItemWriter<HourlyGridSummary> hourlyGridWriter(
|
||||||
|
@Value("#{jobParameters['startTime']}") String startTime,
|
||||||
|
@Value("#{jobParameters['endTime']}") String endTime) {
|
||||||
|
|
||||||
|
return items -> {
|
||||||
|
LocalDateTime start = LocalDateTime.parse(startTime);
|
||||||
|
LocalDateTime end = LocalDateTime.parse(endTime);
|
||||||
|
LocalDateTime hourBucket = start.withMinute(0).withSecond(0).withNano(0);
|
||||||
|
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
|
||||||
|
|
||||||
|
for (HourlyGridSummary summary : items) {
|
||||||
|
if (summary == null) continue;
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
INSERT INTO signal.t_grid_tracks_summary_hourly
|
||||||
|
(haegu_no, time_bucket, total_vessels, total_distance_nm, avg_speed, vessel_list, created_at)
|
||||||
|
SELECT
|
||||||
|
haegu_no,
|
||||||
|
?::timestamp as time_bucket,
|
||||||
|
COUNT(DISTINCT sig_src_cd || '_' || target_id) as total_vessels,
|
||||||
|
SUM(distance_nm) as total_distance_nm,
|
||||||
|
AVG(avg_speed) as avg_speed,
|
||||||
|
jsonb_agg(DISTINCT jsonb_build_object(
|
||||||
|
'sig_src_cd', sig_src_cd,
|
||||||
|
'target_id', target_id,
|
||||||
|
'distance_nm', distance_nm,
|
||||||
|
'avg_speed', avg_speed
|
||||||
|
)) as vessel_list,
|
||||||
|
NOW()
|
||||||
|
FROM signal.t_grid_vessel_tracks
|
||||||
|
WHERE haegu_no = ?
|
||||||
|
AND time_bucket >= ?
|
||||||
|
AND time_bucket < ?
|
||||||
|
GROUP BY haegu_no
|
||||||
|
ON CONFLICT (haegu_no, time_bucket) DO UPDATE SET
|
||||||
|
total_vessels = EXCLUDED.total_vessels,
|
||||||
|
total_distance_nm = EXCLUDED.total_distance_nm,
|
||||||
|
avg_speed = EXCLUDED.avg_speed,
|
||||||
|
vessel_list = EXCLUDED.vessel_list,
|
||||||
|
created_at = NOW()
|
||||||
|
""";
|
||||||
|
|
||||||
|
jdbcTemplate.update(sql, hourBucket, summary.haeguNo, start, end);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Area summary reader
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public JdbcCursorItemReader<String> hourlyAreaReader(
|
||||||
|
@Value("#{jobParameters['startTime']}") String startTime,
|
||||||
|
@Value("#{jobParameters['endTime']}") String endTime) {
|
||||||
|
|
||||||
|
LocalDateTime start = LocalDateTime.parse(startTime);
|
||||||
|
LocalDateTime end = LocalDateTime.parse(endTime);
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
SELECT DISTINCT area_id
|
||||||
|
FROM signal.t_area_vessel_tracks
|
||||||
|
WHERE time_bucket >= ? AND time_bucket < ?
|
||||||
|
ORDER BY area_id
|
||||||
|
""";
|
||||||
|
|
||||||
|
return new JdbcCursorItemReaderBuilder<String>()
|
||||||
|
.name("hourlyAreaReader")
|
||||||
|
.dataSource(queryDataSource)
|
||||||
|
.sql(sql)
|
||||||
|
.preparedStatementSetter(ps -> {
|
||||||
|
ps.setTimestamp(1, java.sql.Timestamp.valueOf(start));
|
||||||
|
ps.setTimestamp(2, java.sql.Timestamp.valueOf(end));
|
||||||
|
})
|
||||||
|
.rowMapper((rs, rowNum) -> rs.getString("area_id"))
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public ItemProcessor<String, HourlyAreaSummary> hourlyAreaProcessor() {
|
||||||
|
return new ItemProcessor<String, HourlyAreaSummary>() {
|
||||||
|
@Override
|
||||||
|
public HourlyAreaSummary process(String areaId) throws Exception {
|
||||||
|
HourlyAreaSummary summary = new HourlyAreaSummary();
|
||||||
|
summary.areaId = areaId;
|
||||||
|
return summary;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public ItemWriter<HourlyAreaSummary> hourlyAreaWriter(
|
||||||
|
@Value("#{jobParameters['startTime']}") String startTime,
|
||||||
|
@Value("#{jobParameters['endTime']}") String endTime) {
|
||||||
|
|
||||||
|
return items -> {
|
||||||
|
LocalDateTime start = LocalDateTime.parse(startTime);
|
||||||
|
LocalDateTime end = LocalDateTime.parse(endTime);
|
||||||
|
LocalDateTime hourBucket = start.withMinute(0).withSecond(0).withNano(0);
|
||||||
|
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
|
||||||
|
|
||||||
|
for (HourlyAreaSummary summary : items) {
|
||||||
|
if (summary == null) continue;
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
INSERT INTO signal.t_area_tracks_summary_hourly
|
||||||
|
(area_id, time_bucket, total_vessels, total_distance_nm, avg_speed, vessel_list, created_at)
|
||||||
|
SELECT
|
||||||
|
area_id,
|
||||||
|
?::timestamp as time_bucket,
|
||||||
|
COUNT(DISTINCT sig_src_cd || '_' || target_id) as total_vessels,
|
||||||
|
SUM(distance_nm) as total_distance_nm,
|
||||||
|
AVG(avg_speed) as avg_speed,
|
||||||
|
jsonb_agg(DISTINCT jsonb_build_object(
|
||||||
|
'sig_src_cd', sig_src_cd,
|
||||||
|
'target_id', target_id,
|
||||||
|
'distance_nm', distance_nm,
|
||||||
|
'avg_speed', avg_speed
|
||||||
|
)) as vessel_list,
|
||||||
|
NOW()
|
||||||
|
FROM signal.t_area_vessel_tracks
|
||||||
|
WHERE area_id = ?
|
||||||
|
AND time_bucket >= ?
|
||||||
|
AND time_bucket < ?
|
||||||
|
GROUP BY area_id
|
||||||
|
ON CONFLICT (area_id, time_bucket) DO UPDATE SET
|
||||||
|
total_vessels = EXCLUDED.total_vessels,
|
||||||
|
total_distance_nm = EXCLUDED.total_distance_nm,
|
||||||
|
avg_speed = EXCLUDED.avg_speed,
|
||||||
|
vessel_list = EXCLUDED.vessel_list,
|
||||||
|
created_at = NOW()
|
||||||
|
""";
|
||||||
|
|
||||||
|
jdbcTemplate.update(sql, hourBucket, summary.areaId, start, end);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// 비정상 궤적 검출 관련 빈 정의
|
||||||
|
@Bean
|
||||||
|
public ItemProcessor<VesselTrack.VesselKey, AbnormalDetectionResult> hourlyTrackProcessorWithAbnormalDetection() {
|
||||||
|
return new HourlyTrackProcessorWithAbnormalDetection(
|
||||||
|
hourlyTrackItemProcessor(),
|
||||||
|
abnormalTrackDetector,
|
||||||
|
queryDataSource
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public ItemWriter<AbnormalDetectionResult> hourlyCompositeTrackWriter() {
|
||||||
|
// Job 이름 직접 설정
|
||||||
|
abnormalTrackWriter.setJobName("hourlyAggregationJob");
|
||||||
|
return new CompositeTrackWriter(
|
||||||
|
vesselTrackBulkWriter,
|
||||||
|
abnormalTrackWriter,
|
||||||
|
"hourly"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Summary 클래스들
|
||||||
|
public static class HourlyGridSummary {
|
||||||
|
public Integer haeguNo;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class HourlyAreaSummary {
|
||||||
|
public String areaId;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,178 @@
|
|||||||
|
package gc.mda.signal_batch.batch.job;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselData;
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselLatestPosition;
|
||||||
|
import gc.mda.signal_batch.batch.processor.LatestPositionProcessor;
|
||||||
|
import gc.mda.signal_batch.batch.reader.InMemoryVesselDataReader;
|
||||||
|
import gc.mda.signal_batch.batch.reader.PartitionedReader;
|
||||||
|
import gc.mda.signal_batch.batch.reader.VesselDataReader;
|
||||||
|
import gc.mda.signal_batch.batch.writer.UpsertWriter;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.core.Step;
|
||||||
|
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||||
|
import org.springframework.batch.core.partition.support.TaskExecutorPartitionHandler;
|
||||||
|
import org.springframework.batch.core.repository.JobRepository;
|
||||||
|
import org.springframework.batch.core.step.builder.StepBuilder;
|
||||||
|
import org.springframework.batch.item.ItemProcessor;
|
||||||
|
import org.springframework.batch.item.ItemReader;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.context.ApplicationContext;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.context.annotation.Profile;
|
||||||
|
import org.springframework.core.task.TaskExecutor;
|
||||||
|
import org.springframework.retry.RetryPolicy;
|
||||||
|
import org.springframework.retry.backoff.BackOffPolicy;
|
||||||
|
import org.springframework.retry.backoff.ExponentialBackOffPolicy;
|
||||||
|
import org.springframework.retry.policy.SimpleRetryPolicy;
|
||||||
|
import org.springframework.transaction.PlatformTransactionManager;
|
||||||
|
|
||||||
|
import java.time.LocalDate;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Configuration
|
||||||
|
@Profile("!query") // query 프로파일에서는 배치 작업 비활성화
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
public class LatestPositionStepConfig {
|
||||||
|
|
||||||
|
private final JobRepository jobRepository;
|
||||||
|
private final PlatformTransactionManager queryTransactionManager;
|
||||||
|
private final LatestPositionProcessor latestPositionProcessor;
|
||||||
|
private final UpsertWriter upsertWriter;
|
||||||
|
private final PartitionedReader partitionedReader;
|
||||||
|
private final ApplicationContext applicationContext;
|
||||||
|
private final TaskExecutor batchTaskExecutor;
|
||||||
|
private final TaskExecutor partitionTaskExecutor;
|
||||||
|
|
||||||
|
public LatestPositionStepConfig(
|
||||||
|
JobRepository jobRepository,
|
||||||
|
@Qualifier("queryTransactionManager") PlatformTransactionManager queryTransactionManager,
|
||||||
|
LatestPositionProcessor latestPositionProcessor,
|
||||||
|
UpsertWriter upsertWriter,
|
||||||
|
PartitionedReader partitionedReader,
|
||||||
|
ApplicationContext applicationContext,
|
||||||
|
@Qualifier("batchTaskExecutor") TaskExecutor batchTaskExecutor,
|
||||||
|
@Qualifier("partitionTaskExecutor") TaskExecutor partitionTaskExecutor) {
|
||||||
|
this.jobRepository = jobRepository;
|
||||||
|
this.queryTransactionManager = queryTransactionManager;
|
||||||
|
this.latestPositionProcessor = latestPositionProcessor;
|
||||||
|
this.upsertWriter = upsertWriter;
|
||||||
|
this.partitionedReader = partitionedReader;
|
||||||
|
this.applicationContext = applicationContext;
|
||||||
|
this.batchTaskExecutor = batchTaskExecutor;
|
||||||
|
this.partitionTaskExecutor = partitionTaskExecutor;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public Step updateLatestPositionStep() {
|
||||||
|
// InMemoryVesselDataReader를 ApplicationContext에서 가져옴
|
||||||
|
InMemoryVesselDataReader inMemoryReader = applicationContext.getBean(InMemoryVesselDataReader.class);
|
||||||
|
|
||||||
|
return new StepBuilder("updateLatestPositionStep", jobRepository)
|
||||||
|
.<VesselData, VesselLatestPosition>chunk(10000, queryTransactionManager)
|
||||||
|
.reader(inMemoryReader) // 메모리 기반 Reader 사용
|
||||||
|
.processor(latestPositionProcessor.processor())
|
||||||
|
.writer(upsertWriter.latestPositionWriter())
|
||||||
|
.faultTolerant()
|
||||||
|
.retryLimit(3)
|
||||||
|
.retry(org.springframework.dao.CannotAcquireLockException.class)
|
||||||
|
.skipLimit(1000)
|
||||||
|
.skip(org.springframework.dao.EmptyResultDataAccessException.class)
|
||||||
|
.skip(Exception.class)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
// 메모리 기반 Reader 사용으로 제거
|
||||||
|
// @Bean
|
||||||
|
// @StepScope
|
||||||
|
// public ItemReader<VesselData> defaultVesselDataReader() { ... }
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public Step partitionedLatestPositionStep() {
|
||||||
|
return new StepBuilder("partitionedLatestPositionStep", jobRepository)
|
||||||
|
.partitioner("latestPositionPartitioner", dayPartitioner(null))
|
||||||
|
.partitionHandler(latestPositionPartitionHandler())
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public TaskExecutorPartitionHandler latestPositionPartitionHandler() {
|
||||||
|
TaskExecutorPartitionHandler handler = new TaskExecutorPartitionHandler();
|
||||||
|
handler.setTaskExecutor(partitionTaskExecutor);
|
||||||
|
handler.setStep(latestPositionSlaveStep());
|
||||||
|
handler.setGridSize(24);
|
||||||
|
return handler;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public Step latestPositionSlaveStep() {
|
||||||
|
return new StepBuilder("latestPositionSlaveStep", jobRepository)
|
||||||
|
.<VesselData, VesselLatestPosition>chunk(3000, queryTransactionManager)
|
||||||
|
.reader(slaveVesselDataReader(null, null, null))
|
||||||
|
.processor(slaveLatestPositionProcessor())
|
||||||
|
.writer(upsertWriter.latestPositionWriter())
|
||||||
|
.faultTolerant()
|
||||||
|
.retryPolicy(retryPolicy())
|
||||||
|
.backOffPolicy(exponentialBackOffPolicy())
|
||||||
|
.skipLimit(50)
|
||||||
|
.skip(Exception.class)
|
||||||
|
.noRollback(org.springframework.dao.DuplicateKeyException.class)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public ItemReader<VesselData> slaveVesselDataReader(
|
||||||
|
@Value("#{stepExecutionContext['startTime']}") String startTime,
|
||||||
|
@Value("#{stepExecutionContext['endTime']}") String endTime,
|
||||||
|
@Value("#{stepExecutionContext['partition']}") String partition) {
|
||||||
|
|
||||||
|
// ApplicationContext에서 VesselDataReader를 가져와서 사용
|
||||||
|
VesselDataReader reader = applicationContext.getBean(VesselDataReader.class);
|
||||||
|
|
||||||
|
return reader.vesselLatestPositionReader(
|
||||||
|
LocalDateTime.parse(startTime),
|
||||||
|
LocalDateTime.parse(endTime),
|
||||||
|
partition
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public ItemProcessor<VesselData, VesselLatestPosition> slaveLatestPositionProcessor() {
|
||||||
|
return latestPositionProcessor.processor();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public org.springframework.batch.core.partition.support.Partitioner dayPartitioner(
|
||||||
|
@Value("#{jobParameters['processingDate']}") String processingDateStr) {
|
||||||
|
LocalDate processingDate = processingDateStr != null ? LocalDate.parse(processingDateStr) : null;
|
||||||
|
return partitionedReader.dayPartitioner(processingDate);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public RetryPolicy retryPolicy() {
|
||||||
|
Map<Class<? extends Throwable>, Boolean> retryableExceptions = new HashMap<>();
|
||||||
|
retryableExceptions.put(org.springframework.dao.CannotAcquireLockException.class, true);
|
||||||
|
retryableExceptions.put(org.springframework.dao.DataAccessException.class, true);
|
||||||
|
|
||||||
|
SimpleRetryPolicy retryPolicy = new SimpleRetryPolicy(3, retryableExceptions);
|
||||||
|
return retryPolicy;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public BackOffPolicy exponentialBackOffPolicy() {
|
||||||
|
ExponentialBackOffPolicy backOffPolicy = new ExponentialBackOffPolicy();
|
||||||
|
backOffPolicy.setInitialInterval(1000); // 1초
|
||||||
|
backOffPolicy.setMaxInterval(10000); // 최대 10초
|
||||||
|
backOffPolicy.setMultiplier(2.0); // 2배씩 증가
|
||||||
|
return backOffPolicy;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,350 @@
|
|||||||
|
package gc.mda.signal_batch.batch.job;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.batch.processor.AccumulatingTileProcessor;
|
||||||
|
import gc.mda.signal_batch.domain.gis.model.TileStatistics;
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselData;
|
||||||
|
import gc.mda.signal_batch.batch.processor.TileAggregationProcessor;
|
||||||
|
import gc.mda.signal_batch.batch.reader.InMemoryVesselDataReader;
|
||||||
|
import gc.mda.signal_batch.batch.reader.PartitionedReader;
|
||||||
|
import gc.mda.signal_batch.batch.reader.VesselDataReader;
|
||||||
|
import gc.mda.signal_batch.batch.writer.OptimizedBulkInsertWriter;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.core.Step;
|
||||||
|
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||||
|
import org.springframework.batch.core.partition.support.TaskExecutorPartitionHandler;
|
||||||
|
import org.springframework.batch.core.repository.JobRepository;
|
||||||
|
import org.springframework.batch.core.step.builder.StepBuilder;
|
||||||
|
import org.springframework.batch.item.Chunk;
|
||||||
|
import org.springframework.batch.item.ItemWriter;
|
||||||
|
import org.springframework.batch.item.ItemProcessor;
|
||||||
|
import org.springframework.batch.item.ItemReader;
|
||||||
|
import org.springframework.batch.item.support.CompositeItemProcessor;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.context.ApplicationContext;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.context.annotation.Profile;
|
||||||
|
import org.springframework.core.task.TaskExecutor;
|
||||||
|
import org.springframework.transaction.PlatformTransactionManager;
|
||||||
|
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Configuration
|
||||||
|
@Profile("!query") // query 프로파일에서는 배치 작업 비활성화
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
public class TileAggregationStepConfig {
|
||||||
|
|
||||||
|
private final JobRepository jobRepository;
|
||||||
|
private final PlatformTransactionManager queryTransactionManager;
|
||||||
|
private final VesselDataReader vesselDataReader;
|
||||||
|
private final TileAggregationProcessor tileAggregationProcessor;
|
||||||
|
private final AccumulatingTileProcessor accumulatingTileProcessor;
|
||||||
|
private final OptimizedBulkInsertWriter optimizedBulkInsertWriter;
|
||||||
|
private final PartitionedReader partitionedReader;
|
||||||
|
private final ApplicationContext applicationContext;
|
||||||
|
private final TaskExecutor batchTaskExecutor;
|
||||||
|
private final TaskExecutor partitionTaskExecutor;
|
||||||
|
|
||||||
|
public TileAggregationStepConfig(
|
||||||
|
JobRepository jobRepository,
|
||||||
|
@Qualifier("queryTransactionManager") PlatformTransactionManager queryTransactionManager,
|
||||||
|
VesselDataReader vesselDataReader,
|
||||||
|
TileAggregationProcessor tileAggregationProcessor,
|
||||||
|
AccumulatingTileProcessor accumulatingTileProcessor,
|
||||||
|
OptimizedBulkInsertWriter optimizedBulkInsertWriter,
|
||||||
|
PartitionedReader partitionedReader,
|
||||||
|
ApplicationContext applicationContext,
|
||||||
|
@Qualifier("batchTaskExecutor") TaskExecutor batchTaskExecutor,
|
||||||
|
@Qualifier("partitionTaskExecutor") TaskExecutor partitionTaskExecutor) {
|
||||||
|
this.jobRepository = jobRepository;
|
||||||
|
this.queryTransactionManager = queryTransactionManager;
|
||||||
|
this.vesselDataReader = vesselDataReader;
|
||||||
|
this.tileAggregationProcessor = tileAggregationProcessor;
|
||||||
|
this.accumulatingTileProcessor = accumulatingTileProcessor;
|
||||||
|
this.optimizedBulkInsertWriter = optimizedBulkInsertWriter;
|
||||||
|
this.partitionedReader = partitionedReader;
|
||||||
|
this.applicationContext = applicationContext;
|
||||||
|
this.batchTaskExecutor = batchTaskExecutor;
|
||||||
|
this.partitionTaskExecutor = partitionTaskExecutor;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public Step aggregateTileStatisticsStep() {
|
||||||
|
// InMemoryVesselDataReader를 ApplicationContext에서 가져옴
|
||||||
|
InMemoryVesselDataReader inMemoryReader = applicationContext.getBean(InMemoryVesselDataReader.class);
|
||||||
|
|
||||||
|
return new StepBuilder("aggregateTileStatisticsStep", jobRepository)
|
||||||
|
.<VesselData, TileStatistics>chunk(50000, queryTransactionManager)
|
||||||
|
.reader(inMemoryReader) // 메모리 기반 Reader 사용
|
||||||
|
.processor(accumulatingTileProcessor)
|
||||||
|
.writer(new AccumulatedTileWriter())
|
||||||
|
.listener(tileAggregationStepListener())
|
||||||
|
.faultTolerant()
|
||||||
|
.skipLimit(1000)
|
||||||
|
.skip(Exception.class)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public ItemReader<VesselData> tileDataReader(
|
||||||
|
@Value("#{jobParameters['startTime']}") String startTimeStr,
|
||||||
|
@Value("#{jobParameters['endTime']}") String endTimeStr) {
|
||||||
|
return new ItemReader<VesselData>() {
|
||||||
|
private ItemReader<VesselData> delegate;
|
||||||
|
private boolean initialized = false;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public VesselData read() throws Exception {
|
||||||
|
if (!initialized) {
|
||||||
|
LocalDateTime startTime = startTimeStr != null ? LocalDateTime.parse(startTimeStr) : null;
|
||||||
|
LocalDateTime endTime = endTimeStr != null ? LocalDateTime.parse(endTimeStr) : null;
|
||||||
|
log.info("Creating tileDataReader with startTime: {}, endTime: {}", startTime, endTime);
|
||||||
|
|
||||||
|
// 기존 reader close
|
||||||
|
if (delegate != null) {
|
||||||
|
try {
|
||||||
|
((org.springframework.batch.item.ItemStream) delegate).close();
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.debug("Failed to close previous reader: {}", e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 최신 위치만 사용
|
||||||
|
delegate = vesselDataReader.vesselLatestPositionReader(startTime, endTime, null);
|
||||||
|
((org.springframework.batch.item.ItemStream) delegate).open(
|
||||||
|
org.springframework.batch.core.scope.context.StepSynchronizationManager
|
||||||
|
.getContext().getStepExecution().getExecutionContext());
|
||||||
|
initialized = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
VesselData data = delegate.read();
|
||||||
|
|
||||||
|
// Reader 종료 시 close
|
||||||
|
if (data == null && delegate != null) {
|
||||||
|
try {
|
||||||
|
((org.springframework.batch.item.ItemStream) delegate).close();
|
||||||
|
delegate = null;
|
||||||
|
initialized = false;
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.debug("Failed to close reader on completion: {}", e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public Step partitionedTileAggregationStep() {
|
||||||
|
return new StepBuilder("partitionedTileAggregationStep", jobRepository)
|
||||||
|
.partitioner("tileAggregationPartitioner", partitionedReader.dayPartitioner(null))
|
||||||
|
.partitionHandler(tileAggregationPartitionHandler())
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public TaskExecutorPartitionHandler tileAggregationPartitionHandler() {
|
||||||
|
TaskExecutorPartitionHandler handler = new TaskExecutorPartitionHandler();
|
||||||
|
handler.setTaskExecutor(partitionTaskExecutor);
|
||||||
|
handler.setStep(tileAggregationSlaveStep());
|
||||||
|
handler.setGridSize(24);
|
||||||
|
return handler;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public Step tileAggregationSlaveStep() {
|
||||||
|
return new StepBuilder("tileAggregationSlaveStep", jobRepository)
|
||||||
|
.<List<VesselData>, List<TileStatistics>>chunk(50, queryTransactionManager)
|
||||||
|
.reader(slaveTileBatchVesselDataReader(null, null, null))
|
||||||
|
.processor(slaveTileProcessor(null, null))
|
||||||
|
.writer(optimizedBulkInsertWriter.tileStatisticsBulkWriter())
|
||||||
|
.faultTolerant()
|
||||||
|
.skipLimit(100)
|
||||||
|
.skip(Exception.class)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public ItemReader<List<VesselData>> tileBatchVesselDataReader(
|
||||||
|
@Value("#{jobParameters['startTime']}") String startTimeStr,
|
||||||
|
@Value("#{jobParameters['endTime']}") String endTimeStr) {
|
||||||
|
LocalDateTime startTime = startTimeStr != null ? LocalDateTime.parse(startTimeStr) : null;
|
||||||
|
LocalDateTime endTime = endTimeStr != null ? LocalDateTime.parse(endTimeStr) : null;
|
||||||
|
return new ItemReader<List<VesselData>>() {
|
||||||
|
private ItemReader<VesselData> delegate = vesselDataReader.vesselDataPagingReader(startTime, endTime, null);
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<VesselData> read() throws Exception {
|
||||||
|
List<VesselData> batch = new java.util.ArrayList<>();
|
||||||
|
|
||||||
|
for (int i = 0; i < 1000; i++) {
|
||||||
|
VesselData item = delegate.read();
|
||||||
|
if (item == null) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
batch.add(item);
|
||||||
|
}
|
||||||
|
|
||||||
|
return batch.isEmpty() ? null : batch;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public ItemReader<List<VesselData>> slaveTileBatchVesselDataReader(
|
||||||
|
@Value("#{stepExecutionContext['startTime']}") String startTime,
|
||||||
|
@Value("#{stepExecutionContext['endTime']}") String endTime,
|
||||||
|
@Value("#{stepExecutionContext['partition']}") String partition) {
|
||||||
|
|
||||||
|
return new ItemReader<List<VesselData>>() {
|
||||||
|
private ItemReader<VesselData> delegate = vesselDataReader.vesselDataPagingReader(
|
||||||
|
startTime != null ? LocalDateTime.parse(startTime) : null,
|
||||||
|
endTime != null ? LocalDateTime.parse(endTime) : null,
|
||||||
|
partition
|
||||||
|
);
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<VesselData> read() throws Exception {
|
||||||
|
List<VesselData> batch = new java.util.ArrayList<>();
|
||||||
|
|
||||||
|
for (int i = 0; i < 1000; i++) {
|
||||||
|
VesselData item = delegate.read();
|
||||||
|
if (item == null) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
batch.add(item);
|
||||||
|
}
|
||||||
|
|
||||||
|
return batch.isEmpty() ? null : batch;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public ItemProcessor<List<VesselData>, List<TileStatistics>> slaveTileProcessor(
|
||||||
|
@Value("#{jobParameters['tileLevel']}") Integer tileLevel,
|
||||||
|
@Value("#{jobParameters['timeBucketMinutes']}") Integer timeBucketMinutes) {
|
||||||
|
|
||||||
|
final int bucketMinutes = (timeBucketMinutes != null) ? timeBucketMinutes : 5;
|
||||||
|
|
||||||
|
// 여러 레벨 처리를 위한 복합 프로세서
|
||||||
|
if (tileLevel == null) {
|
||||||
|
CompositeItemProcessor<List<VesselData>, List<TileStatistics>> compositeProcessor =
|
||||||
|
new CompositeItemProcessor<>();
|
||||||
|
|
||||||
|
compositeProcessor.setDelegates(Arrays.asList(
|
||||||
|
tileAggregationProcessor.batchProcessor(0, bucketMinutes),
|
||||||
|
tileAggregationProcessor.batchProcessor(1, bucketMinutes),
|
||||||
|
tileAggregationProcessor.batchProcessor(2, bucketMinutes)
|
||||||
|
));
|
||||||
|
|
||||||
|
return compositeProcessor;
|
||||||
|
} else {
|
||||||
|
return tileAggregationProcessor.batchProcessor(tileLevel, bucketMinutes);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public ItemProcessor<VesselData, List<TileStatistics>> batchTileProcessor(
|
||||||
|
@Value("#{jobParameters['tileLevel']}") Integer tileLevel,
|
||||||
|
@Value("#{jobParameters['timeBucketMinutes']}") Integer timeBucketMinutes) {
|
||||||
|
|
||||||
|
final int level = (tileLevel != null) ? tileLevel : 1;
|
||||||
|
final int bucketMinutes = (timeBucketMinutes != null) ? timeBucketMinutes : 5;
|
||||||
|
|
||||||
|
return new ItemProcessor<VesselData, List<TileStatistics>>() {
|
||||||
|
private final List<VesselData> buffer = new ArrayList<>(1000);
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<TileStatistics> process(VesselData item) throws Exception {
|
||||||
|
if (item == null || !item.isValidPosition()) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
buffer.add(item);
|
||||||
|
|
||||||
|
// 버퍼가 차면 처리
|
||||||
|
if (buffer.size() >= 1000) {
|
||||||
|
List<TileStatistics> result = tileAggregationProcessor
|
||||||
|
.batchProcessor(level, bucketMinutes)
|
||||||
|
.process(new ArrayList<>(buffer));
|
||||||
|
buffer.clear();
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 누적된 결과를 한 번에 처리하는 Writer
|
||||||
|
*/
|
||||||
|
private class AccumulatedTileWriter implements ItemWriter<TileStatistics> {
|
||||||
|
@Override
|
||||||
|
public void write(Chunk<? extends TileStatistics> chunk) throws Exception {
|
||||||
|
// 대부분의 아이템은 null일 것임 (processor에서 null 반환)
|
||||||
|
// 실제 데이터는 Step 종료 시 처리됨
|
||||||
|
log.debug("AccumulatedTileWriter called with {} items", chunk.size());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Step 종료 후 누적된 데이터를 처리하는 리스너
|
||||||
|
*/
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public org.springframework.batch.core.StepExecutionListener tileAggregationStepListener() {
|
||||||
|
return new org.springframework.batch.core.StepExecutionListener() {
|
||||||
|
@Override
|
||||||
|
public void beforeStep(org.springframework.batch.core.StepExecution stepExecution) {
|
||||||
|
// beforeStep에서는 특별한 처리 없음
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public org.springframework.batch.core.ExitStatus afterStep(org.springframework.batch.core.StepExecution stepExecution) {
|
||||||
|
log.info("[TileAggregationStepListener] afterStep called");
|
||||||
|
|
||||||
|
try {
|
||||||
|
// AccumulatingTileProcessor에서 직접 결과 가져오기
|
||||||
|
List<TileStatistics> accumulatedTiles = accumulatingTileProcessor.getAccumulatedResults();
|
||||||
|
log.info("[TileAggregationStepListener] Retrieved {} tiles from processor",
|
||||||
|
accumulatedTiles != null ? accumulatedTiles.size() : 0);
|
||||||
|
|
||||||
|
if (accumulatedTiles != null && !accumulatedTiles.isEmpty()) {
|
||||||
|
log.info("Writing {} accumulated tiles to database", accumulatedTiles.size());
|
||||||
|
|
||||||
|
// Bulk Writer를 사용하여 한 번에 저장
|
||||||
|
ItemWriter<List<TileStatistics>> writer = optimizedBulkInsertWriter.tileStatisticsBulkWriter();
|
||||||
|
Chunk<List<TileStatistics>> chunk = new Chunk<>();
|
||||||
|
chunk.add(accumulatedTiles);
|
||||||
|
writer.write(chunk);
|
||||||
|
|
||||||
|
log.info("Successfully wrote all accumulated tiles");
|
||||||
|
stepExecution.setWriteCount(accumulatedTiles.size());
|
||||||
|
} else {
|
||||||
|
log.warn("[TileAggregationStepListener] No tiles to write!");
|
||||||
|
}
|
||||||
|
|
||||||
|
return stepExecution.getExitStatus();
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to write accumulated tiles", e);
|
||||||
|
return org.springframework.batch.core.ExitStatus.FAILED;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,78 @@
|
|||||||
|
package gc.mda.signal_batch.batch.job;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.global.util.SharedDataJobListener;
|
||||||
|
import gc.mda.signal_batch.global.util.VesselDataHolder;
|
||||||
|
import gc.mda.signal_batch.batch.listener.JobCompletionListener;
|
||||||
|
import gc.mda.signal_batch.batch.listener.PerformanceOptimizationListener;
|
||||||
|
import gc.mda.signal_batch.batch.reader.InMemoryVesselDataReader;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.core.Job;
|
||||||
|
import org.springframework.batch.core.JobParametersValidator;
|
||||||
|
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||||
|
import org.springframework.batch.core.job.DefaultJobParametersValidator;
|
||||||
|
import org.springframework.batch.core.job.builder.JobBuilder;
|
||||||
|
import org.springframework.batch.core.launch.support.RunIdIncrementer;
|
||||||
|
import org.springframework.batch.core.repository.JobRepository;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.context.annotation.Profile;
|
||||||
|
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Configuration
|
||||||
|
@Profile("!query") // query 프로파일에서는 배치 작업 비활성화
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
public class VesselAggregationJobConfig {
|
||||||
|
|
||||||
|
private final JobRepository jobRepository;
|
||||||
|
private final LatestPositionStepConfig latestPositionStepConfig;
|
||||||
|
private final TileAggregationStepConfig tileAggregationStepConfig;
|
||||||
|
private final AreaStatisticsStepConfig areaStatisticsStepConfig;
|
||||||
|
private final JobCompletionListener jobCompletionListener;
|
||||||
|
private final SharedDataJobListener sharedDataJobListener;
|
||||||
|
private final VesselDataHolder vesselDataHolder;
|
||||||
|
private final PerformanceOptimizationListener performanceOptimizationListener;
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public Job vesselAggregationJob() {
|
||||||
|
return new JobBuilder("vesselAggregationJob", jobRepository)
|
||||||
|
.incrementer(new RunIdIncrementer())
|
||||||
|
.validator(jobParametersValidator())
|
||||||
|
.listener(jobCompletionListener)
|
||||||
|
.listener(sharedDataJobListener) // 데이터 로드 리스너 추가
|
||||||
|
.listener(performanceOptimizationListener) // 성능 최적화 리스너 추가
|
||||||
|
.start(latestPositionStepConfig.updateLatestPositionStep())
|
||||||
|
.next(tileAggregationStepConfig.aggregateTileStatisticsStep())
|
||||||
|
.next(areaStatisticsStepConfig.aggregateAreaStatisticsStep())
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public InMemoryVesselDataReader inMemoryVesselDataReader() {
|
||||||
|
return new InMemoryVesselDataReader(vesselDataHolder);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public Job vesselDailyPositionJob() {
|
||||||
|
return new JobBuilder("vesselDailyPositionJob", jobRepository)
|
||||||
|
.incrementer(new RunIdIncrementer())
|
||||||
|
.listener(jobCompletionListener)
|
||||||
|
.start(latestPositionStepConfig.partitionedLatestPositionStep())
|
||||||
|
.next(tileAggregationStepConfig.partitionedTileAggregationStep())
|
||||||
|
.next(areaStatisticsStepConfig.partitionedAreaStatisticsStep())
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public JobParametersValidator jobParametersValidator() {
|
||||||
|
DefaultJobParametersValidator validator = new DefaultJobParametersValidator();
|
||||||
|
validator.setRequiredKeys(new String[]{"startTime", "endTime"});
|
||||||
|
validator.setOptionalKeys(new String[]{"executionTime", "processingDate",
|
||||||
|
"tileLevel", "partitionCount"});
|
||||||
|
return validator;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,261 @@
|
|||||||
|
package gc.mda.signal_batch.batch.job;
|
||||||
|
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.core.*;
|
||||||
|
import org.springframework.batch.core.launch.JobLauncher;
|
||||||
|
import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.context.annotation.Profile;
|
||||||
|
import org.springframework.scheduling.annotation.Scheduled;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.time.format.DateTimeFormatter;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
@Profile("!query") // query 프로파일에서는 배치 스케줄러 비활성화
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
public class VesselBatchScheduler {
|
||||||
|
|
||||||
|
private volatile boolean trackAggregationRunning = false;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
@Qualifier("asyncJobLauncher")
|
||||||
|
private JobLauncher jobLauncher;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
@Qualifier("vesselAggregationJob")
|
||||||
|
private Job vesselAggregationJob;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
@Qualifier("vesselTrackAggregationJob")
|
||||||
|
private Job vesselTrackAggregationJob;
|
||||||
|
|
||||||
|
@Autowired(required = false)
|
||||||
|
@Qualifier("hourlyAggregationJob")
|
||||||
|
private Job hourlyAggregationJob;
|
||||||
|
|
||||||
|
@Autowired(required = false)
|
||||||
|
@Qualifier("dailyAggregationJob")
|
||||||
|
private Job dailyAggregationJob;
|
||||||
|
|
||||||
|
@Value("${vessel.batch.scheduler.enabled:true}")
|
||||||
|
private boolean schedulerEnabled;
|
||||||
|
|
||||||
|
@Value("${vessel.batch.scheduler.incremental.delay-minutes:2}")
|
||||||
|
private int incrementalDelayMinutes;
|
||||||
|
|
||||||
|
@Value("${vessel.batch.abnormal-detection.enabled:true}")
|
||||||
|
private boolean abnormalDetectionEnabled;
|
||||||
|
/**
|
||||||
|
* 5분 단위 증분 처리 (3분 지연으로 데이터 수집 대기)
|
||||||
|
* 매 5분마다 실행 (0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55분)
|
||||||
|
*/
|
||||||
|
@Scheduled(cron = "0 3,8,13,18,23,28,33,38,43,48,53,58 * * * *")
|
||||||
|
public void runIncrementalAggregation() {
|
||||||
|
if (!schedulerEnabled) {
|
||||||
|
log.debug("Scheduler is disabled");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// 3분 전 데이터를 처리 (데이터 수집 지연 고려)
|
||||||
|
LocalDateTime now = LocalDateTime.now();
|
||||||
|
LocalDateTime endTime = now.minusMinutes(incrementalDelayMinutes);
|
||||||
|
LocalDateTime startTime = endTime.minusMinutes(5);
|
||||||
|
|
||||||
|
log.info("Starting incremental aggregation for period: {} to {}", startTime, endTime);
|
||||||
|
|
||||||
|
JobParameters params = new JobParametersBuilder()
|
||||||
|
.addString("startTime", startTime.withNano(0).toString())
|
||||||
|
.addString("endTime", endTime.withNano(0).toString())
|
||||||
|
.addString("jobType", "INCREMENTAL")
|
||||||
|
.addString("timeBucketMinutes", "5") // 5분 단위 집계
|
||||||
|
// executionTime 제거 - startTime/endTime만으로 고유성 보장
|
||||||
|
.toJobParameters();
|
||||||
|
|
||||||
|
JobExecution execution = jobLauncher.run(vesselAggregationJob, params);
|
||||||
|
|
||||||
|
log.info("Incremental aggregation started with execution ID: {}", execution.getId());
|
||||||
|
|
||||||
|
} catch (JobExecutionAlreadyRunningException e) {
|
||||||
|
log.warn("Previous incremental job is still running, skipping this execution");
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to start incremental aggregation", e);
|
||||||
|
// 중복 키 오류인 경우 경고로만 처리
|
||||||
|
if (e.getMessage().contains("중복된 키") || e.getMessage().contains("duplicate key")) {
|
||||||
|
log.warn("Duplicate key detected, job may have already processed this time bucket");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//
|
||||||
|
/**
|
||||||
|
* 5분 단위 궤적 집계 처리 (4분 지연으로 위치 집계 이후 실행)
|
||||||
|
* 매 5분마다 실행 (4, 9, 14, 19, 24, 29, 34, 39, 44, 49, 54, 59분)
|
||||||
|
*/
|
||||||
|
@Scheduled(cron = "0 4,9,14,19,24,29,34,39,44,49,54,59 * * * *")
|
||||||
|
public void runTrackAggregation() {
|
||||||
|
log.info("=== runTrackAggregation() called by thread: {} ===", Thread.currentThread().getName());
|
||||||
|
|
||||||
|
if (!schedulerEnabled) {
|
||||||
|
log.debug("Scheduler is disabled");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 실행 가드: 이미 실행 중이면 스킵
|
||||||
|
if (trackAggregationRunning) {
|
||||||
|
log.warn("Track aggregation is already running, skipping this execution - thread: {}", Thread.currentThread().getName());
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
trackAggregationRunning = true;
|
||||||
|
try {
|
||||||
|
// 4분 전 데이터를 처리 (위치 집계 완료 후)
|
||||||
|
LocalDateTime now = LocalDateTime.now();
|
||||||
|
LocalDateTime endTime = now.minusMinutes(incrementalDelayMinutes + 1); // 3+1=4분 지연
|
||||||
|
LocalDateTime startTime = endTime.minusMinutes(5);
|
||||||
|
|
||||||
|
// 5분 버킷 계산
|
||||||
|
LocalDateTime timeBucket = startTime
|
||||||
|
.withSecond(0)
|
||||||
|
.withNano(0)
|
||||||
|
.minusMinutes(startTime.getMinute() % 5);
|
||||||
|
|
||||||
|
log.info("Starting track aggregation for period: {} to {} (bucket: {})",
|
||||||
|
startTime, endTime, timeBucket);
|
||||||
|
|
||||||
|
// Timestamp 형식에 맞게 포매팅
|
||||||
|
DateTimeFormatter timestampFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
|
||||||
|
|
||||||
|
JobParameters params = new JobParametersBuilder()
|
||||||
|
.addString("startTime", startTime.withNano(0).toString())
|
||||||
|
.addString("endTime", endTime.withNano(0).toString())
|
||||||
|
.addString("timeBucket", timeBucket.format(timestampFormatter))
|
||||||
|
.addString("jobType", "TRACK_INCREMENTAL")
|
||||||
|
// executionTime, nanoTime 제거 - timeBucket만으로 고유성 보장
|
||||||
|
.toJobParameters();
|
||||||
|
|
||||||
|
log.info("=== ABOUT TO CALL jobLauncher.run() - Thread: {} ===", Thread.currentThread().getName());
|
||||||
|
JobExecution execution = jobLauncher.run(vesselTrackAggregationJob, params);
|
||||||
|
log.info("=== COMPLETED jobLauncher.run() - Thread: {} - Execution ID: {} ===",
|
||||||
|
Thread.currentThread().getName(), execution.getId());
|
||||||
|
|
||||||
|
log.info("Track aggregation started with execution ID: {}", execution.getId());
|
||||||
|
|
||||||
|
} catch (JobExecutionAlreadyRunningException e) {
|
||||||
|
log.warn("Previous track job is still running, skipping this execution");
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to start track aggregation", e);
|
||||||
|
// 중복 키 오류인 경우 경고로만 처리
|
||||||
|
if (e.getMessage().contains("중복된 키") || e.getMessage().contains("duplicate key")) {
|
||||||
|
log.warn("Duplicate key detected, job may have already processed this time bucket");
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
trackAggregationRunning = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 향상된 1시간 집계 스케줄 - 매시 10분
|
||||||
|
* 비정상 궤적 검출 기능 포함
|
||||||
|
*/
|
||||||
|
@Scheduled(cron = "0 10 * * * *")
|
||||||
|
public void runHourlyAggregation() {
|
||||||
|
if (!schedulerEnabled || hourlyAggregationJob == null) {
|
||||||
|
log.debug("Hourly aggregation job is not available");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
LocalDateTime now = LocalDateTime.now();
|
||||||
|
LocalDateTime startTime = now.minusHours(1).withMinute(0).withSecond(0).withNano(0);
|
||||||
|
LocalDateTime endTime = startTime.plusHours(1);
|
||||||
|
|
||||||
|
JobParameters params = new JobParametersBuilder()
|
||||||
|
.addString("startTime", startTime.toString())
|
||||||
|
.addString("endTime", endTime.toString())
|
||||||
|
.addString("timeBucket", "hourly")
|
||||||
|
.addString("executionTime", now.toString())
|
||||||
|
.addString("enableAbnormalDetection", String.valueOf(abnormalDetectionEnabled))
|
||||||
|
.toJobParameters();
|
||||||
|
|
||||||
|
JobExecution execution = jobLauncher.run(hourlyAggregationJob, params);
|
||||||
|
log.info("Started enhanced hourly aggregation job: {} for period {} to {} (abnormal detection: {})",
|
||||||
|
execution.getId(), startTime, endTime, abnormalDetectionEnabled);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to start enhanced hourly aggregation job", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 향상된 1일 집계 스케줄 - 매일 01:00
|
||||||
|
* 비정상 궤적 검출 기능 포함
|
||||||
|
*/
|
||||||
|
@Scheduled(cron = "0 0 1 * * *")
|
||||||
|
public void runDailyAggregation() {
|
||||||
|
if (!schedulerEnabled || dailyAggregationJob == null) {
|
||||||
|
log.debug("Enhanced daily aggregation job is not available");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
LocalDateTime now = LocalDateTime.now();
|
||||||
|
LocalDateTime startTime = now.minusDays(1).withHour(0).withMinute(0).withSecond(0).withNano(0);
|
||||||
|
LocalDateTime endTime = startTime.plusDays(1);
|
||||||
|
|
||||||
|
JobParameters params = new JobParametersBuilder()
|
||||||
|
.addString("startTime", startTime.toString())
|
||||||
|
.addString("endTime", endTime.toString())
|
||||||
|
.addString("timeBucket", "daily")
|
||||||
|
.addString("executionTime", now.toString())
|
||||||
|
.addString("enableAbnormalDetection", String.valueOf(abnormalDetectionEnabled))
|
||||||
|
.toJobParameters();
|
||||||
|
|
||||||
|
JobExecution execution = jobLauncher.run(dailyAggregationJob, params);
|
||||||
|
log.info("Started enhanced daily aggregation job: {} for date {} (abnormal detection: {})",
|
||||||
|
execution.getId(), startTime.toLocalDate(), abnormalDetectionEnabled);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to start enhanced daily aggregation job", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 비정상 궤적 통계 집계 (매일 02:00)
|
||||||
|
*/
|
||||||
|
@Scheduled(cron = "0 0 2 * * *")
|
||||||
|
public void updateAbnormalTrackStatistics() {
|
||||||
|
if (!schedulerEnabled || !abnormalDetectionEnabled) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
log.info("Updating abnormal track statistics...");
|
||||||
|
// TODO: 통계 업데이트 로직 구현
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to update abnormal track statistics", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* 데이터 지연 모니터링 및 캐치업 작업 (10분마다)
|
||||||
|
*/
|
||||||
|
@Scheduled(fixedDelay = 600000, initialDelay = 60000)
|
||||||
|
public void monitorAndCatchUp() {
|
||||||
|
if (!schedulerEnabled) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// TODO: 데이터 지연 확인 로직 구현
|
||||||
|
// 지연이 10분 이상이면 캐치업 작업 실행
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to monitor data delay", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,192 @@
|
|||||||
|
package gc.mda.signal_batch.batch.job;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.vessel.dto.RecentVesselPositionDto;
|
||||||
|
import gc.mda.signal_batch.domain.vessel.service.VesselLatestPositionCache;
|
||||||
|
import gc.mda.signal_batch.global.util.ShipKindCodeConverter;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.jdbc.core.RowMapper;
|
||||||
|
import org.springframework.scheduling.annotation.Scheduled;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.sql.Timestamp;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 선박 최신 위치 캐시 갱신 스케줄러
|
||||||
|
*
|
||||||
|
* 실행 주기: 1분마다 (매분 0초)
|
||||||
|
* 데이터 소스: Collect DB (sig_test 테이블)
|
||||||
|
* 처리 방식: 읽기 전용 (DB에 쓰기 없음, 캐시만 업데이트)
|
||||||
|
*
|
||||||
|
* 동작 흐름:
|
||||||
|
* 1. 매분 0초에 실행
|
||||||
|
* 2. 최근 2분치 데이터를 DB에서 조회 (수집 지연 고려)
|
||||||
|
* 3. DISTINCT ON으로 선박별 최신 위치만 추출
|
||||||
|
* 4. 캐시에 업데이트
|
||||||
|
*
|
||||||
|
* 기존 배치와의 관계:
|
||||||
|
* - 기존 5분 배치는 그대로 유지 (DB 저장)
|
||||||
|
* - 이 스케줄러는 캐시만 관리 (읽기 전용)
|
||||||
|
* - 충돌 없음
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.cache.latest-position.enabled", havingValue = "true", matchIfMissing = false)
|
||||||
|
public class VesselPositionCacheRefreshScheduler {
|
||||||
|
|
||||||
|
@Qualifier("collectJdbcTemplate")
|
||||||
|
private final JdbcTemplate collectJdbcTemplate;
|
||||||
|
|
||||||
|
private final VesselLatestPositionCache cache;
|
||||||
|
|
||||||
|
@Value("${vessel.batch.cache.latest-position.refresh-interval-minutes:2}")
|
||||||
|
private int refreshIntervalMinutes;
|
||||||
|
|
||||||
|
private volatile boolean isRunning = false;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 1분마다 캐시 갱신
|
||||||
|
* 매분 0초에 실행 (예: 10:00:00, 10:01:00, 10:02:00...)
|
||||||
|
*/
|
||||||
|
@Scheduled(cron = "0 * * * * *")
|
||||||
|
public void refreshCache() {
|
||||||
|
// 동시 실행 방지
|
||||||
|
if (isRunning) {
|
||||||
|
log.warn("Previous cache refresh is still running, skipping this execution");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
isRunning = true;
|
||||||
|
long startTime = System.currentTimeMillis();
|
||||||
|
|
||||||
|
try {
|
||||||
|
// 최근 N분치 데이터 조회 (수집 지연 고려)
|
||||||
|
List<RecentVesselPositionDto> positions = fetchLatestPositions();
|
||||||
|
|
||||||
|
if (positions.isEmpty()) {
|
||||||
|
log.warn("No vessel positions found in last {} minutes", refreshIntervalMinutes);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 캐시 업데이트
|
||||||
|
cache.putAll(positions);
|
||||||
|
|
||||||
|
long duration = System.currentTimeMillis() - startTime;
|
||||||
|
log.info("Cache refresh completed in {}ms (fetched {} positions from DB)",
|
||||||
|
duration, positions.size());
|
||||||
|
|
||||||
|
// 캐시 통계 로깅 (5분마다만)
|
||||||
|
if (LocalDateTime.now().getMinute() % 5 == 0) {
|
||||||
|
logCacheStats();
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to refresh cache", e);
|
||||||
|
} finally {
|
||||||
|
isRunning = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DB에서 최신 위치 데이터 조회
|
||||||
|
*/
|
||||||
|
private List<RecentVesselPositionDto> fetchLatestPositions() {
|
||||||
|
LocalDateTime endTime = LocalDateTime.now();
|
||||||
|
LocalDateTime startTime = endTime.minusMinutes(refreshIntervalMinutes);
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
SELECT DISTINCT ON (sig_src_cd, target_id)
|
||||||
|
sig_src_cd,
|
||||||
|
target_id,
|
||||||
|
lon,
|
||||||
|
lat,
|
||||||
|
sog,
|
||||||
|
cog,
|
||||||
|
ship_nm,
|
||||||
|
ship_ty,
|
||||||
|
message_time as last_update
|
||||||
|
FROM signal.sig_test
|
||||||
|
WHERE message_time >= ? AND message_time < ?
|
||||||
|
AND sig_src_cd NOT IN ('000004', '000005')
|
||||||
|
AND length(target_id) > 5
|
||||||
|
AND lat BETWEEN -90 AND 90
|
||||||
|
AND lon BETWEEN -180 AND 180
|
||||||
|
ORDER BY sig_src_cd, target_id, message_time DESC
|
||||||
|
""";
|
||||||
|
|
||||||
|
try {
|
||||||
|
return collectJdbcTemplate.query(sql,
|
||||||
|
new Object[]{Timestamp.valueOf(startTime), Timestamp.valueOf(endTime)},
|
||||||
|
new VesselPositionRowMapper());
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to fetch positions from DB", e);
|
||||||
|
return List.of();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 캐시 통계 로깅
|
||||||
|
*/
|
||||||
|
private void logCacheStats() {
|
||||||
|
try {
|
||||||
|
VesselLatestPositionCache.CacheStats stats = cache.getStats();
|
||||||
|
log.info("Cache Stats - Size: {}, HitRate: {:.2f}%, MissRate: {:.2f}%, Hits: {}, Misses: {}",
|
||||||
|
stats.currentSize(),
|
||||||
|
stats.hitRate(),
|
||||||
|
stats.missRate(),
|
||||||
|
stats.hitCount(),
|
||||||
|
stats.missCount());
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.warn("Failed to get cache stats", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* RowMapper 구현
|
||||||
|
*/
|
||||||
|
private static class VesselPositionRowMapper implements RowMapper<RecentVesselPositionDto> {
|
||||||
|
@Override
|
||||||
|
public RecentVesselPositionDto mapRow(ResultSet rs, int rowNum) throws SQLException {
|
||||||
|
String sigSrcCd = rs.getString("sig_src_cd");
|
||||||
|
String targetId = rs.getString("target_id");
|
||||||
|
String shipTy = rs.getString("ship_ty");
|
||||||
|
|
||||||
|
// shipKindCode 계산
|
||||||
|
String shipKindCode = ShipKindCodeConverter.getShipKindCode(sigSrcCd, shipTy);
|
||||||
|
|
||||||
|
// nationalCode 계산
|
||||||
|
String nationalCode;
|
||||||
|
if ("000001".equals(sigSrcCd) && targetId != null && targetId.length() >= 3) {
|
||||||
|
nationalCode = targetId.substring(0, 3);
|
||||||
|
} else {
|
||||||
|
nationalCode = "440"; // 기본값
|
||||||
|
}
|
||||||
|
|
||||||
|
return RecentVesselPositionDto.builder()
|
||||||
|
.sigSrcCd(sigSrcCd)
|
||||||
|
.targetId(targetId)
|
||||||
|
.lon(rs.getDouble("lon"))
|
||||||
|
.lat(rs.getDouble("lat"))
|
||||||
|
.sog(rs.getBigDecimal("sog"))
|
||||||
|
.cog(rs.getBigDecimal("cog"))
|
||||||
|
.shipNm(rs.getString("ship_nm"))
|
||||||
|
.shipTy(shipTy)
|
||||||
|
.shipKindCode(shipKindCode)
|
||||||
|
.nationalCode(nationalCode)
|
||||||
|
.lastUpdate(rs.getTimestamp("last_update") != null ?
|
||||||
|
rs.getTimestamp("last_update").toLocalDateTime() : null)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,53 @@
|
|||||||
|
package gc.mda.signal_batch.batch.job;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.global.util.VesselTrackDataJobListener;
|
||||||
|
import gc.mda.signal_batch.batch.listener.JobCompletionListener;
|
||||||
|
import gc.mda.signal_batch.batch.listener.PerformanceOptimizationListener;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.core.Job;
|
||||||
|
import org.springframework.batch.core.JobParametersValidator;
|
||||||
|
import org.springframework.batch.core.job.DefaultJobParametersValidator;
|
||||||
|
import org.springframework.batch.core.job.builder.JobBuilder;
|
||||||
|
import org.springframework.batch.core.launch.support.RunIdIncrementer;
|
||||||
|
import org.springframework.batch.core.repository.JobRepository;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.context.annotation.Profile;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Configuration
|
||||||
|
@Profile("!query") // query 프로파일에서는 배치 작업 비활성화
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
public class VesselTrackAggregationJobConfig {
|
||||||
|
|
||||||
|
private final JobRepository jobRepository;
|
||||||
|
private final VesselTrackStepConfig vesselTrackStepConfig;
|
||||||
|
private final JobCompletionListener jobCompletionListener;
|
||||||
|
private final VesselTrackDataJobListener vesselTrackDataJobListener;
|
||||||
|
private final PerformanceOptimizationListener performanceOptimizationListener;
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public Job vesselTrackAggregationJob() {
|
||||||
|
return new JobBuilder("vesselTrackAggregationJob", jobRepository)
|
||||||
|
.incrementer(new RunIdIncrementer())
|
||||||
|
.validator(trackJobParametersValidator())
|
||||||
|
.listener(jobCompletionListener)
|
||||||
|
.listener(vesselTrackDataJobListener)
|
||||||
|
.listener(performanceOptimizationListener) // 성능 최적화 리스너 추가
|
||||||
|
.start(vesselTrackStepConfig.vesselTrackStep())
|
||||||
|
.next(vesselTrackStepConfig.gridTrackSummaryStep())
|
||||||
|
.next(vesselTrackStepConfig.areaTrackSummaryStep())
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public JobParametersValidator trackJobParametersValidator() {
|
||||||
|
DefaultJobParametersValidator validator = new DefaultJobParametersValidator();
|
||||||
|
validator.setRequiredKeys(new String[]{"startTime", "endTime", "timeBucket"});
|
||||||
|
validator.setOptionalKeys(new String[]{"executionTime", "processingDate"});
|
||||||
|
return validator;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,405 @@
|
|||||||
|
package gc.mda.signal_batch.batch.job;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselData;
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselTrack;
|
||||||
|
import gc.mda.signal_batch.batch.processor.VesselTrackProcessor;
|
||||||
|
import gc.mda.signal_batch.batch.processor.AbnormalTrackDetector;
|
||||||
|
import gc.mda.signal_batch.batch.processor.AbnormalTrackDetector.AbnormalDetectionResult;
|
||||||
|
import gc.mda.signal_batch.batch.reader.InMemoryVesselTrackDataReader;
|
||||||
|
import gc.mda.signal_batch.global.util.VesselTrackDataHolder;
|
||||||
|
import gc.mda.signal_batch.global.util.TrackClippingUtils;
|
||||||
|
import gc.mda.signal_batch.batch.writer.VesselTrackBulkWriter;
|
||||||
|
import gc.mda.signal_batch.batch.writer.AbnormalTrackWriter;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.core.Step;
|
||||||
|
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||||
|
import org.springframework.batch.core.repository.JobRepository;
|
||||||
|
import org.springframework.batch.core.step.builder.StepBuilder;
|
||||||
|
import org.springframework.batch.item.ItemProcessor;
|
||||||
|
import org.springframework.batch.item.ItemWriter;
|
||||||
|
import org.springframework.batch.item.support.CompositeItemWriter;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.transaction.PlatformTransactionManager;
|
||||||
|
|
||||||
|
import org.springframework.batch.item.Chunk;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.context.annotation.Profile;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
import java.sql.Timestamp;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
import jakarta.annotation.PostConstruct;
|
||||||
|
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Configuration
|
||||||
|
@Profile("!query") // query 프로파일에서는 배치 작업 비활성화
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
public class VesselTrackStepConfig {
|
||||||
|
|
||||||
|
private final JobRepository jobRepository;
|
||||||
|
private final PlatformTransactionManager transactionManager;
|
||||||
|
private final DataSource queryDataSource;
|
||||||
|
private final VesselTrackProcessor vesselTrackProcessor;
|
||||||
|
private final VesselTrackDataHolder vesselTrackDataHolder;
|
||||||
|
private final VesselTrackBulkWriter vesselTrackBulkWriter;
|
||||||
|
private final TrackClippingUtils trackClippingUtils;
|
||||||
|
private final AbnormalTrackDetector abnormalTrackDetector;
|
||||||
|
private final AbnormalTrackWriter abnormalTrackWriter;
|
||||||
|
|
||||||
|
public VesselTrackStepConfig(
|
||||||
|
JobRepository jobRepository,
|
||||||
|
PlatformTransactionManager transactionManager,
|
||||||
|
@Qualifier("queryDataSource") DataSource queryDataSource,
|
||||||
|
VesselTrackProcessor vesselTrackProcessor,
|
||||||
|
VesselTrackDataHolder vesselTrackDataHolder,
|
||||||
|
VesselTrackBulkWriter vesselTrackBulkWriter,
|
||||||
|
TrackClippingUtils trackClippingUtils,
|
||||||
|
AbnormalTrackDetector abnormalTrackDetector,
|
||||||
|
AbnormalTrackWriter abnormalTrackWriter) {
|
||||||
|
this.jobRepository = jobRepository;
|
||||||
|
this.transactionManager = transactionManager;
|
||||||
|
this.queryDataSource = queryDataSource;
|
||||||
|
this.vesselTrackProcessor = vesselTrackProcessor;
|
||||||
|
this.vesselTrackDataHolder = vesselTrackDataHolder;
|
||||||
|
this.vesselTrackBulkWriter = vesselTrackBulkWriter;
|
||||||
|
this.trackClippingUtils = trackClippingUtils;
|
||||||
|
this.abnormalTrackDetector = abnormalTrackDetector;
|
||||||
|
this.abnormalTrackWriter = abnormalTrackWriter;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Value("${vessel.batch.chunk-size:1000}")
|
||||||
|
private int chunkSize;
|
||||||
|
|
||||||
|
@PostConstruct
|
||||||
|
public void init() {
|
||||||
|
// 5분 Job의 이름을 명시적으로 설정
|
||||||
|
abnormalTrackWriter.setJobName("vesselTrackAggregationJob");
|
||||||
|
log.info("AbnormalTrackWriter initialized with job name: vesselTrackAggregationJob");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public Step vesselTrackStep() {
|
||||||
|
return new StepBuilder("vesselTrackStep", jobRepository)
|
||||||
|
.<List<VesselData>, List<VesselTrack>>chunk(chunkSize, transactionManager)
|
||||||
|
.reader(trackDataReader())
|
||||||
|
.processor(trackProcessorWithSimpleFilter())
|
||||||
|
.writer(compositeTrackWriter())
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public InMemoryVesselTrackDataReader trackDataReader() {
|
||||||
|
return new InMemoryVesselTrackDataReader(vesselTrackDataHolder, chunkSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public ItemProcessor<List<VesselData>, List<VesselTrack>> trackProcessorWithSimpleFilter() {
|
||||||
|
return items -> {
|
||||||
|
// 1. 기본 처리
|
||||||
|
List<VesselTrack> tracks = vesselTrackProcessor.process(items);
|
||||||
|
if (tracks == null || tracks.isEmpty()) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. 강화된 비정상 궤적 필터링
|
||||||
|
List<VesselTrack> filteredTracks = new ArrayList<>();
|
||||||
|
for (VesselTrack track : tracks) {
|
||||||
|
boolean isAbnormal = false;
|
||||||
|
|
||||||
|
// 선박/항공기 구분
|
||||||
|
boolean isAircraft = "000019".equals(track.getSigSrcCd());
|
||||||
|
double speedLimit = isAircraft ? 300.0 : 100.0; // 항공기 300, 선박 100
|
||||||
|
double distanceLimit = isAircraft ? 30.0 : 10.0; // 항공기 30nm, 선박 10nm
|
||||||
|
|
||||||
|
// 평균속도 체크
|
||||||
|
if (track.getAvgSpeed() != null && track.getAvgSpeed().doubleValue() >= speedLimit) {
|
||||||
|
isAbnormal = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5분간 이동거리 체크
|
||||||
|
if (track.getDistanceNm() != null && track.getDistanceNm().doubleValue() >= distanceLimit) {
|
||||||
|
isAbnormal = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isAbnormal) {
|
||||||
|
log.warn("5분 비정상 궤적 감지: vessel={}, avg_speed={}, distance={}",
|
||||||
|
track.getVesselKey(), track.getAvgSpeed(), track.getDistanceNm());
|
||||||
|
saveAbnormalTrack(track);
|
||||||
|
} else {
|
||||||
|
filteredTracks.add(track);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return filteredTracks.isEmpty() ? null : filteredTracks;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private void saveAbnormalTrack(VesselTrack track) {
|
||||||
|
try {
|
||||||
|
// Job 이름 설정
|
||||||
|
abnormalTrackWriter.setJobName("vesselTrackAggregationJob");
|
||||||
|
|
||||||
|
List<AbnormalTrackDetector.AbnormalSegment> segments = new ArrayList<>();
|
||||||
|
Map<String, Object> details = new HashMap<>();
|
||||||
|
details.put("avgSpeed", track.getAvgSpeed());
|
||||||
|
details.put("distanceNm", track.getDistanceNm());
|
||||||
|
details.put("timeBucket", track.getTimeBucket());
|
||||||
|
|
||||||
|
// 선박/항공기 구분
|
||||||
|
boolean isAircraft = "000019".equals(track.getSigSrcCd());
|
||||||
|
double speedLimit = isAircraft ? 300.0 : 100.0;
|
||||||
|
double distanceLimit = isAircraft ? 30.0 : 10.0;
|
||||||
|
|
||||||
|
// 비정상 유형 결정
|
||||||
|
String abnormalType = "abnormal_5min";
|
||||||
|
double actualValue = 0.0;
|
||||||
|
double threshold = 0.0;
|
||||||
|
String description = "";
|
||||||
|
|
||||||
|
if (track.getAvgSpeed() != null && track.getAvgSpeed().doubleValue() >= speedLimit) {
|
||||||
|
abnormalType = "extreme_avg_speed_5min";
|
||||||
|
actualValue = track.getAvgSpeed().doubleValue();
|
||||||
|
threshold = speedLimit;
|
||||||
|
description = String.format("5분 비정상 평균속도: %.1f knots", actualValue);
|
||||||
|
} else if (track.getDistanceNm() != null && track.getDistanceNm().doubleValue() >= distanceLimit) {
|
||||||
|
abnormalType = "extreme_distance_5min";
|
||||||
|
actualValue = track.getDistanceNm().doubleValue();
|
||||||
|
threshold = distanceLimit;
|
||||||
|
description = String.format("5분 비정상 이동거리: %.1f nm", actualValue);
|
||||||
|
}
|
||||||
|
|
||||||
|
segments.add(AbnormalTrackDetector.AbnormalSegment.builder()
|
||||||
|
.type(abnormalType)
|
||||||
|
.startIndex(0)
|
||||||
|
.endIndex(track.getPointCount() - 1)
|
||||||
|
.actualValue(actualValue)
|
||||||
|
.threshold(threshold)
|
||||||
|
.description(description)
|
||||||
|
.details(details)
|
||||||
|
.build());
|
||||||
|
|
||||||
|
AbnormalDetectionResult result = AbnormalTrackDetector.AbnormalDetectionResult.builder()
|
||||||
|
.originalTrack(track)
|
||||||
|
.correctedTrack(null)
|
||||||
|
.abnormalSegments(segments)
|
||||||
|
.hasAbnormalities(true)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
abnormalTrackWriter.write(new Chunk<>(Arrays.asList(result)));
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to save 5min abnormal track", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CompositeItemWriter로 3개 테이블에 동시 저장
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public ItemWriter<List<VesselTrack>> compositeTrackWriter() {
|
||||||
|
CompositeItemWriter<List<VesselTrack>> compositeWriter = new CompositeItemWriter<>();
|
||||||
|
compositeWriter.setDelegates(Arrays.asList(
|
||||||
|
vesselTrackWriter(),
|
||||||
|
gridTrackWriter(),
|
||||||
|
areaTrackWriter()
|
||||||
|
));
|
||||||
|
return compositeWriter;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public ItemWriter<List<VesselTrack>> vesselTrackWriter() {
|
||||||
|
return vesselTrackBulkWriter;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public ItemWriter<List<VesselTrack>> gridTrackWriter() {
|
||||||
|
return chunk -> {
|
||||||
|
// 각 track을 해구별로 분할
|
||||||
|
List<VesselTrack> allClippedTracks = new ArrayList<>();
|
||||||
|
|
||||||
|
for (List<VesselTrack> trackList : chunk.getItems()) {
|
||||||
|
for (VesselTrack track : trackList) {
|
||||||
|
List<VesselTrack> clippedTracks = trackClippingUtils.clipTracksByHaegu(track);
|
||||||
|
allClippedTracks.addAll(clippedTracks);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (allClippedTracks.isEmpty()) {
|
||||||
|
log.debug("No tracks to write to grid table after clipping");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
INSERT INTO signal.t_grid_vessel_tracks (
|
||||||
|
haegu_no, sig_src_cd, target_id, time_bucket,
|
||||||
|
distance_nm, avg_speed, point_count, entry_time, exit_time
|
||||||
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
ON CONFLICT (haegu_no, sig_src_cd, target_id, time_bucket) DO NOTHING
|
||||||
|
""";
|
||||||
|
|
||||||
|
List<Object[]> args = allClippedTracks.stream()
|
||||||
|
.map(track -> new Object[] {
|
||||||
|
track.getHaeguNo(),
|
||||||
|
track.getSigSrcCd(),
|
||||||
|
track.getTargetId(),
|
||||||
|
Timestamp.valueOf(track.getTimeBucket()),
|
||||||
|
track.getDistanceNm(),
|
||||||
|
track.getAvgSpeed(),
|
||||||
|
track.getPointCount(),
|
||||||
|
track.getEntryTime() != null ? Timestamp.valueOf(track.getEntryTime()) : null,
|
||||||
|
track.getExitTime() != null ? Timestamp.valueOf(track.getExitTime()) : null
|
||||||
|
})
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
int[] results = new JdbcTemplate(queryDataSource).batchUpdate(sql, args);
|
||||||
|
log.info("Inserted {} clipped records to t_grid_vessel_tracks", results.length);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public ItemWriter<List<VesselTrack>> areaTrackWriter() {
|
||||||
|
return chunk -> {
|
||||||
|
// 각 track을 area별로 분할
|
||||||
|
List<VesselTrack> allClippedTracks = new ArrayList<>();
|
||||||
|
|
||||||
|
for (List<VesselTrack> trackList : chunk.getItems()) {
|
||||||
|
for (VesselTrack track : trackList) {
|
||||||
|
List<VesselTrack> clippedTracks = trackClippingUtils.clipTracksByArea(track);
|
||||||
|
allClippedTracks.addAll(clippedTracks);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (allClippedTracks.isEmpty()) {
|
||||||
|
log.debug("No tracks to write to area table after clipping");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
INSERT INTO signal.t_area_vessel_tracks (
|
||||||
|
area_id, sig_src_cd, target_id, time_bucket,
|
||||||
|
distance_nm, avg_speed, point_count, metrics
|
||||||
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?::jsonb)
|
||||||
|
ON CONFLICT (area_id, sig_src_cd, target_id, time_bucket) DO NOTHING
|
||||||
|
""";
|
||||||
|
|
||||||
|
List<Object[]> args = allClippedTracks.stream()
|
||||||
|
.map(track -> new Object[] {
|
||||||
|
track.getAreaId(),
|
||||||
|
track.getSigSrcCd(),
|
||||||
|
track.getTargetId(),
|
||||||
|
Timestamp.valueOf(track.getTimeBucket()),
|
||||||
|
track.getDistanceNm(),
|
||||||
|
track.getAvgSpeed(),
|
||||||
|
track.getPointCount(),
|
||||||
|
"{}"
|
||||||
|
})
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
int[] results = new JdbcTemplate(queryDataSource).batchUpdate(sql, args);
|
||||||
|
log.info("Inserted {} clipped records to t_area_vessel_tracks", results.length);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// 집계 스텝들 (변경 없음)
|
||||||
|
@Bean
|
||||||
|
public Step gridTrackSummaryStep() {
|
||||||
|
return new StepBuilder("gridTrackSummaryStep", jobRepository)
|
||||||
|
.tasklet((contribution, chunkContext) -> {
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
INSERT INTO signal.t_grid_tracks_summary
|
||||||
|
(haegu_no, time_bucket, total_vessels, total_distance_nm, avg_speed, vessel_list)
|
||||||
|
SELECT
|
||||||
|
haegu_no,
|
||||||
|
time_bucket,
|
||||||
|
COUNT(DISTINCT CONCAT(sig_src_cd, '_', target_id)) as total_vessels,
|
||||||
|
SUM(distance_nm) as total_distance_nm,
|
||||||
|
AVG(avg_speed) as avg_speed,
|
||||||
|
jsonb_agg(jsonb_build_object(
|
||||||
|
'sig_src_cd', sig_src_cd,
|
||||||
|
'target_id', target_id,
|
||||||
|
'distance_nm', distance_nm,
|
||||||
|
'avg_speed', avg_speed
|
||||||
|
)) as vessel_list
|
||||||
|
FROM signal.t_grid_vessel_tracks
|
||||||
|
WHERE time_bucket = ?
|
||||||
|
GROUP BY haegu_no, time_bucket
|
||||||
|
ON CONFLICT (haegu_no, time_bucket)
|
||||||
|
DO UPDATE SET
|
||||||
|
total_vessels = EXCLUDED.total_vessels,
|
||||||
|
total_distance_nm = EXCLUDED.total_distance_nm,
|
||||||
|
avg_speed = EXCLUDED.avg_speed,
|
||||||
|
vessel_list = EXCLUDED.vessel_list
|
||||||
|
""";
|
||||||
|
|
||||||
|
String timeBucketStr = (String) chunkContext.getStepContext()
|
||||||
|
.getJobParameters().get("timeBucket");
|
||||||
|
Timestamp timeBucket = Timestamp.valueOf(timeBucketStr);
|
||||||
|
|
||||||
|
int updated = jdbcTemplate.update(sql, timeBucket);
|
||||||
|
log.info("Updated {} grid track summaries for time_bucket: {}", updated, timeBucket);
|
||||||
|
|
||||||
|
return org.springframework.batch.repeat.RepeatStatus.FINISHED;
|
||||||
|
}, transactionManager)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public Step areaTrackSummaryStep() {
|
||||||
|
return new StepBuilder("areaTrackSummaryStep", jobRepository)
|
||||||
|
.tasklet((contribution, chunkContext) -> {
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
INSERT INTO signal.t_area_tracks_summary
|
||||||
|
(area_id, time_bucket, total_vessels, total_distance_nm, avg_speed, vessel_list)
|
||||||
|
SELECT
|
||||||
|
area_id,
|
||||||
|
time_bucket,
|
||||||
|
COUNT(DISTINCT CONCAT(sig_src_cd, '_', target_id)) as total_vessels,
|
||||||
|
SUM(distance_nm) as total_distance_nm,
|
||||||
|
AVG(avg_speed) as avg_speed,
|
||||||
|
jsonb_agg(jsonb_build_object(
|
||||||
|
'sig_src_cd', sig_src_cd,
|
||||||
|
'target_id', target_id,
|
||||||
|
'distance_nm', distance_nm,
|
||||||
|
'avg_speed', avg_speed
|
||||||
|
)) as vessel_list
|
||||||
|
FROM signal.t_area_vessel_tracks
|
||||||
|
WHERE time_bucket = ?
|
||||||
|
GROUP BY area_id, time_bucket
|
||||||
|
ON CONFLICT (area_id, time_bucket)
|
||||||
|
DO UPDATE SET
|
||||||
|
total_vessels = EXCLUDED.total_vessels,
|
||||||
|
total_distance_nm = EXCLUDED.total_distance_nm,
|
||||||
|
avg_speed = EXCLUDED.avg_speed,
|
||||||
|
vessel_list = EXCLUDED.vessel_list
|
||||||
|
""";
|
||||||
|
|
||||||
|
String timeBucketStr = (String) chunkContext.getStepContext()
|
||||||
|
.getJobParameters().get("timeBucket");
|
||||||
|
Timestamp timeBucket = Timestamp.valueOf(timeBucketStr);
|
||||||
|
|
||||||
|
int updated = jdbcTemplate.update(sql, timeBucket);
|
||||||
|
log.info("Updated {} area track summaries for time_bucket: {}", updated, timeBucket);
|
||||||
|
|
||||||
|
return org.springframework.batch.repeat.RepeatStatus.FINISHED;
|
||||||
|
}, transactionManager)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,138 @@
|
|||||||
|
package gc.mda.signal_batch.batch.listener;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.global.util.BatchUtils;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.core.JobExecution;
|
||||||
|
import org.springframework.batch.core.JobExecutionListener;
|
||||||
|
import org.springframework.batch.core.StepExecution;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.time.Duration;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class JobCompletionListener implements JobExecutionListener {
|
||||||
|
|
||||||
|
@Qualifier("queryJdbcTemplate")
|
||||||
|
private final JdbcTemplate queryJdbcTemplate;
|
||||||
|
|
||||||
|
private final BatchUtils batchUtils;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void beforeJob(JobExecution jobExecution) {
|
||||||
|
log.info("=== Starting Job: {} ===", jobExecution.getJobInstance().getJobName());
|
||||||
|
log.info("Job Parameters: {}", jobExecution.getJobParameters());
|
||||||
|
|
||||||
|
// 메모리 상태 로깅
|
||||||
|
batchUtils.logMemoryUsage("Job Start");
|
||||||
|
|
||||||
|
// 이전 실행 정리 (필요한 경우)
|
||||||
|
// cleanupPreviousRun(jobExecution);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void afterJob(JobExecution jobExecution) {
|
||||||
|
log.info("=== Job Completed: {} ===", jobExecution.getJobInstance().getJobName());
|
||||||
|
log.info("Status: {}", jobExecution.getStatus());
|
||||||
|
|
||||||
|
// 실행 요약
|
||||||
|
String summary = batchUtils.getJobExecutionSummary(jobExecution);
|
||||||
|
log.info("Execution Summary:\n{}", summary);
|
||||||
|
|
||||||
|
// 성능 메트릭 저장
|
||||||
|
savePerformanceMetrics(jobExecution);
|
||||||
|
|
||||||
|
// 메모리 상태 로깅
|
||||||
|
batchUtils.logMemoryUsage("Job End");
|
||||||
|
|
||||||
|
// 실패 시 알림
|
||||||
|
if (jobExecution.getStatus().isUnsuccessful()) {
|
||||||
|
sendFailureNotification(jobExecution);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("unused")
|
||||||
|
private void cleanupPreviousRun(JobExecution jobExecution) {
|
||||||
|
// 임시 테이블 정리 등
|
||||||
|
try {
|
||||||
|
queryJdbcTemplate.execute("TRUNCATE TABLE IF EXISTS temp_vessel_processing");
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.warn("Failed to cleanup previous run: {}", e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void savePerformanceMetrics(JobExecution jobExecution) {
|
||||||
|
try {
|
||||||
|
LocalDateTime startTime = jobExecution.getStartTime();
|
||||||
|
LocalDateTime endTime = jobExecution.getEndTime();
|
||||||
|
|
||||||
|
// null 체크 추가
|
||||||
|
if (startTime == null || endTime == null) {
|
||||||
|
log.warn("Cannot save performance metrics - start or end time is null");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// LocalDateTime으로 직접 Duration 계산
|
||||||
|
Duration duration = Duration.between(startTime, endTime);
|
||||||
|
|
||||||
|
long totalRead = 0;
|
||||||
|
long totalWrite = 0;
|
||||||
|
|
||||||
|
for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
|
||||||
|
totalRead += stepExecution.getReadCount();
|
||||||
|
totalWrite += stepExecution.getWriteCount();
|
||||||
|
}
|
||||||
|
|
||||||
|
double throughput = batchUtils.calculateThroughput(totalRead, duration);
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
INSERT INTO signal.t_batch_performance_metrics (
|
||||||
|
job_name, execution_id, start_time, end_time,
|
||||||
|
duration_seconds, total_read, total_write,
|
||||||
|
throughput_per_sec, status, created_at
|
||||||
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
""";
|
||||||
|
|
||||||
|
queryJdbcTemplate.update(sql,
|
||||||
|
jobExecution.getJobInstance().getJobName(),
|
||||||
|
jobExecution.getId(),
|
||||||
|
startTime,
|
||||||
|
endTime,
|
||||||
|
duration.getSeconds(),
|
||||||
|
totalRead,
|
||||||
|
totalWrite,
|
||||||
|
throughput,
|
||||||
|
jobExecution.getStatus().toString(),
|
||||||
|
LocalDateTime.now()
|
||||||
|
);
|
||||||
|
|
||||||
|
log.info("Performance metrics saved - Duration: {} seconds, Throughput: {} records/sec",
|
||||||
|
duration.getSeconds(), throughput);
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to save performance metrics", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void sendFailureNotification(JobExecution jobExecution) {
|
||||||
|
// 실패 알림 로직 (이메일, Slack 등)
|
||||||
|
log.error("Job {} failed with status: {}",
|
||||||
|
jobExecution.getJobInstance().getJobName(),
|
||||||
|
jobExecution.getStatus()
|
||||||
|
);
|
||||||
|
|
||||||
|
if (jobExecution.getAllFailureExceptions() != null && !jobExecution.getAllFailureExceptions().isEmpty()) {
|
||||||
|
jobExecution.getAllFailureExceptions().forEach(e ->
|
||||||
|
log.error("Failure reason: ", e)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,163 @@
|
|||||||
|
package gc.mda.signal_batch.batch.listener;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.monitoring.health.BatchMetricsCollector;
|
||||||
|
import gc.mda.signal_batch.monitoring.performance.BatchProcessingOptimizer;
|
||||||
|
import gc.mda.signal_batch.monitoring.performance.PerformanceOptimizationManager;
|
||||||
|
import gc.mda.signal_batch.monitoring.performance.QueryPerformanceOptimizer;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.core.*;
|
||||||
|
import org.springframework.batch.core.annotation.AfterChunk;
|
||||||
|
import org.springframework.batch.core.annotation.BeforeChunk;
|
||||||
|
import org.springframework.batch.core.scope.context.ChunkContext;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.time.Duration;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 성능 최적화 리스너
|
||||||
|
* 배치 작업 실행 중 성능 최적화를 자동으로 수행
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class PerformanceOptimizationListener implements JobExecutionListener, StepExecutionListener, ChunkListener {
|
||||||
|
|
||||||
|
private final PerformanceOptimizationManager optimizationManager;
|
||||||
|
private final BatchProcessingOptimizer batchOptimizer;
|
||||||
|
private final QueryPerformanceOptimizer queryOptimizer;
|
||||||
|
@SuppressWarnings("unused")
|
||||||
|
private final BatchMetricsCollector metricsCollector;
|
||||||
|
|
||||||
|
private final ConcurrentHashMap<String, LocalDateTime> executionStartTimes = new ConcurrentHashMap<>();
|
||||||
|
private final ConcurrentHashMap<String, Integer> currentChunkSizes = new ConcurrentHashMap<>();
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void beforeJob(JobExecution jobExecution) {
|
||||||
|
String jobName = jobExecution.getJobInstance().getJobName();
|
||||||
|
log.info("Starting performance optimization for job: {}", jobName);
|
||||||
|
|
||||||
|
// 메모리 상태 확인 및 최적화
|
||||||
|
if (optimizationManager.getStatus().getMemoryUsage().getPercentage() > 70) {
|
||||||
|
log.warn("High memory usage detected before job start, performing optimization");
|
||||||
|
optimizationManager.performMemoryOptimization();
|
||||||
|
}
|
||||||
|
|
||||||
|
executionStartTimes.put(jobName, LocalDateTime.now());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void afterJob(JobExecution jobExecution) {
|
||||||
|
String jobName = jobExecution.getJobInstance().getJobName();
|
||||||
|
LocalDateTime startTime = executionStartTimes.remove(jobName);
|
||||||
|
|
||||||
|
if (startTime != null) {
|
||||||
|
Duration duration = Duration.between(startTime, LocalDateTime.now());
|
||||||
|
log.info("Job {} completed in {} seconds", jobName, duration.getSeconds());
|
||||||
|
|
||||||
|
// 성능 리포트 생성
|
||||||
|
if (jobExecution.getStatus() == BatchStatus.COMPLETED) {
|
||||||
|
String report = optimizationManager.generatePerformanceReport();
|
||||||
|
log.info("Performance Report for job {}: {}", jobName, report);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void beforeStep(StepExecution stepExecution) {
|
||||||
|
String stepName = stepExecution.getStepName();
|
||||||
|
executionStartTimes.put(stepName, LocalDateTime.now());
|
||||||
|
|
||||||
|
// 초기 청크 크기 설정
|
||||||
|
Long chunkSizeParam = stepExecution.getJobParameters().getLong("chunkSize", 5000L);
|
||||||
|
int chunkSize = chunkSizeParam != null ? chunkSizeParam.intValue() : 5000;
|
||||||
|
currentChunkSizes.put(stepName, chunkSize);
|
||||||
|
|
||||||
|
log.debug("Starting step {} with chunk size: {}", stepName, chunkSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ExitStatus afterStep(StepExecution stepExecution) {
|
||||||
|
String stepName = stepExecution.getStepName();
|
||||||
|
LocalDateTime startTime = executionStartTimes.remove(stepName);
|
||||||
|
|
||||||
|
if (startTime != null) {
|
||||||
|
Duration duration = Duration.between(startTime, LocalDateTime.now());
|
||||||
|
Long readCountParam = stepExecution.getReadCount();
|
||||||
|
int readCount = readCountParam != null ? readCountParam.intValue() : 0;
|
||||||
|
|
||||||
|
// 동적 청크 크기 최적화
|
||||||
|
Integer currentChunkSize = currentChunkSizes.get(stepName);
|
||||||
|
if (currentChunkSize != null && readCount > 0) {
|
||||||
|
int optimalChunkSize = batchOptimizer.calculateOptimalChunkSize(
|
||||||
|
stepName,
|
||||||
|
currentChunkSize,
|
||||||
|
duration.toMillis(),
|
||||||
|
readCount
|
||||||
|
);
|
||||||
|
|
||||||
|
if (optimalChunkSize != currentChunkSize) {
|
||||||
|
log.info("Recommended chunk size for step {} changed from {} to {}",
|
||||||
|
stepName, currentChunkSize, optimalChunkSize);
|
||||||
|
// 다음 실행을 위해 저장
|
||||||
|
stepExecution.getExecutionContext().putInt("optimalChunkSize", optimalChunkSize);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 쿼리 성능 기록
|
||||||
|
queryOptimizer.recordQueryPerformance(stepName, duration.toMillis());
|
||||||
|
}
|
||||||
|
|
||||||
|
return stepExecution.getExitStatus();
|
||||||
|
}
|
||||||
|
|
||||||
|
@BeforeChunk
|
||||||
|
public void beforeChunk(ChunkContext context) {
|
||||||
|
String stepName = context.getStepContext().getStepName();
|
||||||
|
context.setAttribute("chunkStartTime", System.currentTimeMillis());
|
||||||
|
|
||||||
|
// 캐시 히트율 모니터링
|
||||||
|
if (optimizationManager.getStatus().getCacheHitRate() < 50) {
|
||||||
|
log.warn("Low cache hit rate detected in step: {}", stepName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterChunk
|
||||||
|
public void afterChunk(ChunkContext context) {
|
||||||
|
String stepName = context.getStepContext().getStepName();
|
||||||
|
Long startTime = (Long) context.getAttribute("chunkStartTime");
|
||||||
|
|
||||||
|
if (startTime != null) {
|
||||||
|
long duration = System.currentTimeMillis() - startTime;
|
||||||
|
|
||||||
|
// 청크 처리 시간이 너무 길면 경고
|
||||||
|
if (duration > 10000) { // 10초 이상
|
||||||
|
log.warn("Chunk processing in step {} took {} ms", stepName, duration);
|
||||||
|
|
||||||
|
// 메모리 압박 확인
|
||||||
|
if (optimizationManager.getStatus().getMemoryUsage().getPercentage() > 85) {
|
||||||
|
log.error("Critical memory usage during chunk processing, triggering optimization");
|
||||||
|
optimizationManager.performMemoryOptimization();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void afterChunkError(ChunkContext context) {
|
||||||
|
String stepName = context.getStepContext().getStepName();
|
||||||
|
log.error("Chunk error in step: {}", stepName);
|
||||||
|
|
||||||
|
// 에러 발생 시 청크 크기 자동 감소
|
||||||
|
Integer currentChunkSize = currentChunkSizes.get(stepName);
|
||||||
|
if (currentChunkSize != null && currentChunkSize > 1000) {
|
||||||
|
int reducedSize = (int) (currentChunkSize * 0.5);
|
||||||
|
currentChunkSizes.put(stepName, reducedSize);
|
||||||
|
log.warn("Reducing chunk size for step {} to {} due to error", stepName, reducedSize);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,409 @@
|
|||||||
|
package gc.mda.signal_batch.batch.processor;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselTrack;
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 비정상 궤적 검출기 - 최종 개선 버전
|
||||||
|
* 기기 오작동이나 네트워크 문제로 인한 명백한 비정상 궤적만 검출
|
||||||
|
* 데이터 gap과 단일 포인트 케이스 처리
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
public class AbnormalTrackDetector {
|
||||||
|
|
||||||
|
@Value("${batch.track.abnormal-detection.extreme-speed-threshold:1000}")
|
||||||
|
private double extremeSpeedThreshold;
|
||||||
|
|
||||||
|
// 물리적 한계값 (매우 관대하게 설정)
|
||||||
|
@SuppressWarnings("unused")
|
||||||
|
private static final double VESSEL_PHYSICAL_LIMIT_KNOTS = 100.0; // 선박 물리적 한계
|
||||||
|
@SuppressWarnings("unused")
|
||||||
|
private static final double AIRCRAFT_PHYSICAL_LIMIT_KNOTS = 600.0; // 항공기 물리적 한계
|
||||||
|
|
||||||
|
// 명백한 비정상만 검출하기 위한 임계값
|
||||||
|
private static final double VESSEL_ABNORMAL_SPEED_KNOTS = 500.0; // 선박 비정상 속도 (매우 관대)
|
||||||
|
private static final double AIRCRAFT_ABNORMAL_SPEED_KNOTS = 800.0; // 항공기 비정상 속도
|
||||||
|
|
||||||
|
// 시간별 거리 임계값 (제곱근 스케일링 적용)
|
||||||
|
private static final double BASE_DISTANCE_5MIN_NM = 20.0; // 5분간 기준 거리 (2배로 증가)
|
||||||
|
private static final double MIN_MOVEMENT_NM = 0.01; // 최소 이동거리 (정박 판단)
|
||||||
|
private static final double STATIONARY_SPEED_KNOTS = 0.5; // 정박 속도 기준
|
||||||
|
|
||||||
|
// 데이터 gap 허용치
|
||||||
|
private static final long MAX_NORMAL_GAP_MINUTES = 120; // 2시간까지는 정상 gap으로 간주
|
||||||
|
private static final long MIN_GAP_FOR_RELAXED_CHECK = 30; // 30분 이상 gap은 완화된 검사
|
||||||
|
|
||||||
|
private static final double EARTH_RADIUS_NM = 3440.065;
|
||||||
|
private static final String AIRCRAFT_SIG_SRC_CD = "000019";
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
public static class AbnormalSegment {
|
||||||
|
private String type;
|
||||||
|
private int startIndex;
|
||||||
|
private int endIndex;
|
||||||
|
private double actualValue;
|
||||||
|
private double threshold;
|
||||||
|
private String description;
|
||||||
|
private Map<String, Object> details;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
public static class AbnormalDetectionResult {
|
||||||
|
private VesselTrack originalTrack;
|
||||||
|
private VesselTrack correctedTrack;
|
||||||
|
private List<AbnormalSegment> abnormalSegments;
|
||||||
|
private boolean hasAbnormalities;
|
||||||
|
|
||||||
|
public boolean hasAbnormalities() {
|
||||||
|
return abnormalSegments != null && !abnormalSegments.isEmpty();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 5분 데이터용 비정상 검출 - 명백한 비정상만 검출
|
||||||
|
*/
|
||||||
|
public AbnormalDetectionResult detectAbnormalTrack(VesselTrack track, VesselTrack previousTrack) {
|
||||||
|
List<AbnormalSegment> abnormalSegments = new ArrayList<>();
|
||||||
|
|
||||||
|
// 정박 상태 체크
|
||||||
|
if (isStationary(track)) {
|
||||||
|
log.debug("정박/저속 선박으로 판단, 비정상 검출 스킵 - vessel: {}, distance: {}nm, avgSpeed: {}kts",
|
||||||
|
track.getVesselKey(), track.getDistanceNm(), track.getAvgSpeed());
|
||||||
|
return buildNormalResult(track);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 1. 집계 메트릭 기반 검출 (매우 관대한 기준)
|
||||||
|
abnormalSegments.addAll(checkAggregatedMetricsLenient(track));
|
||||||
|
|
||||||
|
// 2. 이전 궤적과의 연결점 검증 (있는 경우에만)
|
||||||
|
if (previousTrack != null && !isStationary(previousTrack)) {
|
||||||
|
abnormalSegments.addAll(checkBucketTransitionLenient(previousTrack, track));
|
||||||
|
}
|
||||||
|
|
||||||
|
// 비정상이 없으면 정상 처리
|
||||||
|
if (abnormalSegments.isEmpty()) {
|
||||||
|
return buildNormalResult(track);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 명백한 비정상만 제외
|
||||||
|
VesselTrack correctedTrack = shouldExcludeTrack(abnormalSegments) ? null : track;
|
||||||
|
|
||||||
|
log.info("명백한 비정상 궤적 검출 - vessel: {}, type: {}, value: {}",
|
||||||
|
track.getVesselKey(),
|
||||||
|
abnormalSegments.get(0).getType(),
|
||||||
|
abnormalSegments.get(0).getActualValue());
|
||||||
|
|
||||||
|
return AbnormalDetectionResult.builder()
|
||||||
|
.originalTrack(track)
|
||||||
|
.correctedTrack(correctedTrack)
|
||||||
|
.abnormalSegments(abnormalSegments)
|
||||||
|
.hasAbnormalities(true)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Hourly/Daily 집계용 - Bucket 간 연결점만 매우 관대하게 검사
|
||||||
|
*/
|
||||||
|
public AbnormalDetectionResult detectBucketTransitionOnly(VesselTrack track, VesselTrack previousTrack) {
|
||||||
|
// 5분 데이터는 이미 검증되었으므로 bucket 간 전환만 확인
|
||||||
|
if (previousTrack == null) {
|
||||||
|
return buildNormalResult(track);
|
||||||
|
}
|
||||||
|
|
||||||
|
List<AbnormalSegment> abnormalSegments = checkBucketTransitionVeryLenient(previousTrack, track);
|
||||||
|
|
||||||
|
if (abnormalSegments.isEmpty()) {
|
||||||
|
return buildNormalResult(track);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hourly/Daily에서는 선박/항공기 구분하여 제외
|
||||||
|
boolean isAircraft = AIRCRAFT_SIG_SRC_CD.equals(track.getSigSrcCd());
|
||||||
|
double speedLimit = isAircraft ? 300.0 : 100.0; // 항공기 300, 선박 100
|
||||||
|
|
||||||
|
boolean shouldExclude = abnormalSegments.stream()
|
||||||
|
.anyMatch(seg -> seg.getActualValue() > speedLimit);
|
||||||
|
|
||||||
|
VesselTrack correctedTrack = shouldExclude ? null : track;
|
||||||
|
|
||||||
|
if (shouldExclude) {
|
||||||
|
log.info("Hourly/Daily 명백한 비정상 전환 - vessel: {}, speed: {} knots",
|
||||||
|
track.getVesselKey(), abnormalSegments.get(0).getActualValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
return AbnormalDetectionResult.builder()
|
||||||
|
.originalTrack(track)
|
||||||
|
.correctedTrack(correctedTrack)
|
||||||
|
.abnormalSegments(abnormalSegments)
|
||||||
|
.hasAbnormalities(!abnormalSegments.isEmpty())
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 정박/저속 상태 판단
|
||||||
|
*/
|
||||||
|
private boolean isStationary(VesselTrack track) {
|
||||||
|
if (track.getDistanceNm() == null || track.getAvgSpeed() == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 거리가 매우 작고 평균속도가 낮으면 정박 상태
|
||||||
|
return track.getDistanceNm().doubleValue() < MIN_MOVEMENT_NM ||
|
||||||
|
track.getAvgSpeed().doubleValue() < STATIONARY_SPEED_KNOTS;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 단일 포인트 트랙인지 확인
|
||||||
|
*/
|
||||||
|
private boolean isSinglePointTrack(VesselTrack track) {
|
||||||
|
if (track.getStartPosition() == null || track.getEndPosition() == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 시작과 끝 위치가 동일하고 시간도 동일하면 단일 포인트
|
||||||
|
return track.getStartPosition().getLat().equals(track.getEndPosition().getLat()) &&
|
||||||
|
track.getStartPosition().getLon().equals(track.getEndPosition().getLon()) &&
|
||||||
|
track.getStartPosition().getTime().equals(track.getEndPosition().getTime());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 관대한 집계 메트릭 검사 (5분 데이터용)
|
||||||
|
*/
|
||||||
|
private List<AbnormalSegment> checkAggregatedMetricsLenient(VesselTrack track) {
|
||||||
|
List<AbnormalSegment> abnormalSegments = new ArrayList<>();
|
||||||
|
|
||||||
|
boolean isAircraft = AIRCRAFT_SIG_SRC_CD.equals(track.getSigSrcCd());
|
||||||
|
double speedLimit = isAircraft ? AIRCRAFT_ABNORMAL_SPEED_KNOTS : VESSEL_ABNORMAL_SPEED_KNOTS;
|
||||||
|
|
||||||
|
// 평균속도가 명백히 비정상인 경우만 검출
|
||||||
|
if (track.getAvgSpeed() != null && track.getAvgSpeed().doubleValue() > speedLimit) {
|
||||||
|
abnormalSegments.add(AbnormalSegment.builder()
|
||||||
|
.type("extreme_speed")
|
||||||
|
.actualValue(track.getAvgSpeed().doubleValue())
|
||||||
|
.threshold(speedLimit)
|
||||||
|
.description(String.format("극단적 비정상 속도: %.1f knots",
|
||||||
|
track.getAvgSpeed().doubleValue()))
|
||||||
|
.build());
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5분간 극단적 이동거리 (예: 100nm 이상)
|
||||||
|
if (track.getDistanceNm() != null && track.getDistanceNm().doubleValue() > 100.0) {
|
||||||
|
abnormalSegments.add(AbnormalSegment.builder()
|
||||||
|
.type("extreme_distance")
|
||||||
|
.actualValue(track.getDistanceNm().doubleValue())
|
||||||
|
.threshold(100.0)
|
||||||
|
.description(String.format("5분간 극단적 이동: %.1f nm",
|
||||||
|
track.getDistanceNm().doubleValue()))
|
||||||
|
.build());
|
||||||
|
}
|
||||||
|
|
||||||
|
return abnormalSegments;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 관대한 Bucket 전환 검사 (5분 데이터용)
|
||||||
|
*/
|
||||||
|
private List<AbnormalSegment> checkBucketTransitionLenient(VesselTrack previousTrack, VesselTrack currentTrack) {
|
||||||
|
List<AbnormalSegment> abnormalSegments = new ArrayList<>();
|
||||||
|
|
||||||
|
if (previousTrack.getEndPosition() == null || currentTrack.getStartPosition() == null) {
|
||||||
|
return abnormalSegments;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 둘 중 하나가 단일 포인트면 검사 스킵
|
||||||
|
if (isSinglePointTrack(previousTrack) || isSinglePointTrack(currentTrack)) {
|
||||||
|
log.debug("단일 포인트 트랙 감지, bucket 전환 검사 스킵");
|
||||||
|
return abnormalSegments;
|
||||||
|
}
|
||||||
|
|
||||||
|
double distance = calculateDistance(
|
||||||
|
previousTrack.getEndPosition().getLat(),
|
||||||
|
previousTrack.getEndPosition().getLon(),
|
||||||
|
currentTrack.getStartPosition().getLat(),
|
||||||
|
currentTrack.getStartPosition().getLon()
|
||||||
|
);
|
||||||
|
|
||||||
|
long durationMinutes = java.time.Duration.between(
|
||||||
|
previousTrack.getEndPosition().getTime(),
|
||||||
|
currentTrack.getStartPosition().getTime()
|
||||||
|
).toMinutes();
|
||||||
|
|
||||||
|
if (durationMinutes <= 0) return abnormalSegments;
|
||||||
|
|
||||||
|
// 데이터 gap이 큰 경우 더 관대한 기준 적용
|
||||||
|
boolean hasLargeGap = durationMinutes > MIN_GAP_FOR_RELAXED_CHECK;
|
||||||
|
boolean isNormalGap = durationMinutes <= MAX_NORMAL_GAP_MINUTES;
|
||||||
|
|
||||||
|
if (hasLargeGap && isNormalGap) {
|
||||||
|
log.debug("데이터 gap {} 분 감지, 완화된 검사 적용", durationMinutes);
|
||||||
|
// Gap이 30분~2시간인 경우 매우 관대한 기준
|
||||||
|
return abnormalSegments;
|
||||||
|
}
|
||||||
|
|
||||||
|
double impliedSpeed = (distance * 60.0) / durationMinutes;
|
||||||
|
|
||||||
|
// 제곱근 스케일링으로 시간에 따른 거리 임계값 계산
|
||||||
|
double timeScale = Math.sqrt(durationMinutes / 5.0);
|
||||||
|
double distanceThreshold = BASE_DISTANCE_5MIN_NM * timeScale * 3.0; // 3배 여유
|
||||||
|
|
||||||
|
boolean isAircraft = AIRCRAFT_SIG_SRC_CD.equals(currentTrack.getSigSrcCd());
|
||||||
|
double speedLimit = isAircraft ? AIRCRAFT_ABNORMAL_SPEED_KNOTS : VESSEL_ABNORMAL_SPEED_KNOTS;
|
||||||
|
|
||||||
|
// 매우 명백한 비정상만 검출
|
||||||
|
if (impliedSpeed > speedLimit && distance > distanceThreshold) {
|
||||||
|
Map<String, Object> details = new HashMap<>();
|
||||||
|
details.put("distance", distance);
|
||||||
|
details.put("duration", durationMinutes);
|
||||||
|
details.put("impliedSpeed", impliedSpeed);
|
||||||
|
details.put("distanceThreshold", distanceThreshold);
|
||||||
|
details.put("hasGap", hasLargeGap);
|
||||||
|
|
||||||
|
abnormalSegments.add(AbnormalSegment.builder()
|
||||||
|
.type("extreme_transition")
|
||||||
|
.actualValue(impliedSpeed)
|
||||||
|
.threshold(speedLimit)
|
||||||
|
.description(String.format("극단적 bucket 전환: %.1f knots", impliedSpeed))
|
||||||
|
.details(details)
|
||||||
|
.build());
|
||||||
|
}
|
||||||
|
|
||||||
|
return abnormalSegments;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 매우 관대한 Bucket 전환 검사 (Hourly/Daily용) - 수정됨
|
||||||
|
*/
|
||||||
|
private List<AbnormalSegment> checkBucketTransitionVeryLenient(VesselTrack previousTrack, VesselTrack currentTrack) {
|
||||||
|
List<AbnormalSegment> abnormalSegments = new ArrayList<>();
|
||||||
|
|
||||||
|
if (previousTrack.getEndPosition() == null || currentTrack.getStartPosition() == null) {
|
||||||
|
return abnormalSegments;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 둘 중 하나가 단일 포인트면 검사 스킵
|
||||||
|
if (isSinglePointTrack(previousTrack) || isSinglePointTrack(currentTrack)) {
|
||||||
|
return abnormalSegments;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 거리 계산 (집계된 위치 기반)
|
||||||
|
double distance = calculateDistance(
|
||||||
|
previousTrack.getEndPosition().getLat(),
|
||||||
|
previousTrack.getEndPosition().getLon(),
|
||||||
|
currentTrack.getStartPosition().getLat(),
|
||||||
|
currentTrack.getStartPosition().getLon()
|
||||||
|
);
|
||||||
|
|
||||||
|
// 시간 계산 - 위치 정보의 시간 사용
|
||||||
|
long durationMinutes = java.time.Duration.between(
|
||||||
|
previousTrack.getEndPosition().getTime(),
|
||||||
|
currentTrack.getStartPosition().getTime()
|
||||||
|
).toMinutes();
|
||||||
|
|
||||||
|
if (durationMinutes <= 0) return abnormalSegments;
|
||||||
|
|
||||||
|
// Hourly/Daily는 데이터 gap이 클 수 있으므로 매우 관대하게
|
||||||
|
if (durationMinutes > MAX_NORMAL_GAP_MINUTES) {
|
||||||
|
log.debug("Hourly/Daily 큰 gap {} 분 감지", durationMinutes);
|
||||||
|
|
||||||
|
// 매우 긴 gap이지만 물리적으로 불가능한 속도는 여전히 검출
|
||||||
|
double impliedSpeed = (distance * 60.0) / durationMinutes;
|
||||||
|
if (impliedSpeed > extremeSpeedThreshold) {
|
||||||
|
Map<String, Object> details = new HashMap<>();
|
||||||
|
details.put("distance", distance);
|
||||||
|
details.put("duration", durationMinutes);
|
||||||
|
details.put("impliedSpeed", impliedSpeed);
|
||||||
|
details.put("largeGap", true);
|
||||||
|
|
||||||
|
abnormalSegments.add(AbnormalSegment.builder()
|
||||||
|
.type("extreme_large_gap_transition")
|
||||||
|
.actualValue(impliedSpeed)
|
||||||
|
.threshold(extremeSpeedThreshold)
|
||||||
|
.description(String.format("Large gap extreme transition: %.1f knots over %d hours",
|
||||||
|
impliedSpeed, durationMinutes / 60))
|
||||||
|
.details(details)
|
||||||
|
.build());
|
||||||
|
|
||||||
|
log.info("Large gap extreme transition detected - vessel: {}, speed: {} knots, gap: {} hours",
|
||||||
|
currentTrack.getVesselKey(), Math.round(impliedSpeed), durationMinutes / 60);
|
||||||
|
}
|
||||||
|
|
||||||
|
return abnormalSegments;
|
||||||
|
}
|
||||||
|
|
||||||
|
double impliedSpeed = (distance * 60.0) / durationMinutes;
|
||||||
|
|
||||||
|
// Hourly/Daily는 선박/항공기 구분하여 처리
|
||||||
|
boolean isAircraft = AIRCRAFT_SIG_SRC_CD.equals(currentTrack.getSigSrcCd());
|
||||||
|
double speedLimit = isAircraft ? 300.0 : 100.0;
|
||||||
|
|
||||||
|
if (impliedSpeed > speedLimit) {
|
||||||
|
Map<String, Object> details = new HashMap<>();
|
||||||
|
details.put("distance", distance);
|
||||||
|
details.put("duration", durationMinutes);
|
||||||
|
details.put("impliedSpeed", impliedSpeed);
|
||||||
|
details.put("prevEndTime", previousTrack.getEndPosition().getTime());
|
||||||
|
details.put("currStartTime", currentTrack.getStartPosition().getTime());
|
||||||
|
|
||||||
|
abnormalSegments.add(AbnormalSegment.builder()
|
||||||
|
.type("impossible_transition")
|
||||||
|
.actualValue(impliedSpeed)
|
||||||
|
.threshold(speedLimit)
|
||||||
|
.description(String.format("물리적 불가능 전환: %.1f knots", impliedSpeed))
|
||||||
|
.details(details)
|
||||||
|
.build());
|
||||||
|
}
|
||||||
|
|
||||||
|
return abnormalSegments;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 정상 결과 생성
|
||||||
|
*/
|
||||||
|
private AbnormalDetectionResult buildNormalResult(VesselTrack track) {
|
||||||
|
return AbnormalDetectionResult.builder()
|
||||||
|
.originalTrack(track)
|
||||||
|
.correctedTrack(track)
|
||||||
|
.abnormalSegments(new ArrayList<>())
|
||||||
|
.hasAbnormalities(false)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 트랙 제외 여부 판단
|
||||||
|
*/
|
||||||
|
private boolean shouldExcludeTrack(List<AbnormalSegment> abnormalSegments) {
|
||||||
|
// extreme_* 타입의 비정상만 제외
|
||||||
|
return abnormalSegments.stream()
|
||||||
|
.anyMatch(seg -> seg.getType().startsWith("extreme_") ||
|
||||||
|
seg.getType().equals("impossible_transition"));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Haversine 거리 계산
|
||||||
|
*/
|
||||||
|
private double calculateDistance(double lat1, double lon1, double lat2, double lon2) {
|
||||||
|
double dLat = Math.toRadians(lat2 - lat1);
|
||||||
|
double dLon = Math.toRadians(lon2 - lon1);
|
||||||
|
|
||||||
|
double a = Math.sin(dLat / 2) * Math.sin(dLat / 2) +
|
||||||
|
Math.cos(Math.toRadians(lat1)) * Math.cos(Math.toRadians(lat2)) *
|
||||||
|
Math.sin(dLon / 2) * Math.sin(dLon / 2);
|
||||||
|
|
||||||
|
double c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a));
|
||||||
|
|
||||||
|
return EARTH_RADIUS_NM * c;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,190 @@
|
|||||||
|
package gc.mda.signal_batch.batch.processor;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselData;
|
||||||
|
import gc.mda.signal_batch.batch.processor.AreaStatisticsProcessor.AreaStatistics;
|
||||||
|
import gc.mda.signal_batch.batch.processor.AreaStatisticsProcessor.VesselMovement;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.core.StepExecution;
|
||||||
|
import org.springframework.batch.core.annotation.AfterStep;
|
||||||
|
import org.springframework.batch.core.annotation.BeforeStep;
|
||||||
|
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||||
|
import org.springframework.batch.item.ItemProcessor;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.time.temporal.ChronoUnit;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Area Statistics를 위한 누적 프로세서
|
||||||
|
* 전체 데이터를 메모리에 누적한 후 Step 종료 시 한 번에 집계
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
@StepScope
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class AccumulatingAreaProcessor implements ItemProcessor<VesselData, AreaStatistics> {
|
||||||
|
|
||||||
|
private final AreaStatisticsProcessor areaStatisticsProcessor;
|
||||||
|
|
||||||
|
@Value("#{jobParameters['timeBucketMinutes']}")
|
||||||
|
private Integer timeBucketMinutes;
|
||||||
|
|
||||||
|
// area_id + time_bucket별 선박 데이터 누적
|
||||||
|
private final Map<String, List<VesselData>> dataAccumulator = new ConcurrentHashMap<>();
|
||||||
|
|
||||||
|
// 처리 통계
|
||||||
|
private long processedCount = 0;
|
||||||
|
private long skippedCount = 0;
|
||||||
|
|
||||||
|
@BeforeStep
|
||||||
|
public void beforeStep(StepExecution stepExecution) {
|
||||||
|
int bucketMinutes = (timeBucketMinutes != null) ? timeBucketMinutes : 5;
|
||||||
|
log.info("AccumulatingAreaProcessor initialized with timeBucket: {} minutes", bucketMinutes);
|
||||||
|
dataAccumulator.clear();
|
||||||
|
processedCount = 0;
|
||||||
|
skippedCount = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public AreaStatistics process(VesselData item) throws Exception {
|
||||||
|
if (!item.isValidPosition()) {
|
||||||
|
skippedCount++;
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 메모리에서 속한 구역 찾기
|
||||||
|
List<String> areaIds = areaStatisticsProcessor.findAreasForPointInMemory(
|
||||||
|
item.getLat(), item.getLon()
|
||||||
|
);
|
||||||
|
|
||||||
|
if (areaIds.isEmpty()) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// time bucket 계산
|
||||||
|
int bucketSize = timeBucketMinutes != null ? timeBucketMinutes : 5;
|
||||||
|
LocalDateTime bucket = item.getMessageTime()
|
||||||
|
.truncatedTo(ChronoUnit.MINUTES)
|
||||||
|
.withMinute((item.getMessageTime().getMinute() / bucketSize) * bucketSize);
|
||||||
|
|
||||||
|
// 각 area에 대해 데이터 누적
|
||||||
|
for (String areaId : areaIds) {
|
||||||
|
String key = areaId + "||" + bucket.toString(); // 구분자 변경
|
||||||
|
dataAccumulator.computeIfAbsent(key, k -> new ArrayList<>()).add(item);
|
||||||
|
}
|
||||||
|
|
||||||
|
processedCount++;
|
||||||
|
|
||||||
|
// null 반환으로 개별 출력 방지
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterStep
|
||||||
|
public void afterStep(StepExecution stepExecution) {
|
||||||
|
log.info("Processing accumulated data for {} area-timebucket combinations",
|
||||||
|
dataAccumulator.size());
|
||||||
|
log.info("Processed: {}, Skipped: {}", processedCount, skippedCount);
|
||||||
|
|
||||||
|
if (dataAccumulator.isEmpty()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 누적된 데이터를 기반으로 통계 계산
|
||||||
|
List<AreaStatistics> allStatistics = new ArrayList<>();
|
||||||
|
|
||||||
|
dataAccumulator.forEach((key, vessels) -> {
|
||||||
|
String[] parts = key.split("\\|\\|", 2); // || 구분자 사용
|
||||||
|
if (parts.length != 2) {
|
||||||
|
log.error("Invalid key format: {}", key);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
String areaId = parts[0];
|
||||||
|
LocalDateTime timeBucket = LocalDateTime.parse(parts[1]);
|
||||||
|
|
||||||
|
AreaStatistics stats = new AreaStatistics(areaId, timeBucket);
|
||||||
|
Map<String, VesselMovement> vesselMovements = new HashMap<>();
|
||||||
|
|
||||||
|
// 각 선박별로 movement 정보 계산
|
||||||
|
Map<String, List<VesselData>> vesselGroups = new HashMap<>();
|
||||||
|
for (VesselData vessel : vessels) {
|
||||||
|
vesselGroups.computeIfAbsent(vessel.getVesselKey(), k -> new ArrayList<>())
|
||||||
|
.add(vessel);
|
||||||
|
}
|
||||||
|
|
||||||
|
vesselGroups.forEach((vesselKey, vesselDataList) -> {
|
||||||
|
// 시간순 정렬
|
||||||
|
vesselDataList.sort(Comparator.comparing(VesselData::getMessageTime));
|
||||||
|
|
||||||
|
VesselMovement movement = new VesselMovement();
|
||||||
|
movement.setVesselKey(vesselKey);
|
||||||
|
movement.setEnterTime(vesselDataList.get(0).getMessageTime());
|
||||||
|
movement.setExitTime(vesselDataList.get(vesselDataList.size() - 1).getMessageTime());
|
||||||
|
movement.setPointCount(vesselDataList.size());
|
||||||
|
|
||||||
|
// 평균 속도 계산
|
||||||
|
double totalSpeed = 0;
|
||||||
|
int speedCount = 0;
|
||||||
|
for (VesselData vd : vesselDataList) {
|
||||||
|
if (vd.getSog() != null) {
|
||||||
|
totalSpeed += vd.getSog().doubleValue();
|
||||||
|
speedCount++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (speedCount > 0) {
|
||||||
|
movement.setAvgSpeed(BigDecimal.valueOf(totalSpeed / speedCount)
|
||||||
|
.setScale(2, BigDecimal.ROUND_HALF_UP));
|
||||||
|
} else {
|
||||||
|
movement.setAvgSpeed(BigDecimal.ZERO);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 정류/통과 구분 (10분 이상 체류 시 정류)
|
||||||
|
long stayMinutes = ChronoUnit.MINUTES.between(
|
||||||
|
movement.getEnterTime(), movement.getExitTime()
|
||||||
|
);
|
||||||
|
|
||||||
|
if (stayMinutes > 10) {
|
||||||
|
stats.getStationaryVessels().put(vesselKey, movement);
|
||||||
|
} else {
|
||||||
|
stats.getTransitVessels().put(vesselKey, movement);
|
||||||
|
}
|
||||||
|
|
||||||
|
vesselMovements.put(vesselKey, movement);
|
||||||
|
});
|
||||||
|
|
||||||
|
// 통계 최종 계산
|
||||||
|
stats.setVesselCount(vesselMovements.size());
|
||||||
|
stats.setInCount(vesselMovements.size()); // 진입 선박 수
|
||||||
|
stats.setOutCount(0); // 추후 로직 개선 필요
|
||||||
|
|
||||||
|
// 전체 평균 속도
|
||||||
|
List<BigDecimal> allSpeeds = new ArrayList<>();
|
||||||
|
vesselMovements.values().stream()
|
||||||
|
.map(VesselMovement::getAvgSpeed)
|
||||||
|
.filter(Objects::nonNull)
|
||||||
|
.forEach(allSpeeds::add);
|
||||||
|
|
||||||
|
if (!allSpeeds.isEmpty()) {
|
||||||
|
BigDecimal totalSpeed = allSpeeds.stream()
|
||||||
|
.reduce(BigDecimal.ZERO, BigDecimal::add);
|
||||||
|
stats.setAvgSog(totalSpeed.divide(
|
||||||
|
BigDecimal.valueOf(allSpeeds.size()), 2, BigDecimal.ROUND_HALF_UP));
|
||||||
|
} else {
|
||||||
|
stats.setAvgSog(BigDecimal.ZERO);
|
||||||
|
}
|
||||||
|
|
||||||
|
allStatistics.add(stats);
|
||||||
|
});
|
||||||
|
|
||||||
|
// StepExecution context에 결과 저장
|
||||||
|
stepExecution.getExecutionContext().put("areaStatistics", allStatistics);
|
||||||
|
log.info("Calculated statistics for {} areas", allStatistics.size());
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,206 @@
|
|||||||
|
package gc.mda.signal_batch.batch.processor;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.gis.model.TileStatistics;
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselData;
|
||||||
|
import gc.mda.signal_batch.global.util.HaeguGeoUtils;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.core.StepExecution;
|
||||||
|
import org.springframework.batch.core.annotation.AfterStep;
|
||||||
|
import org.springframework.batch.core.annotation.BeforeStep;
|
||||||
|
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||||
|
import org.springframework.batch.item.ItemProcessor;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.time.temporal.ChronoUnit;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 전체 데이터를 누적하여 집계하는 프로세서
|
||||||
|
* Step 실행 중 모든 데이터를 메모리에 누적하고, Step 완료 시 한 번에 출력
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
@StepScope
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class AccumulatingTileProcessor implements ItemProcessor<VesselData, TileStatistics> {
|
||||||
|
|
||||||
|
private final HaeguGeoUtils geoUtils;
|
||||||
|
|
||||||
|
@Value("#{jobParameters['tileLevel']}")
|
||||||
|
private Integer tileLevel;
|
||||||
|
|
||||||
|
@Value("#{jobParameters['timeBucketMinutes']}")
|
||||||
|
private Integer timeBucketMinutes;
|
||||||
|
|
||||||
|
// 전체 집계를 위한 누적 맵
|
||||||
|
private final Map<String, TileStatistics> accumulator = new ConcurrentHashMap<>();
|
||||||
|
|
||||||
|
// 처리된 레코드 수 추적
|
||||||
|
private long processedCount = 0;
|
||||||
|
private long skippedCount = 0;
|
||||||
|
|
||||||
|
@BeforeStep
|
||||||
|
public void beforeStep(StepExecution stepExecution) {
|
||||||
|
int level = (tileLevel != null) ? tileLevel : 1;
|
||||||
|
int bucketMinutes = (timeBucketMinutes != null) ? timeBucketMinutes : 5;
|
||||||
|
|
||||||
|
log.info("Starting AccumulatingTileProcessor - tileLevel: {}, timeBucket: {} minutes",
|
||||||
|
level, bucketMinutes);
|
||||||
|
|
||||||
|
// 초기화
|
||||||
|
accumulator.clear();
|
||||||
|
processedCount = 0;
|
||||||
|
skippedCount = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public TileStatistics process(VesselData item) throws Exception {
|
||||||
|
if (item == null || !item.isValidPosition()) {
|
||||||
|
skippedCount++;
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
processedCount++;
|
||||||
|
|
||||||
|
int level = (tileLevel != null) ? tileLevel : 1;
|
||||||
|
int bucketMinutes = (timeBucketMinutes != null) ? timeBucketMinutes : 5;
|
||||||
|
|
||||||
|
LocalDateTime bucket = item.getMessageTime()
|
||||||
|
.truncatedTo(ChronoUnit.MINUTES)
|
||||||
|
.withMinute((item.getMessageTime().getMinute() / bucketMinutes) * bucketMinutes);
|
||||||
|
|
||||||
|
// Level 0 (대해구) 처리
|
||||||
|
if (level >= 0) {
|
||||||
|
processLevel0(item, bucket);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Level 1 (소해구) 처리
|
||||||
|
if (level >= 1) {
|
||||||
|
processLevel1(item, bucket);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 10000건마다 진행 상황 로그
|
||||||
|
if (processedCount % 10000 == 0) {
|
||||||
|
log.debug("Processed {} records, accumulated {} tiles",
|
||||||
|
processedCount, accumulator.size());
|
||||||
|
}
|
||||||
|
|
||||||
|
// null 반환 - 실제 출력은 AfterStep에서 수행
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void processLevel0(VesselData item, LocalDateTime bucket) {
|
||||||
|
HaeguGeoUtils.HaeguTileInfo level0Info = geoUtils.getHaeguTileInfo(
|
||||||
|
item.getLat(), item.getLon(), 0
|
||||||
|
);
|
||||||
|
|
||||||
|
if (level0Info != null) {
|
||||||
|
String key = generateKey(level0Info.tileId, 0, bucket);
|
||||||
|
|
||||||
|
accumulator.compute(key, (k, existing) -> {
|
||||||
|
if (existing == null) {
|
||||||
|
existing = TileStatistics.builder()
|
||||||
|
.tileId(level0Info.tileId)
|
||||||
|
.tileLevel(0)
|
||||||
|
.timeBucket(bucket)
|
||||||
|
.uniqueVessels(new HashMap<>())
|
||||||
|
.totalPoints(0L)
|
||||||
|
.avgSog(BigDecimal.ZERO)
|
||||||
|
.maxSog(BigDecimal.ZERO)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
existing.addVesselData(item);
|
||||||
|
return existing;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void processLevel1(VesselData item, LocalDateTime bucket) {
|
||||||
|
HaeguGeoUtils.HaeguTileInfo level1Info = geoUtils.getHaeguTileInfo(
|
||||||
|
item.getLat(), item.getLon(), 1
|
||||||
|
);
|
||||||
|
|
||||||
|
if (level1Info != null && level1Info.sohaeguNo != null) {
|
||||||
|
String key = generateKey(level1Info.tileId, 1, bucket);
|
||||||
|
|
||||||
|
accumulator.compute(key, (k, existing) -> {
|
||||||
|
if (existing == null) {
|
||||||
|
existing = TileStatistics.builder()
|
||||||
|
.tileId(level1Info.tileId)
|
||||||
|
.tileLevel(1)
|
||||||
|
.timeBucket(bucket)
|
||||||
|
.uniqueVessels(new HashMap<>())
|
||||||
|
.totalPoints(0L)
|
||||||
|
.avgSog(BigDecimal.ZERO)
|
||||||
|
.maxSog(BigDecimal.ZERO)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
existing.addVesselData(item);
|
||||||
|
return existing;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private String generateKey(String tileId, int tileLevel, LocalDateTime timeBucket) {
|
||||||
|
return String.format("%s|%d|%s", tileId, tileLevel, timeBucket);
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterStep
|
||||||
|
public void afterStep(StepExecution stepExecution) {
|
||||||
|
log.info("AccumulatingTileProcessor completed - processed: {}, skipped: {}, tiles: {}",
|
||||||
|
processedCount, skippedCount, accumulator.size());
|
||||||
|
|
||||||
|
// 밀도 계산
|
||||||
|
accumulator.values().forEach(this::calculateDensity);
|
||||||
|
|
||||||
|
// 메트릭 저장
|
||||||
|
stepExecution.getExecutionContext().putLong("totalProcessed", processedCount);
|
||||||
|
stepExecution.getExecutionContext().putLong("totalSkipped", skippedCount);
|
||||||
|
stepExecution.getExecutionContext().putInt("totalTiles", accumulator.size());
|
||||||
|
|
||||||
|
// 이 위치에서 바로 DB에 저장하면 안됨 - StepListener에서 처리해야 함
|
||||||
|
log.info("Accumulated {} tiles ready for writing", accumulator.size());
|
||||||
|
}
|
||||||
|
|
||||||
|
private void calculateDensity(TileStatistics stats) {
|
||||||
|
if (stats.getVesselCount() == null || stats.getVesselCount() == 0) {
|
||||||
|
stats.setVesselDensity(BigDecimal.ZERO);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
double tileArea = geoUtils.getTileArea(stats.getTileId());
|
||||||
|
|
||||||
|
if (tileArea > 0) {
|
||||||
|
BigDecimal density = BigDecimal.valueOf(stats.getVesselCount())
|
||||||
|
.divide(BigDecimal.valueOf(tileArea), 6, BigDecimal.ROUND_HALF_UP);
|
||||||
|
stats.setVesselDensity(density);
|
||||||
|
} else {
|
||||||
|
stats.setVesselDensity(BigDecimal.ZERO);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 누적된 결과 반환 (테스트용)
|
||||||
|
*/
|
||||||
|
public List<TileStatistics> getAccumulatedResults() {
|
||||||
|
log.info("[AccumulatingTileProcessor] getAccumulatedResults called - size: {}", accumulator.size());
|
||||||
|
return new ArrayList<>(accumulator.values());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 누적 데이터 초기화
|
||||||
|
*/
|
||||||
|
public void clear() {
|
||||||
|
accumulator.clear();
|
||||||
|
processedCount = 0;
|
||||||
|
skippedCount = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,333 @@
|
|||||||
|
package gc.mda.signal_batch.batch.processor;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselData;
|
||||||
|
import gc.mda.signal_batch.global.util.DataSourceLogger;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.locationtech.jts.geom.*;
|
||||||
|
import org.locationtech.jts.io.WKTReader;
|
||||||
|
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||||
|
import org.springframework.batch.item.ItemProcessor;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
import jakarta.annotation.PostConstruct;
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.time.temporal.ChronoUnit;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class AreaStatisticsProcessor {
|
||||||
|
|
||||||
|
@Qualifier("queryJdbcTemplate")
|
||||||
|
private final JdbcTemplate queryJdbcTemplate;
|
||||||
|
|
||||||
|
@Qualifier("queryDataSource")
|
||||||
|
private final DataSource queryDataSource;
|
||||||
|
|
||||||
|
// 메모리에 구역 정보 캐싱
|
||||||
|
private final Map<String, AreaInfo> areaCache = new ConcurrentHashMap<>();
|
||||||
|
private final List<AreaInfo> areaList = new ArrayList<>();
|
||||||
|
|
||||||
|
// JTS 객체들
|
||||||
|
private final GeometryFactory geometryFactory = new GeometryFactory(new PrecisionModel(), 4326);
|
||||||
|
private final WKTReader wktReader = new WKTReader(geometryFactory);
|
||||||
|
|
||||||
|
@PostConstruct
|
||||||
|
public void init() {
|
||||||
|
log.info("========== AreaStatisticsProcessor Initialization ==========");
|
||||||
|
DataSourceLogger.logJdbcTemplateInfo("AreaStatisticsProcessor", queryJdbcTemplate);
|
||||||
|
|
||||||
|
// t_areas 테이블 존재 확인
|
||||||
|
boolean tableExists = DataSourceLogger.checkTableExists(
|
||||||
|
"AreaStatisticsProcessor", queryJdbcTemplate, "signal", "t_areas"
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!tableExists) {
|
||||||
|
log.error("CRITICAL: Table signal.t_areas does not exist in query database!");
|
||||||
|
log.error("Please run: scripts/sql/create-query-db-schema.sql on the query database");
|
||||||
|
} else {
|
||||||
|
// 초기화 시 구역 정보 로드
|
||||||
|
loadAreas();
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info("========== End of Initialization ==========");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public static class AreaInfo {
|
||||||
|
private String areaId;
|
||||||
|
private String areaName;
|
||||||
|
private String areaType;
|
||||||
|
private String geomWkt;
|
||||||
|
private Geometry geometry; // JTS Geometry 객체
|
||||||
|
private Envelope envelope; // Bounding Box for quick filtering
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public static class AreaStatistics implements java.io.Serializable {
|
||||||
|
private String areaId;
|
||||||
|
private LocalDateTime timeBucket;
|
||||||
|
private Integer vesselCount;
|
||||||
|
private Integer inCount;
|
||||||
|
private Integer outCount;
|
||||||
|
private Map<String, VesselMovement> transitVessels;
|
||||||
|
private Map<String, VesselMovement> stationaryVessels;
|
||||||
|
private BigDecimal avgSog;
|
||||||
|
private LocalDateTime createdAt;
|
||||||
|
|
||||||
|
public AreaStatistics(String areaId, LocalDateTime timeBucket) {
|
||||||
|
this.areaId = areaId;
|
||||||
|
this.timeBucket = timeBucket;
|
||||||
|
this.vesselCount = 0;
|
||||||
|
this.inCount = 0;
|
||||||
|
this.outCount = 0;
|
||||||
|
this.transitVessels = new HashMap<>();
|
||||||
|
this.stationaryVessels = new HashMap<>();
|
||||||
|
this.avgSog = BigDecimal.ZERO;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public static class VesselMovement implements java.io.Serializable {
|
||||||
|
private String vesselKey;
|
||||||
|
private LocalDateTime enterTime;
|
||||||
|
private LocalDateTime exitTime;
|
||||||
|
private BigDecimal avgSpeed;
|
||||||
|
private Integer pointCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
@StepScope
|
||||||
|
public ItemProcessor<List<VesselData>, List<AreaStatistics>> batchProcessor() {
|
||||||
|
return batchProcessor(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
@StepScope
|
||||||
|
public ItemProcessor<List<VesselData>, List<AreaStatistics>> batchProcessor(
|
||||||
|
@Value("#{jobParameters['timeBucketMinutes']}") Integer bucketMinutes) {
|
||||||
|
|
||||||
|
return items -> {
|
||||||
|
// 구역 정보가 없으면 빈 결과 반환
|
||||||
|
if (areaList.isEmpty()) {
|
||||||
|
log.warn("No areas loaded, skipping area statistics processing");
|
||||||
|
return new ArrayList<>();
|
||||||
|
}
|
||||||
|
|
||||||
|
Map<String, AreaStatistics> statsMap = new HashMap<>();
|
||||||
|
Map<String, Map<String, VesselMovement>> vesselTracker = new HashMap<>();
|
||||||
|
|
||||||
|
for (VesselData item : items) {
|
||||||
|
if (!item.isValidPosition()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 메모리에서 속한 구역 찾기 (DB 쿼리 없음!)
|
||||||
|
List<String> areaIds = findAreasForPointInMemory(item.getLat(), item.getLon());
|
||||||
|
|
||||||
|
int bucketSize = bucketMinutes != null ? bucketMinutes : 5; // 5분 단위로 변경
|
||||||
|
LocalDateTime bucket = item.getMessageTime()
|
||||||
|
.truncatedTo(ChronoUnit.MINUTES)
|
||||||
|
.withMinute((item.getMessageTime().getMinute() / bucketSize) * bucketSize);
|
||||||
|
|
||||||
|
for (String areaId : areaIds) {
|
||||||
|
String statsKey = areaId + "_" + bucket.toString();
|
||||||
|
AreaStatistics stats = statsMap.computeIfAbsent(statsKey,
|
||||||
|
k -> new AreaStatistics(areaId, bucket)
|
||||||
|
);
|
||||||
|
|
||||||
|
// 선박 이동 추적
|
||||||
|
String vesselKey = item.getVesselKey();
|
||||||
|
Map<String, VesselMovement> areaVessels = vesselTracker.computeIfAbsent(
|
||||||
|
areaId, k -> new HashMap<>()
|
||||||
|
);
|
||||||
|
|
||||||
|
VesselMovement movement = areaVessels.computeIfAbsent(vesselKey,
|
||||||
|
k -> {
|
||||||
|
VesselMovement vm = new VesselMovement();
|
||||||
|
vm.setVesselKey(vesselKey);
|
||||||
|
vm.setEnterTime(item.getMessageTime());
|
||||||
|
vm.setPointCount(0);
|
||||||
|
vm.setAvgSpeed(BigDecimal.ZERO);
|
||||||
|
stats.setInCount(stats.getInCount() + 1);
|
||||||
|
return vm;
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
movement.setExitTime(item.getMessageTime());
|
||||||
|
movement.setPointCount(movement.getPointCount() + 1);
|
||||||
|
|
||||||
|
// 평균 속도 계산
|
||||||
|
if (item.getSog() != null) {
|
||||||
|
BigDecimal currentTotal = movement.getAvgSpeed()
|
||||||
|
.multiply(BigDecimal.valueOf(movement.getPointCount() - 1));
|
||||||
|
movement.setAvgSpeed(
|
||||||
|
currentTotal.add(item.getSog())
|
||||||
|
.divide(BigDecimal.valueOf(movement.getPointCount()), 2, BigDecimal.ROUND_HALF_UP)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 정류/통과 구분 (10분 이상 체류 시 정류)
|
||||||
|
long stayMinutes = ChronoUnit.MINUTES.between(
|
||||||
|
movement.getEnterTime(), movement.getExitTime()
|
||||||
|
);
|
||||||
|
|
||||||
|
if (stayMinutes > 10) {
|
||||||
|
stats.getStationaryVessels().put(vesselKey, movement);
|
||||||
|
} else {
|
||||||
|
stats.getTransitVessels().put(vesselKey, movement);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 통계 최종 계산
|
||||||
|
statsMap.values().forEach(stats -> {
|
||||||
|
stats.setVesselCount(
|
||||||
|
stats.getTransitVessels().size() + stats.getStationaryVessels().size()
|
||||||
|
);
|
||||||
|
|
||||||
|
// 평균 속도 계산
|
||||||
|
List<BigDecimal> allSpeeds = new ArrayList<>();
|
||||||
|
stats.getTransitVessels().values().stream()
|
||||||
|
.map(VesselMovement::getAvgSpeed)
|
||||||
|
.filter(Objects::nonNull)
|
||||||
|
.forEach(allSpeeds::add);
|
||||||
|
stats.getStationaryVessels().values().stream()
|
||||||
|
.map(VesselMovement::getAvgSpeed)
|
||||||
|
.filter(Objects::nonNull)
|
||||||
|
.forEach(allSpeeds::add);
|
||||||
|
|
||||||
|
if (!allSpeeds.isEmpty()) {
|
||||||
|
BigDecimal totalSpeed = allSpeeds.stream()
|
||||||
|
.reduce(BigDecimal.ZERO, BigDecimal::add);
|
||||||
|
stats.setAvgSog(
|
||||||
|
totalSpeed.divide(BigDecimal.valueOf(allSpeeds.size()), 2, BigDecimal.ROUND_HALF_UP)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return new ArrayList<>(statsMap.values());
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private void loadAreas() {
|
||||||
|
log.info("Loading areas from query database...");
|
||||||
|
DataSourceLogger.logJdbcTemplateInfo("AreaStatisticsProcessor.loadAreas", queryJdbcTemplate);
|
||||||
|
|
||||||
|
String sql = "SELECT area_id, area_name, area_type, public.ST_AsText(area_geom) as geom_wkt FROM signal.t_areas";
|
||||||
|
|
||||||
|
try {
|
||||||
|
boolean exists = DataSourceLogger.checkTableExists(
|
||||||
|
"AreaStatisticsProcessor.loadAreas", queryJdbcTemplate, "signal", "t_areas"
|
||||||
|
);
|
||||||
|
|
||||||
|
if (exists) {
|
||||||
|
List<AreaInfo> areas = queryJdbcTemplate.query(sql, (rs, rowNum) -> {
|
||||||
|
AreaInfo area = new AreaInfo();
|
||||||
|
area.setAreaId(rs.getString("area_id"));
|
||||||
|
area.setAreaName(rs.getString("area_name"));
|
||||||
|
area.setAreaType(rs.getString("area_type"));
|
||||||
|
area.setGeomWkt(rs.getString("geom_wkt"));
|
||||||
|
|
||||||
|
// WKT를 JTS Geometry로 변환
|
||||||
|
try {
|
||||||
|
Geometry geom = wktReader.read(area.getGeomWkt());
|
||||||
|
area.setGeometry(geom);
|
||||||
|
area.setEnvelope(geom.getEnvelopeInternal());
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to parse WKT for area {}: {}", area.getAreaId(), e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
return area;
|
||||||
|
});
|
||||||
|
|
||||||
|
areas.forEach(area -> {
|
||||||
|
areaCache.put(area.getAreaId(), area);
|
||||||
|
areaList.add(area);
|
||||||
|
});
|
||||||
|
|
||||||
|
log.info("Successfully loaded {} areas into memory cache", areas.size());
|
||||||
|
log.info("Area types: {}", areas.stream()
|
||||||
|
.collect(java.util.stream.Collectors.groupingBy(
|
||||||
|
AreaInfo::getAreaType,
|
||||||
|
java.util.stream.Collectors.counting()
|
||||||
|
)));
|
||||||
|
} else {
|
||||||
|
log.error("Cannot load areas - table signal.t_areas does not exist!");
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to load areas", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 메모리에서 포인트가 속한 구역 찾기 (DB 쿼리 없음!)
|
||||||
|
*/
|
||||||
|
public List<String> findAreasForPointInMemory(double lat, double lon) {
|
||||||
|
|
||||||
|
// JTS Point 생성
|
||||||
|
Point point = geometryFactory.createPoint(new Coordinate(lon, lat));
|
||||||
|
|
||||||
|
return areaList.parallelStream()
|
||||||
|
.filter(area -> area.getGeometry() != null)
|
||||||
|
.filter(area -> area.getEnvelope().contains(lon, lat))
|
||||||
|
.filter(area -> {
|
||||||
|
try {
|
||||||
|
return area.getGeometry().contains(point);
|
||||||
|
} catch (Exception e) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.map(AreaInfo::getAreaId)
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
// List<String> areaIds = new ArrayList<>();
|
||||||
|
// // 모든 구역에 대해 contains 검사
|
||||||
|
// for (AreaInfo area : areaList) {
|
||||||
|
// if (area.getGeometry() == null) {
|
||||||
|
// continue;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// // 1. Envelope(Bounding Box)로 빠른 필터링
|
||||||
|
// if (!area.getEnvelope().contains(lon, lat)) {
|
||||||
|
// continue;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// // 2. 정확한 contains 검사
|
||||||
|
// try {
|
||||||
|
// if (area.getGeometry().contains(point)) {
|
||||||
|
// areaIds.add(area.getAreaId());
|
||||||
|
// }
|
||||||
|
// } catch (Exception e) {
|
||||||
|
// log.debug("Error checking contains for area {}: {}", area.getAreaId(), e.getMessage());
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// return areaIds;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 캐시 상태 조회 (디버깅/모니터링용)
|
||||||
|
*/
|
||||||
|
public Map<String, Object> getCacheStats() {
|
||||||
|
Map<String, Object> stats = new HashMap<>();
|
||||||
|
stats.put("loadedAreas", areaList.size());
|
||||||
|
stats.put("areaTypes", areaList.stream()
|
||||||
|
.collect(java.util.stream.Collectors.groupingBy(
|
||||||
|
AreaInfo::getAreaType,
|
||||||
|
java.util.stream.Collectors.counting()
|
||||||
|
)));
|
||||||
|
return stats;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,138 @@
|
|||||||
|
package gc.mda.signal_batch.batch.processor;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselTrack;
|
||||||
|
import gc.mda.signal_batch.batch.processor.AbnormalTrackDetector.AbnormalDetectionResult;
|
||||||
|
import gc.mda.signal_batch.global.util.LineStringMUtils;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.item.ItemProcessor;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import java.sql.Timestamp;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.time.format.DateTimeFormatter;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 시간별/일별 궤적 프로세서 기본 클래스 - 비정상 궤적 검출 기능 포함
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public abstract class BaseTrackProcessorWithAbnormalDetection implements ItemProcessor<VesselTrack.VesselKey, AbnormalDetectionResult> {
|
||||||
|
|
||||||
|
protected final ItemProcessor<VesselTrack.VesselKey, VesselTrack> trackProcessor;
|
||||||
|
protected final AbnormalTrackDetector abnormalTrackDetector;
|
||||||
|
protected final DataSource queryDataSource;
|
||||||
|
|
||||||
|
private static final DateTimeFormatter TIMESTAMP_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public AbnormalDetectionResult process(VesselTrack.VesselKey vesselKey) throws Exception {
|
||||||
|
// 기존 프로세서로 궤적 생성
|
||||||
|
VesselTrack track = trackProcessor.process(vesselKey);
|
||||||
|
|
||||||
|
if (track == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 이전 bucket의 마지막 궤적 조회
|
||||||
|
VesselTrack previousTrack = getPreviousBucketLastTrack(vesselKey);
|
||||||
|
|
||||||
|
// Bucket 간 연결점만 검사 (하위 데이터는 이미 검증됨)
|
||||||
|
AbnormalDetectionResult result = abnormalTrackDetector.detectBucketTransitionOnly(track, previousTrack);
|
||||||
|
|
||||||
|
if (result.hasAbnormalities()) {
|
||||||
|
log.debug("Abnormal track detected for vessel {}/{} at {}: {}",
|
||||||
|
track.getSigSrcCd(), track.getTargetId(), track.getTimeBucket(),
|
||||||
|
result.getAbnormalSegments().size());
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 이전 버킷의 마지막 궤적 조회
|
||||||
|
*/
|
||||||
|
protected VesselTrack getPreviousBucketLastTrack(VesselTrack.VesselKey vesselKey) {
|
||||||
|
try {
|
||||||
|
String sql = """
|
||||||
|
SELECT sig_src_cd, target_id, time_bucket,
|
||||||
|
end_position,
|
||||||
|
public.ST_AsText(public.ST_LineSubstring(track_geom, 0.9, 1.0)) as last_segment
|
||||||
|
FROM %s
|
||||||
|
WHERE sig_src_cd = ?
|
||||||
|
AND target_id = ?
|
||||||
|
AND time_bucket >= ?
|
||||||
|
AND time_bucket < ?
|
||||||
|
ORDER BY time_bucket DESC
|
||||||
|
LIMIT 1
|
||||||
|
""".formatted(getPreviousTrackTableName());
|
||||||
|
|
||||||
|
LocalDateTime currentBucket = getNormalizedBucket(vesselKey.getTimeBucket());
|
||||||
|
LocalDateTime previousBucket = getPreviousBucket(currentBucket);
|
||||||
|
|
||||||
|
// Convert to java.sql.Timestamp for proper PostgreSQL type handling
|
||||||
|
Timestamp previousBucketTimestamp = Timestamp.valueOf(previousBucket);
|
||||||
|
Timestamp currentBucketTimestamp = Timestamp.valueOf(currentBucket);
|
||||||
|
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
|
||||||
|
return jdbcTemplate.queryForObject(sql,
|
||||||
|
(rs, rowNum) -> {
|
||||||
|
return VesselTrack.builder()
|
||||||
|
.sigSrcCd(rs.getString("sig_src_cd"))
|
||||||
|
.targetId(rs.getString("target_id"))
|
||||||
|
.timeBucket(rs.getTimestamp("time_bucket").toLocalDateTime())
|
||||||
|
.trackGeom(rs.getString("last_segment"))
|
||||||
|
.endPosition(parseEndPosition(rs.getString("end_position")))
|
||||||
|
.build();
|
||||||
|
},
|
||||||
|
vesselKey.getSigSrcCd(), vesselKey.getTargetId(), previousBucketTimestamp, currentBucketTimestamp
|
||||||
|
);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.debug("No previous bucket track found for vessel {}", vesselKey);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* JSON 형식의 end_position 파싱
|
||||||
|
*/
|
||||||
|
protected VesselTrack.TrackPosition parseEndPosition(String json) {
|
||||||
|
if (json == null) return null;
|
||||||
|
try {
|
||||||
|
String lat = LineStringMUtils.extractJsonValue(json, "lat");
|
||||||
|
String lon = LineStringMUtils.extractJsonValue(json, "lon");
|
||||||
|
String time = LineStringMUtils.extractJsonValue(json, "time");
|
||||||
|
String sog = LineStringMUtils.extractJsonValue(json, "sog");
|
||||||
|
|
||||||
|
return VesselTrack.TrackPosition.builder()
|
||||||
|
.lat(lat != null ? Double.parseDouble(lat) : null)
|
||||||
|
.lon(lon != null ? Double.parseDouble(lon) : null)
|
||||||
|
.time(time != null ? LocalDateTime.parse(time, TIMESTAMP_FORMATTER) : null)
|
||||||
|
.sog(sog != null ? new BigDecimal(sog) : null)
|
||||||
|
.build();
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to parse end position: {}", json, e);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 이전 트랙을 조회할 테이블명 반환 (하위 클래스에서 구현)
|
||||||
|
*/
|
||||||
|
protected abstract String getPreviousTrackTableName();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 정규화된 버킷 시간 반환 (하위 클래스에서 구현)
|
||||||
|
*/
|
||||||
|
protected abstract LocalDateTime getNormalizedBucket(LocalDateTime timeBucket);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 이전 버킷 시간 계산 (하위 클래스에서 구현)
|
||||||
|
*/
|
||||||
|
protected abstract LocalDateTime getPreviousBucket(LocalDateTime currentBucket);
|
||||||
|
}
|
||||||
@ -0,0 +1,210 @@
|
|||||||
|
package gc.mda.signal_batch.batch.processor;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselTrack;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.item.ItemProcessor;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.Timestamp;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.time.format.DateTimeFormatter;
|
||||||
|
import gc.mda.signal_batch.global.util.LineStringMUtils;
|
||||||
|
import gc.mda.signal_batch.global.util.TrackSimplificationUtils;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class DailyTrackProcessor implements ItemProcessor<VesselTrack.VesselKey, VesselTrack> {
|
||||||
|
|
||||||
|
private final DataSource queryDataSource;
|
||||||
|
private final JdbcTemplate jdbcTemplate;
|
||||||
|
private static final DateTimeFormatter TIMESTAMP_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public VesselTrack process(VesselTrack.VesselKey vesselKey) throws Exception {
|
||||||
|
LocalDateTime dayBucket = vesselKey.getTimeBucket()
|
||||||
|
.withHour(0)
|
||||||
|
.withMinute(0)
|
||||||
|
.withSecond(0)
|
||||||
|
.withNano(0);
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
WITH ordered_tracks AS (
|
||||||
|
SELECT *
|
||||||
|
FROM signal.t_vessel_tracks_hourly
|
||||||
|
WHERE sig_src_cd = ?
|
||||||
|
AND target_id = ?
|
||||||
|
AND time_bucket >= ?
|
||||||
|
AND time_bucket < ?
|
||||||
|
AND track_geom IS NOT NULL
|
||||||
|
AND public.ST_NPoints(track_geom) > 0
|
||||||
|
ORDER BY time_bucket
|
||||||
|
),
|
||||||
|
merged_coords AS (
|
||||||
|
SELECT
|
||||||
|
sig_src_cd,
|
||||||
|
target_id,
|
||||||
|
string_agg(
|
||||||
|
substring(public.ST_AsText(track_geom) from 'M \\((.+)\\)'),
|
||||||
|
','
|
||||||
|
ORDER BY time_bucket
|
||||||
|
) FILTER (WHERE track_geom IS NOT NULL) as all_coords
|
||||||
|
FROM ordered_tracks
|
||||||
|
GROUP BY sig_src_cd, target_id
|
||||||
|
),
|
||||||
|
merged_tracks AS (
|
||||||
|
SELECT
|
||||||
|
mc.sig_src_cd,
|
||||||
|
mc.target_id,
|
||||||
|
TO_TIMESTAMP(?, 'YYYY-MM-DD HH24:MI:SS') as time_bucket,
|
||||||
|
public.ST_GeomFromText('LINESTRING M(' || mc.all_coords || ')') as merged_geom,
|
||||||
|
(SELECT MAX(max_speed) FROM ordered_tracks WHERE sig_src_cd = mc.sig_src_cd AND target_id = mc.target_id) as max_speed,
|
||||||
|
(SELECT SUM(point_count) FROM ordered_tracks WHERE sig_src_cd = mc.sig_src_cd AND target_id = mc.target_id) as total_points,
|
||||||
|
(SELECT MIN(time_bucket) FROM ordered_tracks WHERE sig_src_cd = mc.sig_src_cd AND target_id = mc.target_id) as start_time,
|
||||||
|
(SELECT MAX(time_bucket) FROM ordered_tracks WHERE sig_src_cd = mc.sig_src_cd AND target_id = mc.target_id) as end_time,
|
||||||
|
(SELECT start_position FROM ordered_tracks WHERE sig_src_cd = mc.sig_src_cd AND target_id = mc.target_id ORDER BY time_bucket LIMIT 1) as start_pos,
|
||||||
|
(SELECT end_position FROM ordered_tracks WHERE sig_src_cd = mc.sig_src_cd AND target_id = mc.target_id ORDER BY time_bucket DESC LIMIT 1) as end_pos
|
||||||
|
FROM merged_coords mc
|
||||||
|
),
|
||||||
|
calculated_tracks AS (
|
||||||
|
SELECT
|
||||||
|
*,
|
||||||
|
public.ST_Length(merged_geom::geography) / 1852.0 as total_distance,
|
||||||
|
CASE
|
||||||
|
WHEN public.ST_NPoints(merged_geom) > 0 THEN
|
||||||
|
public.ST_M(public.ST_PointN(merged_geom, public.ST_NPoints(merged_geom))) -
|
||||||
|
public.ST_M(public.ST_PointN(merged_geom, 1))
|
||||||
|
ELSE
|
||||||
|
EXTRACT(EPOCH FROM
|
||||||
|
TO_TIMESTAMP(end_pos->>'time', 'YYYY-MM-DD HH24:MI:SS') - TO_TIMESTAMP(start_pos->>'time', 'YYYY-MM-DD HH24:MI:SS')
|
||||||
|
)
|
||||||
|
END as time_diff_seconds
|
||||||
|
FROM merged_tracks
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
sig_src_cd,
|
||||||
|
target_id,
|
||||||
|
time_bucket,
|
||||||
|
merged_geom,
|
||||||
|
total_distance,
|
||||||
|
CASE
|
||||||
|
WHEN time_diff_seconds > 0 THEN
|
||||||
|
CAST(LEAST((total_distance / (time_diff_seconds / 3600.0)), 9999.99) AS numeric(6,2))
|
||||||
|
ELSE 0
|
||||||
|
END as avg_speed,
|
||||||
|
max_speed,
|
||||||
|
total_points,
|
||||||
|
start_time,
|
||||||
|
end_time,
|
||||||
|
start_pos,
|
||||||
|
end_pos,
|
||||||
|
public.ST_AsText(merged_geom) as geom_text
|
||||||
|
FROM calculated_tracks
|
||||||
|
""";
|
||||||
|
|
||||||
|
LocalDateTime startTime = dayBucket;
|
||||||
|
LocalDateTime endTime = dayBucket.plusDays(1);
|
||||||
|
|
||||||
|
// Convert to java.sql.Timestamp for proper PostgreSQL type handling
|
||||||
|
Timestamp startTimestamp = Timestamp.valueOf(startTime);
|
||||||
|
Timestamp endTimestamp = Timestamp.valueOf(endTime);
|
||||||
|
Timestamp dayBucketTimestamp = Timestamp.valueOf(dayBucket);
|
||||||
|
|
||||||
|
log.debug("DailyTrackProcessor params - sig_src_cd: {}, target_id: {}, startTime: {}, endTime: {}, dayBucket: {}",
|
||||||
|
vesselKey.getSigSrcCd(), vesselKey.getTargetId(), startTimestamp, endTimestamp, dayBucketTimestamp);
|
||||||
|
|
||||||
|
try {
|
||||||
|
return jdbcTemplate.queryForObject(sql,
|
||||||
|
(rs, rowNum) -> {
|
||||||
|
try {
|
||||||
|
return buildDailyTrack(rs, dayBucket);
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw new RuntimeException("Failed to build daily track", e);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
vesselKey.getSigSrcCd(), vesselKey.getTargetId(),
|
||||||
|
startTimestamp, endTimestamp, dayBucketTimestamp
|
||||||
|
);
|
||||||
|
} catch (org.springframework.dao.EmptyResultDataAccessException e) {
|
||||||
|
log.warn("No hourly data found for vessel {} in time range {}-{}, skipping daily aggregation",
|
||||||
|
vesselKey.getSigSrcCd() + "_" + vesselKey.getTargetId(), startTimestamp, endTimestamp);
|
||||||
|
return null;
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to process daily track for vessel {}: {}",
|
||||||
|
vesselKey.getSigSrcCd() + "_" + vesselKey.getTargetId(), e.getMessage(), e);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private VesselTrack buildDailyTrack(ResultSet rs, LocalDateTime dayBucket) throws Exception {
|
||||||
|
// Start/End position 추출
|
||||||
|
VesselTrack.TrackPosition startPos = null;
|
||||||
|
VesselTrack.TrackPosition endPos = null;
|
||||||
|
|
||||||
|
String startPosJson = rs.getString("start_pos");
|
||||||
|
String endPosJson = rs.getString("end_pos");
|
||||||
|
|
||||||
|
if (startPosJson != null) {
|
||||||
|
startPos = parseTrackPosition(startPosJson);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (endPosJson != null) {
|
||||||
|
endPos = parseTrackPosition(endPosJson);
|
||||||
|
}
|
||||||
|
|
||||||
|
// M값은 이미 SQL에서 재계산됨
|
||||||
|
String dailyLineStringM = rs.getString("geom_text");
|
||||||
|
|
||||||
|
// 일별 궤적 간소화 (20m 이내 생략, 최대 30분 간격)
|
||||||
|
String simplifiedLineStringM = TrackSimplificationUtils.simplifyDailyTrack(dailyLineStringM);
|
||||||
|
|
||||||
|
// 간소화 통계 로깅
|
||||||
|
if (!dailyLineStringM.equals(simplifiedLineStringM)) {
|
||||||
|
TrackSimplificationUtils.SimplificationStats stats =
|
||||||
|
TrackSimplificationUtils.getSimplificationStats(dailyLineStringM, simplifiedLineStringM);
|
||||||
|
log.debug("일별 궤적 간소화 - vessel: {}/{}, 원본: {}포인트, 간소화: {}포인트 ({}% 감소)",
|
||||||
|
rs.getString("sig_src_cd"), rs.getString("target_id"),
|
||||||
|
stats.originalPoints, stats.simplifiedPoints, (int)stats.reductionRate);
|
||||||
|
}
|
||||||
|
|
||||||
|
// track_geom만 사용
|
||||||
|
return VesselTrack.builder()
|
||||||
|
.sigSrcCd(rs.getString("sig_src_cd"))
|
||||||
|
.targetId(rs.getString("target_id"))
|
||||||
|
.timeBucket(dayBucket)
|
||||||
|
.trackGeom(simplifiedLineStringM)
|
||||||
|
.distanceNm(rs.getBigDecimal("total_distance"))
|
||||||
|
.avgSpeed(rs.getBigDecimal("avg_speed"))
|
||||||
|
.maxSpeed(rs.getBigDecimal("max_speed"))
|
||||||
|
.pointCount(rs.getInt("total_points"))
|
||||||
|
.startPosition(startPos)
|
||||||
|
.endPosition(endPos)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
private VesselTrack.TrackPosition parseTrackPosition(String json) {
|
||||||
|
try {
|
||||||
|
String latStr = LineStringMUtils.extractJsonValue(json, "lat");
|
||||||
|
String lonStr = LineStringMUtils.extractJsonValue(json, "lon");
|
||||||
|
String timeStr = LineStringMUtils.extractJsonValue(json, "time");
|
||||||
|
String sogStr = LineStringMUtils.extractJsonValue(json, "sog");
|
||||||
|
|
||||||
|
return VesselTrack.TrackPosition.builder()
|
||||||
|
.lat(latStr != null ? Double.parseDouble(latStr) : null)
|
||||||
|
.lon(lonStr != null ? Double.parseDouble(lonStr) : null)
|
||||||
|
.time(timeStr != null ? LocalDateTime.parse(timeStr, TIMESTAMP_FORMATTER) : null)
|
||||||
|
.sog(sogStr != null ? new BigDecimal(sogStr) : null)
|
||||||
|
.build();
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to parse track position: {}", json, e);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,38 @@
|
|||||||
|
package gc.mda.signal_batch.batch.processor;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselTrack;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.item.ItemProcessor;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 일별 궤적 프로세서 - 비정상 궤적 검출 기능 포함
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
public class DailyTrackProcessorWithAbnormalDetection extends BaseTrackProcessorWithAbnormalDetection {
|
||||||
|
|
||||||
|
public DailyTrackProcessorWithAbnormalDetection(
|
||||||
|
ItemProcessor<VesselTrack.VesselKey, VesselTrack> dailyTrackProcessor,
|
||||||
|
AbnormalTrackDetector abnormalTrackDetector,
|
||||||
|
DataSource queryDataSource) {
|
||||||
|
super(dailyTrackProcessor, abnormalTrackDetector, queryDataSource);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected String getPreviousTrackTableName() {
|
||||||
|
return "signal.t_vessel_tracks_hourly";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected LocalDateTime getNormalizedBucket(LocalDateTime timeBucket) {
|
||||||
|
return timeBucket.withHour(0).withMinute(0).withSecond(0).withNano(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected LocalDateTime getPreviousBucket(LocalDateTime currentBucket) {
|
||||||
|
return currentBucket.minusDays(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,207 @@
|
|||||||
|
package gc.mda.signal_batch.batch.processor;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselTrack;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.item.ItemProcessor;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.Timestamp;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.time.format.DateTimeFormatter;
|
||||||
|
import gc.mda.signal_batch.global.util.LineStringMUtils;
|
||||||
|
import gc.mda.signal_batch.global.util.TrackSimplificationUtils;
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class HourlyTrackProcessor implements ItemProcessor<VesselTrack.VesselKey, VesselTrack> {
|
||||||
|
|
||||||
|
private final DataSource queryDataSource;
|
||||||
|
private final JdbcTemplate jdbcTemplate;
|
||||||
|
private static final DateTimeFormatter TIMESTAMP_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public VesselTrack process(VesselTrack.VesselKey vesselKey) throws Exception {
|
||||||
|
LocalDateTime hourBucket = vesselKey.getTimeBucket()
|
||||||
|
.withMinute(0)
|
||||||
|
.withSecond(0)
|
||||||
|
.withNano(0);
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
WITH ordered_tracks AS (
|
||||||
|
SELECT *
|
||||||
|
FROM signal.t_vessel_tracks_5min
|
||||||
|
WHERE sig_src_cd = ?
|
||||||
|
AND target_id = ?
|
||||||
|
AND time_bucket >= ?
|
||||||
|
AND time_bucket < ?
|
||||||
|
AND track_geom IS NOT NULL
|
||||||
|
AND public.ST_NPoints(track_geom) > 0
|
||||||
|
ORDER BY time_bucket
|
||||||
|
),
|
||||||
|
merged_coords AS (
|
||||||
|
SELECT
|
||||||
|
sig_src_cd,
|
||||||
|
target_id,
|
||||||
|
string_agg(
|
||||||
|
substring(public.ST_AsText(track_geom) from 'M \\((.+)\\)'),
|
||||||
|
','
|
||||||
|
ORDER BY time_bucket
|
||||||
|
) FILTER (WHERE track_geom IS NOT NULL) as all_coords
|
||||||
|
FROM ordered_tracks
|
||||||
|
GROUP BY sig_src_cd, target_id
|
||||||
|
),
|
||||||
|
merged_tracks AS (
|
||||||
|
SELECT
|
||||||
|
mc.sig_src_cd,
|
||||||
|
mc.target_id,
|
||||||
|
TO_TIMESTAMP(?, 'YYYY-MM-DD HH24:MI:SS') as time_bucket,
|
||||||
|
public.ST_GeomFromText('LINESTRING M(' || mc.all_coords || ')') as merged_geom,
|
||||||
|
(SELECT MAX(max_speed) FROM ordered_tracks WHERE sig_src_cd = mc.sig_src_cd AND target_id = mc.target_id) as max_speed,
|
||||||
|
(SELECT SUM(point_count) FROM ordered_tracks WHERE sig_src_cd = mc.sig_src_cd AND target_id = mc.target_id) as total_points,
|
||||||
|
(SELECT MIN(time_bucket) FROM ordered_tracks WHERE sig_src_cd = mc.sig_src_cd AND target_id = mc.target_id) as start_time,
|
||||||
|
(SELECT MAX(time_bucket) FROM ordered_tracks WHERE sig_src_cd = mc.sig_src_cd AND target_id = mc.target_id) as end_time,
|
||||||
|
(SELECT start_position FROM ordered_tracks WHERE sig_src_cd = mc.sig_src_cd AND target_id = mc.target_id ORDER BY time_bucket LIMIT 1) as start_pos,
|
||||||
|
(SELECT end_position FROM ordered_tracks WHERE sig_src_cd = mc.sig_src_cd AND target_id = mc.target_id ORDER BY time_bucket DESC LIMIT 1) as end_pos
|
||||||
|
FROM merged_coords mc
|
||||||
|
),
|
||||||
|
calculated_tracks AS (
|
||||||
|
SELECT
|
||||||
|
*,
|
||||||
|
public.ST_Length(merged_geom::geography) / 1852.0 as total_distance,
|
||||||
|
CASE
|
||||||
|
WHEN public.ST_NPoints(merged_geom) > 0 THEN
|
||||||
|
public.ST_M(public.ST_PointN(merged_geom, public.ST_NPoints(merged_geom))) -
|
||||||
|
public.ST_M(public.ST_PointN(merged_geom, 1))
|
||||||
|
ELSE
|
||||||
|
EXTRACT(EPOCH FROM
|
||||||
|
TO_TIMESTAMP(end_pos->>'time', 'YYYY-MM-DD HH24:MI:SS') - TO_TIMESTAMP(start_pos->>'time', 'YYYY-MM-DD HH24:MI:SS')
|
||||||
|
)
|
||||||
|
END as time_diff_seconds
|
||||||
|
FROM merged_tracks
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
sig_src_cd,
|
||||||
|
target_id,
|
||||||
|
time_bucket,
|
||||||
|
merged_geom,
|
||||||
|
total_distance,
|
||||||
|
CASE
|
||||||
|
WHEN time_diff_seconds > 0 THEN
|
||||||
|
CAST(LEAST((total_distance / (time_diff_seconds / 3600.0)), 9999.99) AS numeric(6,2))
|
||||||
|
ELSE 0
|
||||||
|
END as avg_speed,
|
||||||
|
max_speed,
|
||||||
|
total_points,
|
||||||
|
start_time,
|
||||||
|
end_time,
|
||||||
|
start_pos,
|
||||||
|
end_pos,
|
||||||
|
public.ST_AsText(merged_geom) as geom_text
|
||||||
|
FROM calculated_tracks
|
||||||
|
""";
|
||||||
|
|
||||||
|
LocalDateTime startTime = hourBucket;
|
||||||
|
LocalDateTime endTime = hourBucket.plusHours(1);
|
||||||
|
|
||||||
|
// Convert to java.sql.Timestamp for proper PostgreSQL type handling
|
||||||
|
Timestamp startTimestamp = Timestamp.valueOf(startTime);
|
||||||
|
Timestamp endTimestamp = Timestamp.valueOf(endTime);
|
||||||
|
Timestamp hourBucketTimestamp = Timestamp.valueOf(hourBucket);
|
||||||
|
|
||||||
|
log.debug("HourlyTrackProcessor params - sig_src_cd: {}, target_id: {}, startTime: {}, endTime: {}, hourBucket: {}",
|
||||||
|
vesselKey.getSigSrcCd(), vesselKey.getTargetId(), startTimestamp, endTimestamp, hourBucketTimestamp);
|
||||||
|
|
||||||
|
try {
|
||||||
|
return jdbcTemplate.queryForObject(sql,
|
||||||
|
(rs, rowNum) -> {
|
||||||
|
try {
|
||||||
|
return buildHourlyTrack(rs, hourBucket);
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw new RuntimeException("Failed to build hourly track", e);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
vesselKey.getSigSrcCd(), vesselKey.getTargetId(),
|
||||||
|
startTimestamp, endTimestamp, hourBucketTimestamp
|
||||||
|
);
|
||||||
|
} catch (org.springframework.dao.EmptyResultDataAccessException e) {
|
||||||
|
log.warn("No 5min data found for vessel {} in time range {}-{}, skipping hourly aggregation",
|
||||||
|
vesselKey.getSigSrcCd() + "_" + vesselKey.getTargetId(), startTimestamp, endTimestamp);
|
||||||
|
return null;
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to process hourly track for vessel {}: {}",
|
||||||
|
vesselKey.getSigSrcCd() + "_" + vesselKey.getTargetId(), e.getMessage(), e);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private VesselTrack buildHourlyTrack(ResultSet rs, LocalDateTime hourBucket) throws Exception {
|
||||||
|
// Start/End position 추출
|
||||||
|
VesselTrack.TrackPosition startPos = null;
|
||||||
|
VesselTrack.TrackPosition endPos = null;
|
||||||
|
|
||||||
|
String startPosJson = rs.getString("start_pos");
|
||||||
|
String endPosJson = rs.getString("end_pos");
|
||||||
|
|
||||||
|
if (startPosJson != null) {
|
||||||
|
startPos = parseTrackPosition(startPosJson);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (endPosJson != null) {
|
||||||
|
endPos = parseTrackPosition(endPosJson);
|
||||||
|
}
|
||||||
|
|
||||||
|
// M값은 이미 SQL에서 재계산됨
|
||||||
|
String hourlyLineStringM = rs.getString("geom_text");
|
||||||
|
|
||||||
|
// 이동이 거의 없는 포인트 간소화 (10m 이내 생략, 최대 10분 간격)
|
||||||
|
String simplifiedLineStringM = TrackSimplificationUtils.simplifyHourlyTrack(hourlyLineStringM);
|
||||||
|
|
||||||
|
// 간소화 통계 로깅
|
||||||
|
if (!hourlyLineStringM.equals(simplifiedLineStringM)) {
|
||||||
|
TrackSimplificationUtils.SimplificationStats stats =
|
||||||
|
TrackSimplificationUtils.getSimplificationStats(hourlyLineStringM, simplifiedLineStringM);
|
||||||
|
log.debug("시간별 궤적 간소화 - vessel: {}/{}, 원본: {}포인트, 간소화: {}포인트 ({}% 감소)",
|
||||||
|
rs.getString("sig_src_cd"), rs.getString("target_id"),
|
||||||
|
stats.originalPoints, stats.simplifiedPoints, (int)stats.reductionRate);
|
||||||
|
}
|
||||||
|
|
||||||
|
// track_geom만 사용
|
||||||
|
return VesselTrack.builder()
|
||||||
|
.sigSrcCd(rs.getString("sig_src_cd"))
|
||||||
|
.targetId(rs.getString("target_id"))
|
||||||
|
.timeBucket(hourBucket)
|
||||||
|
.trackGeom(simplifiedLineStringM)
|
||||||
|
.distanceNm(rs.getBigDecimal("total_distance"))
|
||||||
|
.avgSpeed(rs.getBigDecimal("avg_speed"))
|
||||||
|
.maxSpeed(rs.getBigDecimal("max_speed"))
|
||||||
|
.pointCount(rs.getInt("total_points"))
|
||||||
|
.startPosition(startPos)
|
||||||
|
.endPosition(endPos)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
private VesselTrack.TrackPosition parseTrackPosition(String json) {
|
||||||
|
try {
|
||||||
|
String latStr = LineStringMUtils.extractJsonValue(json, "lat");
|
||||||
|
String lonStr = LineStringMUtils.extractJsonValue(json, "lon");
|
||||||
|
String timeStr = LineStringMUtils.extractJsonValue(json, "time");
|
||||||
|
String sogStr = LineStringMUtils.extractJsonValue(json, "sog");
|
||||||
|
|
||||||
|
return VesselTrack.TrackPosition.builder()
|
||||||
|
.lat(latStr != null ? Double.parseDouble(latStr) : null)
|
||||||
|
.lon(lonStr != null ? Double.parseDouble(lonStr) : null)
|
||||||
|
.time(timeStr != null ? LocalDateTime.parse(timeStr, TIMESTAMP_FORMATTER) : null)
|
||||||
|
.sog(sogStr != null ? new BigDecimal(sogStr) : null)
|
||||||
|
.build();
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to parse track position: {}", json, e);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,38 @@
|
|||||||
|
package gc.mda.signal_batch.batch.processor;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselTrack;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.item.ItemProcessor;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 시간별 궤적 프로세서 - 비정상 궤적 검출 기능 포함
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
public class HourlyTrackProcessorWithAbnormalDetection extends BaseTrackProcessorWithAbnormalDetection {
|
||||||
|
|
||||||
|
public HourlyTrackProcessorWithAbnormalDetection(
|
||||||
|
ItemProcessor<VesselTrack.VesselKey, VesselTrack> hourlyTrackProcessor,
|
||||||
|
AbnormalTrackDetector abnormalTrackDetector,
|
||||||
|
DataSource queryDataSource) {
|
||||||
|
super(hourlyTrackProcessor, abnormalTrackDetector, queryDataSource);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected String getPreviousTrackTableName() {
|
||||||
|
return "signal.t_vessel_tracks_5min";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected LocalDateTime getNormalizedBucket(LocalDateTime timeBucket) {
|
||||||
|
return timeBucket.withMinute(0).withSecond(0).withNano(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected LocalDateTime getPreviousBucket(LocalDateTime currentBucket) {
|
||||||
|
return currentBucket.minusHours(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,60 @@
|
|||||||
|
package gc.mda.signal_batch.batch.processor;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselData;
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselLatestPosition;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||||
|
import org.springframework.batch.item.ItemProcessor;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
public class LatestPositionProcessor {
|
||||||
|
|
||||||
|
@StepScope
|
||||||
|
public ItemProcessor<VesselData, VesselLatestPosition> processor() {
|
||||||
|
// 청크 내에서 최신 데이터만 유지
|
||||||
|
ConcurrentHashMap<String, VesselLatestPosition> latestMap = new ConcurrentHashMap<>();
|
||||||
|
|
||||||
|
return item -> {
|
||||||
|
if (!item.isValidPosition()) {
|
||||||
|
log.debug("Invalid position for vessel: {}", item.getVesselKey());
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
String key = item.getVesselKey();
|
||||||
|
VesselLatestPosition current = VesselLatestPosition.fromVesselData(item);
|
||||||
|
|
||||||
|
VesselLatestPosition existing = latestMap.get(key);
|
||||||
|
if (existing == null || current.getLastUpdate().isAfter(existing.getLastUpdate())) {
|
||||||
|
latestMap.put(key, current);
|
||||||
|
return current;
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@StepScope
|
||||||
|
public ItemProcessor<VesselData, VesselLatestPosition> filteringProcessor(
|
||||||
|
LocalDateTime cutoffTime) {
|
||||||
|
|
||||||
|
return item -> {
|
||||||
|
// 특정 시간 이후 데이터만 처리
|
||||||
|
if (item.getMessageTime().isBefore(cutoffTime)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!item.isValidPosition()) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return VesselLatestPosition.fromVesselData(item);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,291 @@
|
|||||||
|
package gc.mda.signal_batch.batch.processor;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.gis.model.TileStatistics;
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselData;
|
||||||
|
import gc.mda.signal_batch.global.util.HaeguGeoUtils;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||||
|
import org.springframework.batch.item.ItemProcessor;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import java.math.RoundingMode;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.time.temporal.ChronoUnit;
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Configuration
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class TileAggregationProcessor {
|
||||||
|
|
||||||
|
private final HaeguGeoUtils geoUtils;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 타일 레벨과 시간 버킷에 따른 배치 프로세서 생성
|
||||||
|
*/
|
||||||
|
public ItemProcessor<List<VesselData>, List<TileStatistics>> batchProcessor(
|
||||||
|
int tileLevel, int timeBucketMinutes) {
|
||||||
|
|
||||||
|
return items -> {
|
||||||
|
if (items == null || items.isEmpty()) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
Map<String, TileStatistics> tileMap = new HashMap<>();
|
||||||
|
|
||||||
|
for (VesselData item : items) {
|
||||||
|
if (!item.isValidPosition()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
LocalDateTime bucket = item.getMessageTime()
|
||||||
|
.truncatedTo(ChronoUnit.MINUTES)
|
||||||
|
.withMinute((item.getMessageTime().getMinute() / timeBucketMinutes) * timeBucketMinutes);
|
||||||
|
|
||||||
|
// 요청된 레벨에 따라 처리
|
||||||
|
if (tileLevel >= 0) {
|
||||||
|
// Level 0 (대해구) 처리
|
||||||
|
HaeguGeoUtils.HaeguTileInfo level0Info = geoUtils.getHaeguTileInfo(
|
||||||
|
item.getLat(), item.getLon(), 0
|
||||||
|
);
|
||||||
|
|
||||||
|
if (level0Info != null) {
|
||||||
|
String haeguKey = level0Info.tileId + "_" + bucket.toString();
|
||||||
|
|
||||||
|
TileStatistics haeguStats = tileMap.computeIfAbsent(haeguKey,
|
||||||
|
k -> TileStatistics.builder()
|
||||||
|
.tileId(level0Info.tileId)
|
||||||
|
.tileLevel(0)
|
||||||
|
.timeBucket(bucket)
|
||||||
|
.uniqueVessels(new HashMap<>())
|
||||||
|
.totalPoints(0L)
|
||||||
|
.avgSog(BigDecimal.ZERO)
|
||||||
|
.maxSog(BigDecimal.ZERO)
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
haeguStats.addVesselData(item);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (tileLevel >= 1) {
|
||||||
|
// Level 1 (소해구) 처리
|
||||||
|
HaeguGeoUtils.HaeguTileInfo level1Info = geoUtils.getHaeguTileInfo(
|
||||||
|
item.getLat(), item.getLon(), 1
|
||||||
|
);
|
||||||
|
|
||||||
|
if (level1Info != null && level1Info.sohaeguNo != null) {
|
||||||
|
String subKey = level1Info.tileId + "_" + bucket.toString();
|
||||||
|
|
||||||
|
TileStatistics subStats = tileMap.computeIfAbsent(subKey,
|
||||||
|
k -> TileStatistics.builder()
|
||||||
|
.tileId(level1Info.tileId)
|
||||||
|
.tileLevel(1)
|
||||||
|
.timeBucket(bucket)
|
||||||
|
.uniqueVessels(new HashMap<>())
|
||||||
|
.totalPoints(0L)
|
||||||
|
.avgSog(BigDecimal.ZERO)
|
||||||
|
.maxSog(BigDecimal.ZERO)
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
subStats.addVesselData(item);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 각 타일별로 밀도 계산
|
||||||
|
tileMap.values().forEach(this::calculateDensity);
|
||||||
|
|
||||||
|
return new ArrayList<>(tileMap.values());
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public ItemProcessor<List<VesselData>, List<TileStatistics>> tileAggregationBatchProcessor(
|
||||||
|
@Value("#{jobParameters['timeBucketMinutes']}") Integer timeBucketMinutes) {
|
||||||
|
|
||||||
|
final int bucketMinutes = (timeBucketMinutes != null) ? timeBucketMinutes : 5;
|
||||||
|
|
||||||
|
return items -> {
|
||||||
|
if (items == null || items.isEmpty()) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
Map<String, TileStatistics> tileMap = new HashMap<>();
|
||||||
|
|
||||||
|
for (VesselData item : items) {
|
||||||
|
if (!item.isValidPosition()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
LocalDateTime bucket = item.getMessageTime()
|
||||||
|
.truncatedTo(ChronoUnit.MINUTES)
|
||||||
|
.withMinute((item.getMessageTime().getMinute() / bucketMinutes) * bucketMinutes);
|
||||||
|
|
||||||
|
// 1. 대해구 레벨(Level 0) 처리
|
||||||
|
HaeguGeoUtils.HaeguTileInfo level0Info = geoUtils.getHaeguTileInfo(
|
||||||
|
item.getLat(), item.getLon(), 0
|
||||||
|
);
|
||||||
|
|
||||||
|
if (level0Info != null) {
|
||||||
|
String haeguKey = level0Info.tileId + "_" + bucket.toString();
|
||||||
|
|
||||||
|
TileStatistics haeguStats = tileMap.computeIfAbsent(haeguKey,
|
||||||
|
k -> TileStatistics.builder()
|
||||||
|
.tileId(level0Info.tileId)
|
||||||
|
.tileLevel(0) // 대해구는 레벨 0
|
||||||
|
.timeBucket(bucket)
|
||||||
|
.uniqueVessels(new HashMap<>())
|
||||||
|
.totalPoints(0L)
|
||||||
|
.avgSog(BigDecimal.ZERO)
|
||||||
|
.maxSog(BigDecimal.ZERO)
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
haeguStats.addVesselData(item);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. 소해구 레벨(Level 1) 처리
|
||||||
|
HaeguGeoUtils.HaeguTileInfo level1Info = geoUtils.getHaeguTileInfo(
|
||||||
|
item.getLat(), item.getLon(), 1
|
||||||
|
);
|
||||||
|
|
||||||
|
if (level1Info != null && level1Info.sohaeguNo != null) {
|
||||||
|
String subKey = level1Info.tileId + "_" + bucket.toString();
|
||||||
|
|
||||||
|
TileStatistics subStats = tileMap.computeIfAbsent(subKey,
|
||||||
|
k -> TileStatistics.builder()
|
||||||
|
.tileId(level1Info.tileId)
|
||||||
|
.tileLevel(1) // 소해구는 레벨 1
|
||||||
|
.timeBucket(bucket)
|
||||||
|
.uniqueVessels(new HashMap<>())
|
||||||
|
.totalPoints(0L)
|
||||||
|
.avgSog(BigDecimal.ZERO)
|
||||||
|
.maxSog(BigDecimal.ZERO)
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
subStats.addVesselData(item);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 각 타일별로 밀도 계산
|
||||||
|
tileMap.values().forEach(stats -> {
|
||||||
|
calculateDensity(stats);
|
||||||
|
});
|
||||||
|
|
||||||
|
return new ArrayList<>(tileMap.values());
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@StepScope
|
||||||
|
public ItemProcessor<VesselData, List<TileStatistics>> singleItemProcessor(
|
||||||
|
@Value("#{jobParameters['tileLevel']}") Integer tileLevel,
|
||||||
|
@Value("#{jobParameters['timeBucketMinutes']}") Integer timeBucketMinutes) {
|
||||||
|
|
||||||
|
final int bucketMinutes = (timeBucketMinutes != null) ? timeBucketMinutes : 5;
|
||||||
|
final int maxLevel = (tileLevel != null) ? tileLevel : 1;
|
||||||
|
|
||||||
|
Map<String, TileStatistics> accumulator = new HashMap<>();
|
||||||
|
|
||||||
|
return item -> {
|
||||||
|
if (!item.isValidPosition()) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
LocalDateTime bucket = item.getMessageTime()
|
||||||
|
.truncatedTo(ChronoUnit.MINUTES)
|
||||||
|
.withMinute((item.getMessageTime().getMinute() / bucketMinutes) * bucketMinutes);
|
||||||
|
|
||||||
|
List<TileStatistics> result = new ArrayList<>();
|
||||||
|
|
||||||
|
// Level 0 (대해구)
|
||||||
|
if (maxLevel >= 0) {
|
||||||
|
HaeguGeoUtils.HaeguTileInfo level0Info = geoUtils.getHaeguTileInfo(
|
||||||
|
item.getLat(), item.getLon(), 0
|
||||||
|
);
|
||||||
|
|
||||||
|
if (level0Info != null) {
|
||||||
|
String key = level0Info.tileId + "_" + bucket.toString();
|
||||||
|
TileStatistics stats = accumulator.computeIfAbsent(key,
|
||||||
|
k -> TileStatistics.builder()
|
||||||
|
.tileId(level0Info.tileId)
|
||||||
|
.tileLevel(0)
|
||||||
|
.timeBucket(bucket)
|
||||||
|
.uniqueVessels(new HashMap<>())
|
||||||
|
.totalPoints(0L)
|
||||||
|
.avgSog(BigDecimal.ZERO)
|
||||||
|
.maxSog(BigDecimal.ZERO)
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
stats.addVesselData(item);
|
||||||
|
|
||||||
|
// 일정 개수가 쌓이면 출력
|
||||||
|
if (stats.getTotalPoints() % 1000 == 0) {
|
||||||
|
calculateDensity(stats);
|
||||||
|
result.add(stats);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Level 1 (소해구)
|
||||||
|
if (maxLevel >= 1) {
|
||||||
|
HaeguGeoUtils.HaeguTileInfo level1Info = geoUtils.getHaeguTileInfo(
|
||||||
|
item.getLat(), item.getLon(), 1
|
||||||
|
);
|
||||||
|
|
||||||
|
if (level1Info != null && level1Info.sohaeguNo != null) {
|
||||||
|
String key = level1Info.tileId + "_" + bucket.toString();
|
||||||
|
TileStatistics stats = accumulator.computeIfAbsent(key,
|
||||||
|
k -> TileStatistics.builder()
|
||||||
|
.tileId(level1Info.tileId)
|
||||||
|
.tileLevel(1)
|
||||||
|
.timeBucket(bucket)
|
||||||
|
.uniqueVessels(new HashMap<>())
|
||||||
|
.totalPoints(0L)
|
||||||
|
.avgSog(BigDecimal.ZERO)
|
||||||
|
.maxSog(BigDecimal.ZERO)
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
stats.addVesselData(item);
|
||||||
|
|
||||||
|
// 일정 개수가 쌓이면 출력
|
||||||
|
if (stats.getTotalPoints() % 1000 == 0) {
|
||||||
|
calculateDensity(stats);
|
||||||
|
result.add(stats);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result.isEmpty() ? null : result;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 타일의 선박 밀도 계산
|
||||||
|
*/
|
||||||
|
private void calculateDensity(TileStatistics stats) {
|
||||||
|
if (stats.getVesselCount() == null || stats.getVesselCount() == 0) {
|
||||||
|
stats.setVesselDensity(BigDecimal.ZERO);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 타일 면적 가져오기 (km²)
|
||||||
|
double tileArea = geoUtils.getTileArea(stats.getTileId());
|
||||||
|
|
||||||
|
if (tileArea > 0) {
|
||||||
|
// 밀도 = 선박 수 / 면적
|
||||||
|
BigDecimal density = BigDecimal.valueOf(stats.getVesselCount())
|
||||||
|
.divide(BigDecimal.valueOf(tileArea), 6, RoundingMode.HALF_UP);
|
||||||
|
stats.setVesselDensity(density);
|
||||||
|
} else {
|
||||||
|
stats.setVesselDensity(BigDecimal.ZERO);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,222 @@
|
|||||||
|
package gc.mda.signal_batch.batch.processor;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.gis.cache.AreaBoundaryCache;
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselData;
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselTrack;
|
||||||
|
import gc.mda.signal_batch.global.util.HaeguGeoUtils;
|
||||||
|
import gc.mda.signal_batch.migration.unix_timestamp.strategy.UnixTimestampStrategy;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.item.ItemProcessor;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class VesselTrackProcessor implements ItemProcessor<List<VesselData>, List<VesselTrack>> {
|
||||||
|
|
||||||
|
private final HaeguGeoUtils haeguGeoUtils;
|
||||||
|
private final AreaBoundaryCache areaBoundaryCache; // 캐시 활용
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private UnixTimestampStrategy unixStrategy;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<VesselTrack> process(List<VesselData> items) throws Exception {
|
||||||
|
if (items == null || items.isEmpty()) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 이미 vessel key로 그룹화된 데이터이므로 직접 처리
|
||||||
|
VesselTrack track = buildTrack(items);
|
||||||
|
if (track != null && track.hasValidTrack()) {
|
||||||
|
// VesselTrackStepConfig에서 비정상 궤적 필터링 및 저장 처리
|
||||||
|
return List.of(track);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 필터링 원인 로깅
|
||||||
|
if (track == null) {
|
||||||
|
log.debug("Track is null for vessel: {}", items.get(0).getVesselKey());
|
||||||
|
} else if (!track.hasValidTrack()) {
|
||||||
|
log.debug("Invalid track for vessel: {}, points: {}",
|
||||||
|
track.getVesselKey(), track.getTrackPoints() != null ? track.getTrackPoints().size() : 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private VesselTrack buildTrack(List<VesselData> vesselDataList) {
|
||||||
|
VesselData first = vesselDataList.get(0);
|
||||||
|
VesselData last = vesselDataList.get(vesselDataList.size() - 1);
|
||||||
|
|
||||||
|
// 5분 버킷 계산
|
||||||
|
LocalDateTime timeBucket = first.getMessageTime()
|
||||||
|
.withSecond(0)
|
||||||
|
.withNano(0)
|
||||||
|
.minusMinutes(first.getMessageTime().getMinute() % 5);
|
||||||
|
|
||||||
|
// 트랙 포인트 생성
|
||||||
|
List<VesselTrack.TrackPoint> trackPoints = vesselDataList.stream()
|
||||||
|
.map(data -> VesselTrack.TrackPoint.builder()
|
||||||
|
.time(data.getMessageTime())
|
||||||
|
.lat(data.getLat())
|
||||||
|
.lon(data.getLon())
|
||||||
|
.sog(data.getSog())
|
||||||
|
.cog(data.getCog())
|
||||||
|
.heading(data.getHeading())
|
||||||
|
.build())
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
VesselTrack track = VesselTrack.builder()
|
||||||
|
.sigSrcCd(first.getSigSrcCd())
|
||||||
|
.targetId(first.getTargetId())
|
||||||
|
.timeBucket(timeBucket)
|
||||||
|
.trackPoints(trackPoints)
|
||||||
|
.pointCount(trackPoints.size())
|
||||||
|
.startPosition(VesselTrack.TrackPosition.builder()
|
||||||
|
.lat(first.getLat())
|
||||||
|
.lon(first.getLon())
|
||||||
|
.time(first.getMessageTime())
|
||||||
|
.sog(first.getSog())
|
||||||
|
.build())
|
||||||
|
.endPosition(VesselTrack.TrackPosition.builder()
|
||||||
|
.lat(last.getLat())
|
||||||
|
.lon(last.getLon())
|
||||||
|
.time(last.getMessageTime())
|
||||||
|
.sog(last.getSog())
|
||||||
|
.build())
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// 거리 계산
|
||||||
|
track.setDistanceNm(track.calculateDistance());
|
||||||
|
|
||||||
|
// 속도 통계
|
||||||
|
calculateSpeedStatistics(track, vesselDataList);
|
||||||
|
|
||||||
|
// PostGIS LineStringM 생성 (Unix timestamp만 사용)
|
||||||
|
track.setTrackGeom(unixStrategy.buildLineStringM(trackPoints));
|
||||||
|
|
||||||
|
// 해구 정보 추가
|
||||||
|
addHaeguInfo(track, first);
|
||||||
|
|
||||||
|
// Area 정보 추가 (개선된 메서드)
|
||||||
|
addAreaInfo(track);
|
||||||
|
|
||||||
|
return track;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void calculateSpeedStatistics(VesselTrack track, List<VesselData> vesselDataList) {
|
||||||
|
BigDecimal MAX_SPEED_LIMIT = new BigDecimal("9999.99"); // NUMERIC(6,2) 제한
|
||||||
|
|
||||||
|
// 1. 최대 속도는 SOG 값들 중 최대값 사용
|
||||||
|
BigDecimal maxSpeed = vesselDataList.stream()
|
||||||
|
.map(VesselData::getSog)
|
||||||
|
.filter(Objects::nonNull)
|
||||||
|
.filter(speed -> speed.compareTo(BigDecimal.ZERO) >= 0)
|
||||||
|
.filter(speed -> speed.compareTo(MAX_SPEED_LIMIT) <= 0)
|
||||||
|
.max(BigDecimal::compareTo)
|
||||||
|
.orElse(BigDecimal.ZERO);
|
||||||
|
track.setMaxSpeed(maxSpeed);
|
||||||
|
|
||||||
|
// 2. 평균 속도는 실제 이동거리와 M값 기반 시간으로 계산
|
||||||
|
if (track.getDistanceNm() != null && track.getDistanceNm().compareTo(new BigDecimal("0.1")) > 0) {
|
||||||
|
// 거리가 0.1nm 이상일 때만 속도 계산
|
||||||
|
// M값 기반 정확한 시간 계산
|
||||||
|
long totalSeconds = calculateDurationFromMValues(track);
|
||||||
|
|
||||||
|
if (totalSeconds > 0) {
|
||||||
|
// 거리(해리) / 시간(시간) = 속도(노트)
|
||||||
|
BigDecimal hours = BigDecimal.valueOf(totalSeconds).divide(BigDecimal.valueOf(3600), 4, BigDecimal.ROUND_HALF_UP);
|
||||||
|
BigDecimal avgSpeed = track.getDistanceNm().divide(hours, 2, BigDecimal.ROUND_HALF_UP);
|
||||||
|
// NUMERIC(6,2) 제한만 적용 (실제 데이터 특성상 극단값도 유효)
|
||||||
|
track.setAvgSpeed(avgSpeed.min(MAX_SPEED_LIMIT));
|
||||||
|
} else {
|
||||||
|
// 시간차가 없고 거리가 있는 경우는 이상 데이터
|
||||||
|
track.setAvgSpeed(BigDecimal.ZERO);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// 거리가 0.1nm 미만이면 정박 중으로 간주
|
||||||
|
track.setAvgSpeed(BigDecimal.ZERO);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* LineStringM의 M값으로부터 실제 경과 시간 계산 (Unix timestamp 전용)
|
||||||
|
*/
|
||||||
|
private long calculateDurationFromMValues(VesselTrack track) {
|
||||||
|
String wkt = track.getTrackGeom();
|
||||||
|
if (wkt == null || !wkt.contains("LINESTRING M")) {
|
||||||
|
return java.time.Duration.between(
|
||||||
|
track.getStartPosition().getTime(),
|
||||||
|
track.getEndPosition().getTime()
|
||||||
|
).getSeconds();
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
String coords = wkt.substring(wkt.indexOf('(') + 1, wkt.lastIndexOf(')'));
|
||||||
|
String[] points = coords.split(",");
|
||||||
|
|
||||||
|
if (points.length == 0) return 0;
|
||||||
|
|
||||||
|
String[] firstPoint = points[0].trim().split(" ");
|
||||||
|
String[] lastPoint = points[points.length - 1].trim().split(" ");
|
||||||
|
|
||||||
|
if (firstPoint.length < 3 || lastPoint.length < 3) return 0;
|
||||||
|
|
||||||
|
double firstM = Double.parseDouble(firstPoint[2]);
|
||||||
|
double lastM = Double.parseDouble(lastPoint[2]);
|
||||||
|
|
||||||
|
// Unix timestamp: 차이가 경과 시간(초)
|
||||||
|
return (long)(lastM - firstM);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.warn("M값 파싱 실패, 기존 방식 사용: {}", e.getMessage());
|
||||||
|
return java.time.Duration.between(
|
||||||
|
track.getStartPosition().getTime(),
|
||||||
|
track.getEndPosition().getTime()
|
||||||
|
).getSeconds();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
private void addHaeguInfo(VesselTrack track, VesselData vesselData) {
|
||||||
|
HaeguGeoUtils.HaeguTileInfo haeguInfo = haeguGeoUtils.getHaeguTileInfo(vesselData.getLat(), vesselData.getLon(), 0);
|
||||||
|
if (haeguInfo != null) {
|
||||||
|
track.setHaeguNo(haeguInfo.haeguNo);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 개선된 area 정보 추가 메서드 - 캐시 활용
|
||||||
|
private void addAreaInfo(VesselTrack track) {
|
||||||
|
try {
|
||||||
|
String startAreaId = areaBoundaryCache.findAreaId(
|
||||||
|
track.getStartPosition().getLat(),
|
||||||
|
track.getStartPosition().getLon()
|
||||||
|
);
|
||||||
|
String endAreaId = areaBoundaryCache.findAreaId(
|
||||||
|
track.getEndPosition().getLat(),
|
||||||
|
track.getEndPosition().getLon()
|
||||||
|
);
|
||||||
|
|
||||||
|
if (startAreaId != null) {
|
||||||
|
track.setAreaId(startAreaId);
|
||||||
|
track.setEntryTime(track.getStartPosition().getTime());
|
||||||
|
|
||||||
|
if (!startAreaId.equals(endAreaId)) {
|
||||||
|
track.setExitTime(track.getEndPosition().getTime());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.debug("Failed to add area info for vessel {}: {}", track.getVesselKey(), e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@ -0,0 +1,54 @@
|
|||||||
|
package gc.mda.signal_batch.batch.reader;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.global.util.VesselDataHolder;
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselData;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.core.StepExecution;
|
||||||
|
import org.springframework.batch.core.annotation.AfterStep;
|
||||||
|
import org.springframework.batch.core.annotation.BeforeStep;
|
||||||
|
import org.springframework.batch.item.ItemReader;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
|
||||||
|
@Component
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
@Slf4j
|
||||||
|
public class InMemoryVesselDataReader implements ItemReader<VesselData> {
|
||||||
|
|
||||||
|
private final VesselDataHolder dataHolder;
|
||||||
|
private Iterator<VesselData> iterator;
|
||||||
|
private boolean initialized = false;
|
||||||
|
|
||||||
|
@BeforeStep
|
||||||
|
public void beforeStep(StepExecution stepExecution) {
|
||||||
|
List<VesselData> data = dataHolder.getData();
|
||||||
|
this.iterator = data.iterator();
|
||||||
|
this.initialized = true;
|
||||||
|
log.info("Initialized reader with {} items for step: {}",
|
||||||
|
data.size(), stepExecution.getStepName());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public VesselData read() {
|
||||||
|
if (!initialized) {
|
||||||
|
throw new IllegalStateException("Reader not initialized");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (iterator.hasNext()) {
|
||||||
|
return iterator.next();
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterStep
|
||||||
|
public void afterStep(StepExecution stepExecution) {
|
||||||
|
iterator = null;
|
||||||
|
initialized = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,73 @@
|
|||||||
|
package gc.mda.signal_batch.batch.reader;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.global.util.VesselTrackDataHolder;
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselData;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.item.ItemReader;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class InMemoryVesselTrackDataReader implements ItemReader<List<VesselData>> {
|
||||||
|
|
||||||
|
private final VesselTrackDataHolder dataHolder;
|
||||||
|
private final int chunkSize;
|
||||||
|
|
||||||
|
private Iterator<Map.Entry<String, List<VesselData>>> groupIterator;
|
||||||
|
private List<List<VesselData>> currentChunk;
|
||||||
|
private Iterator<List<VesselData>> chunkIterator;
|
||||||
|
private boolean initialized = false;
|
||||||
|
|
||||||
|
public void initialize() {
|
||||||
|
|
||||||
|
// 선박별로 그룹화 (sig_src_cd + target_id)
|
||||||
|
Map<String, List<VesselData>> groupedData = dataHolder.getAllVesselData().stream()
|
||||||
|
.collect(Collectors.groupingBy(VesselData::getVesselKey));
|
||||||
|
|
||||||
|
// 각 그룹 내에서 시간순 정렬
|
||||||
|
groupedData.forEach((key, dataList) ->
|
||||||
|
dataList.sort(Comparator.comparing(VesselData::getMessageTime)));
|
||||||
|
|
||||||
|
groupIterator = groupedData.entrySet().iterator();
|
||||||
|
currentChunk = new ArrayList<>();
|
||||||
|
|
||||||
|
log.info("Initialized track reader with {} vessel groups", groupedData.size());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<VesselData> read() {
|
||||||
|
if (!initialized) {
|
||||||
|
initialize();
|
||||||
|
initialized = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 현재 청크에서 데이터 반환
|
||||||
|
if (chunkIterator != null && chunkIterator.hasNext()) {
|
||||||
|
return chunkIterator.next();
|
||||||
|
}
|
||||||
|
|
||||||
|
// 새로운 청크 생성
|
||||||
|
currentChunk.clear();
|
||||||
|
int count = 0;
|
||||||
|
|
||||||
|
while (groupIterator.hasNext() && count < chunkSize) {
|
||||||
|
Map.Entry<String, List<VesselData>> entry = groupIterator.next();
|
||||||
|
currentChunk.add(entry.getValue());
|
||||||
|
count++;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (currentChunk.isEmpty()) {
|
||||||
|
return null; // 더 이상 데이터 없음
|
||||||
|
}
|
||||||
|
|
||||||
|
chunkIterator = currentChunk.iterator();
|
||||||
|
return chunkIterator.next();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
@ -0,0 +1,181 @@
|
|||||||
|
package gc.mda.signal_batch.batch.reader;
|
||||||
|
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||||
|
import org.springframework.batch.core.partition.support.Partitioner;
|
||||||
|
import org.springframework.batch.item.ExecutionContext;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.time.LocalDate;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.time.format.DateTimeFormatter;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class PartitionedReader {
|
||||||
|
|
||||||
|
@Qualifier("collectJdbcTemplate")
|
||||||
|
private final JdbcTemplate collectJdbcTemplate;
|
||||||
|
|
||||||
|
@StepScope
|
||||||
|
public Partitioner dayPartitioner(@Value("#{jobParameters['processingDate']}") LocalDate processingDate) {
|
||||||
|
return gridSize -> {
|
||||||
|
Map<String, ExecutionContext> partitions = new HashMap<>();
|
||||||
|
|
||||||
|
// 파티션 존재 확인
|
||||||
|
String partitionName = generatePartitionName(processingDate);
|
||||||
|
|
||||||
|
if (checkPartitionExists(partitionName)) {
|
||||||
|
// 시간대별로 파티션 생성 (gridSize 고려)
|
||||||
|
int hoursPerPartition = 24 / Math.min(gridSize, 24);
|
||||||
|
int actualPartitions = Math.min(gridSize, 24);
|
||||||
|
|
||||||
|
for (int i = 0; i < actualPartitions; i++) {
|
||||||
|
ExecutionContext context = new ExecutionContext();
|
||||||
|
|
||||||
|
int startHour = i * hoursPerPartition;
|
||||||
|
int endHour = (i == actualPartitions - 1) ? 24 : (i + 1) * hoursPerPartition;
|
||||||
|
|
||||||
|
context.put("partition", partitionName);
|
||||||
|
context.put("startTime", processingDate.atTime(startHour, 0));
|
||||||
|
context.put("endTime", processingDate.atTime(endHour, 0));
|
||||||
|
context.put("partitionIndex", i);
|
||||||
|
|
||||||
|
partitions.put("partition-" + i, context);
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info("Created {} partitions for table {}", partitions.size(), partitionName);
|
||||||
|
|
||||||
|
} else {
|
||||||
|
// 파티션이 없는 경우 처리
|
||||||
|
log.warn("Partition {} does not exist. Creating fallback partition.", partitionName);
|
||||||
|
|
||||||
|
// 동적으로 파티션 생성 시도
|
||||||
|
if (createMissingPartition(processingDate)) {
|
||||||
|
// 재귀 호출로 다시 파티셔닝
|
||||||
|
return dayPartitioner(processingDate).partition(gridSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 실패 시 단일 파티션으로 처리
|
||||||
|
ExecutionContext context = new ExecutionContext();
|
||||||
|
context.put("partition", ""); // 전체 테이블에서 날짜 조건으로 읽기
|
||||||
|
context.put("startTime", processingDate.atStartOfDay());
|
||||||
|
context.put("endTime", processingDate.plusDays(1).atStartOfDay());
|
||||||
|
context.put("partitionIndex", 0);
|
||||||
|
partitions.put("partition-fallback", context);
|
||||||
|
}
|
||||||
|
|
||||||
|
return partitions;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 시간 범위 기반 파티셔너
|
||||||
|
*/
|
||||||
|
@StepScope
|
||||||
|
public Partitioner rangePartitioner(
|
||||||
|
@Value("#{jobParameters['startTime']}") LocalDateTime startTime,
|
||||||
|
@Value("#{jobParameters['endTime']}") LocalDateTime endTime,
|
||||||
|
@Value("#{jobParameters['partitionCount']}") Integer partitionCount) {
|
||||||
|
|
||||||
|
return gridSize -> {
|
||||||
|
Map<String, ExecutionContext> partitions = new HashMap<>();
|
||||||
|
|
||||||
|
// 날짜별로 그룹화
|
||||||
|
Map<LocalDate, List<LocalDateTime>> dateGroups = groupByDate(startTime, endTime);
|
||||||
|
|
||||||
|
int partitionIndex = 0;
|
||||||
|
for (Map.Entry<LocalDate, List<LocalDateTime>> entry : dateGroups.entrySet()) {
|
||||||
|
LocalDate date = entry.getKey();
|
||||||
|
String partitionName = findPartitionForDate(date);
|
||||||
|
|
||||||
|
// 각 날짜에 대해 시간 범위 분할
|
||||||
|
LocalDateTime dayStart = entry.getValue().get(0);
|
||||||
|
LocalDateTime dayEnd = entry.getValue().get(1);
|
||||||
|
|
||||||
|
long totalMinutes = java.time.Duration.between(dayStart, dayEnd).toMinutes();
|
||||||
|
int subPartitions = Math.max(1, (int)(totalMinutes / 60)); // 시간 단위로 분할
|
||||||
|
|
||||||
|
for (int i = 0; i < subPartitions; i++) {
|
||||||
|
ExecutionContext context = new ExecutionContext();
|
||||||
|
|
||||||
|
LocalDateTime partStart = dayStart.plusHours(i);
|
||||||
|
LocalDateTime partEnd = (i == subPartitions - 1) ? dayEnd : dayStart.plusHours(i + 1);
|
||||||
|
|
||||||
|
context.put("startTime", partStart);
|
||||||
|
context.put("endTime", partEnd);
|
||||||
|
context.put("partition", partitionName != null ? partitionName : "");
|
||||||
|
context.put("partitionIndex", partitionIndex++);
|
||||||
|
|
||||||
|
partitions.put("range-partition-" + partitionIndex, context);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info("Created {} range partitions for period {} to {}",
|
||||||
|
partitions.size(), startTime, endTime);
|
||||||
|
|
||||||
|
return partitions;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private String generatePartitionName(LocalDate date) {
|
||||||
|
// YYMMDD 형식으로 변경
|
||||||
|
return "sig_test_" + date.format(DateTimeFormatter.ofPattern("yyMMdd"));
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean checkPartitionExists(String partitionName) {
|
||||||
|
String sql = "SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'signal' AND tablename = ?)";
|
||||||
|
return Boolean.TRUE.equals(collectJdbcTemplate.queryForObject(sql, Boolean.class, partitionName));
|
||||||
|
}
|
||||||
|
|
||||||
|
private String findPartitionForDate(LocalDate date) {
|
||||||
|
String partitionName = generatePartitionName(date);
|
||||||
|
return checkPartitionExists(partitionName) ? partitionName : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean createMissingPartition(LocalDate date) {
|
||||||
|
try {
|
||||||
|
String partitionName = generatePartitionName(date);
|
||||||
|
String sql = String.format("""
|
||||||
|
CREATE TABLE IF NOT EXISTS signal.%s PARTITION OF signal.sig_test
|
||||||
|
FOR VALUES FROM ('%s') TO ('%s')
|
||||||
|
""", partitionName, date, date.plusDays(1));
|
||||||
|
|
||||||
|
collectJdbcTemplate.execute(sql);
|
||||||
|
log.info("Successfully created missing partition: {}", partitionName);
|
||||||
|
return true;
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to create missing partition for date: {}", date, e);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Map<LocalDate, List<LocalDateTime>> groupByDate(LocalDateTime start, LocalDateTime end) {
|
||||||
|
Map<LocalDate, List<LocalDateTime>> groups = new HashMap<>();
|
||||||
|
|
||||||
|
LocalDate currentDate = start.toLocalDate();
|
||||||
|
while (!currentDate.isAfter(end.toLocalDate())) {
|
||||||
|
LocalDateTime dayStart = currentDate.equals(start.toLocalDate()) ?
|
||||||
|
start : currentDate.atStartOfDay();
|
||||||
|
LocalDateTime dayEnd = currentDate.equals(end.toLocalDate()) ?
|
||||||
|
end : currentDate.plusDays(1).atStartOfDay();
|
||||||
|
|
||||||
|
groups.put(currentDate, List.of(dayStart, dayEnd));
|
||||||
|
currentDate = currentDate.plusDays(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
return groups;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,388 @@
|
|||||||
|
package gc.mda.signal_batch.batch.reader;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselData;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.item.database.JdbcCursorItemReader;
|
||||||
|
import org.springframework.batch.item.database.JdbcPagingItemReader;
|
||||||
|
import org.springframework.batch.item.database.Order;
|
||||||
|
import org.springframework.batch.item.database.support.PostgresPagingQueryProvider;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.jdbc.core.RowMapper;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
import jakarta.annotation.PostConstruct;
|
||||||
|
import java.sql.Connection;
|
||||||
|
import java.sql.DatabaseMetaData;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.sql.Timestamp;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.time.format.DateTimeFormatter;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
public class VesselDataReader {
|
||||||
|
|
||||||
|
private final DataSource collectDataSource;
|
||||||
|
private final JdbcTemplate collectJdbcTemplate;
|
||||||
|
|
||||||
|
private static final DateTimeFormatter PARTITION_FORMATTER = DateTimeFormatter.ofPattern("yyMMdd");
|
||||||
|
|
||||||
|
public VesselDataReader(
|
||||||
|
@Qualifier("collectDataSource") DataSource collectDataSource,
|
||||||
|
@Qualifier("collectJdbcTemplate") JdbcTemplate collectJdbcTemplate) {
|
||||||
|
this.collectDataSource = collectDataSource;
|
||||||
|
this.collectJdbcTemplate = collectJdbcTemplate;
|
||||||
|
}
|
||||||
|
|
||||||
|
@PostConstruct
|
||||||
|
public void init() {
|
||||||
|
logDataSourceInfo();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 최신 위치만 가져오는 최적화된 Reader
|
||||||
|
* DISTINCT ON을 사용하여 각 선박의 최신 위치만 조회
|
||||||
|
*/
|
||||||
|
public JdbcCursorItemReader<VesselData> vesselLatestPositionReader(
|
||||||
|
LocalDateTime startTime,
|
||||||
|
LocalDateTime endTime,
|
||||||
|
String partition) {
|
||||||
|
|
||||||
|
log.info("Creating optimized latest position reader from {} to {}", startTime, endTime);
|
||||||
|
|
||||||
|
JdbcCursorItemReader<VesselData> reader = new JdbcCursorItemReader<VesselData>() {
|
||||||
|
@Override
|
||||||
|
protected void openCursor(Connection con) {
|
||||||
|
try {
|
||||||
|
// search_path 설정
|
||||||
|
try (var stmt = con.createStatement()) {
|
||||||
|
stmt.execute("SET search_path TO signal, public");
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Error setting search_path in cursor", e);
|
||||||
|
throw new RuntimeException("Failed to set search_path", e);
|
||||||
|
}
|
||||||
|
super.openCursor(con);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
reader.setDataSource(collectDataSource);
|
||||||
|
reader.setName("vesselLatestPositionReader");
|
||||||
|
|
||||||
|
// 성능 최적화 설정
|
||||||
|
reader.setFetchSize(10000); // 줄임 (최신 위치만 가져오므로)
|
||||||
|
reader.setMaxRows(0);
|
||||||
|
reader.setQueryTimeout(300);
|
||||||
|
reader.setVerifyCursorPosition(false);
|
||||||
|
reader.setUseSharedExtendedConnection(false);
|
||||||
|
reader.setSaveState(false);
|
||||||
|
|
||||||
|
String tableName = determineTableName(partition, startTime);
|
||||||
|
log.info("Using table: {}", tableName);
|
||||||
|
|
||||||
|
// 최신 위치만 가져오는 SQL - DISTINCT ON 사용
|
||||||
|
String sql = """
|
||||||
|
SELECT DISTINCT ON (sig_src_cd, target_id)
|
||||||
|
message_time, real_time, sig_src_cd, target_id,
|
||||||
|
lat, lon, sog, cog, heading, ship_nm, ship_ty, rot, posacc,
|
||||||
|
sensor_id, base_st_id, mode, gps_sttus, battery_sttus,
|
||||||
|
vts_cd, mmsi, vpass_id, ship_no
|
||||||
|
FROM signal.%s
|
||||||
|
WHERE message_time >= ? AND message_time < ?
|
||||||
|
AND sig_src_cd != '000005'
|
||||||
|
AND lat BETWEEN -90 AND 90
|
||||||
|
AND lon BETWEEN -180 AND 180
|
||||||
|
ORDER BY sig_src_cd, target_id, message_time DESC
|
||||||
|
""".formatted(tableName);
|
||||||
|
|
||||||
|
reader.setSql(sql);
|
||||||
|
|
||||||
|
reader.setPreparedStatementSetter(ps -> {
|
||||||
|
ps.setObject(1, Timestamp.valueOf(startTime));
|
||||||
|
ps.setObject(2, Timestamp.valueOf(endTime));
|
||||||
|
});
|
||||||
|
|
||||||
|
reader.setRowMapper(new OptimizedVesselDataRowMapper());
|
||||||
|
|
||||||
|
// 예상 데이터 건수 로그
|
||||||
|
try {
|
||||||
|
Integer expectedCount = collectJdbcTemplate.queryForObject(
|
||||||
|
"""
|
||||||
|
SELECT COUNT(*) FROM (
|
||||||
|
SELECT DISTINCT ON (sig_src_cd, target_id) 1
|
||||||
|
FROM signal.%s
|
||||||
|
WHERE message_time >= ? AND message_time < ?
|
||||||
|
AND sig_src_cd != '000005'
|
||||||
|
) t
|
||||||
|
""".formatted(tableName),
|
||||||
|
Integer.class,
|
||||||
|
startTime, endTime
|
||||||
|
);
|
||||||
|
log.info("Expected record count (latest positions only): {}", expectedCount);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.warn("Could not get expected count: {}", e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
return reader;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 기존 Cursor Reader (전체 데이터) - 타일 집계 등에 필요한 경우
|
||||||
|
*/
|
||||||
|
public JdbcCursorItemReader<VesselData> vesselDataCursorReader(
|
||||||
|
LocalDateTime startTime,
|
||||||
|
LocalDateTime endTime,
|
||||||
|
String partition) {
|
||||||
|
|
||||||
|
log.info("Creating cursor reader for partition: {} from {} to {}",
|
||||||
|
partition, startTime, endTime);
|
||||||
|
|
||||||
|
JdbcCursorItemReader<VesselData> reader = new JdbcCursorItemReader<VesselData>() {
|
||||||
|
@Override
|
||||||
|
protected void openCursor(Connection con) {
|
||||||
|
try {
|
||||||
|
try (var stmt = con.createStatement()) {
|
||||||
|
stmt.execute("SET search_path TO signal, public");
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Error setting search_path in cursor", e);
|
||||||
|
throw new RuntimeException("Failed to set search_path", e);
|
||||||
|
}
|
||||||
|
super.openCursor(con);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
reader.setDataSource(collectDataSource);
|
||||||
|
reader.setName("vesselDataCursorReader");
|
||||||
|
|
||||||
|
reader.setFetchSize(50000);
|
||||||
|
reader.setMaxRows(0);
|
||||||
|
reader.setQueryTimeout(1800);
|
||||||
|
reader.setVerifyCursorPosition(false);
|
||||||
|
reader.setUseSharedExtendedConnection(false);
|
||||||
|
reader.setSaveState(false);
|
||||||
|
|
||||||
|
String tableName = determineTableName(partition, startTime);
|
||||||
|
log.info("Determined table name: {} for startTime: {}", tableName, startTime);
|
||||||
|
|
||||||
|
// 전체 데이터 조회 SQL (타일 집계용)
|
||||||
|
StringBuilder sql = new StringBuilder();
|
||||||
|
sql.append("SELECT /*+ PARALLEL(8) */ ");
|
||||||
|
sql.append("message_time, real_time, sig_src_cd, target_id, ");
|
||||||
|
sql.append("lat, lon, sog, cog, heading, ship_nm, ship_ty, rot, posacc, ");
|
||||||
|
sql.append("sensor_id, base_st_id, mode, gps_sttus, battery_sttus, ");
|
||||||
|
sql.append("vts_cd, mmsi, vpass_id, ship_no ");
|
||||||
|
sql.append("FROM signal.").append(tableName).append(" ");
|
||||||
|
sql.append("WHERE message_time >= ? AND message_time < ? AND sig_src_cd != '000005' ");
|
||||||
|
sql.append("ORDER BY message_time, sig_src_cd, target_id");
|
||||||
|
|
||||||
|
reader.setSql(sql.toString());
|
||||||
|
|
||||||
|
reader.setPreparedStatementSetter(ps -> {
|
||||||
|
ps.setTimestamp(1, Timestamp.valueOf(startTime));
|
||||||
|
ps.setTimestamp(2, Timestamp.valueOf(endTime));
|
||||||
|
});
|
||||||
|
|
||||||
|
reader.setRowMapper(new OptimizedVesselDataRowMapper());
|
||||||
|
|
||||||
|
return reader;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 기존 Paging Reader (작은 데이터셋용)
|
||||||
|
*/
|
||||||
|
public JdbcPagingItemReader<VesselData> vesselDataPagingReader(
|
||||||
|
LocalDateTime startTime,
|
||||||
|
LocalDateTime endTime,
|
||||||
|
String partition) {
|
||||||
|
|
||||||
|
JdbcPagingItemReader<VesselData> reader = new JdbcPagingItemReader<>();
|
||||||
|
reader.setDataSource(collectDataSource);
|
||||||
|
reader.setPageSize(10000);
|
||||||
|
reader.setFetchSize(10000);
|
||||||
|
reader.setRowMapper(new OptimizedVesselDataRowMapper());
|
||||||
|
|
||||||
|
String tableName = determineTableName(partition, startTime);
|
||||||
|
|
||||||
|
PostgresPagingQueryProvider queryProvider = new PostgresPagingQueryProvider();
|
||||||
|
queryProvider.setSelectClause("SELECT message_time, real_time, sig_src_cd, target_id, " +
|
||||||
|
"lat, lon, sog, cog, heading, ship_nm, ship_ty, rot, posacc, " +
|
||||||
|
"sensor_id, base_st_id, mode, gps_sttus, battery_sttus, " +
|
||||||
|
"vts_cd, mmsi, vpass_id, ship_no ");
|
||||||
|
|
||||||
|
queryProvider.setFromClause("FROM signal." + tableName);
|
||||||
|
queryProvider.setWhereClause("WHERE message_time >= :startTime AND message_time < :endTime and sig_src_cd != '000005'");
|
||||||
|
|
||||||
|
Map<String, Order> sortKeys = new HashMap<>();
|
||||||
|
sortKeys.put("message_time", Order.ASCENDING);
|
||||||
|
sortKeys.put("sig_src_cd", Order.ASCENDING);
|
||||||
|
sortKeys.put("target_id", Order.ASCENDING);
|
||||||
|
queryProvider.setSortKeys(sortKeys);
|
||||||
|
|
||||||
|
reader.setQueryProvider(queryProvider);
|
||||||
|
|
||||||
|
Map<String, Object> parameterValues = new HashMap<>();
|
||||||
|
parameterValues.put("startTime", startTime);
|
||||||
|
parameterValues.put("endTime", endTime);
|
||||||
|
reader.setParameterValues(parameterValues);
|
||||||
|
|
||||||
|
try {
|
||||||
|
reader.afterPropertiesSet();
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to initialize JdbcPagingItemReader", e);
|
||||||
|
throw new RuntimeException("Reader initialization failed", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
return reader;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 파티션 테이블 이름 결정
|
||||||
|
*/
|
||||||
|
private String determineTableName(String partition, LocalDateTime startTime) {
|
||||||
|
if (partition != null && !partition.isEmpty()) {
|
||||||
|
log.debug("Using specified partition: {}", partition);
|
||||||
|
return partition;
|
||||||
|
}
|
||||||
|
|
||||||
|
LocalDateTime targetTime = startTime != null ? startTime : LocalDateTime.now();
|
||||||
|
String partitionSuffix = targetTime.format(PARTITION_FORMATTER);
|
||||||
|
String tableName = "sig_test_" + partitionSuffix;
|
||||||
|
|
||||||
|
try {
|
||||||
|
Boolean exists = collectJdbcTemplate.queryForObject(
|
||||||
|
"SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'signal' AND tablename = ?)",
|
||||||
|
Boolean.class,
|
||||||
|
tableName
|
||||||
|
);
|
||||||
|
|
||||||
|
if (Boolean.TRUE.equals(exists)) {
|
||||||
|
log.info("Auto-selected partition table: {}", tableName);
|
||||||
|
return tableName;
|
||||||
|
} else {
|
||||||
|
log.warn("Partition table {} does not exist, using sig_test", tableName);
|
||||||
|
return "sig_test";
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Error checking partition table existence", e);
|
||||||
|
return "sig_test";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 최적화된 RowMapper
|
||||||
|
*/
|
||||||
|
public static class OptimizedVesselDataRowMapper implements RowMapper<VesselData> {
|
||||||
|
@Override
|
||||||
|
public VesselData mapRow(ResultSet rs, int rowNum) throws SQLException {
|
||||||
|
VesselData data = new VesselData();
|
||||||
|
|
||||||
|
Timestamp messageTime = rs.getTimestamp(1);
|
||||||
|
if (messageTime != null) {
|
||||||
|
data.setMessageTime(messageTime.toLocalDateTime());
|
||||||
|
}
|
||||||
|
|
||||||
|
Timestamp realTime = rs.getTimestamp(2);
|
||||||
|
if (realTime != null) {
|
||||||
|
data.setRealTime(realTime.toLocalDateTime());
|
||||||
|
}
|
||||||
|
|
||||||
|
data.setSigSrcCd(rs.getString(3));
|
||||||
|
data.setTargetId(rs.getString(4));
|
||||||
|
data.setLat(rs.getDouble(5));
|
||||||
|
data.setLon(rs.getDouble(6));
|
||||||
|
data.setSog(rs.getBigDecimal(7));
|
||||||
|
data.setCog(rs.getBigDecimal(8));
|
||||||
|
|
||||||
|
data.setHeading(getIntegerFromNumeric(rs, 9));
|
||||||
|
data.setShipNm(rs.getString(10));
|
||||||
|
data.setShipTy(rs.getString(11));
|
||||||
|
data.setRot(getIntegerFromNumeric(rs, 12));
|
||||||
|
data.setPosacc(getIntegerFromNumeric(rs, 13));
|
||||||
|
data.setSensorId(rs.getString(14));
|
||||||
|
data.setBaseStId(rs.getString(15));
|
||||||
|
data.setMode(getIntegerFromNumeric(rs, 16));
|
||||||
|
data.setGpsSttus(getIntegerFromNumeric(rs, 17));
|
||||||
|
data.setBatterySttus(getIntegerFromNumeric(rs, 18));
|
||||||
|
data.setVtsCd(rs.getString(19));
|
||||||
|
data.setMmsi(rs.getString(20));
|
||||||
|
data.setVpassId(rs.getString(21));
|
||||||
|
data.setShipNo(rs.getString(22));
|
||||||
|
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Integer getIntegerFromNumeric(ResultSet rs, int columnIndex) throws SQLException {
|
||||||
|
Object value = rs.getObject(columnIndex);
|
||||||
|
if (value == null || rs.wasNull()) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value instanceof java.math.BigDecimal) {
|
||||||
|
return ((java.math.BigDecimal) value).intValue();
|
||||||
|
} else if (value instanceof Integer) {
|
||||||
|
return (Integer) value;
|
||||||
|
} else if (value instanceof Number) {
|
||||||
|
return ((Number) value).intValue();
|
||||||
|
} else if (value instanceof String) {
|
||||||
|
try {
|
||||||
|
return Integer.parseInt((String) value);
|
||||||
|
} catch (NumberFormatException e) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void logDataSourceInfo() {
|
||||||
|
try {
|
||||||
|
String info = getDataSourceInfo(collectDataSource);
|
||||||
|
log.info("VesselDataReader initialized with DataSource: {}", info);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to get DataSource info", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private String getDataSourceInfo(DataSource dataSource) {
|
||||||
|
try (Connection conn = dataSource.getConnection()) {
|
||||||
|
DatabaseMetaData meta = conn.getMetaData();
|
||||||
|
String url = meta.getURL();
|
||||||
|
String user = meta.getUserName();
|
||||||
|
String db = conn.getCatalog();
|
||||||
|
String schema = conn.getSchema();
|
||||||
|
return String.format("URL=%s, User=%s, DB=%s, Schema=%s", url, user, db, schema);
|
||||||
|
} catch (Exception e) {
|
||||||
|
return "Unknown (" + e.getMessage() + ")";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("unused")
|
||||||
|
private void testConnection(String tableName) {
|
||||||
|
try {
|
||||||
|
try (Connection conn = collectDataSource.getConnection()) {
|
||||||
|
try (var stmt = conn.createStatement()) {
|
||||||
|
stmt.execute("SET search_path TO signal, public");
|
||||||
|
}
|
||||||
|
|
||||||
|
String testSql = "SELECT COUNT(*) FROM signal." + tableName + " LIMIT 1";
|
||||||
|
try (var stmt = conn.createStatement();
|
||||||
|
var rs = stmt.executeQuery(testSql)) {
|
||||||
|
if (rs.next()) {
|
||||||
|
log.info("Direct connection test successful, count: {}", rs.getInt(1));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Connection test failed", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,252 @@
|
|||||||
|
package gc.mda.signal_batch.batch.writer;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselTrack;
|
||||||
|
import gc.mda.signal_batch.batch.processor.AbnormalTrackDetector.AbnormalDetectionResult;
|
||||||
|
import gc.mda.signal_batch.batch.processor.AbnormalTrackDetector.AbnormalSegment;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
|
||||||
|
import org.springframework.batch.core.StepExecution;
|
||||||
|
import org.springframework.batch.core.annotation.BeforeStep;
|
||||||
|
import org.springframework.batch.item.Chunk;
|
||||||
|
import org.springframework.batch.item.ItemWriter;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
import org.springframework.transaction.annotation.Transactional;
|
||||||
|
|
||||||
|
import java.sql.Timestamp;
|
||||||
|
import java.time.LocalDate;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 비정상 궤적 데이터를 별도 테이블에 저장하는 Writer
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
public class AbnormalTrackWriter implements ItemWriter<AbnormalDetectionResult> {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
@Qualifier("queryJdbcTemplate")
|
||||||
|
private JdbcTemplate jdbcTemplate;
|
||||||
|
|
||||||
|
// Unix timestamp 전용 - 설정 제거
|
||||||
|
|
||||||
|
private final ObjectMapper objectMapper = new ObjectMapper()
|
||||||
|
.registerModule(new JavaTimeModule())
|
||||||
|
.disable(com.fasterxml.jackson.databind.SerializationFeature.WRITE_DATES_AS_TIMESTAMPS);
|
||||||
|
|
||||||
|
private String jobName;
|
||||||
|
|
||||||
|
public void setJobName(String jobName) {
|
||||||
|
this.jobName = jobName;
|
||||||
|
log.debug("AbnormalTrackWriter: Job name set to {}", jobName);
|
||||||
|
}
|
||||||
|
|
||||||
|
@BeforeStep
|
||||||
|
public void beforeStep(StepExecution stepExecution) {
|
||||||
|
// CompositeTrackWriter에서 이미 설정된 경우 사용
|
||||||
|
if (this.jobName == null) {
|
||||||
|
// 직접 호출된 경우만 Job 이름 가져오기
|
||||||
|
this.jobName = stepExecution.getJobExecution().getJobInstance().getJobName();
|
||||||
|
log.debug("Job name from StepExecution: {}", jobName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
@Transactional
|
||||||
|
public void write(Chunk<? extends AbnormalDetectionResult> items) throws Exception {
|
||||||
|
List<AbnormalDetectionResult> abnormalResults = items.getItems().stream()
|
||||||
|
.filter(AbnormalDetectionResult::hasAbnormalities)
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
if (abnormalResults.isEmpty()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 1. 비정상 궤적 저장
|
||||||
|
saveAbnormalTracks(abnormalResults);
|
||||||
|
|
||||||
|
// 2. 통계 업데이트
|
||||||
|
updateAbnormalStats(abnormalResults);
|
||||||
|
|
||||||
|
log.info("비정상 궤적 저장 완료: {} 건", abnormalResults.size());
|
||||||
|
}
|
||||||
|
|
||||||
|
private void saveAbnormalTracks(List<AbnormalDetectionResult> results) {
|
||||||
|
// track_geom만 사용
|
||||||
|
String geomColumn = "track_geom";
|
||||||
|
|
||||||
|
String sql = String.format("""
|
||||||
|
INSERT INTO signal.t_abnormal_tracks (
|
||||||
|
sig_src_cd, target_id, time_bucket, %s,
|
||||||
|
abnormal_type, abnormal_reason, distance_nm, avg_speed,
|
||||||
|
max_speed, point_count, source_table
|
||||||
|
) VALUES (?, ?, ?, public.ST_GeomFromText(?::text, 4326), ?, ?::jsonb, ?, ?, ?, ?, ?)
|
||||||
|
ON CONFLICT (sig_src_cd, target_id, time_bucket, source_table)
|
||||||
|
DO UPDATE SET
|
||||||
|
%s = EXCLUDED.%s,
|
||||||
|
abnormal_type = EXCLUDED.abnormal_type,
|
||||||
|
abnormal_reason = EXCLUDED.abnormal_reason,
|
||||||
|
distance_nm = EXCLUDED.distance_nm,
|
||||||
|
avg_speed = EXCLUDED.avg_speed,
|
||||||
|
max_speed = EXCLUDED.max_speed,
|
||||||
|
point_count = EXCLUDED.point_count,
|
||||||
|
detected_at = NOW()
|
||||||
|
""", geomColumn, geomColumn, geomColumn);
|
||||||
|
|
||||||
|
List<Object[]> batchArgs = new ArrayList<>();
|
||||||
|
|
||||||
|
for (AbnormalDetectionResult result : results) {
|
||||||
|
VesselTrack track = result.getOriginalTrack();
|
||||||
|
List<AbnormalSegment> segments = result.getAbnormalSegments();
|
||||||
|
|
||||||
|
// 주요 비정상 유형 결정 (첫 번째 검출된 유형)
|
||||||
|
String mainAbnormalType = segments.get(0).getType();
|
||||||
|
|
||||||
|
// 비정상 이유 JSON 생성
|
||||||
|
Map<String, Object> abnormalReason = new HashMap<>();
|
||||||
|
abnormalReason.put("segments", segments.stream()
|
||||||
|
.map(seg -> {
|
||||||
|
Map<String, Object> segMap = new HashMap<>();
|
||||||
|
segMap.put("type", seg.getType());
|
||||||
|
segMap.put("description", seg.getDescription());
|
||||||
|
segMap.put("actualValue", seg.getActualValue());
|
||||||
|
segMap.put("threshold", seg.getThreshold());
|
||||||
|
segMap.put("details", seg.getDetails());
|
||||||
|
return segMap;
|
||||||
|
})
|
||||||
|
.collect(Collectors.toList()));
|
||||||
|
|
||||||
|
try {
|
||||||
|
String reasonJson = objectMapper.writeValueAsString(abnormalReason);
|
||||||
|
// track_geom만 사용
|
||||||
|
String geomWkt = track.getTrackGeom();
|
||||||
|
|
||||||
|
if (geomWkt == null) {
|
||||||
|
log.warn("비정상 궤적에 geometry 데이터 없음: vessel={}", track.getVesselKey());
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
batchArgs.add(new Object[] {
|
||||||
|
track.getSigSrcCd(),
|
||||||
|
track.getTargetId(),
|
||||||
|
Timestamp.valueOf(track.getTimeBucket()),
|
||||||
|
geomWkt,
|
||||||
|
mainAbnormalType,
|
||||||
|
reasonJson,
|
||||||
|
track.getDistanceNm(),
|
||||||
|
track.getAvgSpeed(),
|
||||||
|
track.getMaxSpeed(),
|
||||||
|
track.getPointCount(),
|
||||||
|
inferSourceTableFromJob()
|
||||||
|
});
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("비정상 궤적 JSON 변환 실패: vessel={}, error={}",
|
||||||
|
track.getVesselKey(), e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!batchArgs.isEmpty()) {
|
||||||
|
jdbcTemplate.batchUpdate(sql, batchArgs);
|
||||||
|
log.info("비정상 궤적 DB 저장: {} 건", batchArgs.size());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void updateAbnormalStats(List<AbnormalDetectionResult> results) {
|
||||||
|
LocalDate today = LocalDate.now();
|
||||||
|
|
||||||
|
// 비정상 유형별 통계 집계
|
||||||
|
Map<String, Stats> typeStats = new HashMap<>();
|
||||||
|
|
||||||
|
for (AbnormalDetectionResult result : results) {
|
||||||
|
for (AbnormalSegment segment : result.getAbnormalSegments()) {
|
||||||
|
typeStats.computeIfAbsent(segment.getType(), k -> new Stats())
|
||||||
|
.add(result.getOriginalTrack(), segment);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// DB 업데이트
|
||||||
|
String sql = """
|
||||||
|
INSERT INTO signal.t_abnormal_track_stats (
|
||||||
|
stat_date, abnormal_type, vessel_count, track_count,
|
||||||
|
total_points, avg_deviation, max_deviation
|
||||||
|
) VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||||
|
ON CONFLICT (stat_date, abnormal_type)
|
||||||
|
DO UPDATE SET
|
||||||
|
vessel_count = t_abnormal_track_stats.vessel_count + EXCLUDED.vessel_count,
|
||||||
|
track_count = t_abnormal_track_stats.track_count + EXCLUDED.track_count,
|
||||||
|
total_points = t_abnormal_track_stats.total_points + EXCLUDED.total_points,
|
||||||
|
avg_deviation = (t_abnormal_track_stats.avg_deviation * t_abnormal_track_stats.track_count +
|
||||||
|
EXCLUDED.avg_deviation * EXCLUDED.track_count) /
|
||||||
|
(t_abnormal_track_stats.track_count + EXCLUDED.track_count),
|
||||||
|
max_deviation = GREATEST(t_abnormal_track_stats.max_deviation, EXCLUDED.max_deviation),
|
||||||
|
updated_at = NOW()
|
||||||
|
""";
|
||||||
|
|
||||||
|
for (Map.Entry<String, Stats> entry : typeStats.entrySet()) {
|
||||||
|
Stats stats = entry.getValue();
|
||||||
|
jdbcTemplate.update(sql,
|
||||||
|
today,
|
||||||
|
entry.getKey(),
|
||||||
|
stats.vesselIds.size(),
|
||||||
|
stats.trackCount,
|
||||||
|
stats.totalPoints,
|
||||||
|
stats.getAvgDeviation(),
|
||||||
|
stats.maxDeviation
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info("비정상 궤적 통계 업데이트: {} 개 유형", typeStats.size());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Job 이름을 기반으로 source_table 결정
|
||||||
|
*/
|
||||||
|
private String inferSourceTableFromJob() {
|
||||||
|
if (jobName == null) {
|
||||||
|
log.warn("Job name is null, defaulting to 5min table");
|
||||||
|
return "t_vessel_tracks_5min";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Job 이름 기반 판단
|
||||||
|
if (jobName.toLowerCase().contains("daily")) {
|
||||||
|
return "t_vessel_tracks_daily";
|
||||||
|
} else if (jobName.toLowerCase().contains("hourly")) {
|
||||||
|
return "t_vessel_tracks_hourly";
|
||||||
|
} else {
|
||||||
|
return "t_vessel_tracks_5min";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private static class Stats {
|
||||||
|
private final List<String> vesselIds = new ArrayList<>();
|
||||||
|
private int trackCount = 0;
|
||||||
|
private int totalPoints = 0;
|
||||||
|
private double sumDeviation = 0;
|
||||||
|
private double maxDeviation = 0;
|
||||||
|
|
||||||
|
void add(VesselTrack track, AbnormalSegment segment) {
|
||||||
|
vesselIds.add(track.getVesselKey());
|
||||||
|
trackCount++;
|
||||||
|
totalPoints += track.getPointCount();
|
||||||
|
|
||||||
|
double deviation = segment.getActualValue() - segment.getThreshold();
|
||||||
|
sumDeviation += deviation;
|
||||||
|
maxDeviation = Math.max(maxDeviation, deviation);
|
||||||
|
}
|
||||||
|
|
||||||
|
double getAvgDeviation() {
|
||||||
|
return trackCount > 0 ? sumDeviation / trackCount : 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,84 @@
|
|||||||
|
package gc.mda.signal_batch.batch.writer;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselTrack;
|
||||||
|
import gc.mda.signal_batch.batch.processor.AbnormalTrackDetector.AbnormalDetectionResult;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.core.StepExecution;
|
||||||
|
import org.springframework.batch.core.annotation.BeforeStep;
|
||||||
|
import org.springframework.batch.item.Chunk;
|
||||||
|
import org.springframework.batch.item.ItemWriter;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 정상/비정상 궤적을 분리하여 저장하는 복합 Writer
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class CompositeTrackWriter implements ItemWriter<AbnormalDetectionResult> {
|
||||||
|
|
||||||
|
private final VesselTrackBulkWriter vesselTrackBulkWriter;
|
||||||
|
private final AbnormalTrackWriter abnormalTrackWriter;
|
||||||
|
private final String targetTable;
|
||||||
|
|
||||||
|
@BeforeStep
|
||||||
|
public void beforeStep(StepExecution stepExecution) {
|
||||||
|
// Job 이름을 AbnormalTrackWriter에 전달
|
||||||
|
String jobName = stepExecution.getJobExecution().getJobInstance().getJobName();
|
||||||
|
abnormalTrackWriter.setJobName(jobName);
|
||||||
|
log.debug("CompositeTrackWriter: Job name = {}", jobName);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void write(Chunk<? extends AbnormalDetectionResult> chunk) throws Exception {
|
||||||
|
List<VesselTrack> normalTracks = new ArrayList<>();
|
||||||
|
List<AbnormalDetectionResult> abnormalResults = new ArrayList<>();
|
||||||
|
|
||||||
|
for (AbnormalDetectionResult result : chunk) {
|
||||||
|
// Processor에서 데이터가 없어서 null을 반환한 경우 스킵
|
||||||
|
if (result == null) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (result.hasAbnormalities()) {
|
||||||
|
// 비정상 궤적 수집
|
||||||
|
abnormalResults.add(result);
|
||||||
|
|
||||||
|
// 정정된 궤적이 있으면 정상 궤적으로 저장
|
||||||
|
// null이면 전체 궤적이 비정상이므로 제외
|
||||||
|
if (result.getCorrectedTrack() != null) {
|
||||||
|
normalTracks.add(result.getCorrectedTrack());
|
||||||
|
} else {
|
||||||
|
log.debug("비정상 궤적 전체 제외: vessel={}",
|
||||||
|
result.getOriginalTrack().getVesselKey());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// 정상 궤적
|
||||||
|
normalTracks.add(result.getOriginalTrack());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 정상 궤적 저장
|
||||||
|
if (!normalTracks.isEmpty()) {
|
||||||
|
if ("hourly".equals(targetTable)) {
|
||||||
|
vesselTrackBulkWriter.writeHourlyTracks(normalTracks);
|
||||||
|
} else if ("daily".equals(targetTable)) {
|
||||||
|
vesselTrackBulkWriter.writeDailyTracks(normalTracks);
|
||||||
|
} else {
|
||||||
|
throw new IllegalArgumentException("Unknown target table: " + targetTable);
|
||||||
|
}
|
||||||
|
log.info("Wrote {} normal tracks to {} table", normalTracks.size(), targetTable);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 비정상 궤적 저장
|
||||||
|
if (!abnormalResults.isEmpty()) {
|
||||||
|
abnormalTrackWriter.write(new Chunk<>(abnormalResults));
|
||||||
|
log.info("Wrote {} abnormal track results", abnormalResults.size());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,702 @@
|
|||||||
|
package gc.mda.signal_batch.batch.writer;
|
||||||
|
|
||||||
|
import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||||
|
import gc.mda.signal_batch.domain.gis.model.TileStatistics;
|
||||||
|
import gc.mda.signal_batch.batch.processor.AreaStatisticsProcessor;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import com.fasterxml.jackson.databind.SerializationFeature;
|
||||||
|
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
|
||||||
|
import com.google.common.collect.Lists;
|
||||||
|
import jakarta.annotation.PostConstruct;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.postgresql.copy.CopyManager;
|
||||||
|
import org.postgresql.core.BaseConnection;
|
||||||
|
import org.springframework.batch.item.Chunk;
|
||||||
|
import org.springframework.batch.item.ItemWriter;
|
||||||
|
import org.springframework.beans.factory.DisposableBean;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
import org.springframework.util.StopWatch;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
import java.io.*;
|
||||||
|
import java.sql.Connection;
|
||||||
|
import java.sql.Timestamp;
|
||||||
|
import java.time.LocalDate;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.time.format.DateTimeFormatter;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.concurrent.*;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
public class OptimizedBulkInsertWriter implements DisposableBean {
|
||||||
|
|
||||||
|
private final DataSource queryDataSource;
|
||||||
|
private final JdbcTemplate queryJdbcTemplate;
|
||||||
|
|
||||||
|
public OptimizedBulkInsertWriter(
|
||||||
|
@Qualifier("queryDataSource") DataSource queryDataSource,
|
||||||
|
@Qualifier("queryJdbcTemplate") JdbcTemplate queryJdbcTemplate) {
|
||||||
|
this.queryDataSource = queryDataSource;
|
||||||
|
this.queryJdbcTemplate = queryJdbcTemplate;
|
||||||
|
|
||||||
|
System.out.println("========================================");
|
||||||
|
System.out.println("!!! OptimizedBulkInsertWriter initialized !!!");
|
||||||
|
System.out.println("queryDataSource: " + queryDataSource);
|
||||||
|
System.out.println("queryJdbcTemplate DataSource: " + queryJdbcTemplate.getDataSource());
|
||||||
|
System.out.println("========================================");
|
||||||
|
}
|
||||||
|
|
||||||
|
private final ObjectMapper objectMapper = createObjectMapper();
|
||||||
|
|
||||||
|
private static ObjectMapper createObjectMapper() {
|
||||||
|
ObjectMapper mapper = new ObjectMapper();
|
||||||
|
mapper.registerModule(new JavaTimeModule());
|
||||||
|
mapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS);
|
||||||
|
mapper.setDateFormat(new java.text.SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"));
|
||||||
|
mapper.setTimeZone(java.util.TimeZone.getTimeZone("Asia/Seoul"));
|
||||||
|
return mapper;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Value("${vessel.batch.bulk-insert.batch-size:50000}")
|
||||||
|
private int batchSize;
|
||||||
|
|
||||||
|
@Value("${vessel.batch.bulk-insert.parallel-threads:4}")
|
||||||
|
private int parallelThreads;
|
||||||
|
|
||||||
|
@Value("${vessel.batch.bulk-insert.use-binary-copy:false}")
|
||||||
|
private boolean useBinaryCopy;
|
||||||
|
|
||||||
|
private volatile ExecutorService executorService;
|
||||||
|
|
||||||
|
|
||||||
|
private static final DateTimeFormatter TIMESTAMP_FORMATTER =
|
||||||
|
DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
|
||||||
|
|
||||||
|
@PostConstruct
|
||||||
|
public void init() {
|
||||||
|
initializeExecutorService();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ExecutorService 초기화 또는 재초기화
|
||||||
|
*/
|
||||||
|
private synchronized void initializeExecutorService() {
|
||||||
|
if (executorService == null || executorService.isShutdown() || executorService.isTerminated()) {
|
||||||
|
if (executorService != null && !executorService.isShutdown()) {
|
||||||
|
executorService.shutdown();
|
||||||
|
}
|
||||||
|
|
||||||
|
int threadCount = Math.max(8, Runtime.getRuntime().availableProcessors() * 2);
|
||||||
|
executorService = Executors.newFixedThreadPool(threadCount,
|
||||||
|
new ThreadFactoryBuilder()
|
||||||
|
.setNameFormat("bulk-insert-worker-%d")
|
||||||
|
.setDaemon(true) // 데몬 스레드로 설정하여 JVM 종료 시 자동 정리
|
||||||
|
.build());
|
||||||
|
|
||||||
|
log.info("ExecutorService initialized with {} threads", threadCount);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ExecutorService 상태 확인 및 필요시 재초기화
|
||||||
|
*/
|
||||||
|
private ExecutorService getHealthyExecutorService() {
|
||||||
|
if (executorService == null || executorService.isShutdown() || executorService.isTerminated()) {
|
||||||
|
log.warn("ExecutorService is not healthy, reinitializing...");
|
||||||
|
initializeExecutorService();
|
||||||
|
}
|
||||||
|
return executorService;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* TileStatistics Bulk Writer
|
||||||
|
*/
|
||||||
|
public ItemWriter<List<TileStatistics>> tileStatisticsBulkWriter() {
|
||||||
|
return new ItemWriter<List<TileStatistics>>() {
|
||||||
|
@Override
|
||||||
|
public void write(Chunk<? extends List<TileStatistics>> chunk) throws Exception {
|
||||||
|
List<TileStatistics> allStats = chunk.getItems().stream()
|
||||||
|
.flatMap(List::stream)
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
if (allStats.isEmpty()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
StopWatch stopWatch = new StopWatch();
|
||||||
|
stopWatch.start();
|
||||||
|
|
||||||
|
try {
|
||||||
|
// 파티션별로 그룹화
|
||||||
|
Map<LocalDate, List<TileStatistics>> partitionedData =
|
||||||
|
allStats.stream()
|
||||||
|
.collect(Collectors.groupingBy(
|
||||||
|
stat -> stat.getTimeBucket().toLocalDate()
|
||||||
|
));
|
||||||
|
|
||||||
|
// 병렬 처리
|
||||||
|
List<CompletableFuture<BulkInsertResult>> futures = new ArrayList<>();
|
||||||
|
|
||||||
|
for (Map.Entry<LocalDate, List<TileStatistics>> entry : partitionedData.entrySet()) {
|
||||||
|
LocalDate date = entry.getKey();
|
||||||
|
List<TileStatistics> data = entry.getValue();
|
||||||
|
|
||||||
|
// 배치 크기로 분할
|
||||||
|
Lists.partition(data, batchSize).forEach(batch -> {
|
||||||
|
try {
|
||||||
|
ExecutorService healthyExecutor = getHealthyExecutorService();
|
||||||
|
CompletableFuture<BulkInsertResult> future = CompletableFuture.supplyAsync(() ->
|
||||||
|
insertTileStatisticsBatch(date, batch), healthyExecutor
|
||||||
|
);
|
||||||
|
futures.add(future);
|
||||||
|
} catch (RejectedExecutionException e) {
|
||||||
|
log.warn("RejectedExecutionException caught, falling back to synchronous processing");
|
||||||
|
BulkInsertResult result = insertTileStatisticsBatch(date, batch);
|
||||||
|
futures.add(CompletableFuture.completedFuture(result));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// 모든 작업 완료 대기
|
||||||
|
CompletableFuture.allOf(futures.toArray(new CompletableFuture[0])).join();
|
||||||
|
|
||||||
|
// 결과 집계
|
||||||
|
long totalInserted = futures.stream()
|
||||||
|
.map(CompletableFuture::join)
|
||||||
|
.mapToLong(result -> result.rowsInserted)
|
||||||
|
.sum();
|
||||||
|
|
||||||
|
stopWatch.stop();
|
||||||
|
log.info("Bulk inserted {} tile statistics in {} ms",
|
||||||
|
totalInserted, stopWatch.getTotalTimeMillis());
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
// CompletionException에서 실제 원인 확인
|
||||||
|
Throwable cause = e;
|
||||||
|
if (e instanceof CompletionException && e.getCause() != null) {
|
||||||
|
cause = e.getCause();
|
||||||
|
if (cause instanceof RuntimeException && cause.getCause() != null) {
|
||||||
|
cause = cause.getCause();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 중복 키 오류는 정상적인 상황
|
||||||
|
if (cause.getMessage() != null && cause.getMessage().contains("중복된 키")) {
|
||||||
|
log.debug("Duplicate key errors detected during bulk insert, using fallback UPSERT");
|
||||||
|
} else {
|
||||||
|
log.error("Bulk insert failed, falling back to batch insert", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 새로운 트랜잭션에서 재시도
|
||||||
|
try {
|
||||||
|
fallbackBatchInsert(allStats);
|
||||||
|
} catch (Exception fallbackEx) {
|
||||||
|
log.error("Fallback insert also failed", fallbackEx);
|
||||||
|
throw fallbackEx;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 개별 배치 처리
|
||||||
|
*/
|
||||||
|
private BulkInsertResult insertTileStatisticsBatch(LocalDate date,
|
||||||
|
List<TileStatistics> batch) {
|
||||||
|
|
||||||
|
String tableName = "t_tile_summary_" + date.format(DateTimeFormatter.BASIC_ISO_DATE);
|
||||||
|
|
||||||
|
// 파티션 존재 확인
|
||||||
|
if (!checkTableExists(tableName)) {
|
||||||
|
tableName = "t_tile_summary"; // 기본 테이블 사용
|
||||||
|
}
|
||||||
|
|
||||||
|
try (Connection conn = queryDataSource.getConnection()) {
|
||||||
|
BaseConnection baseConn = conn.unwrap(BaseConnection.class);
|
||||||
|
CopyManager copyManager = new CopyManager(baseConn);
|
||||||
|
|
||||||
|
if (useBinaryCopy) {
|
||||||
|
return binaryCopyInsert(copyManager, tableName, batch);
|
||||||
|
} else {
|
||||||
|
return textCopyInsert(copyManager, tableName, batch);
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
if (e.getMessage() != null && e.getMessage().contains("duplicate key")) {
|
||||||
|
// 중복 키는 정상적인 상황이므로 DEBUG 레벨로 기록
|
||||||
|
log.debug("Duplicate entries detected for table {} - switching to UPSERT mode", tableName);
|
||||||
|
// 새로운 트랜잭션에서 UPSERT 실행
|
||||||
|
try {
|
||||||
|
return upsertBatch(tableName, batch);
|
||||||
|
} catch (Exception upsertEx) {
|
||||||
|
log.error("UPSERT also failed for table {}", tableName, upsertEx);
|
||||||
|
throw new RuntimeException("Both COPY and UPSERT failed", upsertEx);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
log.error("Failed to insert batch for table {}", tableName, e);
|
||||||
|
throw new RuntimeException("Batch insert failed", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 텍스트 기반 COPY
|
||||||
|
*/
|
||||||
|
private BulkInsertResult textCopyInsert(CopyManager copyManager, String tableName,
|
||||||
|
List<TileStatistics> batch) throws Exception {
|
||||||
|
|
||||||
|
String copySql = String.format("""
|
||||||
|
COPY signal.%s (
|
||||||
|
tile_id, tile_level, time_bucket, vessel_count,
|
||||||
|
unique_vessels, total_points, avg_sog, max_sog,
|
||||||
|
vessel_density, created_at
|
||||||
|
) FROM STDIN
|
||||||
|
""", tableName);
|
||||||
|
|
||||||
|
try (PipedOutputStream pos = new PipedOutputStream();
|
||||||
|
PipedInputStream pis = new PipedInputStream(pos, 1024 * 1024); // 1MB 버퍼
|
||||||
|
PrintWriter writer = new PrintWriter(new BufferedWriter(
|
||||||
|
new OutputStreamWriter(pos, "UTF-8"), 65536))) { // 64KB 버퍼
|
||||||
|
|
||||||
|
// 비동기로 데이터 쓰기
|
||||||
|
CompletableFuture<Void> writerFuture = CompletableFuture.runAsync(() -> {
|
||||||
|
try {
|
||||||
|
for (TileStatistics stat : batch) {
|
||||||
|
writer.println(formatCsvLine(stat));
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
writer.close();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// COPY 실행
|
||||||
|
long rowsInserted = copyManager.copyIn(copySql, pis);
|
||||||
|
|
||||||
|
// Writer 완료 대기
|
||||||
|
writerFuture.join();
|
||||||
|
|
||||||
|
return new BulkInsertResult(rowsInserted, null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 바이너리 기반 COPY (더 빠름)
|
||||||
|
*/
|
||||||
|
private BulkInsertResult binaryCopyInsert(CopyManager copyManager, String tableName,
|
||||||
|
List<TileStatistics> batch) throws Exception {
|
||||||
|
|
||||||
|
String copySql = String.format("""
|
||||||
|
COPY signal.%s (
|
||||||
|
tile_id, tile_level, time_bucket, vessel_count,
|
||||||
|
unique_vessels, total_points, avg_sog, max_sog,
|
||||||
|
vessel_density, created_at
|
||||||
|
) FROM STDIN WITH (FORMAT BINARY)
|
||||||
|
""", tableName);
|
||||||
|
|
||||||
|
try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
|
||||||
|
// PostgreSQL 바이너리 형식 헤더
|
||||||
|
writeBinaryHeader(baos);
|
||||||
|
|
||||||
|
// 데이터 쓰기
|
||||||
|
for (TileStatistics stat : batch) {
|
||||||
|
writeBinaryRow(baos, stat);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 트레일러
|
||||||
|
writeBinaryTrailer(baos);
|
||||||
|
|
||||||
|
// COPY 실행
|
||||||
|
try (ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray())) {
|
||||||
|
long rowsInserted = copyManager.copyIn(copySql, bais);
|
||||||
|
return new BulkInsertResult(rowsInserted, null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* CSV 라인 포맷팅
|
||||||
|
*/
|
||||||
|
private String formatCsvLine(TileStatistics stat) {
|
||||||
|
String json = convertToJson(stat.getUniqueVessels());
|
||||||
|
// TEXT 형식에서는 탭과 줄바꿈만 이스케이프
|
||||||
|
String escapedJson = json.replace("\\", "\\\\")
|
||||||
|
.replace("\t", "\\t")
|
||||||
|
.replace("\n", "\\n")
|
||||||
|
.replace("\r", "\\r");
|
||||||
|
|
||||||
|
return String.format("%s\t%d\t%s\t%d\t%s\t%d\t%s\t%s\t%s\t%s",
|
||||||
|
stat.getTileId(),
|
||||||
|
stat.getTileLevel(),
|
||||||
|
stat.getTimeBucket().format(TIMESTAMP_FORMATTER),
|
||||||
|
stat.getVesselCount(),
|
||||||
|
escapedJson,
|
||||||
|
stat.getTotalPoints(),
|
||||||
|
stat.getAvgSog() != null ? stat.getAvgSog().toString() : "\\N",
|
||||||
|
stat.getMaxSog() != null ? stat.getMaxSog().toString() : "\\N",
|
||||||
|
stat.getVesselDensity() != null ? stat.getVesselDensity().toString() : "\\N",
|
||||||
|
LocalDateTime.now().format(TIMESTAMP_FORMATTER)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* CSV 특수문자 이스케이프
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("unused")
|
||||||
|
private String escapeCsv(String value) {
|
||||||
|
if (value == null) return "NULL";
|
||||||
|
return value.replace("\\", "\\\\")
|
||||||
|
.replace("|", "\\|")
|
||||||
|
.replace("\n", "\\n")
|
||||||
|
.replace("\r", "\\r")
|
||||||
|
.replace("\"", "\\\"");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* JSON 이스케이프
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("unused")
|
||||||
|
private String escapeJson(String json) {
|
||||||
|
if (json == null) return "NULL";
|
||||||
|
return json.replace("\\", "\\\\")
|
||||||
|
.replace("|", "\\|")
|
||||||
|
.replace("\n", "\\n")
|
||||||
|
.replace("\r", "\\r");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 객체를 JSON으로 변환
|
||||||
|
*/
|
||||||
|
private String convertToJson(Object obj) {
|
||||||
|
try {
|
||||||
|
if (obj == null) return "{}";
|
||||||
|
|
||||||
|
// 클래스 레벨의 objectMapper 사용
|
||||||
|
String json = objectMapper.writeValueAsString(obj);
|
||||||
|
|
||||||
|
// JSON 검증 로그
|
||||||
|
if (log.isDebugEnabled()) {
|
||||||
|
log.debug("Generated JSON: {}", json);
|
||||||
|
}
|
||||||
|
|
||||||
|
return json;
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Error converting to JSON: {}", obj, e);
|
||||||
|
return "{}";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* UPSERT 배치 처리 (중복키 발생 시)
|
||||||
|
*/
|
||||||
|
private BulkInsertResult upsertBatch(String tableName, List<TileStatistics> batch) {
|
||||||
|
// 항상 tile_level도 포함하여 처리
|
||||||
|
String sql = String.format("""
|
||||||
|
INSERT INTO signal.%s (
|
||||||
|
tile_id, tile_level, time_bucket, vessel_count,
|
||||||
|
unique_vessels, total_points, avg_sog, max_sog,
|
||||||
|
vessel_density, created_at
|
||||||
|
) VALUES (?, ?, ?, ?, ?::jsonb, ?, ?, ?, ?, ?)
|
||||||
|
ON CONFLICT (tile_id, time_bucket, tile_level) DO UPDATE SET
|
||||||
|
vessel_count = EXCLUDED.vessel_count,
|
||||||
|
unique_vessels = EXCLUDED.unique_vessels,
|
||||||
|
total_points = EXCLUDED.total_points,
|
||||||
|
avg_sog = EXCLUDED.avg_sog,
|
||||||
|
max_sog = EXCLUDED.max_sog,
|
||||||
|
vessel_density = EXCLUDED.vessel_density,
|
||||||
|
created_at = EXCLUDED.created_at
|
||||||
|
""", tableName);
|
||||||
|
|
||||||
|
long totalUpdated = 0;
|
||||||
|
|
||||||
|
// 배치 크기로 분할
|
||||||
|
for (List<TileStatistics> partition : Lists.partition(batch, 1000)) {
|
||||||
|
List<Object[]> args = partition.stream()
|
||||||
|
.map(stat -> new Object[] {
|
||||||
|
stat.getTileId(),
|
||||||
|
stat.getTileLevel(),
|
||||||
|
Timestamp.valueOf(stat.getTimeBucket()),
|
||||||
|
stat.getVesselCount(),
|
||||||
|
convertToJson(stat.getUniqueVessels()),
|
||||||
|
stat.getTotalPoints(),
|
||||||
|
stat.getAvgSog(),
|
||||||
|
stat.getMaxSog(),
|
||||||
|
stat.getVesselDensity(),
|
||||||
|
Timestamp.valueOf(LocalDateTime.now())
|
||||||
|
})
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
int[] results = queryJdbcTemplate.batchUpdate(sql, args);
|
||||||
|
|
||||||
|
for (int result : results) {
|
||||||
|
totalUpdated += result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info("Upserted {} records in table {}", totalUpdated, tableName);
|
||||||
|
return new BulkInsertResult(totalUpdated, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fallback 배치 인서트
|
||||||
|
*/
|
||||||
|
private void fallbackBatchInsert(List<TileStatistics> stats) {
|
||||||
|
String sql = """
|
||||||
|
INSERT INTO signal.t_tile_summary (
|
||||||
|
tile_id, tile_level, time_bucket, vessel_count,
|
||||||
|
unique_vessels, total_points, avg_sog, max_sog,
|
||||||
|
vessel_density, created_at
|
||||||
|
) VALUES (?, ?, ?, ?, ?::jsonb, ?, ?, ?, ?, ?)
|
||||||
|
ON CONFLICT (tile_id, time_bucket, tile_level) DO UPDATE SET
|
||||||
|
vessel_count = EXCLUDED.vessel_count,
|
||||||
|
unique_vessels = EXCLUDED.unique_vessels,
|
||||||
|
total_points = EXCLUDED.total_points,
|
||||||
|
avg_sog = EXCLUDED.avg_sog,
|
||||||
|
max_sog = EXCLUDED.max_sog,
|
||||||
|
vessel_density = EXCLUDED.vessel_density,
|
||||||
|
created_at = EXCLUDED.created_at
|
||||||
|
""";
|
||||||
|
|
||||||
|
// 배치 크기로 분할
|
||||||
|
Lists.partition(stats, 1000).forEach(batch -> {
|
||||||
|
List<Object[]> args = batch.stream()
|
||||||
|
.map(stat -> new Object[] {
|
||||||
|
stat.getTileId(),
|
||||||
|
stat.getTileLevel(),
|
||||||
|
Timestamp.valueOf(stat.getTimeBucket()),
|
||||||
|
stat.getVesselCount(),
|
||||||
|
convertToJson(stat.getUniqueVessels()),
|
||||||
|
stat.getTotalPoints(),
|
||||||
|
stat.getAvgSog(),
|
||||||
|
stat.getMaxSog(),
|
||||||
|
stat.getVesselDensity(),
|
||||||
|
Timestamp.valueOf(LocalDateTime.now())
|
||||||
|
})
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
queryJdbcTemplate.batchUpdate(sql, args);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* AreaStatistics Bulk Writer
|
||||||
|
*/
|
||||||
|
public ItemWriter<List<AreaStatisticsProcessor.AreaStatistics>>
|
||||||
|
areaStatisticsBulkWriter() {
|
||||||
|
|
||||||
|
return new ItemWriter<List<AreaStatisticsProcessor.AreaStatistics>>() {
|
||||||
|
@Override
|
||||||
|
public void write(Chunk<? extends List<AreaStatisticsProcessor.AreaStatistics>> chunk)
|
||||||
|
throws Exception {
|
||||||
|
|
||||||
|
List<AreaStatisticsProcessor.AreaStatistics> allStats =
|
||||||
|
chunk.getItems().stream()
|
||||||
|
.flatMap(List::stream)
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
if (allStats.isEmpty()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 배치 크기로 분할하여 병렬 처리
|
||||||
|
Lists.partition(allStats, batchSize)
|
||||||
|
.parallelStream()
|
||||||
|
.forEach(batch -> insertAreaStatisticsBatch(batch));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private void insertAreaStatisticsBatch(
|
||||||
|
List<AreaStatisticsProcessor.AreaStatistics> batch) {
|
||||||
|
|
||||||
|
try (Connection conn = queryDataSource.getConnection()) {
|
||||||
|
BaseConnection baseConn = conn.unwrap(BaseConnection.class);
|
||||||
|
CopyManager copyManager = new CopyManager(baseConn);
|
||||||
|
|
||||||
|
String copySql = """
|
||||||
|
COPY signal.t_area_statistics (
|
||||||
|
area_id, time_bucket, vessel_count,
|
||||||
|
in_count, out_count, transit_vessels,
|
||||||
|
stationary_vessels, avg_sog, created_at
|
||||||
|
) FROM STDIN WITH (FORMAT CSV, DELIMITER '|', NULL 'NULL')
|
||||||
|
""";
|
||||||
|
|
||||||
|
StringWriter writer = new StringWriter();
|
||||||
|
for (var stat : batch) {
|
||||||
|
writer.write(String.format("%s|%s|%d|%d|%d|%s|%s|%s|%s\n",
|
||||||
|
stat.getAreaId(),
|
||||||
|
stat.getTimeBucket().format(TIMESTAMP_FORMATTER),
|
||||||
|
stat.getVesselCount(),
|
||||||
|
stat.getInCount(),
|
||||||
|
stat.getOutCount(),
|
||||||
|
escapeJson(convertToJson(stat.getTransitVessels())),
|
||||||
|
escapeJson(convertToJson(stat.getStationaryVessels())),
|
||||||
|
stat.getAvgSog() != null ? stat.getAvgSog().toString() : "NULL",
|
||||||
|
LocalDateTime.now().format(TIMESTAMP_FORMATTER)
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
long rowsInserted = copyManager.copyIn(copySql, new StringReader(writer.toString()));
|
||||||
|
log.debug("Inserted {} area statistics", rowsInserted);
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to bulk insert area statistics", e);
|
||||||
|
// Fallback 처리
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 테이블 존재 확인
|
||||||
|
*/
|
||||||
|
private boolean checkTableExists(String tableName) {
|
||||||
|
String sql = "SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'signal' AND tablename = ?)";
|
||||||
|
return Boolean.TRUE.equals(queryJdbcTemplate.queryForObject(sql, Boolean.class, tableName));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 바이너리 형식 헬퍼 메소드들
|
||||||
|
*/
|
||||||
|
private void writeBinaryHeader(ByteArrayOutputStream baos) throws IOException {
|
||||||
|
// PostgreSQL 바이너리 COPY 헤더
|
||||||
|
baos.write("PGCOPY\n\377\r\n\0".getBytes("UTF-8"));
|
||||||
|
// 플래그
|
||||||
|
writeInt32(baos, 0);
|
||||||
|
// 헤더 확장 길이
|
||||||
|
writeInt32(baos, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void writeBinaryTrailer(ByteArrayOutputStream baos) throws IOException {
|
||||||
|
// -1 표시 (EOF)
|
||||||
|
writeInt16(baos, -1);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void writeBinaryRow(ByteArrayOutputStream baos,
|
||||||
|
TileStatistics stat) throws IOException {
|
||||||
|
// 필드 수
|
||||||
|
writeInt16(baos, 10);
|
||||||
|
|
||||||
|
// 각 필드 쓰기
|
||||||
|
writeString(baos, stat.getTileId());
|
||||||
|
writeInt32(baos, stat.getTileLevel());
|
||||||
|
writeTimestamp(baos, stat.getTimeBucket());
|
||||||
|
writeInt32(baos, stat.getVesselCount());
|
||||||
|
writeString(baos, convertToJson(stat.getUniqueVessels()));
|
||||||
|
writeInt64(baos, stat.getTotalPoints());
|
||||||
|
writeBigDecimal(baos, stat.getAvgSog());
|
||||||
|
writeBigDecimal(baos, stat.getMaxSog());
|
||||||
|
writeBigDecimal(baos, stat.getVesselDensity());
|
||||||
|
writeTimestamp(baos, LocalDateTime.now());
|
||||||
|
}
|
||||||
|
|
||||||
|
private void writeInt16(ByteArrayOutputStream baos, int value) throws IOException {
|
||||||
|
baos.write((value >> 8) & 0xFF);
|
||||||
|
baos.write(value & 0xFF);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void writeInt32(ByteArrayOutputStream baos, int value) throws IOException {
|
||||||
|
baos.write((value >> 24) & 0xFF);
|
||||||
|
baos.write((value >> 16) & 0xFF);
|
||||||
|
baos.write((value >> 8) & 0xFF);
|
||||||
|
baos.write(value & 0xFF);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void writeInt64(ByteArrayOutputStream baos, long value) throws IOException {
|
||||||
|
for (int i = 56; i >= 0; i -= 8) {
|
||||||
|
baos.write((int)(value >> i) & 0xFF);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void writeString(ByteArrayOutputStream baos, String value) throws IOException {
|
||||||
|
if (value == null) {
|
||||||
|
writeInt32(baos, -1); // NULL
|
||||||
|
} else {
|
||||||
|
byte[] bytes = value.getBytes("UTF-8");
|
||||||
|
writeInt32(baos, bytes.length);
|
||||||
|
baos.write(bytes);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void writeTimestamp(ByteArrayOutputStream baos, LocalDateTime value) throws IOException {
|
||||||
|
if (value == null) {
|
||||||
|
writeInt32(baos, -1); // NULL
|
||||||
|
} else {
|
||||||
|
// PostgreSQL timestamp 형식으로 변환
|
||||||
|
long micros = value.atZone(java.time.ZoneId.systemDefault())
|
||||||
|
.toInstant().toEpochMilli() * 1000;
|
||||||
|
writeInt32(baos, 8); // 길이
|
||||||
|
writeInt64(baos, micros);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void writeBigDecimal(ByteArrayOutputStream baos, java.math.BigDecimal value)
|
||||||
|
throws IOException {
|
||||||
|
if (value == null) {
|
||||||
|
writeInt32(baos, -1); // NULL
|
||||||
|
} else {
|
||||||
|
writeString(baos, value.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 결과 클래스
|
||||||
|
*/
|
||||||
|
private static class BulkInsertResult {
|
||||||
|
final long rowsInserted;
|
||||||
|
@SuppressWarnings("unused")
|
||||||
|
final String error;
|
||||||
|
|
||||||
|
BulkInsertResult(long rowsInserted, String error) {
|
||||||
|
this.rowsInserted = rowsInserted;
|
||||||
|
this.error = error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 리소스 정리
|
||||||
|
*/
|
||||||
|
public void shutdown() {
|
||||||
|
if (executorService != null && !executorService.isShutdown()) {
|
||||||
|
executorService.shutdown();
|
||||||
|
try {
|
||||||
|
if (!executorService.awaitTermination(60, TimeUnit.SECONDS)) {
|
||||||
|
executorService.shutdownNow();
|
||||||
|
}
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
executorService.shutdownNow();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void destroy() throws Exception {
|
||||||
|
log.info("Shutting down OptimizedBulkInsertWriter ExecutorService");
|
||||||
|
if (executorService != null && !executorService.isShutdown()) {
|
||||||
|
executorService.shutdown();
|
||||||
|
try {
|
||||||
|
if (!executorService.awaitTermination(30, TimeUnit.SECONDS)) {
|
||||||
|
log.warn("ExecutorService did not terminate gracefully, forcing shutdown");
|
||||||
|
executorService.shutdownNow();
|
||||||
|
}
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
Thread.currentThread().interrupt();
|
||||||
|
executorService.shutdownNow();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
271
src/main/java/gc/mda/signal_batch/batch/writer/UpsertWriter.java
Normal file
271
src/main/java/gc/mda/signal_batch/batch/writer/UpsertWriter.java
Normal file
@ -0,0 +1,271 @@
|
|||||||
|
package gc.mda.signal_batch.batch.writer;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselLatestPosition;
|
||||||
|
import gc.mda.signal_batch.batch.processor.AreaStatisticsProcessor.AreaStatistics;
|
||||||
|
import gc.mda.signal_batch.global.util.ConcurrentUpdateManager;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.item.Chunk;
|
||||||
|
import org.springframework.batch.item.ItemWriter;
|
||||||
|
import org.springframework.batch.item.database.JdbcBatchItemWriter;
|
||||||
|
import org.springframework.batch.item.database.BeanPropertyItemSqlParameterSourceProvider;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.concurrent.*;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Configuration
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
public class UpsertWriter {
|
||||||
|
|
||||||
|
private final DataSource queryDataSource;
|
||||||
|
private final ConcurrentUpdateManager concurrentUpdateManager;
|
||||||
|
|
||||||
|
public UpsertWriter(
|
||||||
|
@Qualifier("queryDataSource") DataSource queryDataSource,
|
||||||
|
ConcurrentUpdateManager concurrentUpdateManager) {
|
||||||
|
this.queryDataSource = queryDataSource;
|
||||||
|
this.concurrentUpdateManager = concurrentUpdateManager;
|
||||||
|
|
||||||
|
System.out.println("========================================");
|
||||||
|
System.out.println("!!! UpsertWriter initialized !!!");
|
||||||
|
System.out.println("queryDataSource: " + queryDataSource);
|
||||||
|
System.out.println("========================================");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Value("${vessel.batch.writer.use-advisory-lock:false}")
|
||||||
|
private boolean useAdvisoryLock;
|
||||||
|
|
||||||
|
@Value("${vessel.batch.writer.parallel-threads:4}")
|
||||||
|
private int parallelThreads;
|
||||||
|
|
||||||
|
private static final ExecutorService executorService = new ThreadPoolExecutor(
|
||||||
|
4, 8,
|
||||||
|
60L, TimeUnit.SECONDS,
|
||||||
|
new LinkedBlockingQueue<>(100),
|
||||||
|
new ThreadPoolExecutor.CallerRunsPolicy()
|
||||||
|
);
|
||||||
|
|
||||||
|
// shutdown hook 추가
|
||||||
|
static {
|
||||||
|
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
|
||||||
|
log.info("Shutting down executor service...");
|
||||||
|
executorService.shutdown();
|
||||||
|
try {
|
||||||
|
if (!executorService.awaitTermination(60, TimeUnit.SECONDS)) {
|
||||||
|
executorService.shutdownNow();
|
||||||
|
}
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
executorService.shutdownNow();
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
private final ObjectMapper objectMapper = new ObjectMapper()
|
||||||
|
.registerModule(new JavaTimeModule());
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 최신 위치 Writer - Advisory Lock 사용
|
||||||
|
*/
|
||||||
|
@Bean
|
||||||
|
public ItemWriter<VesselLatestPosition> latestPositionWriter() {
|
||||||
|
if (useAdvisoryLock) {
|
||||||
|
return new ItemWriter<VesselLatestPosition>() {
|
||||||
|
@Override
|
||||||
|
public void write(Chunk<? extends VesselLatestPosition> chunk) throws Exception {
|
||||||
|
List<VesselLatestPosition> items = new ArrayList<>(chunk.getItems());
|
||||||
|
|
||||||
|
// 병렬 처리를 위한 분할
|
||||||
|
int batchSize = Math.max(1, items.size() / parallelThreads);
|
||||||
|
List<CompletableFuture<Void>> futures = new ArrayList<>();
|
||||||
|
|
||||||
|
for (int i = 0; i < items.size(); i += batchSize) {
|
||||||
|
int endIndex = Math.min(i + batchSize, items.size());
|
||||||
|
List<VesselLatestPosition> batch = items.subList(i, endIndex);
|
||||||
|
|
||||||
|
CompletableFuture<Void> future = CompletableFuture.runAsync(() -> {
|
||||||
|
for (VesselLatestPosition position : batch) {
|
||||||
|
try {
|
||||||
|
concurrentUpdateManager.updateLatestPositionWithLock(position);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to update position: {}", position.getTargetId(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, executorService);
|
||||||
|
|
||||||
|
futures.add(future);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 모든 작업 완료 대기
|
||||||
|
CompletableFuture.allOf(futures.toArray(new CompletableFuture[0]))
|
||||||
|
.get(5, TimeUnit.MINUTES);
|
||||||
|
|
||||||
|
log.debug("Updated {} vessel positions", items.size());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
// 기존 방식 (Batch Update)
|
||||||
|
return defaultLatestPositionWriter();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 기본 Batch Writer
|
||||||
|
*/
|
||||||
|
private JdbcBatchItemWriter<VesselLatestPosition> defaultLatestPositionWriter() {
|
||||||
|
return customLatestPositionWriter();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Custom Writer - UPDATE 0건도 정상 처리
|
||||||
|
*/
|
||||||
|
private JdbcBatchItemWriter<VesselLatestPosition> customLatestPositionWriter() {
|
||||||
|
String sql = """
|
||||||
|
INSERT INTO signal.t_vessel_latest_position (
|
||||||
|
sig_src_cd, target_id, lat, lon, geom,
|
||||||
|
sog, cog, heading, ship_nm, ship_ty,
|
||||||
|
last_update, update_count, created_at
|
||||||
|
) VALUES (
|
||||||
|
:sigSrcCd, :targetId, :lat, :lon,
|
||||||
|
public.ST_SetSRID(public.ST_MakePoint(:lon, :lat), 4326),
|
||||||
|
:sog, :cog, :heading, :shipNm, :shipTy,
|
||||||
|
:lastUpdate, 1, CURRENT_TIMESTAMP
|
||||||
|
)
|
||||||
|
ON CONFLICT (sig_src_cd, target_id) DO UPDATE SET
|
||||||
|
lat = EXCLUDED.lat,
|
||||||
|
lon = EXCLUDED.lon,
|
||||||
|
geom = EXCLUDED.geom,
|
||||||
|
sog = EXCLUDED.sog,
|
||||||
|
cog = EXCLUDED.cog,
|
||||||
|
heading = EXCLUDED.heading,
|
||||||
|
ship_nm = COALESCE(EXCLUDED.ship_nm, t_vessel_latest_position.ship_nm),
|
||||||
|
ship_ty = COALESCE(EXCLUDED.ship_ty, t_vessel_latest_position.ship_ty),
|
||||||
|
last_update = EXCLUDED.last_update,
|
||||||
|
update_count = t_vessel_latest_position.update_count + 1
|
||||||
|
WHERE EXCLUDED.last_update > t_vessel_latest_position.last_update
|
||||||
|
""";
|
||||||
|
|
||||||
|
JdbcBatchItemWriter<VesselLatestPosition> writer = new JdbcBatchItemWriter<VesselLatestPosition>() {
|
||||||
|
@Override
|
||||||
|
public void write(Chunk<? extends VesselLatestPosition> chunk) throws Exception {
|
||||||
|
// assertUpdates 비활성화로 UPDATE 0건도 허용
|
||||||
|
this.setAssertUpdates(false);
|
||||||
|
super.write(chunk);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
writer.setDataSource(queryDataSource);
|
||||||
|
writer.setSql(sql);
|
||||||
|
writer.setItemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider<>());
|
||||||
|
writer.afterPropertiesSet();
|
||||||
|
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 구역 통계 Writer
|
||||||
|
*/
|
||||||
|
@Bean
|
||||||
|
public ItemWriter<List<AreaStatistics>> areaStatisticsWriter() {
|
||||||
|
return new ItemWriter<List<AreaStatistics>>() {
|
||||||
|
@Override
|
||||||
|
public void write(Chunk<? extends List<AreaStatistics>> chunk) throws Exception {
|
||||||
|
// 중복 제거를 위한 Map 사용
|
||||||
|
Map<String, AreaStatistics> uniqueStats = new HashMap<>();
|
||||||
|
|
||||||
|
for (List<AreaStatistics> batch : chunk.getItems()) {
|
||||||
|
for (AreaStatistics stat : batch) {
|
||||||
|
String key = stat.getAreaId() + "_" + stat.getTimeBucket();
|
||||||
|
// 중복된 경우 나중 데이터로 덮어쓰기
|
||||||
|
uniqueStats.put(key, stat);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
List<AreaStatistics> allStats = new ArrayList<>(uniqueStats.values());
|
||||||
|
|
||||||
|
if (allStats.isEmpty()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 배치를 더 작은 단위로 분할
|
||||||
|
int batchSize = 500;
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
|
||||||
|
jdbcTemplate.setQueryTimeout(60); // 60초 타임아웃
|
||||||
|
|
||||||
|
for (int i = 0; i < allStats.size(); i += batchSize) {
|
||||||
|
int endIndex = Math.min(i + batchSize, allStats.size());
|
||||||
|
List<AreaStatistics> subBatch = allStats.subList(i, endIndex);
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
INSERT INTO signal.t_area_statistics (
|
||||||
|
area_id, time_bucket, vessel_count, in_count, out_count,
|
||||||
|
transit_vessels, stationary_vessels, avg_sog, created_at
|
||||||
|
) VALUES (
|
||||||
|
?, ?, ?, ?, ?,
|
||||||
|
?::jsonb, ?::jsonb, ?, CURRENT_TIMESTAMP
|
||||||
|
)
|
||||||
|
ON CONFLICT (area_id, time_bucket) DO UPDATE SET
|
||||||
|
vessel_count = EXCLUDED.vessel_count,
|
||||||
|
in_count = EXCLUDED.in_count,
|
||||||
|
out_count = EXCLUDED.out_count,
|
||||||
|
transit_vessels = EXCLUDED.transit_vessels,
|
||||||
|
stationary_vessels = EXCLUDED.stationary_vessels,
|
||||||
|
avg_sog = EXCLUDED.avg_sog
|
||||||
|
""";
|
||||||
|
|
||||||
|
List<Object[]> batchArgs = new ArrayList<>();
|
||||||
|
for (AreaStatistics stats : subBatch) {
|
||||||
|
batchArgs.add(new Object[]{
|
||||||
|
stats.getAreaId(),
|
||||||
|
java.sql.Timestamp.valueOf(stats.getTimeBucket()),
|
||||||
|
stats.getVesselCount(),
|
||||||
|
stats.getInCount(),
|
||||||
|
stats.getOutCount(),
|
||||||
|
objectMapper.writeValueAsString(stats.getTransitVessels()),
|
||||||
|
objectMapper.writeValueAsString(stats.getStationaryVessels()),
|
||||||
|
stats.getAvgSog()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
jdbcTemplate.batchUpdate(sql, batchArgs);
|
||||||
|
log.debug("Updated {} area statistics records", subBatch.size());
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to update batch of {} area statistics", subBatch.size(), e);
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info("Total updated {} area statistics records", allStats.size());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 리소스 정리
|
||||||
|
*/
|
||||||
|
public void shutdown() {
|
||||||
|
executorService.shutdown();
|
||||||
|
try {
|
||||||
|
if (!executorService.awaitTermination(60, TimeUnit.SECONDS)) {
|
||||||
|
executorService.shutdownNow();
|
||||||
|
}
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
executorService.shutdownNow();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,253 @@
|
|||||||
|
package gc.mda.signal_batch.batch.writer;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselTrack;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.postgresql.copy.CopyManager;
|
||||||
|
import org.postgresql.core.BaseConnection;
|
||||||
|
import org.springframework.batch.item.Chunk;
|
||||||
|
import org.springframework.batch.item.ItemWriter;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
import java.io.*;
|
||||||
|
import java.sql.Connection;
|
||||||
|
import java.sql.Timestamp;
|
||||||
|
import java.time.format.DateTimeFormatter;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.LinkedHashMap;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
public class VesselTrackBulkWriter implements ItemWriter<List<VesselTrack>> {
|
||||||
|
|
||||||
|
private final DataSource queryDataSource;
|
||||||
|
private final JdbcTemplate queryJdbcTemplate;
|
||||||
|
|
||||||
|
public VesselTrackBulkWriter(
|
||||||
|
@Qualifier("queryDataSource") DataSource queryDataSource,
|
||||||
|
@Qualifier("queryJdbcTemplate") JdbcTemplate queryJdbcTemplate) {
|
||||||
|
this.queryDataSource = queryDataSource;
|
||||||
|
this.queryJdbcTemplate = queryJdbcTemplate;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static final DateTimeFormatter TIMESTAMP_FORMATTER =
|
||||||
|
DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
|
||||||
|
// DataSource 확인을 위한 초기화 메서드
|
||||||
|
@jakarta.annotation.PostConstruct
|
||||||
|
public void init() {
|
||||||
|
try {
|
||||||
|
java.sql.Connection conn = queryDataSource.getConnection();
|
||||||
|
String url = conn.getMetaData().getURL();
|
||||||
|
String user = conn.getMetaData().getUserName();
|
||||||
|
conn.close();
|
||||||
|
log.warn("========================================");
|
||||||
|
log.warn("!!! VesselTrackBulkWriter DataSource INFO !!!");
|
||||||
|
log.warn("!!! URL: {}", url);
|
||||||
|
log.warn("!!! User: {}", user);
|
||||||
|
log.warn("!!! DataSource Class: {}", queryDataSource.getClass().getName());
|
||||||
|
log.warn("========================================");
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to get DataSource info", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private final ObjectMapper objectMapper = new ObjectMapper()
|
||||||
|
.registerModule(new JavaTimeModule())
|
||||||
|
.setDateFormat(new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm:ss"));
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void write(Chunk<? extends List<VesselTrack>> chunk) throws Exception {
|
||||||
|
List<VesselTrack> allTracks = chunk.getItems().stream()
|
||||||
|
.flatMap(List::stream)
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
if (allTracks.isEmpty()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
bulkInsertTracks(allTracks, "signal.t_vessel_tracks_5min");
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Bulk insert failed, using fallback", e);
|
||||||
|
fallbackInsert(allTracks, "signal.t_vessel_tracks_5min");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void writeHourlyTracks(List<VesselTrack> tracks) throws Exception {
|
||||||
|
if (tracks.isEmpty()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
bulkInsertTracks(tracks, "signal.t_vessel_tracks_hourly");
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Hourly bulk insert failed, using fallback", e);
|
||||||
|
fallbackInsert(tracks, "signal.t_vessel_tracks_hourly");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void writeDailyTracks(List<VesselTrack> tracks) throws Exception {
|
||||||
|
if (tracks.isEmpty()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
bulkInsertTracks(tracks, "signal.t_vessel_tracks_daily");
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Daily bulk insert failed, using fallback", e);
|
||||||
|
fallbackInsert(tracks, "signal.t_vessel_tracks_daily");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// track_geom만 사용하는 단순화된 COPY
|
||||||
|
private void bulkInsertTracks(List<VesselTrack> tracks, String tableName) throws Exception {
|
||||||
|
try (Connection conn = queryDataSource.getConnection()) {
|
||||||
|
BaseConnection baseConn = conn.unwrap(BaseConnection.class);
|
||||||
|
CopyManager copyManager = new CopyManager(baseConn);
|
||||||
|
|
||||||
|
String copySql = String.format("""
|
||||||
|
COPY %s (
|
||||||
|
sig_src_cd, target_id, time_bucket, track_geom,
|
||||||
|
distance_nm, avg_speed, max_speed, point_count,
|
||||||
|
start_position, end_position
|
||||||
|
) FROM STDIN
|
||||||
|
""", tableName);
|
||||||
|
|
||||||
|
StringWriter writer = new StringWriter();
|
||||||
|
for (VesselTrack track : tracks) {
|
||||||
|
writer.write(formatTrackLine(track));
|
||||||
|
writer.write('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
long rowsInserted = copyManager.copyIn(copySql, new StringReader(writer.toString()));
|
||||||
|
log.info("Bulk inserted {} vessel tracks to {} (v2 only)", rowsInserted, tableName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private String formatTrackLine(VesselTrack track) {
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
|
||||||
|
sb.append(track.getSigSrcCd()).append('\t');
|
||||||
|
sb.append(track.getTargetId()).append('\t');
|
||||||
|
sb.append(Timestamp.valueOf(track.getTimeBucket())).append('\t');
|
||||||
|
|
||||||
|
// track_geom만 사용
|
||||||
|
if (track.getTrackGeom() != null && !track.getTrackGeom().isEmpty()) {
|
||||||
|
sb.append(track.getTrackGeom());
|
||||||
|
} else {
|
||||||
|
sb.append("\\N");
|
||||||
|
}
|
||||||
|
sb.append('\t');
|
||||||
|
|
||||||
|
// distance_nm
|
||||||
|
if (track.getDistanceNm() != null) {
|
||||||
|
sb.append(track.getDistanceNm());
|
||||||
|
} else {
|
||||||
|
sb.append("\\N");
|
||||||
|
}
|
||||||
|
sb.append('\t');
|
||||||
|
|
||||||
|
// avg_speed
|
||||||
|
if (track.getAvgSpeed() != null) {
|
||||||
|
sb.append(track.getAvgSpeed());
|
||||||
|
} else {
|
||||||
|
sb.append("\\N");
|
||||||
|
}
|
||||||
|
sb.append('\t');
|
||||||
|
|
||||||
|
// max_speed
|
||||||
|
if (track.getMaxSpeed() != null) {
|
||||||
|
sb.append(track.getMaxSpeed());
|
||||||
|
} else {
|
||||||
|
sb.append("\\N");
|
||||||
|
}
|
||||||
|
sb.append('\t');
|
||||||
|
|
||||||
|
// point_count
|
||||||
|
sb.append(track.getPointCount()).append('\t');
|
||||||
|
|
||||||
|
// start_position (JSON)
|
||||||
|
if (track.getStartPosition() != null) {
|
||||||
|
sb.append(formatPositionJson(track.getStartPosition()));
|
||||||
|
} else {
|
||||||
|
sb.append("\\N");
|
||||||
|
}
|
||||||
|
sb.append('\t');
|
||||||
|
|
||||||
|
// end_position (JSON)
|
||||||
|
if (track.getEndPosition() != null) {
|
||||||
|
sb.append(formatPositionJson(track.getEndPosition()));
|
||||||
|
} else {
|
||||||
|
sb.append("\\N");
|
||||||
|
}
|
||||||
|
|
||||||
|
return sb.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
private String formatPositionJson(VesselTrack.TrackPosition position) {
|
||||||
|
Map<String, Object> jsonMap = new LinkedHashMap<>();
|
||||||
|
jsonMap.put("lat", position.getLat());
|
||||||
|
jsonMap.put("lon", position.getLon());
|
||||||
|
jsonMap.put("time", position.getTime().format(TIMESTAMP_FORMATTER));
|
||||||
|
if (position.getSog() != null) {
|
||||||
|
jsonMap.put("sog", position.getSog());
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
return objectMapper.writeValueAsString(jsonMap);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to format position JSON", e);
|
||||||
|
return "{}";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void fallbackInsert(List<VesselTrack> tracks, String tableName) {
|
||||||
|
String sql = String.format("""
|
||||||
|
INSERT INTO %s (
|
||||||
|
sig_src_cd, target_id, time_bucket, track_geom,
|
||||||
|
distance_nm, avg_speed, max_speed, point_count,
|
||||||
|
start_position, end_position
|
||||||
|
) VALUES (?, ?, ?, public.ST_GeomFromText(?), ?, ?, ?, ?, ?::jsonb, ?::jsonb)
|
||||||
|
ON CONFLICT (sig_src_cd, target_id, time_bucket)
|
||||||
|
DO UPDATE SET
|
||||||
|
track_geom = EXCLUDED.track_geom,
|
||||||
|
distance_nm = EXCLUDED.distance_nm,
|
||||||
|
avg_speed = EXCLUDED.avg_speed,
|
||||||
|
max_speed = EXCLUDED.max_speed,
|
||||||
|
point_count = EXCLUDED.point_count,
|
||||||
|
start_position = EXCLUDED.start_position,
|
||||||
|
end_position = EXCLUDED.end_position
|
||||||
|
""", tableName);
|
||||||
|
|
||||||
|
for (VesselTrack track : tracks) {
|
||||||
|
try {
|
||||||
|
queryJdbcTemplate.update(sql,
|
||||||
|
track.getSigSrcCd(),
|
||||||
|
track.getTargetId(),
|
||||||
|
Timestamp.valueOf(track.getTimeBucket()),
|
||||||
|
track.getTrackGeom(),
|
||||||
|
track.getDistanceNm(),
|
||||||
|
track.getAvgSpeed(),
|
||||||
|
track.getMaxSpeed(),
|
||||||
|
track.getPointCount(),
|
||||||
|
track.getStartPosition() != null ? formatPositionJson(track.getStartPosition()) : null,
|
||||||
|
track.getEndPosition() != null ? formatPositionJson(track.getEndPosition()) : null
|
||||||
|
);
|
||||||
|
log.debug("Upserted track for vessel: {} to {}",
|
||||||
|
track.getSigSrcCd() + "_" + track.getTargetId(), tableName);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to upsert track for vessel: {} to {}",
|
||||||
|
track.getSigSrcCd() + "_" + track.getTargetId(), tableName, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,166 @@
|
|||||||
|
package gc.mda.signal_batch.domain.debug;
|
||||||
|
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.web.bind.annotation.*;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.sql.Timestamp;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.time.ZoneId;
|
||||||
|
import java.time.format.DateTimeFormatter;
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@RestController
|
||||||
|
@RequestMapping("/api/debug")
|
||||||
|
public class DebugTimeController {
|
||||||
|
|
||||||
|
private final DataSource queryDataSource;
|
||||||
|
|
||||||
|
public DebugTimeController(@Qualifier("queryDataSource") DataSource queryDataSource) {
|
||||||
|
this.queryDataSource = queryDataSource;
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/time-analysis")
|
||||||
|
public Map<String, Object> analyzeTimeData(
|
||||||
|
@RequestParam(defaultValue = "000001") String sigSrcCd,
|
||||||
|
@RequestParam(defaultValue = "440331240") String targetId,
|
||||||
|
@RequestParam(defaultValue = "2025-08-26T08:02:59") String startTime,
|
||||||
|
@RequestParam(defaultValue = "2025-08-27T08:02:59") String endTime) {
|
||||||
|
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
|
||||||
|
Map<String, Object> result = new HashMap<>();
|
||||||
|
|
||||||
|
LocalDateTime start = LocalDateTime.parse(startTime);
|
||||||
|
LocalDateTime end = LocalDateTime.parse(endTime);
|
||||||
|
|
||||||
|
result.put("requestInfo", Map.of(
|
||||||
|
"sigSrcCd", sigSrcCd,
|
||||||
|
"targetId", targetId,
|
||||||
|
"startTime", startTime,
|
||||||
|
"endTime", endTime,
|
||||||
|
"startTimestamp", start.atZone(ZoneId.of("Asia/Seoul")).toEpochSecond(),
|
||||||
|
"endTimestamp", end.atZone(ZoneId.of("Asia/Seoul")).toEpochSecond()
|
||||||
|
));
|
||||||
|
|
||||||
|
// 1. DB 서버 시간 정보 확인
|
||||||
|
String timeSql = """
|
||||||
|
SELECT
|
||||||
|
NOW() as current_time,
|
||||||
|
CURRENT_SETTING('TimeZone') as timezone,
|
||||||
|
EXTRACT(epoch FROM NOW()) as current_unix_timestamp
|
||||||
|
""";
|
||||||
|
|
||||||
|
Map<String, Object> serverTime = jdbcTemplate.queryForMap(timeSql);
|
||||||
|
result.put("serverInfo", serverTime);
|
||||||
|
|
||||||
|
// 2. 요청 범위의 실제 데이터 확인
|
||||||
|
String dataSql = """
|
||||||
|
SELECT
|
||||||
|
time_bucket,
|
||||||
|
EXTRACT(epoch FROM time_bucket) as time_bucket_unix,
|
||||||
|
substring(public.ST_AsText(track_geom), 1, 200) as track_sample,
|
||||||
|
distance_nm,
|
||||||
|
avg_speed,
|
||||||
|
point_count
|
||||||
|
FROM signal.t_vessel_tracks_5min
|
||||||
|
WHERE sig_src_cd = ? AND target_id = ?
|
||||||
|
AND time_bucket BETWEEN ? AND ?
|
||||||
|
ORDER BY time_bucket
|
||||||
|
LIMIT 10
|
||||||
|
""";
|
||||||
|
|
||||||
|
List<Map<String, Object>> dataRows = jdbcTemplate.query(dataSql,
|
||||||
|
(rs, rowNum) -> {
|
||||||
|
Map<String, Object> row = new HashMap<>();
|
||||||
|
row.put("time_bucket", rs.getTimestamp("time_bucket"));
|
||||||
|
row.put("time_bucket_unix", rs.getLong("time_bucket_unix"));
|
||||||
|
row.put("track_sample", rs.getString("track_sample"));
|
||||||
|
row.put("distance_nm", rs.getBigDecimal("distance_nm"));
|
||||||
|
row.put("avg_speed", rs.getBigDecimal("avg_speed"));
|
||||||
|
row.put("point_count", rs.getInt("point_count"));
|
||||||
|
|
||||||
|
// Track geometry에서 첫 번째 점의 unix timestamp 추출
|
||||||
|
String trackSample = rs.getString("track_sample");
|
||||||
|
if (trackSample != null && trackSample.contains("LINESTRING M")) {
|
||||||
|
try {
|
||||||
|
String coordsPart = trackSample.substring(trackSample.indexOf("(") + 1);
|
||||||
|
if (coordsPart.contains(",")) {
|
||||||
|
String firstPoint = coordsPart.split(",")[0].trim();
|
||||||
|
String[] parts = firstPoint.split("\\s+");
|
||||||
|
if (parts.length >= 3) {
|
||||||
|
row.put("first_point_unix", parts[2]);
|
||||||
|
// Unix timestamp를 readable date로 변환
|
||||||
|
long unixTime = Long.parseLong(parts[2]);
|
||||||
|
LocalDateTime dateTime = LocalDateTime.ofInstant(
|
||||||
|
java.time.Instant.ofEpochSecond(unixTime),
|
||||||
|
ZoneId.of("Asia/Seoul")
|
||||||
|
);
|
||||||
|
row.put("first_point_kst", dateTime.format(DateTimeFormatter.ISO_LOCAL_DATE_TIME));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
row.put("parse_error", e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return row;
|
||||||
|
},
|
||||||
|
sigSrcCd, targetId, Timestamp.valueOf(start), Timestamp.valueOf(end)
|
||||||
|
);
|
||||||
|
|
||||||
|
result.put("queryResults", dataRows);
|
||||||
|
|
||||||
|
// 3. 더 넓은 범위에서 해당 선박의 최신 데이터 확인
|
||||||
|
String recentSql = """
|
||||||
|
SELECT
|
||||||
|
time_bucket,
|
||||||
|
EXTRACT(epoch FROM time_bucket) as time_bucket_unix,
|
||||||
|
substring(public.ST_AsText(track_geom), 1, 100) as track_sample
|
||||||
|
FROM signal.t_vessel_tracks_5min
|
||||||
|
WHERE sig_src_cd = ? AND target_id = ?
|
||||||
|
ORDER BY time_bucket DESC
|
||||||
|
LIMIT 5
|
||||||
|
""";
|
||||||
|
|
||||||
|
List<Map<String, Object>> recentRows = jdbcTemplate.query(recentSql,
|
||||||
|
(rs, rowNum) -> {
|
||||||
|
Map<String, Object> row = new HashMap<>();
|
||||||
|
row.put("time_bucket", rs.getTimestamp("time_bucket"));
|
||||||
|
row.put("time_bucket_unix", rs.getLong("time_bucket_unix"));
|
||||||
|
row.put("track_sample", rs.getString("track_sample"));
|
||||||
|
return row;
|
||||||
|
},
|
||||||
|
sigSrcCd, targetId
|
||||||
|
);
|
||||||
|
|
||||||
|
result.put("recentData", recentRows);
|
||||||
|
|
||||||
|
// 4. 시간 차이 분석
|
||||||
|
if (!dataRows.isEmpty()) {
|
||||||
|
Map<String, Object> firstRow = dataRows.get(0);
|
||||||
|
Map<String, Object> lastRow = dataRows.get(dataRows.size() - 1);
|
||||||
|
|
||||||
|
Map<String, Object> analysis = new HashMap<>();
|
||||||
|
analysis.put("foundRecords", dataRows.size());
|
||||||
|
analysis.put("firstRecord", Map.of(
|
||||||
|
"time_bucket", firstRow.get("time_bucket"),
|
||||||
|
"unix_from_geometry", firstRow.get("first_point_unix"),
|
||||||
|
"kst_from_geometry", firstRow.get("first_point_kst")
|
||||||
|
));
|
||||||
|
analysis.put("lastRecord", Map.of(
|
||||||
|
"time_bucket", lastRow.get("time_bucket"),
|
||||||
|
"unix_from_geometry", lastRow.get("first_point_unix"),
|
||||||
|
"kst_from_geometry", lastRow.get("first_point_kst")
|
||||||
|
));
|
||||||
|
|
||||||
|
result.put("analysis", analysis);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
158
src/main/java/gc/mda/signal_batch/domain/gis/cache/AreaBoundaryCache.java
vendored
Normal file
158
src/main/java/gc/mda/signal_batch/domain/gis/cache/AreaBoundaryCache.java
vendored
Normal file
@ -0,0 +1,158 @@
|
|||||||
|
package gc.mda.signal_batch.domain.gis.cache;
|
||||||
|
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.locationtech.jts.geom.Coordinate;
|
||||||
|
import org.locationtech.jts.geom.GeometryFactory;
|
||||||
|
import org.locationtech.jts.geom.Point;
|
||||||
|
import org.locationtech.jts.geom.Polygon;
|
||||||
|
import org.locationtech.jts.io.WKTReader;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import jakarta.annotation.PostConstruct;
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
public class AreaBoundaryCache {
|
||||||
|
|
||||||
|
private final DataSource queryDataSource;
|
||||||
|
private final Map<String, Polygon> areaPolygons = new ConcurrentHashMap<>();
|
||||||
|
private final Map<Integer, Polygon> haeguPolygons = new ConcurrentHashMap<>();
|
||||||
|
private final GeometryFactory geometryFactory = new GeometryFactory();
|
||||||
|
private final WKTReader wktReader = new WKTReader(geometryFactory);
|
||||||
|
|
||||||
|
public AreaBoundaryCache(@Qualifier("queryDataSource") DataSource queryDataSource) {
|
||||||
|
this.queryDataSource = queryDataSource;
|
||||||
|
}
|
||||||
|
|
||||||
|
@PostConstruct
|
||||||
|
public void loadBoundaries() {
|
||||||
|
loadAreaBoundaries();
|
||||||
|
loadHaeguBoundaries();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void loadAreaBoundaries() {
|
||||||
|
try {
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
SELECT area_id, public.ST_AsText(area_geom) as wkt
|
||||||
|
FROM signal.t_areas
|
||||||
|
""";
|
||||||
|
|
||||||
|
List<Map<String, Object>> areas = jdbcTemplate.queryForList(sql);
|
||||||
|
|
||||||
|
for (Map<String, Object> area : areas) {
|
||||||
|
String areaId = (String) area.get("area_id");
|
||||||
|
String wkt = (String) area.get("wkt");
|
||||||
|
try {
|
||||||
|
Polygon polygon = (Polygon) wktReader.read(wkt);
|
||||||
|
areaPolygons.put(areaId, polygon);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.warn("Failed to parse geometry for area {}: {}", areaId, e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info("Loaded {} areas into cache", areaPolygons.size());
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to load areas cache", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void loadHaeguBoundaries() {
|
||||||
|
try {
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
SELECT haegu_no, public.ST_AsText(geom) as wkt
|
||||||
|
FROM signal.t_haegu_definitions
|
||||||
|
""";
|
||||||
|
|
||||||
|
List<Map<String, Object>> haegus = jdbcTemplate.queryForList(sql);
|
||||||
|
|
||||||
|
for (Map<String, Object> haegu : haegus) {
|
||||||
|
Integer haeguNo = (Integer) haegu.get("haegu_no");
|
||||||
|
String wkt = (String) haegu.get("wkt");
|
||||||
|
try {
|
||||||
|
Polygon polygon = (Polygon) wktReader.read(wkt);
|
||||||
|
haeguPolygons.put(haeguNo, polygon);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.warn("Failed to parse geometry for haegu {}: {}", haeguNo, e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info("Loaded {} haegus into cache", haeguPolygons.size());
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to load haegus cache", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 포인트가 속한 모든 area ID 반환
|
||||||
|
public Set<String> findAreasForPoint(double lat, double lon) {
|
||||||
|
Point point = geometryFactory.createPoint(new Coordinate(lon, lat));
|
||||||
|
|
||||||
|
return areaPolygons.entrySet().stream()
|
||||||
|
.filter(entry -> entry.getValue().contains(point))
|
||||||
|
.map(Map.Entry::getKey)
|
||||||
|
.collect(Collectors.toSet());
|
||||||
|
}
|
||||||
|
|
||||||
|
// 포인트가 속한 모든 haegu 번호 반환
|
||||||
|
public Set<Integer> findHaegusForPoint(double lat, double lon) {
|
||||||
|
Point point = geometryFactory.createPoint(new Coordinate(lon, lat));
|
||||||
|
|
||||||
|
return haeguPolygons.entrySet().stream()
|
||||||
|
.filter(entry -> entry.getValue().contains(point))
|
||||||
|
.map(Map.Entry::getKey)
|
||||||
|
.collect(Collectors.toSet());
|
||||||
|
}
|
||||||
|
|
||||||
|
// 특정 area에 포인트가 포함되는지 확인
|
||||||
|
public boolean isPointInArea(double lat, double lon, String areaId) {
|
||||||
|
Polygon polygon = areaPolygons.get(areaId);
|
||||||
|
if (polygon == null) return false;
|
||||||
|
|
||||||
|
Point point = geometryFactory.createPoint(new Coordinate(lon, lat));
|
||||||
|
return polygon.contains(point);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 특정 haegu에 포인트가 포함되는지 확인
|
||||||
|
public boolean isPointInHaegu(double lat, double lon, Integer haeguNo) {
|
||||||
|
Polygon polygon = haeguPolygons.get(haeguNo);
|
||||||
|
if (polygon == null) return false;
|
||||||
|
|
||||||
|
Point point = geometryFactory.createPoint(new Coordinate(lon, lat));
|
||||||
|
return polygon.contains(point);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Job 실행 시 캐시 갱신
|
||||||
|
public void refresh() {
|
||||||
|
areaPolygons.clear();
|
||||||
|
haeguPolygons.clear();
|
||||||
|
loadBoundaries();
|
||||||
|
}
|
||||||
|
|
||||||
|
// 포인트가 속한 첫 번째 area ID 반환 (우선순위 기반)
|
||||||
|
public String findAreaId(double lat, double lon) {
|
||||||
|
Set<String> areas = findAreasForPoint(lat, lon);
|
||||||
|
return areas.isEmpty() ? null : areas.iterator().next();
|
||||||
|
}
|
||||||
|
|
||||||
|
// 캐시 크기 반환
|
||||||
|
public int getCacheSize() {
|
||||||
|
return areaPolygons.size() + haeguPolygons.size();
|
||||||
|
}
|
||||||
|
|
||||||
|
// 캐시 클리어
|
||||||
|
public void clearCache() {
|
||||||
|
areaPolygons.clear();
|
||||||
|
haeguPolygons.clear();
|
||||||
|
log.info("Area boundary cache cleared");
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,96 @@
|
|||||||
|
package gc.mda.signal_batch.domain.gis.controller;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.gis.dto.GisBoundaryResponse;
|
||||||
|
import gc.mda.signal_batch.domain.vessel.dto.VesselStatsResponse;
|
||||||
|
import gc.mda.signal_batch.domain.vessel.dto.TrackResponse;
|
||||||
|
import gc.mda.signal_batch.domain.vessel.dto.VesselTracksRequest;
|
||||||
|
import gc.mda.signal_batch.domain.vessel.dto.CompactVesselTrack;
|
||||||
|
import gc.mda.signal_batch.domain.vessel.dto.RecentVesselPositionDto;
|
||||||
|
import gc.mda.signal_batch.domain.gis.service.GisService;
|
||||||
|
import gc.mda.signal_batch.domain.vessel.service.VesselPositionService;
|
||||||
|
import io.swagger.v3.oas.annotations.Operation;
|
||||||
|
import io.swagger.v3.oas.annotations.Parameter;
|
||||||
|
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.web.bind.annotation.*;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@RestController
|
||||||
|
@RequestMapping("/api/v1")
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
@Tag(name = "항적 조회 API", description = "해구 및 영역별 선박 항적 조회 및 통계 API")
|
||||||
|
public class GisController {
|
||||||
|
|
||||||
|
private final GisService gisService;
|
||||||
|
private final VesselPositionService vesselPositionService;
|
||||||
|
|
||||||
|
@GetMapping("/haegu/boundaries")
|
||||||
|
@Operation(summary = "해구 경계 조회", description = "모든 해구의 경계 정보를 GeoJSON 형식으로 반환")
|
||||||
|
public List<GisBoundaryResponse> getHaeguBoundaries() {
|
||||||
|
return gisService.getHaeguBoundaries();
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/haegu/vessel-stats")
|
||||||
|
@Operation(summary = "해구별 선박 통계", description = "지정된 시간 동안의 해구별 선박 통계")
|
||||||
|
public Map<Integer, VesselStatsResponse> getHaeguVesselStats(
|
||||||
|
@RequestParam(defaultValue = "60") int minutes) {
|
||||||
|
return gisService.getHaeguVesselStats(minutes);
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/areas/boundaries")
|
||||||
|
@Operation(summary = "사용자 정의 영역 경계 조회", description = "모든 사용자 정의 영역의 경계 정보")
|
||||||
|
public List<GisBoundaryResponse> getAreaBoundaries() {
|
||||||
|
return gisService.getAreaBoundaries();
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/areas/vessel-stats")
|
||||||
|
@Operation(summary = "영역별 선박 통계", description = "지정된 시간 동안의 영역별 선박 통계")
|
||||||
|
public Map<String, VesselStatsResponse> getAreaVesselStats(
|
||||||
|
@RequestParam(defaultValue = "60") int minutes) {
|
||||||
|
return gisService.getAreaVesselStats(minutes);
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/tracks/haegu/{haeguNo}")
|
||||||
|
@Operation(summary = "해구별 선박 항적", description = "특정 해구의 선박 항적 조회")
|
||||||
|
public List<TrackResponse> getHaeguTracks(
|
||||||
|
@PathVariable Integer haeguNo,
|
||||||
|
@RequestParam(defaultValue = "60") int minutes) {
|
||||||
|
return gisService.getHaeguTracks(haeguNo, minutes);
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/tracks/area/{areaId}")
|
||||||
|
@Operation(summary = "영역별 선박 항적", description = "특정 영역의 선박 항적 조회")
|
||||||
|
public List<TrackResponse> getAreaTracks(
|
||||||
|
@PathVariable String areaId,
|
||||||
|
@RequestParam(defaultValue = "60") int minutes) {
|
||||||
|
return gisService.getAreaTracks(areaId, minutes);
|
||||||
|
}
|
||||||
|
|
||||||
|
@PostMapping("/tracks/vessels")
|
||||||
|
@Operation(summary = "선박별 항적 조회", description = "지정된 선박들의 항적을 조회합니다.")
|
||||||
|
public List<CompactVesselTrack> getVesselTracks(
|
||||||
|
@RequestBody VesselTracksRequest request) {
|
||||||
|
return gisService.getVesselTracks(request);
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/vessels/recent-positions")
|
||||||
|
@Operation(
|
||||||
|
summary = "최근 위치 업데이트된 선박 조회",
|
||||||
|
description = "지정된 시간(분) 이내에 위치가 업데이트된 모든 선박의 최신 위치 정보를 반환합니다."
|
||||||
|
)
|
||||||
|
public List<RecentVesselPositionDto> getRecentVesselPositions(
|
||||||
|
@Parameter(description = "조회할 시간 범위 (분 단위)", example = "30")
|
||||||
|
@RequestParam(defaultValue = "5") int minutes) {
|
||||||
|
|
||||||
|
if (minutes <= 0 || minutes > 1440) { // 24시간 제한
|
||||||
|
throw new IllegalArgumentException("Minutes must be between 1 and 1440");
|
||||||
|
}
|
||||||
|
|
||||||
|
return vesselPositionService.getRecentVesselPositions(minutes);
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,107 @@
|
|||||||
|
package gc.mda.signal_batch.domain.gis.controller;
|
||||||
|
|
||||||
|
import org.springframework.core.io.FileSystemResource;
|
||||||
|
import org.springframework.core.io.Resource;
|
||||||
|
import org.springframework.http.HttpHeaders;
|
||||||
|
import org.springframework.http.HttpStatus;
|
||||||
|
import org.springframework.http.MediaType;
|
||||||
|
import org.springframework.http.ResponseEntity;
|
||||||
|
import org.springframework.web.bind.annotation.GetMapping;
|
||||||
|
import org.springframework.web.bind.annotation.PathVariable;
|
||||||
|
import org.springframework.web.bind.annotation.RequestMapping;
|
||||||
|
import org.springframework.web.bind.annotation.RestController;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.nio.file.Paths;
|
||||||
|
|
||||||
|
@RestController
|
||||||
|
@RequestMapping("/api/tiles")
|
||||||
|
public class MapTileController {
|
||||||
|
|
||||||
|
private static final String TILE_BASE_PATH = "/devdata/MAPS/WORLD_webp";
|
||||||
|
private static final String TILE_ENC_PATH = "/devdata/MAPS/ENC_RAS_webp";
|
||||||
|
|
||||||
|
@GetMapping("/world/{z}/{x}/{y}.webp")
|
||||||
|
public ResponseEntity<Resource> getWorldTile(
|
||||||
|
@PathVariable int z,
|
||||||
|
@PathVariable int x,
|
||||||
|
@PathVariable int y) {
|
||||||
|
|
||||||
|
try {
|
||||||
|
// 안전한 경로 생성
|
||||||
|
Path tilePath = Paths.get(TILE_BASE_PATH, String.valueOf(z),
|
||||||
|
String.valueOf(x), y + ".webp");
|
||||||
|
File tileFile = tilePath.toFile();
|
||||||
|
|
||||||
|
if (!tileFile.exists() || !tileFile.isFile()) {
|
||||||
|
return ResponseEntity.notFound().build();
|
||||||
|
}
|
||||||
|
|
||||||
|
// 경로 탐색 공격 방지
|
||||||
|
if (!tileFile.getCanonicalPath().startsWith(new File(TILE_BASE_PATH).getCanonicalPath())) {
|
||||||
|
return ResponseEntity.status(HttpStatus.FORBIDDEN).build();
|
||||||
|
}
|
||||||
|
|
||||||
|
FileSystemResource resource = new FileSystemResource(tileFile);
|
||||||
|
|
||||||
|
HttpHeaders headers = new HttpHeaders();
|
||||||
|
headers.setContentType(MediaType.valueOf("image/webp"));
|
||||||
|
headers.setCacheControl("public, max-age=86400"); // 24시간 캐시
|
||||||
|
|
||||||
|
return ResponseEntity.ok()
|
||||||
|
.headers(headers)
|
||||||
|
.body(resource);
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/enc/{z}/{x}/{y}.webp")
|
||||||
|
public ResponseEntity<Resource> getEncTile(
|
||||||
|
@PathVariable int z,
|
||||||
|
@PathVariable int x,
|
||||||
|
@PathVariable int y) {
|
||||||
|
|
||||||
|
try {
|
||||||
|
// 안전한 경로 생성
|
||||||
|
Path tilePath = Paths.get(TILE_ENC_PATH, String.valueOf(z),
|
||||||
|
String.valueOf(x), y + ".webp");
|
||||||
|
File tileFile = tilePath.toFile();
|
||||||
|
|
||||||
|
if (!tileFile.exists() || !tileFile.isFile()) {
|
||||||
|
return ResponseEntity.notFound().build();
|
||||||
|
}
|
||||||
|
|
||||||
|
// 경로 탐색 공격 방지
|
||||||
|
if (!tileFile.getCanonicalPath().startsWith(new File(TILE_ENC_PATH).getCanonicalPath())) {
|
||||||
|
return ResponseEntity.status(HttpStatus.FORBIDDEN).build();
|
||||||
|
}
|
||||||
|
|
||||||
|
FileSystemResource resource = new FileSystemResource(tileFile);
|
||||||
|
|
||||||
|
HttpHeaders headers = new HttpHeaders();
|
||||||
|
headers.setContentType(MediaType.valueOf("image/webp"));
|
||||||
|
headers.setCacheControl("public, max-age=86400"); // 24시간 캐시
|
||||||
|
|
||||||
|
return ResponseEntity.ok()
|
||||||
|
.headers(headers)
|
||||||
|
.body(resource);
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/health")
|
||||||
|
public ResponseEntity<String> checkTileService() {
|
||||||
|
File baseDir = new File(TILE_BASE_PATH);
|
||||||
|
if (baseDir.exists() && baseDir.isDirectory()) {
|
||||||
|
return ResponseEntity.ok("Tile service is operational. Base path: " + TILE_BASE_PATH);
|
||||||
|
} else {
|
||||||
|
return ResponseEntity.status(HttpStatus.SERVICE_UNAVAILABLE)
|
||||||
|
.body("Tile directory not found: " + TILE_BASE_PATH);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,125 @@
|
|||||||
|
package gc.mda.signal_batch.domain.gis.controller;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.gis.dto.TileAggregationRequest;
|
||||||
|
import gc.mda.signal_batch.domain.gis.dto.TileAggregationResponse;
|
||||||
|
import gc.mda.signal_batch.domain.gis.service.TileAggregationService;
|
||||||
|
import io.swagger.v3.oas.annotations.Operation;
|
||||||
|
import io.swagger.v3.oas.annotations.Parameter;
|
||||||
|
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.format.annotation.DateTimeFormat;
|
||||||
|
import org.springframework.http.ResponseEntity;
|
||||||
|
import org.springframework.validation.annotation.Validated;
|
||||||
|
import org.springframework.web.bind.annotation.*;
|
||||||
|
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@RestController
|
||||||
|
@RequestMapping("/api/v1/tiles")
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
@Tag(name = "Tile Aggregation API", description = "대해구/소해구 기반 선박 집계 데이터 조회 API")
|
||||||
|
@Validated
|
||||||
|
public class TileAggregationController {
|
||||||
|
|
||||||
|
private final TileAggregationService tileAggregationService;
|
||||||
|
|
||||||
|
@Operation(
|
||||||
|
summary = "타일별 선박 집계 조회",
|
||||||
|
description = """
|
||||||
|
지정된 기간 동안의 타일별 선박 집계 정보를 조회합니다.
|
||||||
|
- tile_id가 지정된 경우: 해당 타일(대해구/소해구)의 상세 정보 반환
|
||||||
|
- tile_id가 없는 경우: 전체 대해구 요약 정보 반환
|
||||||
|
|
||||||
|
중복 제거 정책:
|
||||||
|
- 동일 선박이 여러 시간대에 나타날 경우 최신 위치만 포함
|
||||||
|
- unique_vessels에는 각 선박의 마지막 위치 정보가 포함됨
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
@GetMapping("/aggregation")
|
||||||
|
public ResponseEntity<TileAggregationResponse> getTileAggregation(
|
||||||
|
@Parameter(description = "조회 시작 시간 (yyyy-MM-dd'T'HH:mm:ss)", required = true, example = "2025-01-18T00:00:00")
|
||||||
|
@RequestParam
|
||||||
|
@DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME)
|
||||||
|
LocalDateTime fromDate,
|
||||||
|
|
||||||
|
@Parameter(description = "조회 종료 시간 (yyyy-MM-dd'T'HH:mm:ss)", required = true, example = "2025-01-18T23:59:59")
|
||||||
|
@RequestParam
|
||||||
|
@DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME)
|
||||||
|
LocalDateTime toDate,
|
||||||
|
|
||||||
|
@Parameter(description = "타일 ID (대해구: H238, 소해구: H238_S5)", required = false, example = "H238")
|
||||||
|
@RequestParam(required = false)
|
||||||
|
String tileId
|
||||||
|
) {
|
||||||
|
log.info("Tile aggregation request - fromDate: {}, toDate: {}, tileId: {}", fromDate, toDate, tileId);
|
||||||
|
|
||||||
|
// 유효성 검증
|
||||||
|
if (fromDate.isAfter(toDate)) {
|
||||||
|
throw new IllegalArgumentException("fromDate cannot be after toDate");
|
||||||
|
}
|
||||||
|
|
||||||
|
TileAggregationRequest request = TileAggregationRequest.builder()
|
||||||
|
.fromDate(fromDate)
|
||||||
|
.toDate(toDate)
|
||||||
|
.tileId(tileId)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
TileAggregationResponse response = tileAggregationService.getTileAggregation(request);
|
||||||
|
|
||||||
|
return ResponseEntity.ok(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Operation(
|
||||||
|
summary = "특정 타일의 시계열 집계 조회",
|
||||||
|
description = "지정된 타일의 시간대별 선박 변화 추이를 조회합니다."
|
||||||
|
)
|
||||||
|
@GetMapping("/aggregation/{tileId}/timeseries")
|
||||||
|
public ResponseEntity<Object> getTileTimeSeries(
|
||||||
|
@Parameter(description = "타일 ID", required = true, example = "H238")
|
||||||
|
@PathVariable String tileId,
|
||||||
|
|
||||||
|
@Parameter(description = "조회 시작 시간", required = true)
|
||||||
|
@RequestParam
|
||||||
|
@DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME)
|
||||||
|
LocalDateTime fromDate,
|
||||||
|
|
||||||
|
@Parameter(description = "조회 종료 시간", required = true)
|
||||||
|
@RequestParam
|
||||||
|
@DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME)
|
||||||
|
LocalDateTime toDate,
|
||||||
|
|
||||||
|
@Parameter(description = "집계 간격 (분)", required = false, example = "60")
|
||||||
|
@RequestParam(defaultValue = "60")
|
||||||
|
Integer intervalMinutes
|
||||||
|
) {
|
||||||
|
log.info("Tile timeseries request - tileId: {}, fromDate: {}, toDate: {}, interval: {} minutes",
|
||||||
|
tileId, fromDate, toDate, intervalMinutes);
|
||||||
|
|
||||||
|
// TODO: 시계열 데이터 조회 서비스 구현
|
||||||
|
return ResponseEntity.ok(Map.of(
|
||||||
|
"message", "Time series endpoint - To be implemented",
|
||||||
|
"tileId", tileId,
|
||||||
|
"fromDate", fromDate,
|
||||||
|
"toDate", toDate,
|
||||||
|
"intervalMinutes", intervalMinutes
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Operation(
|
||||||
|
summary = "타일 정보 조회",
|
||||||
|
description = "타일 ID로 타일의 메타데이터(대해구/소해구 정보, 경계 좌표 등)를 조회합니다."
|
||||||
|
)
|
||||||
|
@GetMapping("/info/{tileId}")
|
||||||
|
public ResponseEntity<Object> getTileInfo(
|
||||||
|
@Parameter(description = "타일 ID", required = true, example = "H238")
|
||||||
|
@PathVariable String tileId
|
||||||
|
) {
|
||||||
|
log.info("Tile info request - tileId: {}", tileId);
|
||||||
|
|
||||||
|
return ResponseEntity.ok(tileAggregationService.getTileInfo(tileId));
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,27 @@
|
|||||||
|
package gc.mda.signal_batch.domain.gis.dto;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
public class GisBoundaryResponse {
|
||||||
|
@JsonProperty("haegu_no")
|
||||||
|
private Integer haeguNo;
|
||||||
|
|
||||||
|
@JsonProperty("area_id")
|
||||||
|
private String areaId;
|
||||||
|
|
||||||
|
@JsonProperty("area_name")
|
||||||
|
private String areaName;
|
||||||
|
|
||||||
|
@JsonProperty("geom_json")
|
||||||
|
private String geomJson; // GeoJSON 형식의 geometry
|
||||||
|
|
||||||
|
@JsonProperty("center_lat")
|
||||||
|
private Double centerLat;
|
||||||
|
|
||||||
|
@JsonProperty("center_lon")
|
||||||
|
private Double centerLon;
|
||||||
|
}
|
||||||
@ -0,0 +1,18 @@
|
|||||||
|
package gc.mda.signal_batch.domain.gis.dto;
|
||||||
|
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
public class TileAggregationRequest {
|
||||||
|
private LocalDateTime fromDate;
|
||||||
|
private LocalDateTime toDate;
|
||||||
|
private String tileId;
|
||||||
|
}
|
||||||
@ -0,0 +1,171 @@
|
|||||||
|
package gc.mda.signal_batch.domain.gis.dto;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
@Schema(description = "타일 집계 응답")
|
||||||
|
public class TileAggregationResponse {
|
||||||
|
|
||||||
|
@Schema(description = "요청 정보")
|
||||||
|
private RequestInfo request;
|
||||||
|
|
||||||
|
@Schema(description = "집계 요약")
|
||||||
|
private AggregationSummary summary;
|
||||||
|
|
||||||
|
@Schema(description = "타일별 상세 데이터")
|
||||||
|
private List<TileDetail> tiles;
|
||||||
|
|
||||||
|
@Schema(description = "응답 생성 시간")
|
||||||
|
private LocalDateTime responseTime;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
@Schema(description = "요청 정보")
|
||||||
|
public static class RequestInfo {
|
||||||
|
@Schema(description = "조회 시작 시간")
|
||||||
|
private LocalDateTime fromDate;
|
||||||
|
|
||||||
|
@Schema(description = "조회 종료 시간")
|
||||||
|
private LocalDateTime toDate;
|
||||||
|
|
||||||
|
@Schema(description = "조회 타일 ID (없으면 전체)")
|
||||||
|
private String tileId;
|
||||||
|
|
||||||
|
@Schema(description = "타일 레벨 (0: 대해구, 1: 소해구)")
|
||||||
|
private Integer tileLevel;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
@Schema(description = "집계 요약")
|
||||||
|
public static class AggregationSummary {
|
||||||
|
@Schema(description = "전체 타일 수")
|
||||||
|
private Integer totalTiles;
|
||||||
|
|
||||||
|
@Schema(description = "전체 고유 선박 수")
|
||||||
|
private Integer totalUniqueVessels;
|
||||||
|
|
||||||
|
@Schema(description = "조회된 타임버킷 수")
|
||||||
|
private Integer timeBuckets;
|
||||||
|
|
||||||
|
@Schema(description = "평균 선박 밀도")
|
||||||
|
private Double avgVesselDensity;
|
||||||
|
|
||||||
|
@Schema(description = "최대 선박 수를 가진 타일 ID")
|
||||||
|
private String maxVesselTileId;
|
||||||
|
|
||||||
|
@Schema(description = "최대 선박 수")
|
||||||
|
private Integer maxVesselCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
@Schema(description = "타일 상세 정보")
|
||||||
|
public static class TileDetail {
|
||||||
|
@Schema(description = "타일 ID", example = "H238")
|
||||||
|
private String tileId;
|
||||||
|
|
||||||
|
@Schema(description = "타일 레벨 (0: 대해구, 1: 소해구)")
|
||||||
|
private Integer tileLevel;
|
||||||
|
|
||||||
|
@Schema(description = "대해구 번호")
|
||||||
|
private Integer haeguNo;
|
||||||
|
|
||||||
|
@Schema(description = "소해구 번호 (소해구인 경우)")
|
||||||
|
private Integer sohaeguNo;
|
||||||
|
|
||||||
|
@Schema(description = "고유 선박 수")
|
||||||
|
private Integer vesselCount;
|
||||||
|
|
||||||
|
@Schema(description = "평균 선박 밀도 (선박수/km²)")
|
||||||
|
private Double avgDensity;
|
||||||
|
|
||||||
|
@Schema(description = "선박별 상세 정보", example = """
|
||||||
|
{
|
||||||
|
"000001_413409000": {
|
||||||
|
"lat": 38.507038,
|
||||||
|
"lon": 121.69615,
|
||||||
|
"sog": 15.3,
|
||||||
|
"lastSeen": "2025-01-18T15:15:01"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""")
|
||||||
|
private Map<String, VesselInfo> uniqueVessels;
|
||||||
|
|
||||||
|
@Schema(description = "타일 경계 정보")
|
||||||
|
private TileBoundary boundary;
|
||||||
|
|
||||||
|
@Schema(description = "하위 타일 정보 (대해구인 경우)")
|
||||||
|
private List<TileDetail> subTiles;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
@Schema(description = "선박 정보")
|
||||||
|
public static class VesselInfo {
|
||||||
|
@Schema(description = "위도", example = "35.123456")
|
||||||
|
private Double lat;
|
||||||
|
|
||||||
|
@Schema(description = "경도", example = "129.123456")
|
||||||
|
private Double lon;
|
||||||
|
|
||||||
|
@Schema(description = "속도 (knots)", example = "12.5")
|
||||||
|
private Double sog;
|
||||||
|
|
||||||
|
@Schema(description = "마지막 확인 시간")
|
||||||
|
private LocalDateTime lastSeen;
|
||||||
|
|
||||||
|
@Schema(description = "선박명 (있는 경우)")
|
||||||
|
private String shipName;
|
||||||
|
|
||||||
|
@Schema(description = "MMSI (있는 경우)")
|
||||||
|
private String mmsi;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
@Schema(description = "타일 경계 정보")
|
||||||
|
public static class TileBoundary {
|
||||||
|
@Schema(description = "최소 위도")
|
||||||
|
private Double minLat;
|
||||||
|
|
||||||
|
@Schema(description = "최소 경도")
|
||||||
|
private Double minLon;
|
||||||
|
|
||||||
|
@Schema(description = "최대 위도")
|
||||||
|
private Double maxLat;
|
||||||
|
|
||||||
|
@Schema(description = "최대 경도")
|
||||||
|
private Double maxLon;
|
||||||
|
|
||||||
|
@Schema(description = "중심점 위도")
|
||||||
|
private Double centerLat;
|
||||||
|
|
||||||
|
@Schema(description = "중심점 경도")
|
||||||
|
private Double centerLon;
|
||||||
|
|
||||||
|
@Schema(description = "면적 (km²)")
|
||||||
|
private Double area;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,142 @@
|
|||||||
|
package gc.mda.signal_batch.domain.gis.model;
|
||||||
|
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselData;
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
public class TileStatistics implements java.io.Serializable {
|
||||||
|
private String tileId;
|
||||||
|
private Integer tileLevel;
|
||||||
|
private LocalDateTime timeBucket;
|
||||||
|
private Integer vesselCount;
|
||||||
|
private Map<String, VesselInfo> uniqueVessels;
|
||||||
|
private Long totalPoints;
|
||||||
|
private BigDecimal avgSog;
|
||||||
|
private BigDecimal maxSog;
|
||||||
|
private BigDecimal vesselDensity;
|
||||||
|
private LocalDateTime createdAt;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
public static class VesselInfo implements java.io.Serializable {
|
||||||
|
private Double lat;
|
||||||
|
private Double lon;
|
||||||
|
private BigDecimal sog;
|
||||||
|
private LocalDateTime lastSeen;
|
||||||
|
}
|
||||||
|
|
||||||
|
public TileStatistics(String tileId, Integer level, LocalDateTime timeBucket) {
|
||||||
|
this.tileId = tileId;
|
||||||
|
this.tileLevel = level;
|
||||||
|
this.timeBucket = timeBucket;
|
||||||
|
this.uniqueVessels = new HashMap<>();
|
||||||
|
this.totalPoints = 0L;
|
||||||
|
this.avgSog = BigDecimal.ZERO;
|
||||||
|
this.maxSog = BigDecimal.ZERO;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void addVesselData(VesselData data) {
|
||||||
|
String vesselKey = data.getVesselKey();
|
||||||
|
|
||||||
|
VesselInfo info = VesselInfo.builder()
|
||||||
|
.lat(data.getLat())
|
||||||
|
.lon(data.getLon())
|
||||||
|
.sog(data.getSog())
|
||||||
|
.lastSeen(data.getMessageTime())
|
||||||
|
.build();
|
||||||
|
|
||||||
|
uniqueVessels.put(vesselKey, info);
|
||||||
|
// totalPoints 제거 - 최신 위치만 사용
|
||||||
|
|
||||||
|
// Update statistics
|
||||||
|
if (data.getSog() != null) {
|
||||||
|
if (maxSog == null || data.getSog().compareTo(maxSog) > 0) {
|
||||||
|
maxSog = data.getSog();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
vesselCount = uniqueVessels.size();
|
||||||
|
|
||||||
|
// 평균 속도는 모든 선박의 현재 속도 평균
|
||||||
|
BigDecimal totalSog = BigDecimal.ZERO;
|
||||||
|
int sogCount = 0;
|
||||||
|
for (VesselInfo vessel : uniqueVessels.values()) {
|
||||||
|
if (vessel.getSog() != null) {
|
||||||
|
totalSog = totalSog.add(vessel.getSog());
|
||||||
|
sogCount++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (sogCount > 0) {
|
||||||
|
avgSog = totalSog.divide(BigDecimal.valueOf(sogCount), 2, BigDecimal.ROUND_HALF_UP);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* unique_vessels를 JSON 문자열로 변환
|
||||||
|
*/
|
||||||
|
public String getUniqueVesselsJson() {
|
||||||
|
if (uniqueVessels == null || uniqueVessels.isEmpty()) {
|
||||||
|
return "{}";
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
ObjectMapper mapper = new ObjectMapper();
|
||||||
|
return mapper.writeValueAsString(uniqueVessels);
|
||||||
|
} catch (JsonProcessingException e) {
|
||||||
|
log.error("Failed to convert unique vessels to JSON", e);
|
||||||
|
return "{}";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 타일 통계 병합 (대해구 집계용)
|
||||||
|
*/
|
||||||
|
public void merge(TileStatistics other) {
|
||||||
|
if (other == null) return;
|
||||||
|
|
||||||
|
// unique vessels 병합
|
||||||
|
if (other.uniqueVessels != null) {
|
||||||
|
this.uniqueVessels.putAll(other.uniqueVessels);
|
||||||
|
}
|
||||||
|
|
||||||
|
// vessel count 업데이트
|
||||||
|
this.vesselCount = this.uniqueVessels.size();
|
||||||
|
|
||||||
|
// total points 합산
|
||||||
|
this.totalPoints += other.totalPoints;
|
||||||
|
|
||||||
|
// max sog 업데이트
|
||||||
|
if (other.maxSog != null && (this.maxSog == null || other.maxSog.compareTo(this.maxSog) > 0)) {
|
||||||
|
this.maxSog = other.maxSog;
|
||||||
|
}
|
||||||
|
|
||||||
|
// average sog 재계산 (가중 평균)
|
||||||
|
if (this.totalPoints > 0 && other.avgSog != null) {
|
||||||
|
BigDecimal thisWeight = BigDecimal.valueOf(this.totalPoints - other.totalPoints);
|
||||||
|
BigDecimal otherWeight = BigDecimal.valueOf(other.totalPoints);
|
||||||
|
|
||||||
|
BigDecimal weightedSum = this.avgSog.multiply(thisWeight)
|
||||||
|
.add(other.avgSog.multiply(otherWeight));
|
||||||
|
|
||||||
|
this.avgSog = weightedSum.divide(BigDecimal.valueOf(this.totalPoints), 2, BigDecimal.ROUND_HALF_UP);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,609 @@
|
|||||||
|
package gc.mda.signal_batch.domain.gis.service;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.gis.dto.GisBoundaryResponse;
|
||||||
|
import gc.mda.signal_batch.domain.vessel.dto.TrackResponse;
|
||||||
|
import gc.mda.signal_batch.domain.vessel.dto.VesselStatsResponse;
|
||||||
|
import gc.mda.signal_batch.domain.vessel.dto.VesselTracksRequest;
|
||||||
|
import gc.mda.signal_batch.domain.vessel.dto.CompactVesselTrack;
|
||||||
|
import gc.mda.signal_batch.domain.vessel.dto.IntegrationVessel;
|
||||||
|
import gc.mda.signal_batch.domain.vessel.service.IntegrationVesselService;
|
||||||
|
import gc.mda.signal_batch.global.util.IntegrationSignalConstants;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.sql.Timestamp;
|
||||||
|
import java.time.Duration;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Service
|
||||||
|
public class GisService {
|
||||||
|
|
||||||
|
private final DataSource queryDataSource;
|
||||||
|
private final IntegrationVesselService integrationVesselService;
|
||||||
|
|
||||||
|
public GisService(@Qualifier("queryDataSource") DataSource queryDataSource,
|
||||||
|
IntegrationVesselService integrationVesselService) {
|
||||||
|
this.queryDataSource = queryDataSource;
|
||||||
|
this.integrationVesselService = integrationVesselService;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<GisBoundaryResponse> getHaeguBoundaries() {
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
SELECT haegu_no, center_lat, center_lon,
|
||||||
|
public.ST_AsGeoJSON(geom) as geom_json
|
||||||
|
FROM signal.t_haegu_definitions
|
||||||
|
ORDER BY haegu_no
|
||||||
|
""";
|
||||||
|
|
||||||
|
return jdbcTemplate.query(sql, (rs, rowNum) ->
|
||||||
|
GisBoundaryResponse.builder()
|
||||||
|
.haeguNo(rs.getInt("haegu_no"))
|
||||||
|
.centerLat(rs.getDouble("center_lat"))
|
||||||
|
.centerLon(rs.getDouble("center_lon"))
|
||||||
|
.geomJson(rs.getString("geom_json"))
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Map<Integer, VesselStatsResponse> getHaeguVesselStats(int minutes) {
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
SELECT haegu_no,
|
||||||
|
COUNT(DISTINCT CONCAT(sig_src_cd, '_', target_id)) as vessel_count,
|
||||||
|
COALESCE(SUM(distance_nm), 0) as total_distance,
|
||||||
|
COALESCE(AVG(avg_speed), 0) as avg_speed,
|
||||||
|
COUNT(*) as active_tracks
|
||||||
|
FROM signal.t_grid_vessel_tracks
|
||||||
|
WHERE time_bucket >= NOW() - INTERVAL '%d minutes'
|
||||||
|
GROUP BY haegu_no
|
||||||
|
""".formatted(minutes);
|
||||||
|
|
||||||
|
Map<Integer, VesselStatsResponse> result = new HashMap<>();
|
||||||
|
|
||||||
|
jdbcTemplate.query(sql, rs -> {
|
||||||
|
result.put(rs.getInt("haegu_no"),
|
||||||
|
VesselStatsResponse.builder()
|
||||||
|
.vesselCount(rs.getInt("vessel_count"))
|
||||||
|
.totalDistance(rs.getBigDecimal("total_distance"))
|
||||||
|
.avgSpeed(rs.getBigDecimal("avg_speed"))
|
||||||
|
.activeTracks(rs.getInt("active_tracks"))
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<GisBoundaryResponse> getAreaBoundaries() {
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
SELECT area_id, area_name,
|
||||||
|
public.ST_Y(public.ST_Centroid(area_geom)) as center_lat,
|
||||||
|
public.ST_X(public.ST_Centroid(area_geom)) as center_lon,
|
||||||
|
public.ST_AsGeoJSON(area_geom) as geom_json
|
||||||
|
FROM signal.t_areas
|
||||||
|
ORDER BY area_id
|
||||||
|
""";
|
||||||
|
|
||||||
|
return jdbcTemplate.query(sql, (rs, rowNum) ->
|
||||||
|
GisBoundaryResponse.builder()
|
||||||
|
.areaId(rs.getString("area_id"))
|
||||||
|
.areaName(rs.getString("area_name"))
|
||||||
|
.centerLat(rs.getDouble("center_lat"))
|
||||||
|
.centerLon(rs.getDouble("center_lon"))
|
||||||
|
.geomJson(rs.getString("geom_json"))
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Map<String, VesselStatsResponse> getAreaVesselStats(int minutes) {
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
SELECT area_id,
|
||||||
|
COUNT(DISTINCT CONCAT(sig_src_cd, '_', target_id)) as vessel_count,
|
||||||
|
COALESCE(SUM(distance_nm), 0) as total_distance,
|
||||||
|
COALESCE(AVG(avg_speed), 0) as avg_speed,
|
||||||
|
COUNT(*) as active_tracks
|
||||||
|
FROM signal.t_area_vessel_tracks
|
||||||
|
WHERE time_bucket >= NOW() - INTERVAL '%d minutes'
|
||||||
|
GROUP BY area_id
|
||||||
|
""".formatted(minutes);
|
||||||
|
|
||||||
|
Map<String, VesselStatsResponse> result = new HashMap<>();
|
||||||
|
|
||||||
|
jdbcTemplate.query(sql, rs -> {
|
||||||
|
result.put(rs.getString("area_id"),
|
||||||
|
VesselStatsResponse.builder()
|
||||||
|
.vesselCount(rs.getInt("vessel_count"))
|
||||||
|
.totalDistance(rs.getBigDecimal("total_distance"))
|
||||||
|
.avgSpeed(rs.getBigDecimal("avg_speed"))
|
||||||
|
.activeTracks(rs.getInt("active_tracks"))
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<TrackResponse> getHaeguTracks(Integer haeguNo, int minutes) {
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
|
||||||
|
List<TrackResponse> allTracks = new ArrayList<>();
|
||||||
|
|
||||||
|
LocalDateTime now = LocalDateTime.now();
|
||||||
|
LocalDateTime startTime = now.minusMinutes(minutes);
|
||||||
|
|
||||||
|
// 1시간 이상인 경우 여러 테이블 조합
|
||||||
|
if (minutes > 60) {
|
||||||
|
// 현재 시간의 정시
|
||||||
|
LocalDateTime currentHour = now.withMinute(0).withSecond(0).withNano(0);
|
||||||
|
|
||||||
|
if (minutes <= 1440) { // 24시간 이하
|
||||||
|
// 1. hourly 테이블에서 과거 데이터 조회
|
||||||
|
String hourlySql = """
|
||||||
|
SELECT DISTINCT t.sig_src_cd, t.target_id, t.time_bucket,
|
||||||
|
public.ST_AsText(t.track_geom) as track_geom,
|
||||||
|
t.distance_nm, t.avg_speed, t.max_speed, t.point_count
|
||||||
|
FROM signal.t_vessel_tracks_hourly t
|
||||||
|
WHERE EXISTS (
|
||||||
|
SELECT 1 FROM signal.t_grid_vessel_tracks g
|
||||||
|
WHERE g.sig_src_cd = t.sig_src_cd
|
||||||
|
AND g.target_id = t.target_id
|
||||||
|
AND g.haegu_no = %d
|
||||||
|
AND g.time_bucket >= '%s'
|
||||||
|
)
|
||||||
|
AND t.time_bucket >= '%s'
|
||||||
|
AND t.time_bucket < '%s'
|
||||||
|
ORDER BY t.sig_src_cd, t.target_id, t.time_bucket
|
||||||
|
""".formatted(haeguNo, startTime, startTime, currentHour);
|
||||||
|
|
||||||
|
allTracks.addAll(jdbcTemplate.query(hourlySql, this::mapTrackResponse));
|
||||||
|
} else {
|
||||||
|
// daily 테이블 사용 (추후 구현)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. 5min 테이블에서 최근 데이터 조회 (아직 집계되지 않은 부분)
|
||||||
|
String recentSql = """
|
||||||
|
SELECT DISTINCT t.sig_src_cd, t.target_id, t.time_bucket,
|
||||||
|
public.ST_AsText(t.track_geom) as track_geom,
|
||||||
|
t.distance_nm, t.avg_speed, t.max_speed, t.point_count
|
||||||
|
FROM signal.t_vessel_tracks_5min t
|
||||||
|
WHERE EXISTS (
|
||||||
|
SELECT 1 FROM signal.t_grid_vessel_tracks g
|
||||||
|
WHERE g.sig_src_cd = t.sig_src_cd
|
||||||
|
AND g.target_id = t.target_id
|
||||||
|
AND g.haegu_no = %d
|
||||||
|
AND g.time_bucket >= '%s'
|
||||||
|
)
|
||||||
|
AND t.time_bucket >= '%s'
|
||||||
|
ORDER BY t.sig_src_cd, t.target_id, t.time_bucket
|
||||||
|
""".formatted(haeguNo, currentHour, currentHour);
|
||||||
|
|
||||||
|
allTracks.addAll(jdbcTemplate.query(recentSql, this::mapTrackResponse));
|
||||||
|
|
||||||
|
} else {
|
||||||
|
// 1시간 이하는 5분 테이블만 사용
|
||||||
|
String sql = """
|
||||||
|
SELECT DISTINCT t.sig_src_cd, t.target_id, t.time_bucket,
|
||||||
|
public.ST_AsText(t.track_geom) as track_geom,
|
||||||
|
t.distance_nm, t.avg_speed, t.max_speed, t.point_count
|
||||||
|
FROM signal.t_vessel_tracks_5min t
|
||||||
|
WHERE EXISTS (
|
||||||
|
SELECT 1 FROM signal.t_grid_vessel_tracks g
|
||||||
|
WHERE g.sig_src_cd = t.sig_src_cd
|
||||||
|
AND g.target_id = t.target_id
|
||||||
|
AND g.haegu_no = %d
|
||||||
|
AND g.time_bucket >= NOW() - INTERVAL '%d minutes'
|
||||||
|
)
|
||||||
|
AND t.time_bucket >= NOW() - INTERVAL '%d minutes'
|
||||||
|
ORDER BY t.sig_src_cd, t.target_id, t.time_bucket
|
||||||
|
""".formatted(haeguNo, minutes, minutes);
|
||||||
|
|
||||||
|
allTracks = jdbcTemplate.query(sql, this::mapTrackResponse);
|
||||||
|
}
|
||||||
|
|
||||||
|
log.debug("Fetched {} tracks for haegu {} in last {} minutes",
|
||||||
|
allTracks.size(), haeguNo, minutes);
|
||||||
|
|
||||||
|
return allTracks;
|
||||||
|
}
|
||||||
|
|
||||||
|
private TrackResponse mapTrackResponse(ResultSet rs, int rowNum) throws SQLException {
|
||||||
|
return TrackResponse.builder()
|
||||||
|
.sigSrcCd(rs.getString("sig_src_cd"))
|
||||||
|
.targetId(rs.getString("target_id"))
|
||||||
|
.timeBucket(rs.getObject("time_bucket", LocalDateTime.class))
|
||||||
|
.trackGeom(rs.getString("track_geom"))
|
||||||
|
.distanceNm(rs.getBigDecimal("distance_nm"))
|
||||||
|
.avgSpeed(rs.getBigDecimal("avg_speed"))
|
||||||
|
.maxSpeed(rs.getBigDecimal("max_speed"))
|
||||||
|
.pointCount(rs.getInt("point_count"))
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<TrackResponse> getAreaTracks(String areaId, int minutes) {
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
|
||||||
|
List<TrackResponse> allTracks = new ArrayList<>();
|
||||||
|
|
||||||
|
LocalDateTime now = LocalDateTime.now();
|
||||||
|
LocalDateTime startTime = now.minusMinutes(minutes);
|
||||||
|
|
||||||
|
// 1시간 이상인 경우 여러 테이블 조합
|
||||||
|
if (minutes > 60) {
|
||||||
|
// 현재 시간의 정시
|
||||||
|
LocalDateTime currentHour = now.withMinute(0).withSecond(0).withNano(0);
|
||||||
|
|
||||||
|
if (minutes <= 1440) { // 24시간 이하
|
||||||
|
// 1. hourly 테이블에서 과거 데이터 조회
|
||||||
|
String hourlySql = """
|
||||||
|
SELECT DISTINCT t.sig_src_cd, t.target_id, t.time_bucket,
|
||||||
|
public.ST_AsText(t.track_geom) as track_geom,
|
||||||
|
t.distance_nm, t.avg_speed, t.max_speed, t.point_count
|
||||||
|
FROM signal.t_vessel_tracks_hourly t
|
||||||
|
WHERE EXISTS (
|
||||||
|
SELECT 1 FROM signal.t_area_vessel_tracks a
|
||||||
|
WHERE a.sig_src_cd = t.sig_src_cd
|
||||||
|
AND a.target_id = t.target_id
|
||||||
|
AND a.area_id = '%s'
|
||||||
|
AND a.time_bucket >= '%s'
|
||||||
|
)
|
||||||
|
AND t.time_bucket >= '%s'
|
||||||
|
AND t.time_bucket < '%s'
|
||||||
|
ORDER BY t.sig_src_cd, t.target_id, t.time_bucket
|
||||||
|
""".formatted(areaId, startTime, startTime, currentHour);
|
||||||
|
|
||||||
|
allTracks.addAll(jdbcTemplate.query(hourlySql, this::mapTrackResponse));
|
||||||
|
} else {
|
||||||
|
// daily 테이블 사용 (추후 구현)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. 5min 테이블에서 최근 데이터 조회 (아직 집계되지 않은 부분)
|
||||||
|
String recentSql = """
|
||||||
|
SELECT DISTINCT t.sig_src_cd, t.target_id, t.time_bucket,
|
||||||
|
public.ST_AsText(t.track_geom) as track_geom,
|
||||||
|
t.distance_nm, t.avg_speed, t.max_speed, t.point_count
|
||||||
|
FROM signal.t_vessel_tracks_5min t
|
||||||
|
WHERE EXISTS (
|
||||||
|
SELECT 1 FROM signal.t_area_vessel_tracks a
|
||||||
|
WHERE a.sig_src_cd = t.sig_src_cd
|
||||||
|
AND a.target_id = t.target_id
|
||||||
|
AND a.area_id = '%s'
|
||||||
|
AND a.time_bucket >= '%s'
|
||||||
|
)
|
||||||
|
AND t.time_bucket >= '%s'
|
||||||
|
ORDER BY t.sig_src_cd, t.target_id, t.time_bucket
|
||||||
|
""".formatted(areaId, currentHour, currentHour);
|
||||||
|
|
||||||
|
allTracks.addAll(jdbcTemplate.query(recentSql, this::mapTrackResponse));
|
||||||
|
|
||||||
|
} else {
|
||||||
|
// 1시간 이하는 5분 테이블만 사용
|
||||||
|
String sql = """
|
||||||
|
SELECT DISTINCT t.sig_src_cd, t.target_id, t.time_bucket,
|
||||||
|
public.ST_AsText(t.track_geom) as track_geom,
|
||||||
|
t.distance_nm, t.avg_speed, t.max_speed, t.point_count
|
||||||
|
FROM signal.t_vessel_tracks_5min t
|
||||||
|
WHERE EXISTS (
|
||||||
|
SELECT 1 FROM signal.t_area_vessel_tracks a
|
||||||
|
WHERE a.sig_src_cd = t.sig_src_cd
|
||||||
|
AND a.target_id = t.target_id
|
||||||
|
AND a.area_id = '%s'
|
||||||
|
AND a.time_bucket >= NOW() - INTERVAL '%d minutes'
|
||||||
|
)
|
||||||
|
AND t.time_bucket >= NOW() - INTERVAL '%d minutes'
|
||||||
|
ORDER BY t.sig_src_cd, t.target_id, t.time_bucket
|
||||||
|
""".formatted(areaId, minutes, minutes);
|
||||||
|
|
||||||
|
allTracks = jdbcTemplate.query(sql, this::mapTrackResponse);
|
||||||
|
}
|
||||||
|
|
||||||
|
log.debug("Fetched {} tracks for area {} in last {} minutes",
|
||||||
|
allTracks.size(), areaId, minutes);
|
||||||
|
|
||||||
|
return allTracks;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<CompactVesselTrack> getVesselTracks(VesselTracksRequest request) {
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
|
||||||
|
List<CompactVesselTrack> results = new ArrayList<>();
|
||||||
|
|
||||||
|
LocalDateTime startTime = request.getStartTime();
|
||||||
|
LocalDateTime endTime = request.getEndTime();
|
||||||
|
|
||||||
|
for (VesselTracksRequest.VesselIdentifier vessel : request.getVessels()) {
|
||||||
|
// String vesselId = vessel.getSigSrcCd() + "_" + vessel.getTargetId();
|
||||||
|
|
||||||
|
// Determine which tables to query based on time range
|
||||||
|
Duration duration = Duration.between(startTime, endTime);
|
||||||
|
long hours = duration.toHours();
|
||||||
|
|
||||||
|
List<TrackResponse> tracks = new ArrayList<>();
|
||||||
|
LocalDateTime now = LocalDateTime.now();
|
||||||
|
LocalDateTime currentHour = now.withMinute(0).withSecond(0).withNano(0);
|
||||||
|
LocalDateTime currentDay = now.withHour(0).withMinute(0).withSecond(0).withNano(0);
|
||||||
|
|
||||||
|
// Query daily table first (oldest data)
|
||||||
|
if (hours > 24 && startTime.isBefore(currentDay)) {
|
||||||
|
// Query daily table - time_bucket is DATE type
|
||||||
|
String sqlDaily = """
|
||||||
|
SELECT sig_src_cd, target_id,
|
||||||
|
time_bucket::timestamp as time_bucket,
|
||||||
|
public.ST_AsText(track_geom) as track_geom,
|
||||||
|
distance_nm, avg_speed, max_speed, point_count
|
||||||
|
FROM signal.t_vessel_tracks_daily
|
||||||
|
WHERE sig_src_cd = ? AND target_id = ?
|
||||||
|
AND time_bucket BETWEEN ?::date AND ?::date
|
||||||
|
ORDER BY time_bucket
|
||||||
|
""";
|
||||||
|
|
||||||
|
LocalDateTime queryDailyEnd = endTime.isBefore(currentDay) ? endTime : currentDay.minusDays(1);
|
||||||
|
tracks.addAll(jdbcTemplate.query(sqlDaily, this::mapTrackResponse,
|
||||||
|
vessel.getSigSrcCd(), vessel.getTargetId(),
|
||||||
|
Timestamp.valueOf(startTime), Timestamp.valueOf(queryDailyEnd)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query hourly table (middle-aged data)
|
||||||
|
if (hours > 1 && startTime.isBefore(currentHour)) {
|
||||||
|
// Query hourly table
|
||||||
|
String sqlHourly = """
|
||||||
|
SELECT sig_src_cd, target_id, time_bucket,
|
||||||
|
public.ST_AsText(track_geom) as track_geom,
|
||||||
|
distance_nm, avg_speed, max_speed, point_count
|
||||||
|
FROM signal.t_vessel_tracks_hourly
|
||||||
|
WHERE sig_src_cd = ? AND target_id = ?
|
||||||
|
AND time_bucket BETWEEN ? AND ?
|
||||||
|
ORDER BY time_bucket
|
||||||
|
""";
|
||||||
|
|
||||||
|
// Hourly data should start from currentDay if daily data was queried
|
||||||
|
LocalDateTime queryHourlyStart = hours > 24 ? currentDay : startTime;
|
||||||
|
LocalDateTime queryHourlyEnd = endTime.isBefore(currentHour) ? endTime : currentHour.minusHours(1);
|
||||||
|
if (queryHourlyEnd.isAfter(queryHourlyStart)) {
|
||||||
|
tracks.addAll(jdbcTemplate.query(sqlHourly, this::mapTrackResponse,
|
||||||
|
vessel.getSigSrcCd(), vessel.getTargetId(),
|
||||||
|
Timestamp.valueOf(queryHourlyStart), Timestamp.valueOf(queryHourlyEnd)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query 5min table last (newest data)
|
||||||
|
if (hours <= 1 || endTime.isAfter(currentHour)) {
|
||||||
|
// Query 5min table for recent data
|
||||||
|
String sql5min = """
|
||||||
|
SELECT sig_src_cd, target_id, time_bucket,
|
||||||
|
public.ST_AsText(track_geom) as track_geom,
|
||||||
|
distance_nm, avg_speed, max_speed, point_count
|
||||||
|
FROM signal.t_vessel_tracks_5min
|
||||||
|
WHERE sig_src_cd = ? AND target_id = ?
|
||||||
|
AND time_bucket BETWEEN ? AND ?
|
||||||
|
ORDER BY time_bucket
|
||||||
|
""";
|
||||||
|
|
||||||
|
LocalDateTime query5minStart = startTime.isAfter(currentHour) ? startTime : currentHour;
|
||||||
|
if (endTime.isAfter(currentHour)) {
|
||||||
|
tracks.addAll(jdbcTemplate.query(sql5min, this::mapTrackResponse,
|
||||||
|
vessel.getSigSrcCd(), vessel.getTargetId(),
|
||||||
|
Timestamp.valueOf(query5minStart), Timestamp.valueOf(endTime)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort all tracks by time_bucket to ensure proper ordering
|
||||||
|
tracks.sort((t1, t2) -> t1.getTimeBucket().compareTo(t2.getTimeBucket()));
|
||||||
|
|
||||||
|
if (!tracks.isEmpty()) {
|
||||||
|
CompactVesselTrack compactTrack = buildCompactVesselTrack(vessel, tracks);
|
||||||
|
results.add(compactTrack);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 통합선박 필터링 적용 (isIntegration = "1" 이고 기능이 활성화된 경우)
|
||||||
|
if ("1".equals(request.getIsIntegration()) && integrationVesselService.isEnabled()) {
|
||||||
|
results = filterByIntegration(results);
|
||||||
|
}
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 통합선박 기준 필터링 (REST API용)
|
||||||
|
*/
|
||||||
|
private List<CompactVesselTrack> filterByIntegration(List<CompactVesselTrack> tracks) {
|
||||||
|
if (tracks == null || tracks.isEmpty()) {
|
||||||
|
return tracks;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 1. 모든 트랙의 통합선박 정보 조회 (캐시에서)
|
||||||
|
Map<String, IntegrationVessel> vesselIntegrations = new HashMap<>();
|
||||||
|
for (CompactVesselTrack track : tracks) {
|
||||||
|
String key = track.getSigSrcCd() + "_" + track.getTargetId();
|
||||||
|
if (!vesselIntegrations.containsKey(key)) {
|
||||||
|
IntegrationVessel integration = integrationVesselService.findByVessel(
|
||||||
|
track.getSigSrcCd(), track.getTargetId()
|
||||||
|
);
|
||||||
|
vesselIntegrations.put(key, integration);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. 통합선박별 그룹핑
|
||||||
|
Map<Long, List<CompactVesselTrack>> groupedByIntegration = new HashMap<>();
|
||||||
|
Map<Long, IntegrationVessel> integrationMap = new HashMap<>();
|
||||||
|
|
||||||
|
long tempSeq = -1;
|
||||||
|
for (CompactVesselTrack track : tracks) {
|
||||||
|
String key = track.getSigSrcCd() + "_" + track.getTargetId();
|
||||||
|
IntegrationVessel integration = vesselIntegrations.get(key);
|
||||||
|
|
||||||
|
Long seq;
|
||||||
|
if (integration != null) {
|
||||||
|
seq = integration.getIntgrSeq();
|
||||||
|
integrationMap.putIfAbsent(seq, integration);
|
||||||
|
} else {
|
||||||
|
seq = tempSeq--;
|
||||||
|
}
|
||||||
|
|
||||||
|
groupedByIntegration.computeIfAbsent(seq, k -> new ArrayList<>()).add(track);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. 각 그룹에서 최고 우선순위 신호만 선택
|
||||||
|
List<CompactVesselTrack> result = new ArrayList<>();
|
||||||
|
|
||||||
|
for (Map.Entry<Long, List<CompactVesselTrack>> entry : groupedByIntegration.entrySet()) {
|
||||||
|
Long seq = entry.getKey();
|
||||||
|
List<CompactVesselTrack> groupTracks = entry.getValue();
|
||||||
|
|
||||||
|
if (seq < 0) {
|
||||||
|
// 통합정보 없는 단독 선박
|
||||||
|
CompactVesselTrack firstTrack = groupTracks.get(0);
|
||||||
|
String soloIntegrationId = IntegrationSignalConstants.generateSoloIntegrationId(
|
||||||
|
firstTrack.getSigSrcCd(),
|
||||||
|
firstTrack.getTargetId()
|
||||||
|
);
|
||||||
|
groupTracks.forEach(t -> t.setIntegrationTargetId(soloIntegrationId));
|
||||||
|
result.addAll(groupTracks);
|
||||||
|
} else {
|
||||||
|
// 통합선박 → 존재하는 신호 중 최고 우선순위 선택
|
||||||
|
IntegrationVessel integration = integrationMap.get(seq);
|
||||||
|
|
||||||
|
java.util.Set<String> existingSigSrcCds = groupTracks.stream()
|
||||||
|
.map(CompactVesselTrack::getSigSrcCd)
|
||||||
|
.collect(java.util.stream.Collectors.toSet());
|
||||||
|
|
||||||
|
String selectedSigSrcCd = integrationVesselService.selectHighestPriorityFromExisting(existingSigSrcCds);
|
||||||
|
|
||||||
|
List<CompactVesselTrack> selectedTracks = groupTracks.stream()
|
||||||
|
.filter(t -> t.getSigSrcCd().equals(selectedSigSrcCd))
|
||||||
|
.collect(java.util.stream.Collectors.toList());
|
||||||
|
|
||||||
|
String integrationId = integration.generateIntegrationId();
|
||||||
|
selectedTracks.forEach(t -> t.setIntegrationTargetId(integrationId));
|
||||||
|
|
||||||
|
result.addAll(selectedTracks);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info("[INTEGRATION_FILTER] REST API - Filtered {} tracks to {} tracks", tracks.size(), result.size());
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
private CompactVesselTrack buildCompactVesselTrack(
|
||||||
|
VesselTracksRequest.VesselIdentifier vessel,
|
||||||
|
List<TrackResponse> tracks) {
|
||||||
|
|
||||||
|
String vesselId = vessel.getSigSrcCd() + "_" + vessel.getTargetId();
|
||||||
|
List<double[]> geometry = new ArrayList<>();
|
||||||
|
List<String> timestamps = new ArrayList<>();
|
||||||
|
List<Double> speeds = new ArrayList<>();
|
||||||
|
double totalDistance = 0;
|
||||||
|
double maxSpeed = 0;
|
||||||
|
int totalPoints = 0;
|
||||||
|
|
||||||
|
// WKTReader reader = new WKTReader();
|
||||||
|
|
||||||
|
for (TrackResponse track : tracks) {
|
||||||
|
if (track.getTrackGeom() != null && !track.getTrackGeom().isEmpty()) {
|
||||||
|
try {
|
||||||
|
// Parse LineStringM
|
||||||
|
String wkt = track.getTrackGeom();
|
||||||
|
if (wkt.startsWith("LINESTRING M")) {
|
||||||
|
// Extract coordinate data from WKT
|
||||||
|
String coordsPart = wkt.substring("LINESTRING M(".length() + 1, wkt.length() - 1);
|
||||||
|
String[] points = coordsPart.split(",");
|
||||||
|
|
||||||
|
for (String point : points) {
|
||||||
|
String[] parts = point.trim().split("\\s+");
|
||||||
|
if (parts.length >= 3) {
|
||||||
|
double lon = Double.parseDouble(parts[0]);
|
||||||
|
double lat = Double.parseDouble(parts[1]);
|
||||||
|
String timestamp = parts[2]; // Unix timestamp as string
|
||||||
|
|
||||||
|
geometry.add(new double[]{lon, lat});
|
||||||
|
timestamps.add(timestamp);
|
||||||
|
|
||||||
|
// Add SOG value if available (could be from track data)
|
||||||
|
if (track.getAvgSpeed() != null) {
|
||||||
|
speeds.add(track.getAvgSpeed().doubleValue());
|
||||||
|
} else {
|
||||||
|
speeds.add(0.0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.warn("Failed to parse track geometry: {}", e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (track.getDistanceNm() != null) {
|
||||||
|
totalDistance += track.getDistanceNm().doubleValue();
|
||||||
|
}
|
||||||
|
if (track.getMaxSpeed() != null && track.getMaxSpeed().doubleValue() > maxSpeed) {
|
||||||
|
maxSpeed = track.getMaxSpeed().doubleValue();
|
||||||
|
}
|
||||||
|
if (track.getPointCount() != null) {
|
||||||
|
totalPoints += track.getPointCount();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate average speed
|
||||||
|
double avgSpeed = speeds.stream()
|
||||||
|
.filter(s -> s > 0)
|
||||||
|
.mapToDouble(Double::doubleValue)
|
||||||
|
.average()
|
||||||
|
.orElse(0.0);
|
||||||
|
|
||||||
|
// Get vessel info
|
||||||
|
Map<String, String> vesselInfo = getVesselInfo(vessel.getSigSrcCd(), vessel.getTargetId());
|
||||||
|
|
||||||
|
return CompactVesselTrack.builder()
|
||||||
|
.vesselId(vesselId)
|
||||||
|
.sigSrcCd(vessel.getSigSrcCd())
|
||||||
|
.targetId(vessel.getTargetId())
|
||||||
|
.geometry(geometry)
|
||||||
|
.timestamps(timestamps)
|
||||||
|
.speeds(speeds)
|
||||||
|
.totalDistance(totalDistance)
|
||||||
|
.avgSpeed(avgSpeed)
|
||||||
|
.maxSpeed(maxSpeed)
|
||||||
|
.pointCount(geometry.size())
|
||||||
|
.shipName(vesselInfo.get("ship_name"))
|
||||||
|
.shipType(vesselInfo.get("ship_type"))
|
||||||
|
.shipKindCode(null) // Not available in current schema
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
private Map<String, String> getVesselInfo(String sigSrcCd, String targetId) {
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
|
||||||
|
try {
|
||||||
|
String sql = """
|
||||||
|
SELECT ship_nm as ship_name, ship_ty as ship_type
|
||||||
|
FROM signal.t_vessel_latest_position
|
||||||
|
WHERE sig_src_cd = ? AND target_id = ?
|
||||||
|
LIMIT 1
|
||||||
|
""";
|
||||||
|
|
||||||
|
return jdbcTemplate.queryForMap(sql, sigSrcCd, targetId)
|
||||||
|
.entrySet().stream()
|
||||||
|
.collect(Collectors.toMap(
|
||||||
|
Map.Entry::getKey,
|
||||||
|
e -> e.getValue() != null ? e.getValue().toString() : ""
|
||||||
|
));
|
||||||
|
} catch (Exception e) {
|
||||||
|
return Map.of("ship_name", "", "ship_type", "");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,444 @@
|
|||||||
|
package gc.mda.signal_batch.domain.gis.service;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.type.TypeReference;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import gc.mda.signal_batch.domain.gis.dto.TileAggregationRequest;
|
||||||
|
import gc.mda.signal_batch.domain.gis.dto.TileAggregationResponse;
|
||||||
|
import gc.mda.signal_batch.domain.gis.dto.TileAggregationResponse.*;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Service
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class TileAggregationService {
|
||||||
|
|
||||||
|
private final JdbcTemplate queryJdbcTemplate;
|
||||||
|
private final ObjectMapper objectMapper;
|
||||||
|
|
||||||
|
public TileAggregationResponse getTileAggregation(TileAggregationRequest request) {
|
||||||
|
log.info("Processing tile aggregation request: {}", request);
|
||||||
|
|
||||||
|
RequestInfo requestInfo = RequestInfo.builder()
|
||||||
|
.fromDate(request.getFromDate())
|
||||||
|
.toDate(request.getToDate())
|
||||||
|
.tileId(request.getTileId())
|
||||||
|
.build();
|
||||||
|
|
||||||
|
List<TileDetail> tiles;
|
||||||
|
|
||||||
|
if (request.getTileId() != null) {
|
||||||
|
// 특정 타일 조회
|
||||||
|
tiles = getSpecificTileAggregation(request);
|
||||||
|
requestInfo.setTileLevel(determineTileLevel(request.getTileId()));
|
||||||
|
} else {
|
||||||
|
// 전체 대해구 조회
|
||||||
|
tiles = getAllHaeguAggregation(request);
|
||||||
|
requestInfo.setTileLevel(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 요약 정보 계산
|
||||||
|
AggregationSummary summary = calculateSummary(tiles);
|
||||||
|
|
||||||
|
return TileAggregationResponse.builder()
|
||||||
|
.request(requestInfo)
|
||||||
|
.summary(summary)
|
||||||
|
.tiles(tiles)
|
||||||
|
.responseTime(LocalDateTime.now())
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<TileDetail> getSpecificTileAggregation(TileAggregationRequest request) {
|
||||||
|
String tileId = request.getTileId();
|
||||||
|
int tileLevel = determineTileLevel(tileId);
|
||||||
|
|
||||||
|
List<TileDetail> tiles = new ArrayList<>();
|
||||||
|
|
||||||
|
if (tileLevel == 0) {
|
||||||
|
// 대해구인 경우: 대해구 + 소해구 정보 모두 조회
|
||||||
|
TileDetail haeguTile = getHaeguTileDetail(request, tileId);
|
||||||
|
|
||||||
|
// 소해구 정보 조회
|
||||||
|
List<TileDetail> subTiles = getSubTiles(request, tileId);
|
||||||
|
haeguTile.setSubTiles(subTiles);
|
||||||
|
|
||||||
|
tiles.add(haeguTile);
|
||||||
|
} else {
|
||||||
|
// 소해구인 경우: 해당 소해구만 조회
|
||||||
|
TileDetail sohaeguTile = getSohaeguTileDetail(request, tileId);
|
||||||
|
tiles.add(sohaeguTile);
|
||||||
|
}
|
||||||
|
|
||||||
|
return tiles;
|
||||||
|
}
|
||||||
|
|
||||||
|
private TileDetail getHaeguTileDetail(TileAggregationRequest request, String tileId) {
|
||||||
|
String sql = """
|
||||||
|
WITH aggregated_data AS (
|
||||||
|
SELECT
|
||||||
|
tile_id,
|
||||||
|
tile_level,
|
||||||
|
jsonb_object_agg(
|
||||||
|
vessel_key,
|
||||||
|
vessel_data ORDER BY (vessel_data->>'lastSeen')::timestamp DESC
|
||||||
|
) as unique_vessels,
|
||||||
|
COUNT(DISTINCT vessel_key) as vessel_count,
|
||||||
|
AVG(vessel_density) as avg_density
|
||||||
|
FROM (
|
||||||
|
SELECT
|
||||||
|
tile_id,
|
||||||
|
tile_level,
|
||||||
|
vessel_density,
|
||||||
|
jsonb_each_text(unique_vessels) as vessel_entry
|
||||||
|
FROM signal.t_tile_summary
|
||||||
|
WHERE tile_id = ?
|
||||||
|
AND tile_level = 0
|
||||||
|
AND time_bucket BETWEEN ? AND ?
|
||||||
|
) t
|
||||||
|
CROSS JOIN LATERAL (
|
||||||
|
SELECT
|
||||||
|
vessel_entry.key as vessel_key,
|
||||||
|
vessel_entry.value::jsonb as vessel_data
|
||||||
|
) v
|
||||||
|
GROUP BY tile_id, tile_level
|
||||||
|
),
|
||||||
|
tile_info AS (
|
||||||
|
SELECT
|
||||||
|
g.tile_id,
|
||||||
|
g.haegu_no,
|
||||||
|
g.min_lat, g.min_lon, g.max_lat, g.max_lon,
|
||||||
|
public.ST_X(g.center_point) as center_lon,
|
||||||
|
public.ST_Y(g.center_point) as center_lat,
|
||||||
|
public.ST_Area(g.tile_geom::geography) / 1000000 as area_km2
|
||||||
|
FROM signal.t_grid_tiles g
|
||||||
|
WHERE g.tile_id = ?
|
||||||
|
AND g.tile_level = 0
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
a.tile_id,
|
||||||
|
a.tile_level,
|
||||||
|
t.haegu_no,
|
||||||
|
a.vessel_count,
|
||||||
|
a.avg_density,
|
||||||
|
a.unique_vessels,
|
||||||
|
t.min_lat, t.min_lon, t.max_lat, t.max_lon,
|
||||||
|
t.center_lat, t.center_lon, t.area_km2
|
||||||
|
FROM aggregated_data a
|
||||||
|
JOIN tile_info t ON a.tile_id = t.tile_id
|
||||||
|
""";
|
||||||
|
|
||||||
|
return queryJdbcTemplate.queryForObject(sql, (rs, rowNum) -> {
|
||||||
|
TileDetail detail = TileDetail.builder()
|
||||||
|
.tileId(rs.getString("tile_id"))
|
||||||
|
.tileLevel(rs.getInt("tile_level"))
|
||||||
|
.haeguNo(rs.getInt("haegu_no"))
|
||||||
|
.vesselCount(rs.getInt("vessel_count"))
|
||||||
|
.avgDensity(rs.getDouble("avg_density"))
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// unique_vessels JSON 파싱
|
||||||
|
String vesselsJson = rs.getString("unique_vessels");
|
||||||
|
if (vesselsJson != null) {
|
||||||
|
try {
|
||||||
|
Map<String, Map<String, Object>> vesselMap = objectMapper.readValue(
|
||||||
|
vesselsJson, new TypeReference<Map<String, Map<String, Object>>>() {});
|
||||||
|
|
||||||
|
Map<String, VesselInfo> uniqueVessels = new HashMap<>();
|
||||||
|
for (Map.Entry<String, Map<String, Object>> entry : vesselMap.entrySet()) {
|
||||||
|
Map<String, Object> vesselData = entry.getValue();
|
||||||
|
VesselInfo vesselInfo = VesselInfo.builder()
|
||||||
|
.lat((Double) vesselData.get("lat"))
|
||||||
|
.lon((Double) vesselData.get("lon"))
|
||||||
|
.sog((Double) vesselData.get("sog"))
|
||||||
|
.lastSeen(LocalDateTime.parse((String) vesselData.get("lastSeen")))
|
||||||
|
.build();
|
||||||
|
uniqueVessels.put(entry.getKey(), vesselInfo);
|
||||||
|
}
|
||||||
|
detail.setUniqueVessels(uniqueVessels);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to parse unique_vessels JSON", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 경계 정보
|
||||||
|
TileBoundary boundary = TileBoundary.builder()
|
||||||
|
.minLat(rs.getDouble("min_lat"))
|
||||||
|
.minLon(rs.getDouble("min_lon"))
|
||||||
|
.maxLat(rs.getDouble("max_lat"))
|
||||||
|
.maxLon(rs.getDouble("max_lon"))
|
||||||
|
.centerLat(rs.getDouble("center_lat"))
|
||||||
|
.centerLon(rs.getDouble("center_lon"))
|
||||||
|
.area(rs.getDouble("area_km2"))
|
||||||
|
.build();
|
||||||
|
detail.setBoundary(boundary);
|
||||||
|
|
||||||
|
return detail;
|
||||||
|
}, tileId, request.getFromDate(), request.getToDate(), tileId);
|
||||||
|
}
|
||||||
|
|
||||||
|
private TileDetail getSohaeguTileDetail(TileAggregationRequest request, String tileId) {
|
||||||
|
// 소해구 조회는 getHaeguTileDetail과 유사하지만 tile_level = 1로 조회
|
||||||
|
String sql = """
|
||||||
|
WITH aggregated_data AS (
|
||||||
|
SELECT
|
||||||
|
tile_id,
|
||||||
|
tile_level,
|
||||||
|
jsonb_object_agg(
|
||||||
|
vessel_key,
|
||||||
|
vessel_data ORDER BY (vessel_data->>'lastSeen')::timestamp DESC
|
||||||
|
) as unique_vessels,
|
||||||
|
COUNT(DISTINCT vessel_key) as vessel_count,
|
||||||
|
AVG(vessel_density) as avg_density
|
||||||
|
FROM (
|
||||||
|
SELECT
|
||||||
|
tile_id,
|
||||||
|
tile_level,
|
||||||
|
vessel_density,
|
||||||
|
jsonb_each_text(unique_vessels) as vessel_entry
|
||||||
|
FROM signal.t_tile_summary
|
||||||
|
WHERE tile_id = ?
|
||||||
|
AND tile_level = 1
|
||||||
|
AND time_bucket BETWEEN ? AND ?
|
||||||
|
) t
|
||||||
|
CROSS JOIN LATERAL (
|
||||||
|
SELECT
|
||||||
|
vessel_entry.key as vessel_key,
|
||||||
|
vessel_entry.value::jsonb as vessel_data
|
||||||
|
) v
|
||||||
|
GROUP BY tile_id, tile_level
|
||||||
|
),
|
||||||
|
tile_info AS (
|
||||||
|
SELECT
|
||||||
|
g.tile_id,
|
||||||
|
g.haegu_no,
|
||||||
|
g.sohaegu_no,
|
||||||
|
g.min_lat, g.min_lon, g.max_lat, g.max_lon,
|
||||||
|
public.ST_X(g.center_point) as center_lon,
|
||||||
|
public.ST_Y(g.center_point) as center_lat,
|
||||||
|
public.ST_Area(g.tile_geom::geography) / 1000000 as area_km2
|
||||||
|
FROM signal.t_grid_tiles g
|
||||||
|
WHERE g.tile_id = ?
|
||||||
|
AND g.tile_level = 1
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
a.tile_id,
|
||||||
|
a.tile_level,
|
||||||
|
t.haegu_no,
|
||||||
|
t.sohaegu_no,
|
||||||
|
a.vessel_count,
|
||||||
|
a.avg_density,
|
||||||
|
a.unique_vessels,
|
||||||
|
t.min_lat, t.min_lon, t.max_lat, t.max_lon,
|
||||||
|
t.center_lat, t.center_lon, t.area_km2
|
||||||
|
FROM aggregated_data a
|
||||||
|
JOIN tile_info t ON a.tile_id = t.tile_id
|
||||||
|
""";
|
||||||
|
|
||||||
|
// 나머지 구현은 getHaeguTileDetail과 동일
|
||||||
|
return queryJdbcTemplate.queryForObject(sql, (rs, rowNum) -> {
|
||||||
|
// 파싱 로직 동일...
|
||||||
|
TileDetail detail = TileDetail.builder()
|
||||||
|
.tileId(rs.getString("tile_id"))
|
||||||
|
.tileLevel(rs.getInt("tile_level"))
|
||||||
|
.haeguNo(rs.getInt("haegu_no"))
|
||||||
|
.sohaeguNo(rs.getInt("sohaegu_no"))
|
||||||
|
.vesselCount(rs.getInt("vessel_count"))
|
||||||
|
.avgDensity(rs.getDouble("avg_density"))
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// unique_vessels, boundary 파싱은 동일
|
||||||
|
return detail;
|
||||||
|
}, tileId, request.getFromDate(), request.getToDate(), tileId);
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<TileDetail> getSubTiles(TileAggregationRequest request, String haeguTileId) {
|
||||||
|
String haeguNo = haeguTileId.substring(1); // H238 -> 238
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
WITH aggregated_data AS (
|
||||||
|
SELECT
|
||||||
|
tile_id,
|
||||||
|
tile_level,
|
||||||
|
jsonb_object_agg(
|
||||||
|
vessel_key,
|
||||||
|
vessel_data ORDER BY (vessel_data->>'lastSeen')::timestamp DESC
|
||||||
|
) as unique_vessels,
|
||||||
|
COUNT(DISTINCT vessel_key) as vessel_count,
|
||||||
|
AVG(vessel_density) as avg_density
|
||||||
|
FROM (
|
||||||
|
SELECT
|
||||||
|
tile_id,
|
||||||
|
tile_level,
|
||||||
|
vessel_density,
|
||||||
|
jsonb_each_text(unique_vessels) as vessel_entry
|
||||||
|
FROM signal.t_tile_summary
|
||||||
|
WHERE tile_id LIKE ?
|
||||||
|
AND tile_level = 1
|
||||||
|
AND time_bucket BETWEEN ? AND ?
|
||||||
|
) t
|
||||||
|
CROSS JOIN LATERAL (
|
||||||
|
SELECT
|
||||||
|
vessel_entry.key as vessel_key,
|
||||||
|
vessel_entry.value::jsonb as vessel_data
|
||||||
|
) v
|
||||||
|
GROUP BY tile_id, tile_level
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
a.tile_id,
|
||||||
|
a.tile_level,
|
||||||
|
g.haegu_no,
|
||||||
|
g.sohaegu_no,
|
||||||
|
a.vessel_count,
|
||||||
|
a.avg_density,
|
||||||
|
a.unique_vessels
|
||||||
|
FROM aggregated_data a
|
||||||
|
JOIN signal.t_grid_tiles g ON a.tile_id = g.tile_id AND a.tile_level = g.tile_level
|
||||||
|
ORDER BY g.sohaegu_no
|
||||||
|
""";
|
||||||
|
|
||||||
|
String pattern = haeguTileId + "_S%";
|
||||||
|
|
||||||
|
return queryJdbcTemplate.query(sql, (rs, rowNum) -> {
|
||||||
|
// 각 소해구 타일 정보 파싱
|
||||||
|
TileDetail detail = TileDetail.builder()
|
||||||
|
.tileId(rs.getString("tile_id"))
|
||||||
|
.tileLevel(rs.getInt("tile_level"))
|
||||||
|
.haeguNo(rs.getInt("haegu_no"))
|
||||||
|
.sohaeguNo(rs.getInt("sohaegu_no"))
|
||||||
|
.vesselCount(rs.getInt("vessel_count"))
|
||||||
|
.avgDensity(rs.getDouble("avg_density"))
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// unique_vessels 파싱은 필요시 추가
|
||||||
|
return detail;
|
||||||
|
}, pattern, request.getFromDate(), request.getToDate());
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<TileDetail> getAllHaeguAggregation(TileAggregationRequest request) {
|
||||||
|
// 전체 대해구 집계 조회
|
||||||
|
String sql = """
|
||||||
|
WITH aggregated_data AS (
|
||||||
|
SELECT
|
||||||
|
SUBSTRING(tile_id FROM 1 FOR POSITION('_' IN tile_id || '_') - 1) as haegu_tile_id,
|
||||||
|
jsonb_object_agg(
|
||||||
|
vessel_key,
|
||||||
|
vessel_data ORDER BY (vessel_data->>'lastSeen')::timestamp DESC
|
||||||
|
) as unique_vessels,
|
||||||
|
COUNT(DISTINCT vessel_key) as vessel_count,
|
||||||
|
AVG(vessel_density) as avg_density
|
||||||
|
FROM (
|
||||||
|
SELECT
|
||||||
|
tile_id,
|
||||||
|
vessel_density,
|
||||||
|
jsonb_each_text(unique_vessels) as vessel_entry
|
||||||
|
FROM signal.t_tile_summary
|
||||||
|
WHERE tile_level = 0
|
||||||
|
AND time_bucket BETWEEN ? AND ?
|
||||||
|
) t
|
||||||
|
CROSS JOIN LATERAL (
|
||||||
|
SELECT
|
||||||
|
vessel_entry.key as vessel_key,
|
||||||
|
vessel_entry.value::jsonb as vessel_data
|
||||||
|
) v
|
||||||
|
GROUP BY haegu_tile_id
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
a.haegu_tile_id as tile_id,
|
||||||
|
g.haegu_no,
|
||||||
|
a.vessel_count,
|
||||||
|
a.avg_density,
|
||||||
|
a.unique_vessels
|
||||||
|
FROM aggregated_data a
|
||||||
|
JOIN signal.t_grid_tiles g ON a.haegu_tile_id = g.tile_id AND g.tile_level = 0
|
||||||
|
WHERE a.vessel_count > 0
|
||||||
|
ORDER BY a.vessel_count DESC
|
||||||
|
""";
|
||||||
|
|
||||||
|
return queryJdbcTemplate.query(sql, (rs, rowNum) -> {
|
||||||
|
TileDetail detail = TileDetail.builder()
|
||||||
|
.tileId(rs.getString("tile_id"))
|
||||||
|
.tileLevel(0)
|
||||||
|
.haeguNo(rs.getInt("haegu_no"))
|
||||||
|
.vesselCount(rs.getInt("vessel_count"))
|
||||||
|
.avgDensity(rs.getDouble("avg_density"))
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// unique_vessels는 요약 정보에서는 생략 가능
|
||||||
|
return detail;
|
||||||
|
}, request.getFromDate(), request.getToDate());
|
||||||
|
}
|
||||||
|
|
||||||
|
private AggregationSummary calculateSummary(List<TileDetail> tiles) {
|
||||||
|
if (tiles.isEmpty()) {
|
||||||
|
return AggregationSummary.builder()
|
||||||
|
.totalTiles(0)
|
||||||
|
.totalUniqueVessels(0)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
// 전체 고유 선박 수 계산
|
||||||
|
Set<String> uniqueVesselIds = new HashSet<>();
|
||||||
|
int maxVesselCount = 0;
|
||||||
|
String maxVesselTileId = null;
|
||||||
|
double totalDensity = 0.0;
|
||||||
|
|
||||||
|
for (TileDetail tile : tiles) {
|
||||||
|
if (tile.getUniqueVessels() != null) {
|
||||||
|
uniqueVesselIds.addAll(tile.getUniqueVessels().keySet());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (tile.getVesselCount() > maxVesselCount) {
|
||||||
|
maxVesselCount = tile.getVesselCount();
|
||||||
|
maxVesselTileId = tile.getTileId();
|
||||||
|
}
|
||||||
|
|
||||||
|
totalDensity += tile.getAvgDensity() != null ? tile.getAvgDensity() : 0.0;
|
||||||
|
|
||||||
|
// 하위 타일도 확인
|
||||||
|
if (tile.getSubTiles() != null) {
|
||||||
|
for (TileDetail subTile : tile.getSubTiles()) {
|
||||||
|
if (subTile.getUniqueVessels() != null) {
|
||||||
|
uniqueVesselIds.addAll(subTile.getUniqueVessels().keySet());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return AggregationSummary.builder()
|
||||||
|
.totalTiles(tiles.size())
|
||||||
|
.totalUniqueVessels(uniqueVesselIds.size())
|
||||||
|
.avgVesselDensity(tiles.isEmpty() ? 0.0 : totalDensity / tiles.size())
|
||||||
|
.maxVesselTileId(maxVesselTileId)
|
||||||
|
.maxVesselCount(maxVesselCount)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
private int determineTileLevel(String tileId) {
|
||||||
|
if (tileId == null) return -1;
|
||||||
|
return tileId.contains("_S") ? 1 : 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Map<String, Object> getTileInfo(String tileId) {
|
||||||
|
String sql = """
|
||||||
|
SELECT
|
||||||
|
tile_id,
|
||||||
|
tile_level,
|
||||||
|
haegu_no,
|
||||||
|
sohaegu_no,
|
||||||
|
min_lat, min_lon, max_lat, max_lon,
|
||||||
|
public.ST_X(center_point) as center_lon,
|
||||||
|
public.ST_Y(center_point) as center_lat,
|
||||||
|
public.ST_Area(tile_geom::geography) / 1000000 as area_km2
|
||||||
|
FROM signal.t_grid_tiles
|
||||||
|
WHERE tile_id = ?
|
||||||
|
""";
|
||||||
|
|
||||||
|
return queryJdbcTemplate.queryForMap(sql, tileId);
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,303 @@
|
|||||||
|
package gc.mda.signal_batch.domain.passage.controller;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.passage.dto.SequentialPassageRequest;
|
||||||
|
import gc.mda.signal_batch.domain.passage.dto.SequentialPassageResponse;
|
||||||
|
import gc.mda.signal_batch.domain.passage.service.SequentialAreaTrackingService;
|
||||||
|
import io.swagger.v3.oas.annotations.Operation;
|
||||||
|
import io.swagger.v3.oas.annotations.Parameter;
|
||||||
|
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.format.annotation.DateTimeFormat;
|
||||||
|
import org.springframework.http.ResponseEntity;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.validation.annotation.Validated;
|
||||||
|
import org.springframework.web.bind.annotation.*;
|
||||||
|
|
||||||
|
import jakarta.validation.Valid;
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
import java.sql.Timestamp;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@RestController
|
||||||
|
@RequestMapping("/api/v1/passages")
|
||||||
|
@Tag(name = "Sequential Passage API", description = "순차 구역 통과 선박 조회 API")
|
||||||
|
@Validated
|
||||||
|
public class SequentialPassageController {
|
||||||
|
|
||||||
|
private final SequentialAreaTrackingService trackingService;
|
||||||
|
private final DataSource queryDataSource;
|
||||||
|
|
||||||
|
public SequentialPassageController(SequentialAreaTrackingService trackingService,
|
||||||
|
@Qualifier("queryDataSource") DataSource queryDataSource) {
|
||||||
|
this.trackingService = trackingService;
|
||||||
|
this.queryDataSource = queryDataSource;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Operation(
|
||||||
|
summary = "순차 구역 통과 선박 조회",
|
||||||
|
description = """
|
||||||
|
지정된 구역들을 순차적으로 통과한 선박을 조회합니다.
|
||||||
|
- GRID: 해구(대해구/소해구) 기준으로 조회
|
||||||
|
- AREA: 사용자 정의 구역 기준으로 조회
|
||||||
|
|
||||||
|
예시:
|
||||||
|
- 해구 93 → 92 → 100 순서로 통과한 선박
|
||||||
|
- 구역 AREA001 → AREA002 → AREA003 순서로 통과한 선박
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
@PostMapping("/sequential")
|
||||||
|
public ResponseEntity<SequentialPassageResponse> getSequentialPassages(
|
||||||
|
@Valid @RequestBody SequentialPassageRequest request) {
|
||||||
|
|
||||||
|
long startMs = System.currentTimeMillis();
|
||||||
|
log.info("Sequential passage request: type={}, zones={}, period={} to {}",
|
||||||
|
request.getType(), request.getZoneIds(), request.getStartTime(), request.getEndTime());
|
||||||
|
|
||||||
|
// 유효성 검증
|
||||||
|
if (request.getStartTime().isAfter(request.getEndTime())) {
|
||||||
|
throw new IllegalArgumentException("startTime cannot be after endTime");
|
||||||
|
}
|
||||||
|
|
||||||
|
List<Map<String, Object>> results;
|
||||||
|
|
||||||
|
if (request.getType() == SequentialPassageRequest.PassageType.GRID) {
|
||||||
|
// 해구 번호로 변환
|
||||||
|
List<Integer> haeguNumbers = request.getZoneIds().stream()
|
||||||
|
.map(Integer::parseInt)
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
results = trackingService.findSequentialGridPassages(
|
||||||
|
haeguNumbers, request.getStartTime(), request.getEndTime());
|
||||||
|
} else {
|
||||||
|
results = trackingService.findSequentialAreaPassages(
|
||||||
|
request.getZoneIds(), request.getStartTime(), request.getEndTime());
|
||||||
|
}
|
||||||
|
|
||||||
|
// 응답 구성
|
||||||
|
List<SequentialPassageResponse.VesselPassage> passages = results.stream()
|
||||||
|
.map(row -> buildVesselPassage(row, request))
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
long processingTime = System.currentTimeMillis() - startMs;
|
||||||
|
|
||||||
|
return ResponseEntity.ok(SequentialPassageResponse.builder()
|
||||||
|
.totalVessels(passages.size())
|
||||||
|
.startTime(request.getStartTime())
|
||||||
|
.endTime(request.getEndTime())
|
||||||
|
.zones(request.getZoneIds())
|
||||||
|
.passages(passages)
|
||||||
|
.processingTimeMs(processingTime)
|
||||||
|
.build());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Operation(
|
||||||
|
summary = "구역 통과 통계 조회",
|
||||||
|
description = "특정 구역의 통과 통계를 조회합니다."
|
||||||
|
)
|
||||||
|
@GetMapping("/statistics")
|
||||||
|
public ResponseEntity<Map<String, Object>> getPassageStatistics(
|
||||||
|
@Parameter(description = "구역 유형 (GRID/AREA)", required = true, example = "AREA")
|
||||||
|
@RequestParam SequentialPassageRequest.PassageType type,
|
||||||
|
|
||||||
|
@Parameter(description = "구역 ID", required = true, example = "AREA001")
|
||||||
|
@RequestParam String zoneId,
|
||||||
|
|
||||||
|
@Parameter(description = "조회 시작 시간", required = true, example = "2025-08-01T00:00:00")
|
||||||
|
@RequestParam
|
||||||
|
@DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME)
|
||||||
|
LocalDateTime startTime,
|
||||||
|
|
||||||
|
@Parameter(description = "조회 종료 시간", required = true, example = "2025-08-07T23:59:59")
|
||||||
|
@RequestParam
|
||||||
|
@DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME)
|
||||||
|
LocalDateTime endTime
|
||||||
|
) {
|
||||||
|
log.info("Passage statistics request: type={}, zoneId={}, period={} to {}",
|
||||||
|
type, zoneId, startTime, endTime);
|
||||||
|
|
||||||
|
Map<String, Object> stats;
|
||||||
|
|
||||||
|
if (type == SequentialPassageRequest.PassageType.AREA) {
|
||||||
|
stats = trackingService.getAreaPassageStatistics(zoneId, startTime, endTime);
|
||||||
|
} else {
|
||||||
|
// Grid 통계 조회
|
||||||
|
stats = getGridPassageStatistics(Integer.parseInt(zoneId), startTime, endTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
return ResponseEntity.ok(stats);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Operation(
|
||||||
|
summary = "다중 구역 동시 통과 선박 조회",
|
||||||
|
description = """
|
||||||
|
지정된 기간 동안 모든 구역을 통과한 선박을 조회합니다.
|
||||||
|
(순서 관계없이 모든 구역을 방문한 선박)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
@PostMapping("/all-zones")
|
||||||
|
public ResponseEntity<Map<String, Object>> getVesselsInAllZones(
|
||||||
|
@Valid @RequestBody SequentialPassageRequest request) {
|
||||||
|
|
||||||
|
log.info("All zones passage request: zones={}", request.getZoneIds());
|
||||||
|
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
|
||||||
|
String tableName = request.getType() == SequentialPassageRequest.PassageType.GRID
|
||||||
|
? "t_grid_vessel_tracks" : "t_area_vessel_tracks";
|
||||||
|
String zoneColumn = request.getType() == SequentialPassageRequest.PassageType.GRID
|
||||||
|
? "haegu_no" : "area_id";
|
||||||
|
|
||||||
|
String sql = String.format("""
|
||||||
|
WITH vessel_zones AS (
|
||||||
|
SELECT
|
||||||
|
sig_src_cd,
|
||||||
|
target_id,
|
||||||
|
COUNT(DISTINCT %s) as zone_count,
|
||||||
|
array_agg(DISTINCT %s ORDER BY %s) as visited_zones,
|
||||||
|
MIN(time_bucket) as first_seen,
|
||||||
|
MAX(time_bucket) as last_seen,
|
||||||
|
SUM(distance_nm) as total_distance,
|
||||||
|
AVG(avg_speed) as avg_speed
|
||||||
|
FROM signal.%s
|
||||||
|
WHERE time_bucket BETWEEN ? AND ?
|
||||||
|
AND %s = ANY(?)
|
||||||
|
GROUP BY sig_src_cd, target_id
|
||||||
|
HAVING COUNT(DISTINCT %s) = ?
|
||||||
|
)
|
||||||
|
SELECT * FROM vessel_zones
|
||||||
|
ORDER BY first_seen
|
||||||
|
""", zoneColumn, zoneColumn, zoneColumn, tableName, zoneColumn, zoneColumn);
|
||||||
|
|
||||||
|
Object[] params;
|
||||||
|
if (request.getType() == SequentialPassageRequest.PassageType.GRID) {
|
||||||
|
Integer[] haeguArray = request.getZoneIds().stream()
|
||||||
|
.map(Integer::parseInt)
|
||||||
|
.toArray(Integer[]::new);
|
||||||
|
params = new Object[]{
|
||||||
|
Timestamp.valueOf(request.getStartTime()),
|
||||||
|
Timestamp.valueOf(request.getEndTime()),
|
||||||
|
haeguArray,
|
||||||
|
request.getZoneIds().size()
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
params = new Object[]{
|
||||||
|
Timestamp.valueOf(request.getStartTime()),
|
||||||
|
Timestamp.valueOf(request.getEndTime()),
|
||||||
|
request.getZoneIds().toArray(String[]::new),
|
||||||
|
request.getZoneIds().size()
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
List<Map<String, Object>> results = jdbcTemplate.queryForList(sql, params);
|
||||||
|
|
||||||
|
return ResponseEntity.ok(Map.of(
|
||||||
|
"totalVessels", results.size(),
|
||||||
|
"requiredZones", request.getZoneIds(),
|
||||||
|
"period", Map.of(
|
||||||
|
"start", request.getStartTime(),
|
||||||
|
"end", request.getEndTime()
|
||||||
|
),
|
||||||
|
"vessels", results
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
private SequentialPassageResponse.VesselPassage buildVesselPassage(
|
||||||
|
Map<String, Object> row, SequentialPassageRequest request) {
|
||||||
|
|
||||||
|
String sigSrcCd = (String) row.get("sig_src_cd");
|
||||||
|
String targetId = (String) row.get("target_id");
|
||||||
|
|
||||||
|
// 구역별 통과 정보 구성
|
||||||
|
List<SequentialPassageResponse.ZonePassage> zonePassages = new ArrayList<>();
|
||||||
|
|
||||||
|
for (int i = 0; i < request.getZoneIds().size(); i++) {
|
||||||
|
String zoneId = request.getZoneIds().get(i);
|
||||||
|
String prefix = request.getType() == SequentialPassageRequest.PassageType.GRID
|
||||||
|
? "haegu" : "area";
|
||||||
|
|
||||||
|
Timestamp entryTime = (Timestamp) row.get(prefix + (i + 1) + "_entry");
|
||||||
|
Timestamp exitTime = (Timestamp) row.get(prefix + (i + 1) + "_exit");
|
||||||
|
|
||||||
|
if (entryTime != null) {
|
||||||
|
zonePassages.add(SequentialPassageResponse.ZonePassage.builder()
|
||||||
|
.zoneId(zoneId)
|
||||||
|
.zoneName(getZoneName(zoneId, request.getType()))
|
||||||
|
.entryTime(entryTime.toLocalDateTime())
|
||||||
|
.exitTime(exitTime != null ? exitTime.toLocalDateTime() : null)
|
||||||
|
.build());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 선박 정보 조회 (캐시 활용 가능)
|
||||||
|
SequentialPassageResponse.VesselInfo vesselInfo = getVesselInfo(sigSrcCd, targetId);
|
||||||
|
|
||||||
|
return SequentialPassageResponse.VesselPassage.builder()
|
||||||
|
.sigSrcCd(sigSrcCd)
|
||||||
|
.targetId(targetId)
|
||||||
|
.vesselInfo(vesselInfo)
|
||||||
|
.zonePassages(zonePassages)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
private String getZoneName(String zoneId, SequentialPassageRequest.PassageType type) {
|
||||||
|
if (type == SequentialPassageRequest.PassageType.GRID) {
|
||||||
|
return "해구 " + zoneId;
|
||||||
|
} else {
|
||||||
|
return "구역 " + zoneId;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private SequentialPassageResponse.VesselInfo getVesselInfo(String sigSrcCd, String targetId) {
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
SELECT ship_nm as ship_name, ship_ty as ship_type
|
||||||
|
FROM signal.t_vessel_latest_position
|
||||||
|
WHERE sig_src_cd = ? AND target_id = ?
|
||||||
|
LIMIT 1
|
||||||
|
""";
|
||||||
|
|
||||||
|
try {
|
||||||
|
Map<String, Object> result = jdbcTemplate.queryForMap(sql, sigSrcCd, targetId);
|
||||||
|
return SequentialPassageResponse.VesselInfo.builder()
|
||||||
|
.shipName(result.get("ship_name") != null ? (String) result.get("ship_name") : null)
|
||||||
|
.shipType(result.get("ship_type") != null ? (String) result.get("ship_type") : null)
|
||||||
|
.build();
|
||||||
|
} catch (Exception e) {
|
||||||
|
// 데이터 없을 경우 null 반환
|
||||||
|
return SequentialPassageResponse.VesselInfo.builder()
|
||||||
|
.shipName(null)
|
||||||
|
.shipType(null)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Map<String, Object> getGridPassageStatistics(
|
||||||
|
Integer haeguNo, LocalDateTime startTime, LocalDateTime endTime) {
|
||||||
|
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
SELECT
|
||||||
|
COUNT(DISTINCT CONCAT(sig_src_cd, '_', target_id)) as unique_vessels,
|
||||||
|
COUNT(*) as total_passages,
|
||||||
|
SUM(distance_nm) as total_distance,
|
||||||
|
AVG(avg_speed) as avg_speed,
|
||||||
|
MIN(time_bucket) as first_passage,
|
||||||
|
MAX(time_bucket) as last_passage
|
||||||
|
FROM signal.t_grid_vessel_tracks
|
||||||
|
WHERE haegu_no = ?
|
||||||
|
AND time_bucket BETWEEN ? AND ?
|
||||||
|
""";
|
||||||
|
|
||||||
|
return jdbcTemplate.queryForMap(sql,
|
||||||
|
haeguNo,
|
||||||
|
Timestamp.valueOf(startTime),
|
||||||
|
Timestamp.valueOf(endTime)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,44 @@
|
|||||||
|
package gc.mda.signal_batch.domain.passage.dto;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
|
||||||
|
import jakarta.validation.constraints.NotNull;
|
||||||
|
import jakarta.validation.constraints.Size;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
@Schema(description = "순차 구역 통과 조회 요청")
|
||||||
|
public class SequentialPassageRequest {
|
||||||
|
|
||||||
|
@NotNull(message = "조회 시작 시간은 필수입니다")
|
||||||
|
@Schema(description = "조회 시작 시간", example = "2025-08-01T00:00:00", required = true)
|
||||||
|
private LocalDateTime startTime;
|
||||||
|
|
||||||
|
@NotNull(message = "조회 종료 시간은 필수입니다")
|
||||||
|
@Schema(description = "조회 종료 시간", example = "2025-08-07T23:59:59", required = true)
|
||||||
|
private LocalDateTime endTime;
|
||||||
|
|
||||||
|
@Schema(description = "조회 유형 (GRID: 해구, AREA: 사용자정의구역)", example = "GRID", required = true)
|
||||||
|
@NotNull(message = "조회 유형은 필수입니다")
|
||||||
|
private PassageType type;
|
||||||
|
|
||||||
|
@Size(min = 2, max = 10, message = "구역 ID는 최소 2개, 최대 10개까지 지정 가능합니다")
|
||||||
|
@Schema(description = "순차 통과 구역 목록 (GRID: 해구번호, AREA: 구역ID)", example = "[\"93\", \"92\", \"100\"]")
|
||||||
|
private List<String> zoneIds;
|
||||||
|
|
||||||
|
@Schema(description = "순차 통과 여부 (true: 순서대로 통과, false: 모든 구역 통과)", example = "true", defaultValue = "true")
|
||||||
|
@Builder.Default
|
||||||
|
private Boolean sequentialOnly = true;
|
||||||
|
|
||||||
|
public enum PassageType {
|
||||||
|
GRID, AREA
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,131 @@
|
|||||||
|
package gc.mda.signal_batch.domain.passage.dto;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
@Schema(description = "순차 구역 통과 조회 응답")
|
||||||
|
public class SequentialPassageResponse {
|
||||||
|
|
||||||
|
@Schema(description = "총 매칭 선박 수", example = "24")
|
||||||
|
private Integer totalVessels;
|
||||||
|
|
||||||
|
@Schema(description = "조회 시작 시간", example = "2025-08-01T00:00:00")
|
||||||
|
private LocalDateTime startTime;
|
||||||
|
|
||||||
|
@Schema(description = "조회 종료 시간", example = "2025-08-07T23:59:59")
|
||||||
|
private LocalDateTime endTime;
|
||||||
|
|
||||||
|
@Schema(description = "조회 구역 목록", example = "[\"93\", \"92\", \"100\"]")
|
||||||
|
private List<String> zones;
|
||||||
|
|
||||||
|
@Schema(description = "순차 통과 선박 목록")
|
||||||
|
private List<VesselPassage> passages;
|
||||||
|
|
||||||
|
@Schema(description = "처리 시간 (ms)", example = "788")
|
||||||
|
private Long processingTimeMs;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
@Schema(description = "선박 통과 정보")
|
||||||
|
public static class VesselPassage {
|
||||||
|
|
||||||
|
@Schema(description = "신호원 코드", example = "000001")
|
||||||
|
private String sigSrcCd;
|
||||||
|
|
||||||
|
@Schema(description = "타겟 ID", example = "440308230")
|
||||||
|
private String targetId;
|
||||||
|
|
||||||
|
@Schema(description = "선박 정보")
|
||||||
|
private VesselInfo vesselInfo;
|
||||||
|
|
||||||
|
@Schema(description = "구역별 통과 시간")
|
||||||
|
private List<ZonePassage> zonePassages;
|
||||||
|
|
||||||
|
@Schema(description = "총 이동 거리 (해리)", example = "125.5")
|
||||||
|
private Double totalDistance;
|
||||||
|
|
||||||
|
@Schema(description = "평균 속도 (knots)", example = "12.3")
|
||||||
|
private Double avgSpeed;
|
||||||
|
}
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
@Schema(description = "선박 정보")
|
||||||
|
public static class VesselInfo {
|
||||||
|
@Schema(description = "선박명", example = "VESSEL NAME")
|
||||||
|
private String shipName;
|
||||||
|
|
||||||
|
@Schema(description = "선박 타입", example = "30")
|
||||||
|
private String shipType;
|
||||||
|
|
||||||
|
// @Schema(description = "선박 종류 코드", example = "000020")
|
||||||
|
// private String shipKindCode;
|
||||||
|
//
|
||||||
|
// @Schema(description = "IMO 번호", example = "9123456")
|
||||||
|
// private String imoNumber;
|
||||||
|
//
|
||||||
|
// @Schema(description = "호출부호", example = "VRAA5")
|
||||||
|
// private String callSign;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
@Schema(description = "구역 통과 정보")
|
||||||
|
public static class ZonePassage {
|
||||||
|
@Schema(description = "구역 ID", example = "93")
|
||||||
|
private String zoneId;
|
||||||
|
|
||||||
|
@Schema(description = "구역명", example = "해구 93")
|
||||||
|
private String zoneName;
|
||||||
|
|
||||||
|
@Schema(description = "진입 시간", example = "2025-08-01T10:30:00")
|
||||||
|
private LocalDateTime entryTime;
|
||||||
|
|
||||||
|
@Schema(description = "진출 시간", example = "2025-08-01T14:45:00")
|
||||||
|
private LocalDateTime exitTime;
|
||||||
|
|
||||||
|
@Schema(description = "구역 내 이동거리 (해리)", example = "45.2")
|
||||||
|
private Double distanceInZone;
|
||||||
|
|
||||||
|
@Schema(description = "구역 내 평균속도 (knots)", example = "11.5")
|
||||||
|
private Double avgSpeedInZone;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
@Schema(description = "구역 통과 통계")
|
||||||
|
public static class PassageStatistics {
|
||||||
|
@Schema(description = "구역 ID", example = "93")
|
||||||
|
private String zoneId;
|
||||||
|
|
||||||
|
@Schema(description = "총 통과 선박 수", example = "1722")
|
||||||
|
private Integer totalVessels;
|
||||||
|
|
||||||
|
@Schema(description = "총 통과 횟수", example = "5431")
|
||||||
|
private Integer totalPassages;
|
||||||
|
|
||||||
|
@Schema(description = "평균 체류 시간 (분)", example = "245")
|
||||||
|
private Double avgDurationMinutes;
|
||||||
|
|
||||||
|
@Schema(description = "최다 통과 시간대", example = "14:00-15:00")
|
||||||
|
private String peakHour;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,178 @@
|
|||||||
|
package gc.mda.signal_batch.domain.passage.service;
|
||||||
|
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
import java.sql.Timestamp;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 순차 구역 통과 선박 조회 최적화 서비스
|
||||||
|
* - Unix timestamp 기반 M값 활용
|
||||||
|
* - 병렬 쿼리 처리
|
||||||
|
* - 결과 캐싱
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
@Service
|
||||||
|
public class SequentialAreaTrackingService {
|
||||||
|
|
||||||
|
private final DataSource queryDataSource;
|
||||||
|
|
||||||
|
public SequentialAreaTrackingService(@Qualifier("queryDataSource") DataSource queryDataSource) {
|
||||||
|
this.queryDataSource = queryDataSource;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 순차적으로 지정된 구역들을 통과한 선박 조회 (Grid)
|
||||||
|
*/
|
||||||
|
public List<Map<String, Object>> findSequentialGridPassages(
|
||||||
|
List<Integer> haeguNumbers,
|
||||||
|
LocalDateTime startTime,
|
||||||
|
LocalDateTime endTime) {
|
||||||
|
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
|
||||||
|
|
||||||
|
// MATERIALIZED CTE 사용으로 중간 결과 고정
|
||||||
|
String sql = """
|
||||||
|
WITH vessel_passages AS (
|
||||||
|
SELECT DISTINCT
|
||||||
|
sig_src_cd,
|
||||||
|
target_id,
|
||||||
|
haegu_no,
|
||||||
|
FIRpublic.ST_VALUE(time_bucket) OVER (
|
||||||
|
PARTITION BY sig_src_cd, target_id, haegu_no
|
||||||
|
ORDER BY time_bucket
|
||||||
|
) as entry_time,
|
||||||
|
LApublic.ST_VALUE(time_bucket) OVER (
|
||||||
|
PARTITION BY sig_src_cd, target_id, haegu_no
|
||||||
|
ORDER BY time_bucket
|
||||||
|
ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING
|
||||||
|
) as exit_time
|
||||||
|
FROM signal.t_grid_vessel_tracks
|
||||||
|
WHERE time_bucket BETWEEN ? AND ?
|
||||||
|
AND haegu_no = ANY(ARRAY[?]::integer[])
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
v1.sig_src_cd,
|
||||||
|
v1.target_id,
|
||||||
|
v1.entry_time as haegu1_entry,
|
||||||
|
v1.exit_time as haegu1_exit,
|
||||||
|
v2.entry_time as haegu2_entry,
|
||||||
|
v2.exit_time as haegu2_exit,
|
||||||
|
v3.entry_time as haegu3_entry,
|
||||||
|
v3.exit_time as haegu3_exit
|
||||||
|
FROM vessel_passages v1
|
||||||
|
JOIN vessel_passages v2 ON v1.sig_src_cd = v2.sig_src_cd
|
||||||
|
AND v1.target_id = v2.target_id
|
||||||
|
AND v2.haegu_no = ? AND v2.entry_time > v1.exit_time
|
||||||
|
JOIN vessel_passages v3 ON v2.sig_src_cd = v3.sig_src_cd
|
||||||
|
AND v2.target_id = v3.target_id
|
||||||
|
AND v3.haegu_no = ? AND v3.entry_time > v2.exit_time
|
||||||
|
WHERE v1.haegu_no = ?
|
||||||
|
ORDER BY v1.entry_time
|
||||||
|
""";
|
||||||
|
|
||||||
|
return jdbcTemplate.queryForList(sql,
|
||||||
|
Timestamp.valueOf(startTime),
|
||||||
|
Timestamp.valueOf(endTime),
|
||||||
|
haeguNumbers.toArray(Integer[]::new),
|
||||||
|
haeguNumbers.get(1),
|
||||||
|
haeguNumbers.get(2),
|
||||||
|
haeguNumbers.get(0)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 순차적으로 지정된 구역들을 통과한 선박 조회 (Area)
|
||||||
|
*/
|
||||||
|
public List<Map<String, Object>> findSequentialAreaPassages(
|
||||||
|
List<String> areaIds,
|
||||||
|
LocalDateTime startTime,
|
||||||
|
LocalDateTime endTime) {
|
||||||
|
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
WITH area_passages AS (
|
||||||
|
SELECT DISTINCT
|
||||||
|
sig_src_cd,
|
||||||
|
target_id,
|
||||||
|
area_id,
|
||||||
|
FIRpublic.ST_VALUE(time_bucket) OVER (
|
||||||
|
PARTITION BY sig_src_cd, target_id, area_id
|
||||||
|
ORDER BY time_bucket
|
||||||
|
) as entry_time,
|
||||||
|
LApublic.ST_VALUE(time_bucket) OVER (
|
||||||
|
PARTITION BY sig_src_cd, target_id, area_id
|
||||||
|
ORDER BY time_bucket
|
||||||
|
ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING
|
||||||
|
) as exit_time
|
||||||
|
FROM signal.t_area_vessel_tracks
|
||||||
|
WHERE time_bucket BETWEEN ? AND ?
|
||||||
|
AND area_id = ANY(ARRAY[?]::varchar[])
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
a1.sig_src_cd,
|
||||||
|
a1.target_id,
|
||||||
|
a1.entry_time as area1_entry,
|
||||||
|
a1.exit_time as area1_exit,
|
||||||
|
a2.entry_time as area2_entry,
|
||||||
|
a2.exit_time as area2_exit,
|
||||||
|
a3.entry_time as area3_entry,
|
||||||
|
a3.exit_time as area3_exit
|
||||||
|
FROM area_passages a1
|
||||||
|
JOIN area_passages a2 ON a1.sig_src_cd = a2.sig_src_cd
|
||||||
|
AND a1.target_id = a2.target_id
|
||||||
|
AND a2.area_id = ? AND a2.entry_time > a1.exit_time
|
||||||
|
JOIN area_passages a3 ON a2.sig_src_cd = a3.sig_src_cd
|
||||||
|
AND a2.target_id = a3.target_id
|
||||||
|
AND a3.area_id = ? AND a3.entry_time > a2.exit_time
|
||||||
|
WHERE a1.area_id = ?
|
||||||
|
ORDER BY a1.entry_time
|
||||||
|
""";
|
||||||
|
|
||||||
|
return jdbcTemplate.queryForList(sql,
|
||||||
|
Timestamp.valueOf(startTime),
|
||||||
|
Timestamp.valueOf(endTime),
|
||||||
|
areaIds.toArray(String[]::new),
|
||||||
|
areaIds.get(1),
|
||||||
|
areaIds.get(2),
|
||||||
|
areaIds.get(0)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 특정 구역 통과 통계 (캐시 활용)
|
||||||
|
*/
|
||||||
|
public Map<String, Object> getAreaPassageStatistics(
|
||||||
|
String areaId,
|
||||||
|
LocalDateTime startTime,
|
||||||
|
LocalDateTime endTime) {
|
||||||
|
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
SELECT
|
||||||
|
COUNT(DISTINCT CONCAT(sig_src_cd, '_', target_id)) as unique_vessels,
|
||||||
|
COUNT(*) as total_passages,
|
||||||
|
SUM(distance_nm) as total_distance,
|
||||||
|
AVG(avg_speed) as avg_speed,
|
||||||
|
MIN(time_bucket) as first_passage,
|
||||||
|
MAX(time_bucket) as last_passage
|
||||||
|
FROM signal.t_area_vessel_tracks
|
||||||
|
WHERE area_id = ?
|
||||||
|
AND time_bucket BETWEEN ? AND ?
|
||||||
|
""";
|
||||||
|
|
||||||
|
return jdbcTemplate.queryForMap(sql,
|
||||||
|
areaId,
|
||||||
|
Timestamp.valueOf(startTime),
|
||||||
|
Timestamp.valueOf(endTime)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,180 @@
|
|||||||
|
package gc.mda.signal_batch.domain.track.controller;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.track.dto.AbnormalTrackResponse;
|
||||||
|
import gc.mda.signal_batch.domain.track.dto.AbnormalTrackStatsResponse;
|
||||||
|
import gc.mda.signal_batch.domain.track.service.AbnormalTrackService;
|
||||||
|
import io.swagger.v3.oas.annotations.Operation;
|
||||||
|
import io.swagger.v3.oas.annotations.Parameter;
|
||||||
|
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.format.annotation.DateTimeFormat;
|
||||||
|
import org.springframework.http.ResponseEntity;
|
||||||
|
import org.springframework.web.bind.annotation.*;
|
||||||
|
|
||||||
|
import java.time.LocalDate;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonFormat;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 비정상 궤적 모니터링 API
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
@RestController
|
||||||
|
@RequestMapping("/api/v1/abnormal-tracks")
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
@Tag(name = "비정상 항적 검출 API", description = "비정상 항적 검출, 조회 및 통계 API")
|
||||||
|
public class AbnormalTrackController {
|
||||||
|
|
||||||
|
private final AbnormalTrackService abnormalTrackService;
|
||||||
|
|
||||||
|
@GetMapping("/recent")
|
||||||
|
@Operation(summary = "최근 비정상 항적 조회", description = "지정된 시간 이내의 비정상 항적을 조회합니다.")
|
||||||
|
public ResponseEntity<List<AbnormalTrackResponse>> getRecentAbnormalTracks(
|
||||||
|
@Parameter(description = "조회 시간 (기본값: 24시간)")
|
||||||
|
@RequestParam(defaultValue = "24") int hours) {
|
||||||
|
|
||||||
|
LocalDateTime since = LocalDateTime.now().minusHours(hours);
|
||||||
|
List<AbnormalTrackResponse> tracks = abnormalTrackService.getAbnormalTracksSince(since);
|
||||||
|
|
||||||
|
return ResponseEntity.ok(tracks);
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/vessel/{sigSrcCd}/{targetId}")
|
||||||
|
@Operation(summary = "특정 선박의 비정상 항적 이력", description = "특정 선박의 비정상 항적 이력을 조회합니다.")
|
||||||
|
public ResponseEntity<List<AbnormalTrackResponse>> getVesselAbnormalTracks(
|
||||||
|
@PathVariable String sigSrcCd,
|
||||||
|
@PathVariable String targetId,
|
||||||
|
@Parameter(description = "시작 날짜")
|
||||||
|
@RequestParam @DateTimeFormat(iso = DateTimeFormat.ISO.DATE) LocalDate startDate,
|
||||||
|
@Parameter(description = "종료 날짜")
|
||||||
|
@RequestParam @DateTimeFormat(iso = DateTimeFormat.ISO.DATE) LocalDate endDate) {
|
||||||
|
|
||||||
|
List<AbnormalTrackResponse> tracks = abnormalTrackService.getVesselAbnormalTracks(
|
||||||
|
sigSrcCd, targetId, startDate.atStartOfDay(), endDate.plusDays(1).atStartOfDay()
|
||||||
|
);
|
||||||
|
|
||||||
|
return ResponseEntity.ok(tracks);
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/statistics")
|
||||||
|
@Operation(summary = "비정상 항적 통계", description = "지정된 기간의 비정상 항적 통계를 조회합니다.")
|
||||||
|
public ResponseEntity<List<AbnormalTrackStatsResponse>> getAbnormalTrackStatistics(
|
||||||
|
@Parameter(description = "시작 날짜")
|
||||||
|
@RequestParam @DateTimeFormat(iso = DateTimeFormat.ISO.DATE) LocalDate startDate,
|
||||||
|
@Parameter(description = "종료 날짜")
|
||||||
|
@RequestParam @DateTimeFormat(iso = DateTimeFormat.ISO.DATE) LocalDate endDate) {
|
||||||
|
|
||||||
|
List<AbnormalTrackStatsResponse> stats = abnormalTrackService.getStatistics(startDate, endDate);
|
||||||
|
|
||||||
|
return ResponseEntity.ok(stats);
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/statistics/summary")
|
||||||
|
@Operation(summary = "비정상 항적 요약 통계", description = "비정상 유형별 요약 통계를 조회합니다.")
|
||||||
|
public ResponseEntity<Map<String, Object>> getAbnormalTrackSummary(
|
||||||
|
@Parameter(description = "조회 일수 (기본값: 7일)")
|
||||||
|
@RequestParam(defaultValue = "7") int days) {
|
||||||
|
|
||||||
|
Map<String, Object> summary = abnormalTrackService.getSummaryStatistics(days);
|
||||||
|
|
||||||
|
return ResponseEntity.ok(summary);
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/types")
|
||||||
|
@Operation(summary = "비정상 유형 목록", description = "비정상 항적 유형 목록을 조회합니다.")
|
||||||
|
public ResponseEntity<Map<String, String>> getAbnormalTypes() {
|
||||||
|
Map<String, String> types = Map.of(
|
||||||
|
"excessive_speed", "과속 (50 knots 초과)",
|
||||||
|
"teleport", "순간이동 (5분간 10nm 초과)",
|
||||||
|
"gap_jump", "Bucket 간 비정상 이동",
|
||||||
|
"excessive_acceleration", "급가속 (분당 10 knots 초과)"
|
||||||
|
);
|
||||||
|
|
||||||
|
return ResponseEntity.ok(types);
|
||||||
|
}
|
||||||
|
|
||||||
|
@PostMapping("/detect")
|
||||||
|
@Operation(summary = "사용자 정의 기준으로 비정상 항적 검출", description = "hourly/daily 테이블에서 거리/속도 기준으로 비정상 항적을 검출합니다.")
|
||||||
|
public ResponseEntity<List<AbnormalTrackResponse>> detectAbnormalTracks(
|
||||||
|
@RequestBody DetectRequest request) {
|
||||||
|
|
||||||
|
List<AbnormalTrackResponse> tracks = abnormalTrackService.detectFromHourlyDaily(
|
||||||
|
request.getTableType(),
|
||||||
|
request.getStartTime(),
|
||||||
|
request.getEndTime(),
|
||||||
|
request.getMinDistance(),
|
||||||
|
request.getMinSpeed()
|
||||||
|
);
|
||||||
|
|
||||||
|
return ResponseEntity.ok(tracks);
|
||||||
|
}
|
||||||
|
|
||||||
|
@PostMapping("/move-to-abnormal")
|
||||||
|
@Operation(summary = "선택된 항적을 비정상 테이블로 이동", description = "선택된 항적을 원래 테이블에서 삭제하고 t_abnormal_tracks로 이동합니다.")
|
||||||
|
public ResponseEntity<Map<String, Object>> moveToAbnormalTracks(
|
||||||
|
@RequestBody MoveTracksRequest request) {
|
||||||
|
|
||||||
|
int movedCount = abnormalTrackService.moveToAbnormalTracks(
|
||||||
|
request.getTableType(),
|
||||||
|
request.getTracks(),
|
||||||
|
request.getAbnormalType(),
|
||||||
|
request.getReason()
|
||||||
|
);
|
||||||
|
|
||||||
|
return ResponseEntity.ok(Map.of(
|
||||||
|
"success", true,
|
||||||
|
"movedCount", movedCount,
|
||||||
|
"message", String.format("%d개의 항적이 비정상 테이블로 이동되었습니다.", movedCount)
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
@lombok.Data
|
||||||
|
static class DetectRequest {
|
||||||
|
private String tableType; // "hourly" or "daily"
|
||||||
|
@DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME)
|
||||||
|
@JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss")
|
||||||
|
private LocalDateTime startTime;
|
||||||
|
@DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME)
|
||||||
|
@JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss")
|
||||||
|
private LocalDateTime endTime;
|
||||||
|
private BigDecimal minDistance; // 최소 거리 (nm)
|
||||||
|
private BigDecimal minSpeed; // 최소 평균속도 (knots)
|
||||||
|
}
|
||||||
|
|
||||||
|
@lombok.Data
|
||||||
|
static class MoveTracksRequest {
|
||||||
|
private String tableType; // "hourly" or "daily"
|
||||||
|
private List<TrackIdentifier> tracks;
|
||||||
|
private String abnormalType;
|
||||||
|
private String reason;
|
||||||
|
}
|
||||||
|
|
||||||
|
@lombok.Data
|
||||||
|
public static class TrackIdentifier {
|
||||||
|
private String sigSrcCd;
|
||||||
|
private String targetId;
|
||||||
|
@DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME)
|
||||||
|
@JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss")
|
||||||
|
private LocalDateTime timeBucket;
|
||||||
|
}
|
||||||
|
|
||||||
|
@DeleteMapping("/cleanup")
|
||||||
|
@Operation(summary = "오래된 비정상 항적 정리", description = "지정된 일수 이전의 비정상 항적 데이터를 삭제합니다.")
|
||||||
|
public ResponseEntity<Map<String, Object>> cleanupOldAbnormalTracks(
|
||||||
|
@Parameter(description = "보존 기간 (일)")
|
||||||
|
@RequestParam(defaultValue = "30") int retentionDays) {
|
||||||
|
|
||||||
|
int deletedCount = abnormalTrackService.cleanupOldData(retentionDays);
|
||||||
|
|
||||||
|
return ResponseEntity.ok(Map.of(
|
||||||
|
"deletedCount", deletedCount,
|
||||||
|
"retentionDays", retentionDays,
|
||||||
|
"message", String.format("%d일 이전의 %d건 비정상 항적 데이터가 삭제되었습니다.", retentionDays, deletedCount)
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,34 @@
|
|||||||
|
package gc.mda.signal_batch.domain.track.dto;
|
||||||
|
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 비정상 궤적 응답 DTO
|
||||||
|
*/
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
public class AbnormalTrackResponse {
|
||||||
|
private Long id;
|
||||||
|
private String sigSrcCd;
|
||||||
|
private String targetId;
|
||||||
|
private String vesselId; // sigSrcCd:targetId
|
||||||
|
private LocalDateTime timeBucket;
|
||||||
|
private String abnormalType;
|
||||||
|
private String typeDescription;
|
||||||
|
private String abnormalDescription;
|
||||||
|
private BigDecimal distanceNm;
|
||||||
|
private BigDecimal avgSpeed;
|
||||||
|
private BigDecimal maxSpeed;
|
||||||
|
private Integer pointCount;
|
||||||
|
private String sourceTable;
|
||||||
|
private LocalDateTime detectedAt;
|
||||||
|
private Map<String, Object> details;
|
||||||
|
|
||||||
|
// GeoJSON 형식의 궤적 (선택적)
|
||||||
|
private Object trackGeoJson;
|
||||||
|
}
|
||||||
@ -0,0 +1,22 @@
|
|||||||
|
package gc.mda.signal_batch.domain.track.dto;
|
||||||
|
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import java.time.LocalDate;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 비정상 궤적 통계 응답 DTO
|
||||||
|
*/
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
public class AbnormalTrackStatsResponse {
|
||||||
|
private LocalDate statDate;
|
||||||
|
private String abnormalType;
|
||||||
|
private Integer vesselCount;
|
||||||
|
private Integer trackCount;
|
||||||
|
private Integer totalPoints;
|
||||||
|
private BigDecimal avgDeviation;
|
||||||
|
private BigDecimal maxDeviation;
|
||||||
|
}
|
||||||
@ -0,0 +1,501 @@
|
|||||||
|
package gc.mda.signal_batch.domain.track.service;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.type.TypeReference;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import gc.mda.signal_batch.domain.track.dto.AbnormalTrackResponse;
|
||||||
|
import gc.mda.signal_batch.domain.track.dto.AbnormalTrackStatsResponse;
|
||||||
|
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.stereotype.Service;
|
||||||
|
import org.springframework.transaction.annotation.Transactional;
|
||||||
|
|
||||||
|
import java.time.LocalDate;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import gc.mda.signal_batch.domain.track.controller.AbnormalTrackController.TrackIdentifier;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 비정상 궤적 서비스
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
@Service
|
||||||
|
public class AbnormalTrackService {
|
||||||
|
|
||||||
|
private final JdbcTemplate jdbcTemplate;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
public AbnormalTrackService(@Qualifier("queryJdbcTemplate") JdbcTemplate jdbcTemplate) {
|
||||||
|
this.jdbcTemplate = jdbcTemplate;
|
||||||
|
}
|
||||||
|
|
||||||
|
private final ObjectMapper objectMapper = new ObjectMapper();
|
||||||
|
|
||||||
|
// track_geom 고정 사용
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 특정 시간 이후의 비정상 궤적 조회
|
||||||
|
*/
|
||||||
|
public List<AbnormalTrackResponse> getAbnormalTracksSince(LocalDateTime since) {
|
||||||
|
String sql = """
|
||||||
|
SELECT
|
||||||
|
id,
|
||||||
|
sig_src_cd,
|
||||||
|
target_id,
|
||||||
|
sig_src_cd || ':' || target_id as vessel_id,
|
||||||
|
time_bucket,
|
||||||
|
abnormal_type,
|
||||||
|
abnormal_reason,
|
||||||
|
distance_nm,
|
||||||
|
avg_speed,
|
||||||
|
max_speed,
|
||||||
|
point_count,
|
||||||
|
source_table,
|
||||||
|
detected_at,
|
||||||
|
-- M값을 포함한 커스텀 GeoJSON 생성
|
||||||
|
jsonb_build_object(
|
||||||
|
'type', 'LineString',
|
||||||
|
'coordinates', (
|
||||||
|
SELECT jsonb_agg(
|
||||||
|
jsonb_build_array(
|
||||||
|
public.ST_X(public.ST_PointN(track_geom, point_num)),
|
||||||
|
public.ST_Y(public.ST_PointN(track_geom, point_num)),
|
||||||
|
public.ST_M(public.ST_PointN(track_geom, point_num))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
FROM generate_series(1, public.ST_NPoints(track_geom)) AS point_num
|
||||||
|
)
|
||||||
|
)::text as track_geojson
|
||||||
|
FROM signal.t_abnormal_tracks
|
||||||
|
WHERE detected_at >= ?
|
||||||
|
AND track_geom IS NOT NULL
|
||||||
|
ORDER BY detected_at DESC
|
||||||
|
LIMIT 1000
|
||||||
|
""";
|
||||||
|
|
||||||
|
return jdbcTemplate.query(sql, (rs, rowNum) -> {
|
||||||
|
Map<String, Object> abnormalReason = null;
|
||||||
|
try {
|
||||||
|
abnormalReason = objectMapper.readValue(
|
||||||
|
rs.getString("abnormal_reason"),
|
||||||
|
new TypeReference<Map<String, Object>>() {}
|
||||||
|
);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to parse abnormal_reason: {}", e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
return AbnormalTrackResponse.builder()
|
||||||
|
.id(rs.getLong("id"))
|
||||||
|
.sigSrcCd(rs.getString("sig_src_cd"))
|
||||||
|
.targetId(rs.getString("target_id"))
|
||||||
|
.vesselId(rs.getString("vessel_id"))
|
||||||
|
.timeBucket(rs.getTimestamp("time_bucket").toLocalDateTime())
|
||||||
|
.abnormalType(rs.getString("abnormal_type"))
|
||||||
|
.typeDescription(getTypeDescription(rs.getString("abnormal_type")))
|
||||||
|
.abnormalDescription(getAbnormalDescription(abnormalReason))
|
||||||
|
.distanceNm(rs.getBigDecimal("distance_nm"))
|
||||||
|
.avgSpeed(rs.getBigDecimal("avg_speed"))
|
||||||
|
.maxSpeed(rs.getBigDecimal("max_speed"))
|
||||||
|
.pointCount(rs.getInt("point_count"))
|
||||||
|
.sourceTable(rs.getString("source_table"))
|
||||||
|
.detectedAt(rs.getTimestamp("detected_at").toLocalDateTime())
|
||||||
|
.details(abnormalReason)
|
||||||
|
.trackGeoJson(rs.getString("track_geojson"))
|
||||||
|
.build();
|
||||||
|
}, since);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 특정 선박의 비정상 궤적 이력 조회
|
||||||
|
*/
|
||||||
|
public List<AbnormalTrackResponse> getVesselAbnormalTracks(
|
||||||
|
String sigSrcCd, String targetId, LocalDateTime startTime, LocalDateTime endTime) {
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
SELECT
|
||||||
|
id,
|
||||||
|
sig_src_cd,
|
||||||
|
target_id,
|
||||||
|
sig_src_cd || ':' || target_id as vessel_id,
|
||||||
|
time_bucket,
|
||||||
|
abnormal_type,
|
||||||
|
abnormal_reason,
|
||||||
|
distance_nm,
|
||||||
|
avg_speed,
|
||||||
|
max_speed,
|
||||||
|
point_count,
|
||||||
|
source_table,
|
||||||
|
detected_at
|
||||||
|
FROM signal.t_abnormal_tracks
|
||||||
|
WHERE sig_src_cd = ?
|
||||||
|
AND target_id = ?
|
||||||
|
AND time_bucket >= ?
|
||||||
|
AND time_bucket < ?
|
||||||
|
ORDER BY time_bucket DESC
|
||||||
|
""";
|
||||||
|
|
||||||
|
return jdbcTemplate.query(sql, (rs, rowNum) -> {
|
||||||
|
Map<String, Object> abnormalReason = null;
|
||||||
|
try {
|
||||||
|
abnormalReason = objectMapper.readValue(
|
||||||
|
rs.getString("abnormal_reason"),
|
||||||
|
new TypeReference<Map<String, Object>>() {}
|
||||||
|
);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to parse abnormal_reason: {}", e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
return AbnormalTrackResponse.builder()
|
||||||
|
.id(rs.getLong("id"))
|
||||||
|
.sigSrcCd(rs.getString("sig_src_cd"))
|
||||||
|
.targetId(rs.getString("target_id"))
|
||||||
|
.vesselId(rs.getString("vessel_id"))
|
||||||
|
.timeBucket(rs.getTimestamp("time_bucket").toLocalDateTime())
|
||||||
|
.abnormalType(rs.getString("abnormal_type"))
|
||||||
|
.typeDescription(getTypeDescription(rs.getString("abnormal_type")))
|
||||||
|
.abnormalDescription(getAbnormalDescription(abnormalReason))
|
||||||
|
.distanceNm(rs.getBigDecimal("distance_nm"))
|
||||||
|
.avgSpeed(rs.getBigDecimal("avg_speed"))
|
||||||
|
.maxSpeed(rs.getBigDecimal("max_speed"))
|
||||||
|
.pointCount(rs.getInt("point_count"))
|
||||||
|
.sourceTable(rs.getString("source_table"))
|
||||||
|
.detectedAt(rs.getTimestamp("detected_at").toLocalDateTime())
|
||||||
|
.details(abnormalReason)
|
||||||
|
.build();
|
||||||
|
}, sigSrcCd, targetId, startTime, endTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 비정상 궤적 통계 조회
|
||||||
|
*/
|
||||||
|
public List<AbnormalTrackStatsResponse> getStatistics(LocalDate startDate, LocalDate endDate) {
|
||||||
|
String sql = """
|
||||||
|
SELECT
|
||||||
|
stat_date,
|
||||||
|
abnormal_type,
|
||||||
|
vessel_count,
|
||||||
|
track_count,
|
||||||
|
total_points,
|
||||||
|
avg_deviation,
|
||||||
|
max_deviation
|
||||||
|
FROM signal.t_abnormal_track_stats
|
||||||
|
WHERE stat_date >= ?
|
||||||
|
AND stat_date <= ?
|
||||||
|
ORDER BY stat_date DESC, track_count DESC
|
||||||
|
""";
|
||||||
|
|
||||||
|
return jdbcTemplate.query(sql, (rs, rowNum) ->
|
||||||
|
AbnormalTrackStatsResponse.builder()
|
||||||
|
.statDate(rs.getDate("stat_date").toLocalDate())
|
||||||
|
.abnormalType(rs.getString("abnormal_type"))
|
||||||
|
.vesselCount(rs.getInt("vessel_count"))
|
||||||
|
.trackCount(rs.getInt("track_count"))
|
||||||
|
.totalPoints(rs.getInt("total_points"))
|
||||||
|
.avgDeviation(rs.getBigDecimal("avg_deviation"))
|
||||||
|
.maxDeviation(rs.getBigDecimal("max_deviation"))
|
||||||
|
.build(),
|
||||||
|
startDate, endDate
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 요약 통계 조회
|
||||||
|
*/
|
||||||
|
public Map<String, Object> getSummaryStatistics(int days) {
|
||||||
|
LocalDateTime since = LocalDateTime.now().minusDays(days);
|
||||||
|
|
||||||
|
// 전체 통계
|
||||||
|
String totalSql = """
|
||||||
|
SELECT
|
||||||
|
COUNT(DISTINCT abnormal_type) as type_count,
|
||||||
|
COUNT(*) as total_tracks,
|
||||||
|
COUNT(DISTINCT sig_src_cd || ':' || target_id) as vessel_count,
|
||||||
|
AVG(distance_nm) as avg_distance,
|
||||||
|
MAX(max_speed) as max_speed_detected
|
||||||
|
FROM signal.t_abnormal_tracks
|
||||||
|
WHERE detected_at >= ?
|
||||||
|
""";
|
||||||
|
|
||||||
|
Map<String, Object> summary = jdbcTemplate.queryForMap(totalSql, since);
|
||||||
|
|
||||||
|
// 유형별 통계
|
||||||
|
String typeSql = """
|
||||||
|
SELECT
|
||||||
|
abnormal_type,
|
||||||
|
COUNT(*) as count,
|
||||||
|
COUNT(DISTINCT sig_src_cd || ':' || target_id) as vessel_count
|
||||||
|
FROM signal.t_abnormal_tracks
|
||||||
|
WHERE detected_at >= ?
|
||||||
|
GROUP BY abnormal_type
|
||||||
|
ORDER BY count DESC
|
||||||
|
""";
|
||||||
|
|
||||||
|
List<Map<String, Object>> typeStats = jdbcTemplate.queryForList(typeSql, since);
|
||||||
|
summary.put("typeStatistics", typeStats);
|
||||||
|
|
||||||
|
// 일별 추이
|
||||||
|
String trendSql = """
|
||||||
|
SELECT
|
||||||
|
DATE(detected_at) as date,
|
||||||
|
COUNT(*) as count
|
||||||
|
FROM signal.t_abnormal_tracks
|
||||||
|
WHERE detected_at >= ?
|
||||||
|
GROUP BY DATE(detected_at)
|
||||||
|
ORDER BY date
|
||||||
|
""";
|
||||||
|
|
||||||
|
List<Map<String, Object>> trend = jdbcTemplate.queryForList(trendSql, since);
|
||||||
|
summary.put("dailyTrend", trend);
|
||||||
|
|
||||||
|
return summary;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 오래된 데이터 정리
|
||||||
|
*/
|
||||||
|
/**
|
||||||
|
* hourly/daily 테이블에서 사용자 정의 기준으로 비정상 궤적 검출
|
||||||
|
*/
|
||||||
|
public List<AbnormalTrackResponse> detectFromHourlyDaily(
|
||||||
|
String tableType,
|
||||||
|
LocalDateTime startTime,
|
||||||
|
LocalDateTime endTime,
|
||||||
|
BigDecimal minDistance,
|
||||||
|
BigDecimal minSpeed) {
|
||||||
|
|
||||||
|
// null 처리 - final 변수로 생성
|
||||||
|
final BigDecimal finalMinDistance = minDistance == null ? BigDecimal.ZERO : minDistance;
|
||||||
|
final BigDecimal finalMinSpeed = minSpeed == null ? BigDecimal.ZERO : minSpeed;
|
||||||
|
|
||||||
|
String tableName = tableType.equals("hourly") ?
|
||||||
|
"t_vessel_tracks_hourly" : "t_vessel_tracks_daily";
|
||||||
|
|
||||||
|
// track_geom 사용
|
||||||
|
|
||||||
|
String sql = String.format("""
|
||||||
|
SELECT
|
||||||
|
sig_src_cd,
|
||||||
|
target_id,
|
||||||
|
sig_src_cd || ':' || target_id as vessel_id,
|
||||||
|
time_bucket,
|
||||||
|
distance_nm,
|
||||||
|
avg_speed,
|
||||||
|
max_speed,
|
||||||
|
point_count,
|
||||||
|
-- M값을 포함한 커스텀 GeoJSON 생성
|
||||||
|
jsonb_build_object(
|
||||||
|
'type', 'LineString',
|
||||||
|
'coordinates', (
|
||||||
|
SELECT jsonb_agg(
|
||||||
|
jsonb_build_array(
|
||||||
|
public.ST_X(public.ST_PointN(track_geom, point_num)),
|
||||||
|
public.ST_Y(public.ST_PointN(track_geom, point_num)),
|
||||||
|
public.ST_M(public.ST_PointN(track_geom, point_num))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
FROM generate_series(1, public.ST_NPoints(track_geom)) AS point_num
|
||||||
|
)
|
||||||
|
)::text as track_geojson,
|
||||||
|
start_position,
|
||||||
|
end_position
|
||||||
|
FROM signal.%s
|
||||||
|
WHERE time_bucket >= ?
|
||||||
|
AND time_bucket < ?
|
||||||
|
AND track_geom IS NOT NULL
|
||||||
|
AND (? = 0 OR distance_nm >= ?)
|
||||||
|
AND (? = 0 OR avg_speed >= ?)
|
||||||
|
ORDER BY time_bucket DESC, distance_nm DESC
|
||||||
|
LIMIT 500
|
||||||
|
""", tableName);
|
||||||
|
|
||||||
|
return jdbcTemplate.query(sql, (rs, rowNum) -> {
|
||||||
|
// 비정상 유형 결정
|
||||||
|
String abnormalType = "user_detected";
|
||||||
|
String description = "사용자 정의 기준 (거리: " + finalMinDistance + "nm, 속도: " + finalMinSpeed + "kts 이상)";
|
||||||
|
|
||||||
|
BigDecimal distance = rs.getBigDecimal("distance_nm");
|
||||||
|
BigDecimal avgSpeed = rs.getBigDecimal("avg_speed");
|
||||||
|
|
||||||
|
if (avgSpeed != null && avgSpeed.compareTo(new BigDecimal("100")) > 0) {
|
||||||
|
abnormalType = "extreme_speed";
|
||||||
|
description = "극단적 속도: " + avgSpeed + "kts";
|
||||||
|
} else if (distance != null && distance.compareTo(new BigDecimal("50")) > 0) {
|
||||||
|
abnormalType = "extreme_distance";
|
||||||
|
description = "극단적 이동거리: " + distance + "nm";
|
||||||
|
}
|
||||||
|
|
||||||
|
String sigSrcCd = rs.getString("sig_src_cd");
|
||||||
|
String targetId = rs.getString("target_id");
|
||||||
|
LocalDateTime timeBucket = rs.getTimestamp("time_bucket").toLocalDateTime();
|
||||||
|
|
||||||
|
// ID를 조합 해시로 생성 (고유성 보장)
|
||||||
|
long generatedId = (sigSrcCd + targetId + timeBucket.toString()).hashCode() & 0x7fffffffL;
|
||||||
|
|
||||||
|
return AbnormalTrackResponse.builder()
|
||||||
|
.id(generatedId)
|
||||||
|
.sigSrcCd(sigSrcCd)
|
||||||
|
.targetId(targetId)
|
||||||
|
.vesselId(rs.getString("vessel_id"))
|
||||||
|
.timeBucket(timeBucket)
|
||||||
|
.abnormalType(abnormalType)
|
||||||
|
.typeDescription(getTypeDescription(abnormalType))
|
||||||
|
.abnormalDescription(description)
|
||||||
|
.distanceNm(distance)
|
||||||
|
.avgSpeed(avgSpeed)
|
||||||
|
.maxSpeed(rs.getBigDecimal("max_speed"))
|
||||||
|
.pointCount(rs.getInt("point_count"))
|
||||||
|
.sourceTable(tableName)
|
||||||
|
.detectedAt(LocalDateTime.now())
|
||||||
|
.details(Map.of(
|
||||||
|
"tableType", tableType,
|
||||||
|
"minDistance", finalMinDistance,
|
||||||
|
"minSpeed", finalMinSpeed
|
||||||
|
))
|
||||||
|
.trackGeoJson(rs.getString("track_geojson"))
|
||||||
|
.build();
|
||||||
|
}, startTime, endTime, finalMinDistance, finalMinDistance, finalMinSpeed, finalMinSpeed);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 선택된 궤적을 비정상 테이블로 이동 (정확한 버전)
|
||||||
|
*/
|
||||||
|
@Transactional
|
||||||
|
public int moveToAbnormalTracks(
|
||||||
|
String tableType,
|
||||||
|
List<TrackIdentifier> tracks,
|
||||||
|
String abnormalType,
|
||||||
|
String reason) {
|
||||||
|
|
||||||
|
if (tracks == null || tracks.isEmpty()) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
String sourceTable = tableType.equals("hourly") ?
|
||||||
|
"t_vessel_tracks_hourly" : "t_vessel_tracks_daily";
|
||||||
|
|
||||||
|
String reasonJson = "{\"type\": \"" + abnormalType + "\", \"reason\": \"" + reason + "\"}";
|
||||||
|
int totalMoved = 0;
|
||||||
|
|
||||||
|
// 각 트랙을 개별적으로 처리 (배치 처리도 가능하지만 정확성을 위해)
|
||||||
|
for (TrackIdentifier track : tracks) {
|
||||||
|
try {
|
||||||
|
// 1. t_abnormal_tracks로 복사
|
||||||
|
// track_geom 고정 사용
|
||||||
|
|
||||||
|
String insertSql = String.format("""
|
||||||
|
INSERT INTO signal.t_abnormal_tracks (
|
||||||
|
sig_src_cd, target_id, time_bucket, abnormal_type, abnormal_reason,
|
||||||
|
distance_nm, avg_speed, max_speed, point_count, track_geom,
|
||||||
|
source_table, detected_at
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
sig_src_cd, target_id, time_bucket, ?, ?::jsonb,
|
||||||
|
distance_nm, avg_speed, max_speed, point_count, track_geom,
|
||||||
|
?, NOW()
|
||||||
|
FROM signal.%s
|
||||||
|
WHERE sig_src_cd = ?
|
||||||
|
AND target_id = ?
|
||||||
|
AND time_bucket = ?
|
||||||
|
""", sourceTable);
|
||||||
|
|
||||||
|
int inserted = jdbcTemplate.update(insertSql,
|
||||||
|
abnormalType,
|
||||||
|
reasonJson,
|
||||||
|
sourceTable,
|
||||||
|
track.getSigSrcCd(),
|
||||||
|
track.getTargetId(),
|
||||||
|
track.getTimeBucket()
|
||||||
|
);
|
||||||
|
|
||||||
|
if (inserted > 0) {
|
||||||
|
// 2. 원본 테이블에서 삭제
|
||||||
|
String deleteSql = String.format("""
|
||||||
|
DELETE FROM signal.%s
|
||||||
|
WHERE sig_src_cd = ?
|
||||||
|
AND target_id = ?
|
||||||
|
AND time_bucket = ?
|
||||||
|
""", sourceTable);
|
||||||
|
|
||||||
|
int deleted = jdbcTemplate.update(deleteSql,
|
||||||
|
track.getSigSrcCd(),
|
||||||
|
track.getTargetId(),
|
||||||
|
track.getTimeBucket()
|
||||||
|
);
|
||||||
|
|
||||||
|
if (deleted > 0) {
|
||||||
|
totalMoved++;
|
||||||
|
log.debug("Moved track: {} {} {}",
|
||||||
|
track.getSigSrcCd(), track.getTargetId(), track.getTimeBucket());
|
||||||
|
} else {
|
||||||
|
log.warn("Failed to delete track after insert: {} {} {}",
|
||||||
|
track.getSigSrcCd(), track.getTargetId(), track.getTimeBucket());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Error moving track {} {} {}: {}",
|
||||||
|
track.getSigSrcCd(), track.getTargetId(), track.getTimeBucket(), e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info("Successfully moved {} tracks from {} to t_abnormal_tracks", totalMoved, sourceTable);
|
||||||
|
|
||||||
|
return totalMoved;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Transactional
|
||||||
|
public int cleanupOldData(int retentionDays) {
|
||||||
|
LocalDateTime cutoffDate = LocalDateTime.now().minusDays(retentionDays);
|
||||||
|
|
||||||
|
String sql = "DELETE FROM signal.t_abnormal_tracks WHERE detected_at < ?";
|
||||||
|
int deletedCount = jdbcTemplate.update(sql, cutoffDate);
|
||||||
|
|
||||||
|
log.info("Cleaned up {} abnormal tracks older than {} days", deletedCount, retentionDays);
|
||||||
|
|
||||||
|
return deletedCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
private String getTypeDescription(String type) {
|
||||||
|
return switch (type) {
|
||||||
|
case "excessive_speed" -> "과속";
|
||||||
|
case "teleport" -> "순간이동";
|
||||||
|
case "gap_jump" -> "Bucket 간 점프";
|
||||||
|
case "excessive_acceleration" -> "급가속";
|
||||||
|
case "extreme_avg_speed_5min" -> "순간이동"; // 5분 극단 속도를 순간이동으로 분류
|
||||||
|
case "user_detected" -> "사용자 검출";
|
||||||
|
default -> type;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 비정상 궤적 설명 추출 (null 안전하게)
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
private String getAbnormalDescription(Map<String, Object> abnormalReason) {
|
||||||
|
if (abnormalReason == null) {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
Object segments = abnormalReason.get("segments");
|
||||||
|
if (segments instanceof List) {
|
||||||
|
List<Map<String, Object>> segmentList = (List<Map<String, Object>>) segments;
|
||||||
|
if (!segmentList.isEmpty()) {
|
||||||
|
Map<String, Object> firstSegment = segmentList.get(0);
|
||||||
|
if (firstSegment != null) {
|
||||||
|
Object description = firstSegment.get("description");
|
||||||
|
return description != null ? description.toString() : "";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.warn("Failed to extract abnormal description: {}", e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,48 @@
|
|||||||
|
package gc.mda.signal_batch.domain.vessel.dto;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
/**
|
||||||
|
* 압축된 선박 궤적 데이터 전송용 DTO
|
||||||
|
* LineStringM 대신 단순 배열로 전송하여 프론트엔드 파싱 부하 제거
|
||||||
|
*/
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||||
|
public class CompactVesselTrack {
|
||||||
|
private String vesselId; // sig_src_cd + "_" + target_id
|
||||||
|
private String sigSrcCd;
|
||||||
|
private String targetId;
|
||||||
|
private String nationalCode; // National code based on sigSrcCd and targetId
|
||||||
|
|
||||||
|
// 궤적 데이터 (배열 형태)
|
||||||
|
private List<double[]> geometry; // [[lon, lat], ...]
|
||||||
|
|
||||||
|
// MIGRATION_V2: 호환성을 위해 모두 String으로 처리
|
||||||
|
@JsonProperty("timestamps")
|
||||||
|
private List<String> timestamps; // String 리스트 (Unix timestamp도 String으로 변환)
|
||||||
|
|
||||||
|
private List<Double> speeds; // [12.5, 13.2, ...]
|
||||||
|
|
||||||
|
// 메타데이터
|
||||||
|
private Double totalDistance; // 전체 이동거리 (nm)
|
||||||
|
private Double avgSpeed; // 평균속도 (knots)
|
||||||
|
private Double maxSpeed; // 최대속도 (knots)
|
||||||
|
private Integer pointCount; // 포인트 수
|
||||||
|
|
||||||
|
// 선박 정보
|
||||||
|
private String shipName; // 선명
|
||||||
|
private String shipType; // 선종
|
||||||
|
private String shipKindCode; // 선박 종류 코드
|
||||||
|
|
||||||
|
// 통합선박 정보
|
||||||
|
private String integrationTargetId; // 통합선박 ID (형식: AIS_ENAV_VPASS_VTS-AIS_D-MF/HF)
|
||||||
|
}
|
||||||
@ -0,0 +1,78 @@
|
|||||||
|
package gc.mda.signal_batch.domain.vessel.dto;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.global.util.IntegrationSignalConstants;
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 통합선박 정보 DTO
|
||||||
|
* gis.t_ship_integration_sub 테이블 매핑
|
||||||
|
*/
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
public class IntegrationVessel {
|
||||||
|
|
||||||
|
private Long intgrSeq;
|
||||||
|
private String ais;
|
||||||
|
private String enav;
|
||||||
|
private String vpass;
|
||||||
|
private String vtsAis;
|
||||||
|
private String dMfHf;
|
||||||
|
private String aisShipNm;
|
||||||
|
private String enavShipNm;
|
||||||
|
private String vpassShipNm;
|
||||||
|
private String vtsAisShipNm;
|
||||||
|
private String dMfHfShipNm;
|
||||||
|
private String integrationShipTy;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* integration_target_id 생성
|
||||||
|
*/
|
||||||
|
public String generateIntegrationId() {
|
||||||
|
return IntegrationSignalConstants.generateIntegrationId(
|
||||||
|
ais, enav, vpass, vtsAis, dMfHf
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 신호 타입에 해당하는 target_id 반환
|
||||||
|
*/
|
||||||
|
public String getTargetIdBySignalType(IntegrationSignalConstants.SignalType signalType) {
|
||||||
|
if (signalType == null) return null;
|
||||||
|
|
||||||
|
return switch (signalType) {
|
||||||
|
case AIS -> ais;
|
||||||
|
case E_NAVIGATION -> enav;
|
||||||
|
case VPASS -> vpass;
|
||||||
|
case VTS_AIS -> vtsAis;
|
||||||
|
case D_MF_HF -> dMfHf;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 신호 타입에 해당하는 선박명 반환
|
||||||
|
*/
|
||||||
|
public String getShipNameBySignalType(IntegrationSignalConstants.SignalType signalType) {
|
||||||
|
if (signalType == null) return null;
|
||||||
|
|
||||||
|
return switch (signalType) {
|
||||||
|
case AIS -> aisShipNm;
|
||||||
|
case E_NAVIGATION -> enavShipNm;
|
||||||
|
case VPASS -> vpassShipNm;
|
||||||
|
case VTS_AIS -> vtsAisShipNm;
|
||||||
|
case D_MF_HF -> dMfHfShipNm;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 해당 신호 타입의 데이터가 존재하는지 확인
|
||||||
|
*/
|
||||||
|
public boolean hasSignalType(IntegrationSignalConstants.SignalType signalType) {
|
||||||
|
String targetId = getTargetIdBySignalType(signalType);
|
||||||
|
return targetId != null && !"0".equals(targetId) && !targetId.isEmpty();
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,52 @@
|
|||||||
|
package gc.mda.signal_batch.domain.vessel.dto;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonFormat;
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
@Schema(description = "최근 위치 업데이트된 선박 정보")
|
||||||
|
public class RecentVesselPositionDto {
|
||||||
|
@Schema(description = "신호원 코드", example = "000001")
|
||||||
|
private String sigSrcCd;
|
||||||
|
|
||||||
|
@Schema(description = "대상 ID", example = "440331240")
|
||||||
|
private String targetId;
|
||||||
|
|
||||||
|
@Schema(description = "경도", example = "126.9779")
|
||||||
|
private Double lon;
|
||||||
|
|
||||||
|
@Schema(description = "위도", example = "37.5665")
|
||||||
|
private Double lat;
|
||||||
|
|
||||||
|
@Schema(description = "대지속도 (knots)", example = "12.5")
|
||||||
|
private BigDecimal sog;
|
||||||
|
|
||||||
|
@Schema(description = "대지침로 (도)", example = "180.0")
|
||||||
|
private BigDecimal cog;
|
||||||
|
|
||||||
|
@Schema(description = "선박명", example = "SEOM SARANG 11")
|
||||||
|
private String shipNm;
|
||||||
|
|
||||||
|
@Schema(description = "선박 유형", example = "60")
|
||||||
|
private String shipTy;
|
||||||
|
|
||||||
|
@Schema(description = "선박 종류 코드", example = "000022")
|
||||||
|
private String shipKindCode;
|
||||||
|
|
||||||
|
@Schema(description = "국가 코드", example = "440")
|
||||||
|
private String nationalCode;
|
||||||
|
|
||||||
|
@Schema(description = "최종 업데이트 시간", example = "2025-08-28 15:30:00")
|
||||||
|
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
|
||||||
|
private LocalDateTime lastUpdate;
|
||||||
|
}
|
||||||
@ -0,0 +1,36 @@
|
|||||||
|
package gc.mda.signal_batch.domain.vessel.dto;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
public class TrackResponse {
|
||||||
|
@JsonProperty("sig_src_cd")
|
||||||
|
private String sigSrcCd;
|
||||||
|
|
||||||
|
@JsonProperty("target_id")
|
||||||
|
private String targetId;
|
||||||
|
|
||||||
|
@JsonProperty("track_geom")
|
||||||
|
private String trackGeom; // WKT format
|
||||||
|
|
||||||
|
@JsonProperty("distance_nm")
|
||||||
|
private BigDecimal distanceNm;
|
||||||
|
|
||||||
|
@JsonProperty("avg_speed")
|
||||||
|
private BigDecimal avgSpeed;
|
||||||
|
|
||||||
|
@JsonProperty("max_speed")
|
||||||
|
private BigDecimal maxSpeed;
|
||||||
|
|
||||||
|
@JsonProperty("point_count")
|
||||||
|
private Integer pointCount;
|
||||||
|
|
||||||
|
@JsonProperty("time_bucket")
|
||||||
|
private LocalDateTime timeBucket;
|
||||||
|
}
|
||||||
@ -0,0 +1,23 @@
|
|||||||
|
package gc.mda.signal_batch.domain.vessel.dto;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
public class VesselStatsResponse {
|
||||||
|
@JsonProperty("vessel_count")
|
||||||
|
private Integer vesselCount;
|
||||||
|
|
||||||
|
@JsonProperty("total_distance")
|
||||||
|
private BigDecimal totalDistance;
|
||||||
|
|
||||||
|
@JsonProperty("avg_speed")
|
||||||
|
private BigDecimal avgSpeed;
|
||||||
|
|
||||||
|
@JsonProperty("active_tracks")
|
||||||
|
private Integer activeTracks;
|
||||||
|
}
|
||||||
@ -0,0 +1,36 @@
|
|||||||
|
package gc.mda.signal_batch.domain.vessel.dto;
|
||||||
|
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
@AllArgsConstructor
|
||||||
|
@NoArgsConstructor
|
||||||
|
public class VesselTracksRequest {
|
||||||
|
private LocalDateTime startTime;
|
||||||
|
private LocalDateTime endTime;
|
||||||
|
private List<VesselIdentifier> vessels;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 통합선박신호 모드
|
||||||
|
* "0": 기본 모드 - 전체 항적 반환 (기본값)
|
||||||
|
* "1": 통합 모드 - 동일 선박의 여러 신호 중 최고 우선순위만 반환
|
||||||
|
*/
|
||||||
|
@Builder.Default
|
||||||
|
private String isIntegration = "0";
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
@AllArgsConstructor
|
||||||
|
@NoArgsConstructor
|
||||||
|
public static class VesselIdentifier {
|
||||||
|
private String sigSrcCd;
|
||||||
|
private String targetId;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,48 @@
|
|||||||
|
package gc.mda.signal_batch.domain.vessel.model;
|
||||||
|
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
public class VesselData {
|
||||||
|
private LocalDateTime messageTime;
|
||||||
|
private LocalDateTime realTime;
|
||||||
|
private String sigSrcCd;
|
||||||
|
private String targetId;
|
||||||
|
private Double lat;
|
||||||
|
private Double lon;
|
||||||
|
private BigDecimal sog;
|
||||||
|
private BigDecimal cog;
|
||||||
|
private Integer heading;
|
||||||
|
private String shipNm;
|
||||||
|
private String shipTy;
|
||||||
|
private Integer rot;
|
||||||
|
private Integer posacc;
|
||||||
|
private String sensorId;
|
||||||
|
private String baseStId;
|
||||||
|
private Integer mode;
|
||||||
|
private Integer gpsSttus;
|
||||||
|
private Integer batterySttus;
|
||||||
|
private String vtsCd;
|
||||||
|
private String mmsi;
|
||||||
|
private String vpassId;
|
||||||
|
private String shipNo;
|
||||||
|
|
||||||
|
public String getVesselKey() {
|
||||||
|
return sigSrcCd + "_" + targetId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isValidPosition() {
|
||||||
|
return lat != null && lon != null &&
|
||||||
|
lat >= -90 && lat <= 90 &&
|
||||||
|
lon >= -180 && lon <= 180;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,46 @@
|
|||||||
|
package gc.mda.signal_batch.domain.vessel.model;
|
||||||
|
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
public class VesselLatestPosition {
|
||||||
|
private String sigSrcCd;
|
||||||
|
private String targetId;
|
||||||
|
private Double lat;
|
||||||
|
private Double lon;
|
||||||
|
private String geomWkt;
|
||||||
|
private BigDecimal sog;
|
||||||
|
private BigDecimal cog;
|
||||||
|
private Integer heading;
|
||||||
|
private String shipNm;
|
||||||
|
private String shipTy;
|
||||||
|
private LocalDateTime lastUpdate;
|
||||||
|
private Long updateCount;
|
||||||
|
private LocalDateTime createdAt;
|
||||||
|
|
||||||
|
public static VesselLatestPosition fromVesselData(VesselData data) {
|
||||||
|
return VesselLatestPosition.builder()
|
||||||
|
.sigSrcCd(data.getSigSrcCd())
|
||||||
|
.targetId(data.getTargetId())
|
||||||
|
.lat(data.getLat())
|
||||||
|
.lon(data.getLon())
|
||||||
|
.geomWkt(String.format("POINT(%f %f)", data.getLon(), data.getLat()))
|
||||||
|
.sog(data.getSog())
|
||||||
|
.cog(data.getCog())
|
||||||
|
.heading(data.getHeading())
|
||||||
|
.shipNm(data.getShipNm())
|
||||||
|
.shipTy(data.getShipTy())
|
||||||
|
.lastUpdate(data.getMessageTime())
|
||||||
|
.updateCount(1L)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,116 @@
|
|||||||
|
package gc.mda.signal_batch.domain.vessel.model;
|
||||||
|
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
public class VesselTrack implements Serializable {
|
||||||
|
private static final long serialVersionUID = 1L;
|
||||||
|
|
||||||
|
// 기본 식별자
|
||||||
|
private String sigSrcCd;
|
||||||
|
private String targetId;
|
||||||
|
private LocalDateTime timeBucket;
|
||||||
|
|
||||||
|
// 궤적 정보
|
||||||
|
private List<TrackPoint> trackPoints;
|
||||||
|
private String trackGeom; // MIGRATION_V2: PostGIS LineStringM WKT format (unix timestamp)
|
||||||
|
private BigDecimal distanceNm; // 이동 거리 (해리)
|
||||||
|
private BigDecimal avgSpeed;
|
||||||
|
private BigDecimal maxSpeed;
|
||||||
|
private Integer pointCount;
|
||||||
|
|
||||||
|
// 시작/종료 위치
|
||||||
|
private TrackPosition startPosition;
|
||||||
|
private TrackPosition endPosition;
|
||||||
|
|
||||||
|
// 해구/구역 정보 (선택적)
|
||||||
|
private Integer haeguNo;
|
||||||
|
private String areaId;
|
||||||
|
private LocalDateTime entryTime;
|
||||||
|
private LocalDateTime exitTime;
|
||||||
|
|
||||||
|
private LocalDateTime createdAt;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
public static class TrackPoint implements Serializable {
|
||||||
|
private LocalDateTime time;
|
||||||
|
private Double lat;
|
||||||
|
private Double lon;
|
||||||
|
private BigDecimal sog;
|
||||||
|
private BigDecimal cog;
|
||||||
|
private Integer heading;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
public static class TrackPosition implements Serializable {
|
||||||
|
private Double lat;
|
||||||
|
private Double lon;
|
||||||
|
private LocalDateTime time;
|
||||||
|
private BigDecimal sog;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
public static class VesselKey implements Serializable {
|
||||||
|
private String sigSrcCd;
|
||||||
|
private String targetId;
|
||||||
|
private LocalDateTime timeBucket;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getVesselKey() {
|
||||||
|
return sigSrcCd + "_" + targetId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean hasValidTrack() {
|
||||||
|
return trackPoints != null && trackPoints.size() >= 1; // 1개 이상이면 유효
|
||||||
|
}
|
||||||
|
|
||||||
|
// 거리 계산 (Haversine formula)
|
||||||
|
public BigDecimal calculateDistance() {
|
||||||
|
if (!hasValidTrack()) {
|
||||||
|
return BigDecimal.ZERO;
|
||||||
|
}
|
||||||
|
|
||||||
|
double totalDistance = 0.0;
|
||||||
|
for (int i = 1; i < trackPoints.size(); i++) {
|
||||||
|
TrackPoint prev = trackPoints.get(i - 1);
|
||||||
|
TrackPoint curr = trackPoints.get(i);
|
||||||
|
totalDistance += calculateDistanceBetweenPoints(
|
||||||
|
prev.getLat(), prev.getLon(),
|
||||||
|
curr.getLat(), curr.getLon()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return BigDecimal.valueOf(totalDistance).setScale(2, BigDecimal.ROUND_HALF_UP);
|
||||||
|
}
|
||||||
|
|
||||||
|
private double calculateDistanceBetweenPoints(double lat1, double lon1, double lat2, double lon2) {
|
||||||
|
final double R = 3440.065; // 지구 반경 (해리)
|
||||||
|
double dLat = Math.toRadians(lat2 - lat1);
|
||||||
|
double dLon = Math.toRadians(lon2 - lon1);
|
||||||
|
double a = Math.sin(dLat/2) * Math.sin(dLat/2) +
|
||||||
|
Math.cos(Math.toRadians(lat1)) * Math.cos(Math.toRadians(lat2)) *
|
||||||
|
Math.sin(dLon/2) * Math.sin(dLon/2);
|
||||||
|
double c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1-a));
|
||||||
|
return R * c;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,233 @@
|
|||||||
|
package gc.mda.signal_batch.domain.vessel.service;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.vessel.dto.IntegrationVessel;
|
||||||
|
import gc.mda.signal_batch.global.util.IntegrationSignalConstants;
|
||||||
|
import gc.mda.signal_batch.global.util.IntegrationSignalConstants.SignalType;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.scheduling.annotation.Scheduled;
|
||||||
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 통합선박 정보 서비스
|
||||||
|
* 글로벌 캐시를 통한 통합선박 정보 관리
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
@Service
|
||||||
|
public class IntegrationVesselService {
|
||||||
|
|
||||||
|
private final DataSource queryDataSource;
|
||||||
|
|
||||||
|
@Value("${vessel.integration.enabled:true}")
|
||||||
|
private boolean integrationEnabled;
|
||||||
|
|
||||||
|
// 글로벌 캐시 (키: "sig_src_cd_target_id")
|
||||||
|
private final Map<String, IntegrationVessel> integrationCache = new ConcurrentHashMap<>();
|
||||||
|
|
||||||
|
// 캐시 로드 상태
|
||||||
|
private final AtomicBoolean cacheLoaded = new AtomicBoolean(false);
|
||||||
|
|
||||||
|
public IntegrationVesselService(@Qualifier("queryDataSource") DataSource queryDataSource) {
|
||||||
|
this.queryDataSource = queryDataSource;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 통합선박 기능 활성화 여부
|
||||||
|
*/
|
||||||
|
public boolean isEnabled() {
|
||||||
|
return integrationEnabled;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 스케줄 갱신 (기본: 매일 03:00)
|
||||||
|
*/
|
||||||
|
@Scheduled(cron = "${vessel.integration.cache.refresh-cron:0 0 3 * * ?}")
|
||||||
|
public void scheduledRefresh() {
|
||||||
|
if (!integrationEnabled) {
|
||||||
|
log.debug("Integration feature is disabled, skipping scheduled refresh");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
log.info("Scheduled integration cache refresh started");
|
||||||
|
loadCacheFromDB();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 단일 선박 통합정보 조회 (캐시에서)
|
||||||
|
*/
|
||||||
|
public IntegrationVessel findByVessel(String sigSrcCd, String targetId) {
|
||||||
|
if (!integrationEnabled) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
ensureCacheLoaded();
|
||||||
|
return integrationCache.get(sigSrcCd + "_" + targetId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 다중 선박 통합정보 조회 (캐시에서)
|
||||||
|
*
|
||||||
|
* @param vesselKeys Set of "sig_src_cd_target_id" format
|
||||||
|
* @return Map of vesselKey -> IntegrationVessel
|
||||||
|
*/
|
||||||
|
public Map<String, IntegrationVessel> findByVessels(Set<String> vesselKeys) {
|
||||||
|
if (!integrationEnabled) {
|
||||||
|
return new HashMap<>();
|
||||||
|
}
|
||||||
|
ensureCacheLoaded();
|
||||||
|
|
||||||
|
Map<String, IntegrationVessel> result = new HashMap<>();
|
||||||
|
for (String key : vesselKeys) {
|
||||||
|
IntegrationVessel vessel = integrationCache.get(key);
|
||||||
|
if (vessel != null) {
|
||||||
|
result.put(key, vessel);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 존재하는 신호들 중 최고 우선순위 선택
|
||||||
|
*
|
||||||
|
* @param existingSigSrcCds 존재하는 sig_src_cd 집합
|
||||||
|
* @return 최고 우선순위 sig_src_cd
|
||||||
|
*/
|
||||||
|
public String selectHighestPriorityFromExisting(Set<String> existingSigSrcCds) {
|
||||||
|
for (String sigSrcCd : IntegrationSignalConstants.PRIORITY_ORDER) {
|
||||||
|
if (existingSigSrcCds.contains(sigSrcCd)) {
|
||||||
|
return sigSrcCd;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// fallback: 아무거나 반환
|
||||||
|
return existingSigSrcCds.isEmpty() ? null : existingSigSrcCds.iterator().next();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 통합선박에서 최고 우선순위 신호 타입 반환
|
||||||
|
* (통합테이블에 등록된 신호 중)
|
||||||
|
*/
|
||||||
|
public SignalType getHighestPrioritySignalType(IntegrationVessel vessel) {
|
||||||
|
for (String sigSrcCd : IntegrationSignalConstants.PRIORITY_ORDER) {
|
||||||
|
SignalType type = SignalType.fromSigSrcCd(sigSrcCd);
|
||||||
|
if (type != null && vessel.hasSignalType(type)) {
|
||||||
|
return type;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 캐시 수동 갱신 API
|
||||||
|
*/
|
||||||
|
public void refreshCache() {
|
||||||
|
log.info("Manual integration cache refresh requested");
|
||||||
|
loadCacheFromDB();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 캐시 상태 조회
|
||||||
|
*/
|
||||||
|
public Map<String, Object> getCacheStatus() {
|
||||||
|
Map<String, Object> status = new HashMap<>();
|
||||||
|
status.put("enabled", integrationEnabled);
|
||||||
|
status.put("loaded", cacheLoaded.get());
|
||||||
|
status.put("size", integrationCache.size());
|
||||||
|
return status;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 캐시가 비어있으면 즉시 로드 (Fallback)
|
||||||
|
*/
|
||||||
|
private void ensureCacheLoaded() {
|
||||||
|
if (!cacheLoaded.get() || integrationCache.isEmpty()) {
|
||||||
|
synchronized (this) {
|
||||||
|
if (!cacheLoaded.get() || integrationCache.isEmpty()) {
|
||||||
|
log.info("Integration cache is empty, loading from DB (fallback)...");
|
||||||
|
loadCacheFromDB();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DB에서 전체 통합선박 정보 로드
|
||||||
|
*/
|
||||||
|
private void loadCacheFromDB() {
|
||||||
|
long startTime = System.currentTimeMillis();
|
||||||
|
|
||||||
|
try {
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
SELECT intgr_seq, ais, enav, vpass, vts_ais, d_mf_hf,
|
||||||
|
ais_ship_nm, enav_ship_nm, vpass_ship_nm, vts_ais_ship_nm, d_mf_hf_ship_nm,
|
||||||
|
integration_ship_ty
|
||||||
|
FROM gis.t_ship_integration_sub
|
||||||
|
""";
|
||||||
|
|
||||||
|
List<IntegrationVessel> vessels = jdbcTemplate.query(sql, (rs, rowNum) ->
|
||||||
|
IntegrationVessel.builder()
|
||||||
|
.intgrSeq(rs.getLong("intgr_seq"))
|
||||||
|
.ais(rs.getString("ais"))
|
||||||
|
.enav(rs.getString("enav"))
|
||||||
|
.vpass(rs.getString("vpass"))
|
||||||
|
.vtsAis(rs.getString("vts_ais"))
|
||||||
|
.dMfHf(rs.getString("d_mf_hf"))
|
||||||
|
.aisShipNm(rs.getString("ais_ship_nm"))
|
||||||
|
.enavShipNm(rs.getString("enav_ship_nm"))
|
||||||
|
.vpassShipNm(rs.getString("vpass_ship_nm"))
|
||||||
|
.vtsAisShipNm(rs.getString("vts_ais_ship_nm"))
|
||||||
|
.dMfHfShipNm(rs.getString("d_mf_hf_ship_nm"))
|
||||||
|
.integrationShipTy(rs.getString("integration_ship_ty"))
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
|
||||||
|
// 캐시 초기화 및 재구성
|
||||||
|
Map<String, IntegrationVessel> newCache = new ConcurrentHashMap<>();
|
||||||
|
|
||||||
|
for (IntegrationVessel vessel : vessels) {
|
||||||
|
// 각 신호 타입별로 키 생성하여 캐시에 저장
|
||||||
|
if (isValidTargetId(vessel.getAis())) {
|
||||||
|
newCache.put("000001_" + vessel.getAis(), vessel);
|
||||||
|
}
|
||||||
|
if (isValidTargetId(vessel.getEnav())) {
|
||||||
|
newCache.put("000002_" + vessel.getEnav(), vessel);
|
||||||
|
}
|
||||||
|
if (isValidTargetId(vessel.getVpass())) {
|
||||||
|
newCache.put("000003_" + vessel.getVpass(), vessel);
|
||||||
|
}
|
||||||
|
if (isValidTargetId(vessel.getVtsAis())) {
|
||||||
|
newCache.put("000004_" + vessel.getVtsAis(), vessel);
|
||||||
|
}
|
||||||
|
if (isValidTargetId(vessel.getDMfHf())) {
|
||||||
|
newCache.put("000016_" + vessel.getDMfHf(), vessel);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 기존 캐시를 새 캐시로 교체
|
||||||
|
integrationCache.clear();
|
||||||
|
integrationCache.putAll(newCache);
|
||||||
|
cacheLoaded.set(true);
|
||||||
|
|
||||||
|
long elapsed = System.currentTimeMillis() - startTime;
|
||||||
|
log.info("Integration cache loaded successfully: {} vessels, {} cache entries in {}ms",
|
||||||
|
vessels.size(), integrationCache.size(), elapsed);
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to load integration cache from DB", e);
|
||||||
|
// 실패해도 기존 캐시 유지
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 유효한 target_id인지 확인
|
||||||
|
*/
|
||||||
|
private boolean isValidTargetId(String targetId) {
|
||||||
|
return targetId != null && !"0".equals(targetId) && !targetId.isEmpty();
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,210 @@
|
|||||||
|
package gc.mda.signal_batch.domain.vessel.service;
|
||||||
|
|
||||||
|
import com.github.benmanes.caffeine.cache.Cache;
|
||||||
|
import gc.mda.signal_batch.domain.vessel.dto.RecentVesselPositionDto;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.cache.CacheManager;
|
||||||
|
import org.springframework.cache.caffeine.CaffeineCache;
|
||||||
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.concurrent.ConcurrentMap;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 선박 최신 위치 정보 캐시 관리 서비스
|
||||||
|
*
|
||||||
|
* 캐시 구조:
|
||||||
|
* - Key: "{sigSrcCd}:{targetId}" (예: "000001:440123456")
|
||||||
|
* - Value: RecentVesselPositionDto
|
||||||
|
* - TTL: 60분 (CacheConfig에서 설정)
|
||||||
|
*
|
||||||
|
* 데이터 흐름:
|
||||||
|
* 1. VesselPositionCacheRefreshScheduler가 1분마다 refreshCache() 호출
|
||||||
|
* 2. DB에서 최근 1분치 데이터 조회
|
||||||
|
* 3. 캐시에 업데이트 (기존 데이터는 TTL로 자동 만료)
|
||||||
|
* 4. API 요청 시 getFilteredPositions()로 minutes 파라미터 기준 필터링
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
@Service
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class VesselLatestPositionCache {
|
||||||
|
|
||||||
|
private final CacheManager cacheManager;
|
||||||
|
|
||||||
|
private static final String CACHE_NAME = "vesselLatestPositions";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 캐시 키 생성
|
||||||
|
*/
|
||||||
|
private String createKey(String sigSrcCd, String targetId) {
|
||||||
|
return sigSrcCd + ":" + targetId;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 단일 선박 위치 캐시 저장
|
||||||
|
*/
|
||||||
|
public void put(RecentVesselPositionDto position) {
|
||||||
|
if (position == null || position.getSigSrcCd() == null || position.getTargetId() == null) {
|
||||||
|
log.warn("Invalid position data, skipping cache: {}", position);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
String key = createKey(position.getSigSrcCd(), position.getTargetId());
|
||||||
|
getCache().put(key, position);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 여러 선박 위치 일괄 캐시 저장
|
||||||
|
*/
|
||||||
|
public void putAll(List<RecentVesselPositionDto> positions) {
|
||||||
|
if (positions == null || positions.isEmpty()) {
|
||||||
|
log.debug("No positions to cache");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 갱신 전 캐시 크기
|
||||||
|
int sizeBefore = getCacheSize();
|
||||||
|
|
||||||
|
int count = 0;
|
||||||
|
for (RecentVesselPositionDto position : positions) {
|
||||||
|
if (position.getSigSrcCd() != null && position.getTargetId() != null) {
|
||||||
|
put(position);
|
||||||
|
count++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 갱신 후 캐시 크기
|
||||||
|
int sizeAfter = getCacheSize();
|
||||||
|
int delta = sizeAfter - sizeBefore;
|
||||||
|
String deltaStr = delta >= 0 ? "+" + delta : String.valueOf(delta);
|
||||||
|
|
||||||
|
log.info("Cached {} vessel positions (Total: {} vessels, Delta: {})",
|
||||||
|
count, sizeAfter, deltaStr);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 캐시에서 지정된 시간 범위 내의 선박 위치 조회 (API 요청용)
|
||||||
|
*
|
||||||
|
* @param minutes 현재 시간 기준 조회할 시간 범위 (분 단위)
|
||||||
|
* @return 필터링된 선박 위치 목록
|
||||||
|
*/
|
||||||
|
public List<RecentVesselPositionDto> getFilteredPositions(int minutes) {
|
||||||
|
LocalDateTime threshold = LocalDateTime.now().minusMinutes(minutes);
|
||||||
|
List<RecentVesselPositionDto> result = new ArrayList<>();
|
||||||
|
|
||||||
|
ConcurrentMap<Object, Object> nativeCache = getNativeCache();
|
||||||
|
if (nativeCache == null || nativeCache.isEmpty()) {
|
||||||
|
log.debug("Cache is empty, returning empty list");
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
int totalCached = 0;
|
||||||
|
int filtered = 0;
|
||||||
|
|
||||||
|
for (Map.Entry<Object, Object> entry : nativeCache.entrySet()) {
|
||||||
|
totalCached++;
|
||||||
|
|
||||||
|
if (entry.getValue() instanceof RecentVesselPositionDto position) {
|
||||||
|
// lastUpdate가 threshold 이후인 데이터만 포함
|
||||||
|
if (position.getLastUpdate() != null &&
|
||||||
|
position.getLastUpdate().isAfter(threshold)) {
|
||||||
|
result.add(position);
|
||||||
|
filtered++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log.debug("Cache filter result: total={}, filtered={}, minutes={}",
|
||||||
|
totalCached, filtered, minutes);
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 캐시 전체 삭제
|
||||||
|
*/
|
||||||
|
public void clear() {
|
||||||
|
org.springframework.cache.Cache cache = getCache();
|
||||||
|
if (cache != null) {
|
||||||
|
cache.clear();
|
||||||
|
log.info("Cache cleared");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 캐시 통계 정보 조회
|
||||||
|
*/
|
||||||
|
public CacheStats getStats() {
|
||||||
|
Cache<Object, Object> caffeineCache = getCaffeineCache();
|
||||||
|
if (caffeineCache == null) {
|
||||||
|
return new CacheStats(0, 0, 0.0, 0.0, 0L, 0L);
|
||||||
|
}
|
||||||
|
|
||||||
|
com.github.benmanes.caffeine.cache.stats.CacheStats stats = caffeineCache.stats();
|
||||||
|
ConcurrentMap<Object, Object> nativeCache = getNativeCache();
|
||||||
|
int size = nativeCache != null ? nativeCache.size() : 0;
|
||||||
|
|
||||||
|
return new CacheStats(
|
||||||
|
size,
|
||||||
|
caffeineCache.estimatedSize(),
|
||||||
|
stats.hitRate() * 100,
|
||||||
|
stats.missRate() * 100,
|
||||||
|
stats.hitCount(),
|
||||||
|
stats.missCount()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Spring Cache 객체 가져오기
|
||||||
|
*/
|
||||||
|
private org.springframework.cache.Cache getCache() {
|
||||||
|
return cacheManager.getCache(CACHE_NAME);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Caffeine Cache 네이티브 객체 가져오기
|
||||||
|
*/
|
||||||
|
private Cache<Object, Object> getCaffeineCache() {
|
||||||
|
org.springframework.cache.Cache cache = getCache();
|
||||||
|
if (cache instanceof CaffeineCache caffeineCache) {
|
||||||
|
return caffeineCache.getNativeCache();
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ConcurrentMap 가져오기 (필터링용)
|
||||||
|
*/
|
||||||
|
private ConcurrentMap<Object, Object> getNativeCache() {
|
||||||
|
Cache<Object, Object> caffeineCache = getCaffeineCache();
|
||||||
|
if (caffeineCache != null) {
|
||||||
|
return caffeineCache.asMap();
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 현재 캐시 크기 조회
|
||||||
|
*/
|
||||||
|
private int getCacheSize() {
|
||||||
|
ConcurrentMap<Object, Object> nativeCache = getNativeCache();
|
||||||
|
return nativeCache != null ? nativeCache.size() : 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 캐시 통계 정보 DTO
|
||||||
|
*/
|
||||||
|
public record CacheStats(
|
||||||
|
int currentSize,
|
||||||
|
long estimatedSize,
|
||||||
|
double hitRate,
|
||||||
|
double missRate,
|
||||||
|
long hitCount,
|
||||||
|
long missCount
|
||||||
|
) {
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,146 @@
|
|||||||
|
package gc.mda.signal_batch.domain.vessel.service;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.vessel.dto.RecentVesselPositionDto;
|
||||||
|
import gc.mda.signal_batch.global.util.ShipKindCodeConverter;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.jdbc.core.RowMapper;
|
||||||
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Service
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class VesselPositionService {
|
||||||
|
|
||||||
|
@Qualifier("queryJdbcTemplate")
|
||||||
|
private final JdbcTemplate queryJdbcTemplate;
|
||||||
|
|
||||||
|
// 캐시 서비스 (선택적 의존성 - 활성화 시에만 주입)
|
||||||
|
@Autowired(required = false)
|
||||||
|
private VesselLatestPositionCache cache;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 최근 위치 업데이트된 선박 조회
|
||||||
|
*
|
||||||
|
* 조회 전략:
|
||||||
|
* 1. 캐시 활성화 시: 메모리 캐시에서 조회 (빠른 응답)
|
||||||
|
* 2. 캐시 비활성화/실패 시: DB에서 직접 조회 (기존 로직)
|
||||||
|
*
|
||||||
|
* @param minutes 조회할 시간 범위 (분 단위)
|
||||||
|
* @return 필터링된 선박 위치 목록
|
||||||
|
*/
|
||||||
|
public List<RecentVesselPositionDto> getRecentVesselPositions(int minutes) {
|
||||||
|
// 캐시 우선 조회
|
||||||
|
if (cache != null) {
|
||||||
|
try {
|
||||||
|
List<RecentVesselPositionDto> cachedResult = cache.getFilteredPositions(minutes);
|
||||||
|
|
||||||
|
if (!cachedResult.isEmpty()) {
|
||||||
|
log.debug("Cache hit: returned {} vessels for minutes={}", cachedResult.size(), minutes);
|
||||||
|
return cachedResult;
|
||||||
|
} else {
|
||||||
|
log.debug("Cache returned empty result, falling back to DB query");
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.warn("Cache query failed, falling back to DB: {}", e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 캐시 미사용 또는 실패 시 DB 조회 (기존 로직)
|
||||||
|
return getRecentVesselPositionsFromDB(minutes);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DB에서 직접 조회 (기존 로직 유지 - Fallback)
|
||||||
|
*/
|
||||||
|
private List<RecentVesselPositionDto> getRecentVesselPositionsFromDB(int minutes) {
|
||||||
|
log.debug("Querying DB for vessel positions (cache miss or disabled)");
|
||||||
|
|
||||||
|
// 디버깅을 위해 DB 현재 시간과 임계값 시간 확인
|
||||||
|
if (log.isDebugEnabled()) {
|
||||||
|
String debugSql = """
|
||||||
|
SELECT
|
||||||
|
NOW() as db_now,
|
||||||
|
NOW() - INTERVAL '%d minutes' as threshold_time,
|
||||||
|
(SELECT MAX(last_update) FROM signal.t_vessel_latest_position) as max_last_update,
|
||||||
|
(SELECT COUNT(*) FROM signal.t_vessel_latest_position WHERE last_update >= NOW() - INTERVAL '%d minutes') as matching_count
|
||||||
|
""".formatted(minutes, minutes);
|
||||||
|
|
||||||
|
queryJdbcTemplate.query(debugSql, rs -> {
|
||||||
|
log.debug("DB NOW: {}", rs.getTimestamp("db_now"));
|
||||||
|
log.debug("Threshold time (NOW - {} min): {}", minutes, rs.getTimestamp("threshold_time"));
|
||||||
|
log.debug("Max last_update in table: {}", rs.getTimestamp("max_last_update"));
|
||||||
|
log.debug("Count matching time condition: {}", rs.getInt("matching_count"));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
SELECT
|
||||||
|
sig_src_cd,
|
||||||
|
target_id,
|
||||||
|
lon,
|
||||||
|
lat,
|
||||||
|
sog,
|
||||||
|
cog,
|
||||||
|
ship_nm,
|
||||||
|
ship_ty,
|
||||||
|
last_update
|
||||||
|
FROM signal.t_vessel_latest_position
|
||||||
|
WHERE last_update >= NOW() - INTERVAL '%d minutes'
|
||||||
|
AND sig_src_cd NOT IN ('000004', '000005')
|
||||||
|
ORDER BY last_update DESC
|
||||||
|
""".formatted(minutes);
|
||||||
|
|
||||||
|
log.info("Fetching vessel positions from DB within {} minutes", minutes);
|
||||||
|
|
||||||
|
List<RecentVesselPositionDto> results = queryJdbcTemplate.query(sql, new VesselPositionRowMapper());
|
||||||
|
|
||||||
|
log.info("Found {} vessels from DB within {} minutes", results.size(), minutes);
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static class VesselPositionRowMapper implements RowMapper<RecentVesselPositionDto> {
|
||||||
|
@Override
|
||||||
|
public RecentVesselPositionDto mapRow(ResultSet rs, int rowNum) throws SQLException {
|
||||||
|
String sigSrcCd = rs.getString("sig_src_cd");
|
||||||
|
String targetId = rs.getString("target_id");
|
||||||
|
String shipTy = rs.getString("ship_ty");
|
||||||
|
|
||||||
|
// shipKindCode 계산
|
||||||
|
String shipKindCode = ShipKindCodeConverter.getShipKindCode(sigSrcCd, shipTy);
|
||||||
|
|
||||||
|
// nationalCode 계산
|
||||||
|
String nationalCode;
|
||||||
|
if ("000001".equals(sigSrcCd) && targetId != null && targetId.length() >= 3) {
|
||||||
|
nationalCode = targetId.substring(0, 3);
|
||||||
|
} else {
|
||||||
|
nationalCode = "440"; // 기본값
|
||||||
|
}
|
||||||
|
|
||||||
|
return RecentVesselPositionDto.builder()
|
||||||
|
.sigSrcCd(sigSrcCd)
|
||||||
|
.targetId(targetId)
|
||||||
|
.lon(rs.getDouble("lon"))
|
||||||
|
.lat(rs.getDouble("lat"))
|
||||||
|
.sog(rs.getBigDecimal("sog"))
|
||||||
|
.cog(rs.getBigDecimal("cog"))
|
||||||
|
.shipNm(rs.getString("ship_nm"))
|
||||||
|
.shipTy(shipTy)
|
||||||
|
.shipKindCode(shipKindCode)
|
||||||
|
.nationalCode(nationalCode)
|
||||||
|
.lastUpdate(rs.getTimestamp("last_update") != null ?
|
||||||
|
rs.getTimestamp("last_update").toLocalDateTime() : null)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,395 @@
|
|||||||
|
package gc.mda.signal_batch.domain.vessel.service;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.global.websocket.dto.MergedVesselTrack;
|
||||||
|
import gc.mda.signal_batch.global.websocket.dto.VesselTrackData;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.time.Duration;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 선박 궤적 병합 서비스
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class VesselTrackMerger {
|
||||||
|
|
||||||
|
private final JdbcTemplate queryJdbcTemplate;
|
||||||
|
|
||||||
|
@Value("${batch.track.abnormal-detection.enable-merger-filtering:false}")
|
||||||
|
private boolean enableMergerFiltering;
|
||||||
|
|
||||||
|
@Value("${batch.track.abnormal-detection.large-gap-threshold-hours:6}")
|
||||||
|
private int largeGapThresholdHours;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 선박별로 궤적 데이터를 병합
|
||||||
|
*/
|
||||||
|
public List<MergedVesselTrack> mergeTracksByVessel(List<VesselTrackData> tracks) {
|
||||||
|
// 선박별로 그룹화
|
||||||
|
Map<String, List<VesselTrackData>> vesselGroups = tracks.stream()
|
||||||
|
.collect(Collectors.groupingBy(t -> t.getSigSrcCd() + "_" + t.getTargetId()));
|
||||||
|
|
||||||
|
log.info("Merging tracks for {} vessels from {} segments",
|
||||||
|
vesselGroups.size(), tracks.size());
|
||||||
|
|
||||||
|
List<MergedVesselTrack> mergedTracks = new ArrayList<>();
|
||||||
|
|
||||||
|
for (Map.Entry<String, List<VesselTrackData>> entry : vesselGroups.entrySet()) {
|
||||||
|
try {
|
||||||
|
MergedVesselTrack merged = mergeVesselSegments(entry.getKey(), entry.getValue());
|
||||||
|
if (merged != null) {
|
||||||
|
mergedTracks.add(merged);
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to merge tracks for vessel {}: {}", entry.getKey(), e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info("Merged {} vessels successfully", mergedTracks.size());
|
||||||
|
return mergedTracks;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 단일 선박의 세그먼트들을 병합
|
||||||
|
*/
|
||||||
|
private MergedVesselTrack mergeVesselSegments(String vesselId, List<VesselTrackData> segments) {
|
||||||
|
if (segments.isEmpty()) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 유효한 geometry를 가진 세그먼트만 필터링
|
||||||
|
List<VesselTrackData> validSegments = segments.stream()
|
||||||
|
.filter(s -> s.getTrackGeom() != null
|
||||||
|
&& !s.getTrackGeom().isEmpty()
|
||||||
|
&& s.getTrackGeom().startsWith("LINESTRING M")
|
||||||
|
&& !s.getTrackGeom().equals("LINESTRING EMPTY")
|
||||||
|
&& !s.getTrackGeom().equals("GEOMETRYCOLLECTION EMPTY"))
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
if (validSegments.isEmpty()) {
|
||||||
|
log.trace("No valid geometries for vessel {}", vesselId);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 시간순 정렬
|
||||||
|
validSegments.sort(Comparator.comparing(VesselTrackData::getStartTime));
|
||||||
|
|
||||||
|
// 큰 시간 간격 필터링 (옵션이 활성화된 경우에만)
|
||||||
|
if (enableMergerFiltering) {
|
||||||
|
validSegments = filterLargeGaps(validSegments, vesselId);
|
||||||
|
if (validSegments.isEmpty()) {
|
||||||
|
log.debug("All segments filtered out due to large gaps for vessel {}", vesselId);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
VesselTrackData firstSegment = validSegments.get(0);
|
||||||
|
|
||||||
|
// 간단한 병합: 모든 포인트를 시간순으로 연결
|
||||||
|
String mergedGeom = mergeLineStringsSimple(validSegments);
|
||||||
|
|
||||||
|
// 병합 실패 시 null 반환
|
||||||
|
if (mergedGeom == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
LocalDateTime startTime = validSegments.get(0).getStartTime();
|
||||||
|
LocalDateTime endTime = validSegments.get(validSegments.size() - 1).getEndTime() != null ?
|
||||||
|
validSegments.get(validSegments.size() - 1).getEndTime() : validSegments.get(validSegments.size() - 1).getStartTime();
|
||||||
|
|
||||||
|
// 통계 계산 - 병합된 궤적에서 실제 거리/속도 계산
|
||||||
|
Map<String, Object> stats = calculateMergedTrackStats(mergedGeom, startTime, endTime);
|
||||||
|
double totalDistance = (Double) stats.get("distance");
|
||||||
|
double avgSpeed = (Double) stats.get("avgSpeed");
|
||||||
|
|
||||||
|
// 기존 방식은 폴백으로 사용
|
||||||
|
if (totalDistance == 0.0) {
|
||||||
|
totalDistance = validSegments.stream()
|
||||||
|
.mapToDouble(VesselTrackData::getDistanceNm)
|
||||||
|
.sum();
|
||||||
|
}
|
||||||
|
|
||||||
|
int totalPoints = validSegments.stream()
|
||||||
|
.mapToInt(VesselTrackData::getPointCount)
|
||||||
|
.sum();
|
||||||
|
|
||||||
|
List<String> timeBuckets = validSegments.stream()
|
||||||
|
.map(s -> s.getStartTime().toString())
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
return MergedVesselTrack.builder()
|
||||||
|
.sigSrcCd(firstSegment.getSigSrcCd())
|
||||||
|
.targetId(firstSegment.getTargetId())
|
||||||
|
.nationalCode(gc.mda.signal_batch.global.util.NationalCodeUtil.calculateNationalCode(
|
||||||
|
firstSegment.getSigSrcCd(), firstSegment.getTargetId()))
|
||||||
|
.vesselId(vesselId)
|
||||||
|
.mergedTrackGeom(mergedGeom)
|
||||||
|
.totalDistanceNm(totalDistance)
|
||||||
|
.avgSpeed(avgSpeed)
|
||||||
|
.startTime(startTime)
|
||||||
|
.endTime(endTime)
|
||||||
|
.totalPoints(totalPoints)
|
||||||
|
.timeBuckets(timeBuckets)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 간단한 LineString 병합 (모든 포인트를 시간순으로 연결)
|
||||||
|
*/
|
||||||
|
private String mergeLineStringsSimple(List<VesselTrackData> segments) {
|
||||||
|
if (segments.size() == 1) {
|
||||||
|
return segments.get(0).getTrackGeom();
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// 모든 포인트를 시간순으로 수집
|
||||||
|
List<Point> allPoints = new ArrayList<>();
|
||||||
|
|
||||||
|
// 첫 번째 세그먼트의 시작 시간을 기준으로 함
|
||||||
|
LocalDateTime baseTime = null;
|
||||||
|
|
||||||
|
for (VesselTrackData segment : segments) {
|
||||||
|
String geom = segment.getTrackGeom();
|
||||||
|
if (geom == null || !geom.startsWith("LINESTRING M")) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 기준 시간 설정 (첫 번째 유효한 세그먼트)
|
||||||
|
if (baseTime == null) {
|
||||||
|
baseTime = segment.getStartTime();
|
||||||
|
}
|
||||||
|
|
||||||
|
// 현재 세그먼트의 시간 오프셋 계산 (초 단위)
|
||||||
|
long segmentOffset = java.time.Duration.between(baseTime, segment.getStartTime()).getSeconds();
|
||||||
|
|
||||||
|
// geometry 파싱 - 괄호 처리 개선
|
||||||
|
int startIdx = geom.indexOf('(');
|
||||||
|
int endIdx = geom.lastIndexOf(')');
|
||||||
|
if (startIdx == -1 || endIdx == -1) continue;
|
||||||
|
|
||||||
|
String points = geom.substring(startIdx + 1, endIdx);
|
||||||
|
|
||||||
|
// 포인트들을 파싱 (쉼표로 분리하되, 괄호 내부는 무시)
|
||||||
|
List<String> pointList = new ArrayList<>();
|
||||||
|
StringBuilder currentPoint = new StringBuilder();
|
||||||
|
int parenDepth = 0;
|
||||||
|
|
||||||
|
for (char c : points.toCharArray()) {
|
||||||
|
if (c == '(') parenDepth++;
|
||||||
|
else if (c == ')') parenDepth--;
|
||||||
|
else if (c == ',' && parenDepth == 0) {
|
||||||
|
pointList.add(currentPoint.toString().trim());
|
||||||
|
currentPoint = new StringBuilder();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
currentPoint.append(c);
|
||||||
|
}
|
||||||
|
if (currentPoint.length() > 0) {
|
||||||
|
pointList.add(currentPoint.toString().trim());
|
||||||
|
}
|
||||||
|
|
||||||
|
for (String pointStr : pointList) {
|
||||||
|
String[] coords = pointStr.trim().split("\\s+");
|
||||||
|
if (coords.length >= 3) {
|
||||||
|
try {
|
||||||
|
double lon = Double.parseDouble(coords[0]);
|
||||||
|
double lat = Double.parseDouble(coords[1]);
|
||||||
|
double m = Double.parseDouble(coords[2]) + segmentOffset; // 시간 오프셋 추가
|
||||||
|
allPoints.add(new Point(lon, lat, m));
|
||||||
|
} catch (NumberFormatException e) {
|
||||||
|
log.debug("Failed to parse point: {}", pointStr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (allPoints.isEmpty()) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 중복 제거 및 시간순 정렬
|
||||||
|
allPoints = allPoints.stream()
|
||||||
|
.distinct()
|
||||||
|
.sorted(Comparator.comparingDouble(p -> p.m))
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
// 거리 기반 간소화 (5m 미만 이동은 제거)
|
||||||
|
List<Point> simplifiedPoints = simplifyByDistance(allPoints, 5.0);
|
||||||
|
|
||||||
|
// LineStringM 재구성
|
||||||
|
StringBuilder sb = new StringBuilder("LINESTRING M(");
|
||||||
|
for (int i = 0; i < simplifiedPoints.size(); i++) {
|
||||||
|
if (i > 0) sb.append(", ");
|
||||||
|
Point p = simplifiedPoints.get(i);
|
||||||
|
sb.append(p.lon).append(" ").append(p.lat).append(" ").append(p.m);
|
||||||
|
}
|
||||||
|
sb.append(")");
|
||||||
|
|
||||||
|
return sb.toString();
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to merge LineStrings: {}", e.getMessage());
|
||||||
|
// 실패 시 첫 번째 세그먼트 반환
|
||||||
|
return segments.get(0).getTrackGeom();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 거리 기반 간소화
|
||||||
|
*/
|
||||||
|
private List<Point> simplifyByDistance(List<Point> points, double minDistanceMeters) {
|
||||||
|
if (points.size() <= 2) {
|
||||||
|
return points;
|
||||||
|
}
|
||||||
|
|
||||||
|
List<Point> result = new ArrayList<>();
|
||||||
|
result.add(points.get(0)); // 시작점
|
||||||
|
|
||||||
|
Point lastAdded = points.get(0);
|
||||||
|
|
||||||
|
for (int i = 1; i < points.size() - 1; i++) {
|
||||||
|
Point current = points.get(i);
|
||||||
|
double distance = calculateDistance(lastAdded.lat, lastAdded.lon, current.lat, current.lon);
|
||||||
|
|
||||||
|
if (distance >= minDistanceMeters) {
|
||||||
|
result.add(current);
|
||||||
|
lastAdded = current;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 마지막 포인트가 포함되지 않았다면 추가 (궤적 끝점 보존)
|
||||||
|
Point lastPoint = points.get(points.size() - 1);
|
||||||
|
if (!result.get(result.size() - 1).equals(lastPoint)) {
|
||||||
|
result.add(lastPoint);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Haversine 공식으로 두 지점 간 거리 계산 (미터 단위)
|
||||||
|
*/
|
||||||
|
private double calculateDistance(double lat1, double lon1, double lat2, double lon2) {
|
||||||
|
final double R = 6371000; // 지구 반지름 (미터)
|
||||||
|
double dLat = Math.toRadians(lat2 - lat1);
|
||||||
|
double dLon = Math.toRadians(lon2 - lon1);
|
||||||
|
double a = Math.sin(dLat / 2) * Math.sin(dLat / 2) +
|
||||||
|
Math.cos(Math.toRadians(lat1)) * Math.cos(Math.toRadians(lat2)) *
|
||||||
|
Math.sin(dLon / 2) * Math.sin(dLon / 2);
|
||||||
|
double c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a));
|
||||||
|
return R * c;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 병합된 궤적에서 실제 거리와 평균 속도 계산
|
||||||
|
*/
|
||||||
|
private Map<String, Object> calculateMergedTrackStats(String mergedGeom, LocalDateTime startTime, LocalDateTime endTime) {
|
||||||
|
Map<String, Object> stats = new HashMap<>();
|
||||||
|
|
||||||
|
try {
|
||||||
|
// PostGIS로 실제 거리 계산 (geography로 변환하여 미터 단위 계산)
|
||||||
|
String sql = "SELECT ST_Length(ST_GeomFromText(?::text, 4326)::geography) / 1852.0 as distance_nm";
|
||||||
|
Double distance = queryJdbcTemplate.queryForObject(sql, Double.class, mergedGeom);
|
||||||
|
|
||||||
|
// 시간차 계산 (초)
|
||||||
|
long seconds = java.time.Duration.between(startTime, endTime).getSeconds();
|
||||||
|
|
||||||
|
// 평균 속도 계산 (knots)
|
||||||
|
double avgSpeed = 0.0;
|
||||||
|
if (seconds > 0 && distance != null && distance > 0) {
|
||||||
|
avgSpeed = (distance / (seconds / 3600.0));
|
||||||
|
}
|
||||||
|
|
||||||
|
stats.put("distance", distance != null ? distance : 0.0);
|
||||||
|
stats.put("avgSpeed", avgSpeed);
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.debug("Failed to calculate merged track stats: {}", e.getMessage());
|
||||||
|
stats.put("distance", 0.0);
|
||||||
|
stats.put("avgSpeed", 0.0);
|
||||||
|
}
|
||||||
|
|
||||||
|
return stats;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 큰 시간 간격의 세그먼트들을 필터링
|
||||||
|
* 성능을 위해 O(n) 시간복잡도로 구현
|
||||||
|
*/
|
||||||
|
private List<VesselTrackData> filterLargeGaps(List<VesselTrackData> segments, String vesselId) {
|
||||||
|
if (segments.size() <= 1) {
|
||||||
|
return segments;
|
||||||
|
}
|
||||||
|
|
||||||
|
List<VesselTrackData> filtered = new ArrayList<>();
|
||||||
|
filtered.add(segments.get(0));
|
||||||
|
|
||||||
|
int filteredCount = 0;
|
||||||
|
|
||||||
|
for (int i = 1; i < segments.size(); i++) {
|
||||||
|
VesselTrackData prev = segments.get(i - 1);
|
||||||
|
VesselTrackData curr = segments.get(i);
|
||||||
|
|
||||||
|
// 이전 세그먼트의 종료 시간과 현재 세그먼트의 시작 시간 간격 계산
|
||||||
|
LocalDateTime prevEndTime = prev.getEndTime() != null ? prev.getEndTime() : prev.getStartTime();
|
||||||
|
LocalDateTime currStartTime = curr.getStartTime();
|
||||||
|
|
||||||
|
if (prevEndTime != null && currStartTime != null) {
|
||||||
|
long gapHours = Duration.between(prevEndTime, currStartTime).toHours();
|
||||||
|
|
||||||
|
// 설정된 시간 이상의 gap이면 연결하지 않음
|
||||||
|
if (gapHours >= largeGapThresholdHours) {
|
||||||
|
filteredCount++;
|
||||||
|
log.debug("Large gap filtered for vessel {} - gap: {} hours between {} and {}",
|
||||||
|
vesselId, gapHours, prevEndTime, currStartTime);
|
||||||
|
|
||||||
|
// 큰 gap 이후의 세그먼트는 새로운 궤적의 시작으로 처리
|
||||||
|
// 현재는 단순히 제외하지만, 향후 별도 궤적으로 분리할 수 있음
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
filtered.add(curr);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (filteredCount > 0) {
|
||||||
|
log.info("Filtered {} segments with large gaps for vessel {} (threshold: {} hours)",
|
||||||
|
filteredCount, vesselId, largeGapThresholdHours);
|
||||||
|
}
|
||||||
|
|
||||||
|
return filtered;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 포인트 내부 클래스
|
||||||
|
*/
|
||||||
|
private static class Point {
|
||||||
|
final double lon;
|
||||||
|
final double lat;
|
||||||
|
final double m;
|
||||||
|
|
||||||
|
Point(double lon, double lat, double m) {
|
||||||
|
this.lon = lon;
|
||||||
|
this.lat = lat;
|
||||||
|
this.m = m;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object o) {
|
||||||
|
if (this == o) return true;
|
||||||
|
if (o == null || getClass() != o.getClass()) return false;
|
||||||
|
Point point = (Point) o;
|
||||||
|
return Double.compare(point.lon, lon) == 0 &&
|
||||||
|
Double.compare(point.lat, lat) == 0 &&
|
||||||
|
Double.compare(point.m, m) == 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,285 @@
|
|||||||
|
package gc.mda.signal_batch.domain.vessel.service.filter;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.global.websocket.dto.TrackQueryRequest;
|
||||||
|
import gc.mda.signal_batch.global.websocket.dto.ViewportFilter;
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
public class AdvancedTrackFilter {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SQL WHERE 절 생성
|
||||||
|
*/
|
||||||
|
public FilterResult buildWhereClause(TrackQueryRequest request, String tableAlias) {
|
||||||
|
List<String> conditions = new ArrayList<>();
|
||||||
|
Map<String, Object> parameters = new HashMap<>();
|
||||||
|
|
||||||
|
// 시간 필터
|
||||||
|
conditions.add(String.format("%s.time_bucket >= ?", tableAlias));
|
||||||
|
parameters.put("startTime", request.getStartTime());
|
||||||
|
|
||||||
|
conditions.add(String.format("%s.time_bucket < ?", tableAlias));
|
||||||
|
parameters.put("endTime", request.getEndTime());
|
||||||
|
|
||||||
|
// Viewport 필터
|
||||||
|
if (request.getViewport() != null && request.getViewport().isValid()) {
|
||||||
|
String viewportCondition = buildViewportCondition(request.getViewport(), tableAlias);
|
||||||
|
conditions.add(viewportCondition);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 해구 필터
|
||||||
|
if (request.getHaeguNumbers() != null && !request.getHaeguNumbers().isEmpty()) {
|
||||||
|
String haeguCondition = buildHaeguCondition(request.getHaeguNumbers(), tableAlias);
|
||||||
|
conditions.add(haeguCondition);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 영역 필터
|
||||||
|
if (request.getAreaIds() != null && !request.getAreaIds().isEmpty()) {
|
||||||
|
String areaCondition = buildAreaCondition(request.getAreaIds(), tableAlias);
|
||||||
|
conditions.add(areaCondition);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 선박 필터
|
||||||
|
if (request.getVesselIds() != null && !request.getVesselIds().isEmpty()) {
|
||||||
|
String vesselCondition = buildVesselCondition(request.getVesselIds(), tableAlias);
|
||||||
|
conditions.add(vesselCondition);
|
||||||
|
}
|
||||||
|
|
||||||
|
// WHERE 절 조합
|
||||||
|
String whereClause = conditions.isEmpty() ? "" :
|
||||||
|
" WHERE " + String.join(" AND ", conditions);
|
||||||
|
|
||||||
|
return FilterResult.builder()
|
||||||
|
.whereClause(whereClause)
|
||||||
|
.parameters(parameters)
|
||||||
|
.hasFilters(!conditions.isEmpty())
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 복합 필터 빌더
|
||||||
|
*/
|
||||||
|
public CompositeFilter createCompositeFilter(TrackQueryRequest request) {
|
||||||
|
CompositeFilter.CompositeFilterBuilder builder = CompositeFilter.builder();
|
||||||
|
|
||||||
|
// 기본 필터
|
||||||
|
builder.timeRange(new TimeRange(request.getStartTime(), request.getEndTime()));
|
||||||
|
|
||||||
|
// 공간 필터
|
||||||
|
if (request.getViewport() != null) {
|
||||||
|
builder.spatialFilter(new SpatialFilter(
|
||||||
|
request.getViewport(),
|
||||||
|
request.getHaeguNumbers(),
|
||||||
|
request.getAreaIds()
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// 선박 필터
|
||||||
|
if (request.getVesselIds() != null) {
|
||||||
|
builder.vesselFilter(new VesselFilter(request.getVesselIds()));
|
||||||
|
}
|
||||||
|
|
||||||
|
return builder.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 동적 필터 최적화
|
||||||
|
* 필터 조건에 따라 최적의 쿼리 전략 선택
|
||||||
|
*/
|
||||||
|
public QueryStrategy optimizeQueryStrategy(CompositeFilter filter) {
|
||||||
|
// 선박 ID가 적으면 선박별 쿼리
|
||||||
|
if (filter.hasVesselFilter() && filter.getVesselFilter().getVesselIds().size() <= 10) {
|
||||||
|
return QueryStrategy.VESSEL_BASED;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 특정 영역이면 영역 기반 쿼리
|
||||||
|
if (filter.hasSpatialFilter() && filter.getSpatialFilter().hasAreaFilter()) {
|
||||||
|
return QueryStrategy.AREA_BASED;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 해구 필터가 있으면 해구 기반 쿼리
|
||||||
|
if (filter.hasSpatialFilter() && filter.getSpatialFilter().hasHaeguFilter()) {
|
||||||
|
return QueryStrategy.HAEGU_BASED;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 기본: 시간 기반 쿼리
|
||||||
|
return QueryStrategy.TIME_BASED;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Private helper methods
|
||||||
|
|
||||||
|
private String buildViewportCondition(ViewportFilter viewport, String alias) {
|
||||||
|
return String.format(
|
||||||
|
"public.ST_Intersects(%s.track_geom, public.ST_MakeEnvelope(?, ?, ?, ?, 4326))",
|
||||||
|
alias
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private String buildHaeguCondition(List<Integer> haeguNumbers, String alias) {
|
||||||
|
String placeholders = haeguNumbers.stream()
|
||||||
|
.map(n -> "?")
|
||||||
|
.collect(Collectors.joining(","));
|
||||||
|
|
||||||
|
// 대해구와 소해구 모두 확인
|
||||||
|
return String.format(
|
||||||
|
"EXISTS (SELECT 1 FROM t_grid_tiles gt WHERE " +
|
||||||
|
"(gt.large_grid_no IN (%s) OR gt.grid_no IN (%s)) AND " +
|
||||||
|
"public.ST_Intersects(%s.track_geom, gt.boundary))",
|
||||||
|
placeholders, placeholders, alias
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private String buildAreaCondition(List<String> areaIds, String alias) {
|
||||||
|
String placeholders = areaIds.stream()
|
||||||
|
.map(id -> "?")
|
||||||
|
.collect(Collectors.joining(","));
|
||||||
|
|
||||||
|
return String.format(
|
||||||
|
"EXISTS (SELECT 1 FROM t_areas a WHERE " +
|
||||||
|
"a.area_id IN (%s) AND " +
|
||||||
|
"public.ST_Intersects(%s.track_geom, a.boundary))",
|
||||||
|
placeholders, alias
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private String buildVesselCondition(List<String> vesselIds, String alias) {
|
||||||
|
String placeholders = vesselIds.stream()
|
||||||
|
.map(id -> "?")
|
||||||
|
.collect(Collectors.joining(","));
|
||||||
|
|
||||||
|
return String.format("%s.target_id IN (%s)", alias, placeholders);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Inner classes
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
public static class FilterResult {
|
||||||
|
private String whereClause;
|
||||||
|
private Map<String, Object> parameters;
|
||||||
|
private boolean hasFilters;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
public static class CompositeFilter {
|
||||||
|
private TimeRange timeRange;
|
||||||
|
private SpatialFilter spatialFilter;
|
||||||
|
private VesselFilter vesselFilter;
|
||||||
|
|
||||||
|
public boolean hasSpatialFilter() {
|
||||||
|
return spatialFilter != null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean hasVesselFilter() {
|
||||||
|
return vesselFilter != null && !vesselFilter.getVesselIds().isEmpty();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public static class TimeRange {
|
||||||
|
private final LocalDateTime startTime;
|
||||||
|
private final LocalDateTime endTime;
|
||||||
|
|
||||||
|
public long getDurationHours() {
|
||||||
|
return java.time.Duration.between(startTime, endTime).toHours();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public static class SpatialFilter {
|
||||||
|
private final ViewportFilter viewport;
|
||||||
|
private final List<Integer> haeguNumbers;
|
||||||
|
private final List<String> areaIds;
|
||||||
|
|
||||||
|
public boolean hasHaeguFilter() {
|
||||||
|
return haeguNumbers != null && !haeguNumbers.isEmpty();
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean hasAreaFilter() {
|
||||||
|
return areaIds != null && !areaIds.isEmpty();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public static class VesselFilter {
|
||||||
|
private final List<String> vesselIds;
|
||||||
|
|
||||||
|
public boolean isMultiVessel() {
|
||||||
|
return vesselIds.size() > 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public enum QueryStrategy {
|
||||||
|
TIME_BASED("시간 기반 쿼리"),
|
||||||
|
VESSEL_BASED("선박 기반 쿼리"),
|
||||||
|
AREA_BASED("영역 기반 쿼리"),
|
||||||
|
HAEGU_BASED("해구 기반 쿼리");
|
||||||
|
|
||||||
|
@SuppressWarnings("unused")
|
||||||
|
private final String description;
|
||||||
|
|
||||||
|
QueryStrategy(String description) {
|
||||||
|
this.description = description;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 캐시 키 생성기
|
||||||
|
* 동일한 필터 조건에 대한 캐시 키 생성
|
||||||
|
*/
|
||||||
|
public String generateCacheKey(CompositeFilter filter) {
|
||||||
|
List<String> parts = new ArrayList<>();
|
||||||
|
|
||||||
|
// 시간 범위 (5분 단위로 정규화)
|
||||||
|
long startMinutes = filter.getTimeRange().getStartTime().getMinute() / 5 * 5;
|
||||||
|
long endMinutes = filter.getTimeRange().getEndTime().getMinute() / 5 * 5;
|
||||||
|
parts.add(String.format("T:%d-%d", startMinutes, endMinutes));
|
||||||
|
|
||||||
|
// 공간 필터
|
||||||
|
if (filter.hasSpatialFilter()) {
|
||||||
|
SpatialFilter spatial = filter.getSpatialFilter();
|
||||||
|
|
||||||
|
if (spatial.getViewport() != null) {
|
||||||
|
parts.add(String.format("V:%.2f,%.2f,%.2f,%.2f",
|
||||||
|
spatial.getViewport().getMinLon(),
|
||||||
|
spatial.getViewport().getMinLat(),
|
||||||
|
spatial.getViewport().getMaxLon(),
|
||||||
|
spatial.getViewport().getMaxLat()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (spatial.hasHaeguFilter()) {
|
||||||
|
String haeguKey = spatial.getHaeguNumbers().stream()
|
||||||
|
.sorted()
|
||||||
|
.map(String::valueOf)
|
||||||
|
.collect(Collectors.joining(","));
|
||||||
|
parts.add("H:" + haeguKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (spatial.hasAreaFilter()) {
|
||||||
|
String areaKey = spatial.getAreaIds().stream()
|
||||||
|
.sorted()
|
||||||
|
.collect(Collectors.joining(","));
|
||||||
|
parts.add("A:" + areaKey);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 선박 필터
|
||||||
|
if (filter.hasVesselFilter()) {
|
||||||
|
String vesselKey = filter.getVesselFilter().getVesselIds().stream()
|
||||||
|
.sorted()
|
||||||
|
.collect(Collectors.joining(","));
|
||||||
|
parts.add("S:" + vesselKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
return String.join("|", parts);
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,382 @@
|
|||||||
|
package gc.mda.signal_batch.domain.vessel.service.filter;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.global.websocket.dto.TrackQueryRequest;
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.locationtech.jts.geom.Coordinate;
|
||||||
|
import org.locationtech.jts.geom.LineString;
|
||||||
|
import org.locationtech.jts.io.ParseException;
|
||||||
|
import org.locationtech.jts.io.WKTReader;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.sql.Timestamp;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 선박 궤적 데이터의 거리/속도 기반 필터링을 수행하는 컴포넌트
|
||||||
|
* bucket 간 연결성을 고려한 정확한 거리 계산 포함
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class VesselTrackFilter {
|
||||||
|
|
||||||
|
private final JdbcTemplate queryJdbcTemplate;
|
||||||
|
private final WKTReader wktReader = new WKTReader();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 거리/속도 기반으로 선박을 필터링
|
||||||
|
*/
|
||||||
|
public Set<String> filterVesselsByDistanceAndSpeed(TrackQueryRequest request, String tableName) {
|
||||||
|
if (!hasDistanceOrSpeedFilter(request)) {
|
||||||
|
return Collections.emptySet(); // 필터가 없으면 빈 Set 반환 (모든 선박 포함)
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info("Filtering vessels by distance/speed from table: {}", tableName);
|
||||||
|
|
||||||
|
// 1단계: 기간 내 모든 선박의 궤적 데이터 조회
|
||||||
|
Map<String, List<VesselTrackSegment>> vesselTracks = loadVesselTracks(request, tableName);
|
||||||
|
|
||||||
|
// 2단계: 각 선박별로 전체 거리와 평균 속도 계산
|
||||||
|
Map<String, VesselMetrics> vesselMetrics = calculateVesselMetrics(vesselTracks, request.getIncludeInterBucketDistance());
|
||||||
|
|
||||||
|
// 3단계: 필터 조건에 맞는 선박만 선택
|
||||||
|
Set<String> filteredVessels = filterVessels(vesselMetrics, request);
|
||||||
|
|
||||||
|
log.info("Filtered {} vessels out of {} total vessels", filteredVessels.size(), vesselTracks.size());
|
||||||
|
|
||||||
|
return filteredVessels;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 여러 테이블에서 조회한 선박 궤적을 통합하여 필터링
|
||||||
|
*/
|
||||||
|
public Set<String> filterVesselsByDistanceAndSpeedMultipleTables(
|
||||||
|
TrackQueryRequest request,
|
||||||
|
Map<String, List<TimeRange>> tableStrategyMap) {
|
||||||
|
|
||||||
|
if (!hasDistanceOrSpeedFilter(request)) {
|
||||||
|
return Collections.emptySet();
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info("Filtering vessels across multiple tables: {}", tableStrategyMap.keySet());
|
||||||
|
|
||||||
|
// 모든 테이블에서 선박별 궤적 데이터 수집
|
||||||
|
Map<String, List<VesselTrackSegment>> allVesselTracks = new ConcurrentHashMap<>();
|
||||||
|
|
||||||
|
for (Map.Entry<String, List<TimeRange>> entry : tableStrategyMap.entrySet()) {
|
||||||
|
String tableName = entry.getKey();
|
||||||
|
List<TimeRange> timeRanges = entry.getValue();
|
||||||
|
|
||||||
|
for (TimeRange range : timeRanges) {
|
||||||
|
TrackQueryRequest rangeRequest = request.toBuilder()
|
||||||
|
.startTime(range.getStart())
|
||||||
|
.endTime(range.getEnd())
|
||||||
|
.build();
|
||||||
|
|
||||||
|
Map<String, List<VesselTrackSegment>> tableVesselTracks =
|
||||||
|
loadVesselTracks(rangeRequest, tableName);
|
||||||
|
|
||||||
|
// 기존 데이터와 병합
|
||||||
|
for (Map.Entry<String, List<VesselTrackSegment>> vesselEntry : tableVesselTracks.entrySet()) {
|
||||||
|
allVesselTracks.computeIfAbsent(vesselEntry.getKey(), k -> new ArrayList<>())
|
||||||
|
.addAll(vesselEntry.getValue());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 병합된 데이터로 메트릭 계산
|
||||||
|
Map<String, VesselMetrics> vesselMetrics = calculateVesselMetrics(
|
||||||
|
allVesselTracks, request.getIncludeInterBucketDistance());
|
||||||
|
|
||||||
|
// 필터 적용
|
||||||
|
Set<String> filteredVessels = filterVessels(vesselMetrics, request);
|
||||||
|
|
||||||
|
log.info("Filtered {} vessels out of {} total vessels across all tables",
|
||||||
|
filteredVessels.size(), allVesselTracks.size());
|
||||||
|
|
||||||
|
return filteredVessels;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 거리/속도 필터가 있는지 확인
|
||||||
|
*/
|
||||||
|
private boolean hasDistanceOrSpeedFilter(TrackQueryRequest request) {
|
||||||
|
return request.getMinTotalDistance() != null ||
|
||||||
|
request.getMaxTotalDistance() != null ||
|
||||||
|
request.getMinAvgSpeed() != null ||
|
||||||
|
request.getMaxAvgSpeed() != null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 선박별 궤적 데이터 로드
|
||||||
|
*/
|
||||||
|
private Map<String, List<VesselTrackSegment>> loadVesselTracks(TrackQueryRequest request, String tableName) {
|
||||||
|
String sql = """
|
||||||
|
SELECT sig_src_cd, target_id, time_bucket,
|
||||||
|
public.ST_AsText(track_geom) as track_geom, -- WKT 형식으로 변환
|
||||||
|
distance_nm, avg_speed, point_count,
|
||||||
|
LEAD(time_bucket) OVER (PARTITION BY sig_src_cd, target_id ORDER BY time_bucket) as next_bucket,
|
||||||
|
LEAD(public.ST_AsText(track_geom)) OVER (PARTITION BY sig_src_cd, target_id ORDER BY time_bucket) as next_geom -- WKT 형식으로 변환
|
||||||
|
FROM %s
|
||||||
|
WHERE time_bucket >= ? AND time_bucket < ?
|
||||||
|
ORDER BY sig_src_cd, target_id, time_bucket
|
||||||
|
""".formatted(tableName);
|
||||||
|
|
||||||
|
Map<String, List<VesselTrackSegment>> vesselTracks = new ConcurrentHashMap<>();
|
||||||
|
|
||||||
|
queryJdbcTemplate.query(sql, rs -> {
|
||||||
|
String vesselId = rs.getString("sig_src_cd") + "_" + rs.getString("target_id");
|
||||||
|
|
||||||
|
VesselTrackSegment segment = VesselTrackSegment.builder()
|
||||||
|
.vesselId(vesselId)
|
||||||
|
.timeBucket(rs.getTimestamp("time_bucket").toLocalDateTime())
|
||||||
|
.trackGeom(rs.getString("track_geom"))
|
||||||
|
.distanceNm(rs.getDouble("distance_nm"))
|
||||||
|
.avgSpeed(rs.getDouble("avg_speed"))
|
||||||
|
.pointCount(rs.getInt("point_count"))
|
||||||
|
.nextBucket(rs.getTimestamp("next_bucket") != null ?
|
||||||
|
rs.getTimestamp("next_bucket").toLocalDateTime() : null)
|
||||||
|
.nextGeom(rs.getString("next_geom"))
|
||||||
|
.build();
|
||||||
|
|
||||||
|
vesselTracks.computeIfAbsent(vesselId, k -> new ArrayList<>()).add(segment);
|
||||||
|
}, Timestamp.valueOf(request.getStartTime()), Timestamp.valueOf(request.getEndTime()));
|
||||||
|
|
||||||
|
return vesselTracks;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 선박별 메트릭 계산 (전체 거리, 평균 속도)
|
||||||
|
*/
|
||||||
|
private Map<String, VesselMetrics> calculateVesselMetrics(
|
||||||
|
Map<String, List<VesselTrackSegment>> vesselTracks,
|
||||||
|
boolean includeInterBucketDistance) {
|
||||||
|
|
||||||
|
Map<String, VesselMetrics> metrics = new HashMap<>();
|
||||||
|
|
||||||
|
for (Map.Entry<String, List<VesselTrackSegment>> entry : vesselTracks.entrySet()) {
|
||||||
|
String vesselId = entry.getKey();
|
||||||
|
List<VesselTrackSegment> segments = entry.getValue();
|
||||||
|
|
||||||
|
double totalDistance = 0.0;
|
||||||
|
double totalTime = 0.0;
|
||||||
|
int totalPoints = 0;
|
||||||
|
|
||||||
|
for (int i = 0; i < segments.size(); i++) {
|
||||||
|
VesselTrackSegment segment = segments.get(i);
|
||||||
|
|
||||||
|
// 세그먼트 내 거리
|
||||||
|
totalDistance += segment.getDistanceNm();
|
||||||
|
totalPoints += segment.getPointCount();
|
||||||
|
|
||||||
|
// bucket 간 거리 계산 (옵션)
|
||||||
|
if (includeInterBucketDistance && segment.getNextBucket() != null && i < segments.size() - 1) {
|
||||||
|
double interBucketDistance = calculateInterBucketDistance(segment);
|
||||||
|
if (interBucketDistance > 0) {
|
||||||
|
totalDistance += interBucketDistance;
|
||||||
|
|
||||||
|
// 시간 계산 (bucket 간격)
|
||||||
|
double timeDiff = java.time.Duration.between(
|
||||||
|
segment.getTimeBucket(),
|
||||||
|
segment.getNextBucket()
|
||||||
|
).toMinutes() / 60.0; // hours
|
||||||
|
totalTime += timeDiff;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 전체 시간 계산 (첫 bucket ~ 마지막 bucket)
|
||||||
|
if (!segments.isEmpty()) {
|
||||||
|
LocalDateTime firstTime = segments.get(0).getTimeBucket();
|
||||||
|
LocalDateTime lastTime = segments.get(segments.size() - 1).getTimeBucket();
|
||||||
|
totalTime = java.time.Duration.between(firstTime, lastTime).toMinutes() / 60.0; // hours
|
||||||
|
}
|
||||||
|
|
||||||
|
// 평균 속도 계산
|
||||||
|
double avgSpeed = totalTime > 0 ? totalDistance / totalTime : 0.0;
|
||||||
|
|
||||||
|
metrics.put(vesselId, VesselMetrics.builder()
|
||||||
|
.vesselId(vesselId)
|
||||||
|
.totalDistance(totalDistance)
|
||||||
|
.avgSpeed(avgSpeed)
|
||||||
|
.totalPoints(totalPoints)
|
||||||
|
.segmentCount(segments.size())
|
||||||
|
.build());
|
||||||
|
}
|
||||||
|
|
||||||
|
return metrics;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* bucket 간 거리 계산 (마지막 점과 다음 첫 점 사이)
|
||||||
|
*/
|
||||||
|
private double calculateInterBucketDistance(VesselTrackSegment segment) {
|
||||||
|
try {
|
||||||
|
if (segment.getTrackGeom() == null || segment.getNextGeom() == null) {
|
||||||
|
return 0.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 빈 문자열 체크
|
||||||
|
String currentGeom = segment.getTrackGeom().trim();
|
||||||
|
String nextGeom = segment.getNextGeom().trim();
|
||||||
|
|
||||||
|
if (currentGeom.isEmpty() || nextGeom.isEmpty()) {
|
||||||
|
return 0.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// LINESTRING EMPTY 체크
|
||||||
|
if (currentGeom.equalsIgnoreCase("LINESTRING EMPTY") ||
|
||||||
|
nextGeom.equalsIgnoreCase("LINESTRING EMPTY")) {
|
||||||
|
return 0.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// LINESTRING M 형식 체크 (PostGIS의 M 값을 포함한 LineString)
|
||||||
|
if (currentGeom.toUpperCase().contains("LINESTRING M") && !currentGeom.contains("(")) {
|
||||||
|
log.debug("Invalid LINESTRING M format: {}", currentGeom.substring(0, Math.min(50, currentGeom.length())));
|
||||||
|
return 0.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
LineString currentLine = (LineString) wktReader.read(currentGeom);
|
||||||
|
LineString nextLine = (LineString) wktReader.read(nextGeom);
|
||||||
|
|
||||||
|
if (currentLine.isEmpty() || nextLine.isEmpty() ||
|
||||||
|
currentLine.getNumPoints() == 0 || nextLine.getNumPoints() == 0) {
|
||||||
|
return 0.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 현재 궤적의 마지막 점
|
||||||
|
Coordinate lastPoint = currentLine.getCoordinateN(currentLine.getNumPoints() - 1);
|
||||||
|
// 다음 궤적의 첫 점
|
||||||
|
Coordinate firstPoint = nextLine.getCoordinateN(0);
|
||||||
|
|
||||||
|
// 거리 계산 (Haversine formula)
|
||||||
|
return calculateHaversineDistance(
|
||||||
|
lastPoint.y, lastPoint.x, // lat, lon
|
||||||
|
firstPoint.y, firstPoint.x
|
||||||
|
);
|
||||||
|
|
||||||
|
} catch (ParseException e) {
|
||||||
|
// 상세한 디버그 정보 출력
|
||||||
|
String currentSample = segment.getTrackGeom() != null ?
|
||||||
|
segment.getTrackGeom().substring(0, Math.min(100, segment.getTrackGeom().length())) : "null";
|
||||||
|
String nextSample = segment.getNextGeom() != null ?
|
||||||
|
segment.getNextGeom().substring(0, Math.min(100, segment.getNextGeom().length())) : "null";
|
||||||
|
|
||||||
|
// WKB 형식인지 확인 (16진수로 시작하는 경우)
|
||||||
|
if (currentSample.matches("^[0-9A-Fa-f]+$")) {
|
||||||
|
log.error("Geometry is in WKB format, not WKT. Please use public.ST_AsText() in SQL query. Sample: {}",
|
||||||
|
currentSample.substring(0, Math.min(50, currentSample.length())));
|
||||||
|
} else {
|
||||||
|
log.warn("Failed to parse geometry: {} - Current: {}, Next: {}",
|
||||||
|
e.getMessage(), currentSample, nextSample);
|
||||||
|
}
|
||||||
|
return 0.0;
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.warn("Unexpected error in calculateInterBucketDistance: {}", e.getMessage(), e);
|
||||||
|
return 0.0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Haversine 공식을 사용한 거리 계산 (nautical miles)
|
||||||
|
*/
|
||||||
|
private double calculateHaversineDistance(double lat1, double lon1, double lat2, double lon2) {
|
||||||
|
double R = 3440.065; // 지구 반경 (nautical miles)
|
||||||
|
double dLat = Math.toRadians(lat2 - lat1);
|
||||||
|
double dLon = Math.toRadians(lon2 - lon1);
|
||||||
|
|
||||||
|
double a = Math.sin(dLat / 2) * Math.sin(dLat / 2) +
|
||||||
|
Math.cos(Math.toRadians(lat1)) * Math.cos(Math.toRadians(lat2)) *
|
||||||
|
Math.sin(dLon / 2) * Math.sin(dLon / 2);
|
||||||
|
|
||||||
|
double c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a));
|
||||||
|
return R * c;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 필터 조건에 맞는 선박 선택
|
||||||
|
*/
|
||||||
|
private Set<String> filterVessels(Map<String, VesselMetrics> vesselMetrics, TrackQueryRequest request) {
|
||||||
|
return vesselMetrics.entrySet().stream()
|
||||||
|
.filter(entry -> {
|
||||||
|
VesselMetrics metrics = entry.getValue();
|
||||||
|
|
||||||
|
// 최소 거리 필터
|
||||||
|
if (request.getMinTotalDistance() != null &&
|
||||||
|
metrics.getTotalDistance() < request.getMinTotalDistance()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 최대 거리 필터
|
||||||
|
if (request.getMaxTotalDistance() != null &&
|
||||||
|
metrics.getTotalDistance() > request.getMaxTotalDistance()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 최소 속도 필터
|
||||||
|
if (request.getMinAvgSpeed() != null &&
|
||||||
|
metrics.getAvgSpeed() < request.getMinAvgSpeed()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 최대 속도 필터
|
||||||
|
if (request.getMaxAvgSpeed() != null &&
|
||||||
|
metrics.getAvgSpeed() > request.getMaxAvgSpeed()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
})
|
||||||
|
.map(Map.Entry::getKey)
|
||||||
|
.collect(Collectors.toSet());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 선박별 메트릭 정보를 반환 (디버깅/통계용)
|
||||||
|
*/
|
||||||
|
public Map<String, VesselMetrics> getVesselMetrics(TrackQueryRequest request, String tableName) {
|
||||||
|
Map<String, List<VesselTrackSegment>> vesselTracks = loadVesselTracks(request, tableName);
|
||||||
|
return calculateVesselMetrics(vesselTracks, request.getIncludeInterBucketDistance());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
private static class VesselTrackSegment {
|
||||||
|
private String vesselId;
|
||||||
|
private LocalDateTime timeBucket;
|
||||||
|
private String trackGeom;
|
||||||
|
private double distanceNm;
|
||||||
|
private double avgSpeed;
|
||||||
|
private int pointCount;
|
||||||
|
private LocalDateTime nextBucket;
|
||||||
|
private String nextGeom;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 시간 범위를 나타내는 내부 클래스
|
||||||
|
*/
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
public static class TimeRange {
|
||||||
|
private LocalDateTime start;
|
||||||
|
private LocalDateTime end;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
public static class VesselMetrics {
|
||||||
|
private String vesselId;
|
||||||
|
private double totalDistance;
|
||||||
|
private double avgSpeed;
|
||||||
|
private int totalPoints;
|
||||||
|
private int segmentCount;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,193 @@
|
|||||||
|
package gc.mda.signal_batch.domain.vessel.service.optimization;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.global.websocket.dto.TrackChunkResponse;
|
||||||
|
import gc.mda.signal_batch.global.websocket.dto.VesselTrackData;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.concurrent.*;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
public class TrackStreamingOptimizer {
|
||||||
|
|
||||||
|
// 시간 범위별 최적 청크 크기
|
||||||
|
private static final Map<Integer, Integer> OPTIMAL_CHUNK_SIZES = Map.of(
|
||||||
|
1, 2000, // 1일 이내
|
||||||
|
7, 1000, // 7일 이내
|
||||||
|
30, 500, // 30일 이내
|
||||||
|
90, 250 // 90일 이내
|
||||||
|
);
|
||||||
|
|
||||||
|
// 병렬 처리 스레드 수
|
||||||
|
private static final Map<Integer, Integer> PARALLEL_THREADS = Map.of(
|
||||||
|
1, 2, // 1일 이내
|
||||||
|
7, 4, // 7일 이내
|
||||||
|
30, 8, // 30일 이내
|
||||||
|
90, 12 // 90일 이내
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 조회 기간에 따른 최적 청크 크기 계산
|
||||||
|
*/
|
||||||
|
public int calculateOptimalChunkSize(long durationDays, int requestedChunkSize) {
|
||||||
|
if (requestedChunkSize > 0) {
|
||||||
|
return requestedChunkSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
return OPTIMAL_CHUNK_SIZES.entrySet().stream()
|
||||||
|
.filter(entry -> durationDays <= entry.getKey())
|
||||||
|
.map(Map.Entry::getValue)
|
||||||
|
.findFirst()
|
||||||
|
.orElse(100);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 병렬 처리 스레드 수 계산
|
||||||
|
*/
|
||||||
|
public int calculateParallelThreads(long durationDays) {
|
||||||
|
return PARALLEL_THREADS.entrySet().stream()
|
||||||
|
.filter(entry -> durationDays <= entry.getKey())
|
||||||
|
.map(Map.Entry::getValue)
|
||||||
|
.findFirst()
|
||||||
|
.orElse(16);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 트랙 데이터 우선순위 정렬
|
||||||
|
* - 속도가 높은 선박 우선
|
||||||
|
* - 이동 거리가 긴 선박 우선
|
||||||
|
* - 최근 데이터 우선
|
||||||
|
*/
|
||||||
|
public List<VesselTrackData> prioritizeTracks(List<VesselTrackData> tracks) {
|
||||||
|
return tracks.stream()
|
||||||
|
.sorted(Comparator
|
||||||
|
.comparing(VesselTrackData::getAvgSpeed, Comparator.reverseOrder())
|
||||||
|
.thenComparing(VesselTrackData::getDistanceNm, Comparator.reverseOrder())
|
||||||
|
.thenComparing(VesselTrackData::getEndTime, Comparator.reverseOrder()))
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 적응형 청크 생성
|
||||||
|
* - 네트워크 상태에 따라 청크 크기 동적 조정
|
||||||
|
* - 클라이언트 처리 속도에 따라 백프레셔 적용
|
||||||
|
*/
|
||||||
|
public CompletableFuture<TrackChunkResponse> createAdaptiveChunk(
|
||||||
|
String queryId,
|
||||||
|
List<VesselTrackData> tracks,
|
||||||
|
int chunkIndex,
|
||||||
|
int baseChunkSize,
|
||||||
|
NetworkMetrics networkMetrics) {
|
||||||
|
|
||||||
|
return CompletableFuture.supplyAsync(() -> {
|
||||||
|
// 네트워크 지연에 따른 청크 크기 조정
|
||||||
|
int adjustedChunkSize = adjustChunkSize(baseChunkSize, networkMetrics);
|
||||||
|
|
||||||
|
// 우선순위에 따라 정렬된 트랙 선택
|
||||||
|
List<VesselTrackData> prioritizedTracks = prioritizeTracks(tracks);
|
||||||
|
List<VesselTrackData> chunkTracks = prioritizedTracks.stream()
|
||||||
|
.limit(adjustedChunkSize)
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
TrackChunkResponse chunk = new TrackChunkResponse();
|
||||||
|
chunk.setQueryId(queryId);
|
||||||
|
chunk.setChunkIndex(chunkIndex);
|
||||||
|
chunk.setTracks(chunkTracks);
|
||||||
|
chunk.setIsLastChunk(false);
|
||||||
|
|
||||||
|
log.debug("Created adaptive chunk {} with {} tracks (base: {}, adjusted: {})",
|
||||||
|
chunkIndex, chunkTracks.size(), baseChunkSize, adjustedChunkSize);
|
||||||
|
|
||||||
|
return chunk;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 백프레셔 적용을 위한 청크 크기 조정
|
||||||
|
*/
|
||||||
|
private int adjustChunkSize(int baseSize, NetworkMetrics metrics) {
|
||||||
|
double latencyFactor = Math.min(1.0, 100.0 / metrics.getAverageLatencyMs());
|
||||||
|
double throughputFactor = Math.min(1.0, metrics.getThroughputKbps() / 1000.0);
|
||||||
|
|
||||||
|
double adjustmentFactor = (latencyFactor + throughputFactor) / 2.0;
|
||||||
|
return Math.max(100, (int)(baseSize * adjustmentFactor));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 스마트 캐싱 전략
|
||||||
|
* - 자주 조회되는 영역의 트랙 데이터 캐싱
|
||||||
|
* - TTL 기반 캐시 무효화
|
||||||
|
*/
|
||||||
|
public class SmartCache {
|
||||||
|
private final Map<String, CachedData> cache = new ConcurrentHashMap<>();
|
||||||
|
private final long TTL_MILLIS = TimeUnit.MINUTES.toMillis(5);
|
||||||
|
|
||||||
|
public Optional<List<VesselTrackData>> get(String cacheKey) {
|
||||||
|
CachedData cached = cache.get(cacheKey);
|
||||||
|
if (cached != null && !cached.isExpired()) {
|
||||||
|
return Optional.of(cached.getData());
|
||||||
|
}
|
||||||
|
cache.remove(cacheKey);
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void put(String cacheKey, List<VesselTrackData> data) {
|
||||||
|
cache.put(cacheKey, new CachedData(data));
|
||||||
|
}
|
||||||
|
|
||||||
|
private class CachedData {
|
||||||
|
private final List<VesselTrackData> data;
|
||||||
|
private final long timestamp;
|
||||||
|
|
||||||
|
CachedData(List<VesselTrackData> data) {
|
||||||
|
this.data = data;
|
||||||
|
this.timestamp = System.currentTimeMillis();
|
||||||
|
}
|
||||||
|
|
||||||
|
boolean isExpired() {
|
||||||
|
return System.currentTimeMillis() - timestamp > TTL_MILLIS;
|
||||||
|
}
|
||||||
|
|
||||||
|
List<VesselTrackData> getData() {
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 네트워크 메트릭
|
||||||
|
*/
|
||||||
|
public static class NetworkMetrics {
|
||||||
|
private final Queue<Long> latencies = new ConcurrentLinkedQueue<>();
|
||||||
|
private final Queue<Double> throughputs = new ConcurrentLinkedQueue<>();
|
||||||
|
private static final int MAX_SAMPLES = 100;
|
||||||
|
|
||||||
|
public void recordLatency(long latencyMs) {
|
||||||
|
latencies.offer(latencyMs);
|
||||||
|
if (latencies.size() > MAX_SAMPLES) {
|
||||||
|
latencies.poll();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void recordThroughput(double throughputKbps) {
|
||||||
|
throughputs.offer(throughputKbps);
|
||||||
|
if (throughputs.size() > MAX_SAMPLES) {
|
||||||
|
throughputs.poll();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public double getAverageLatencyMs() {
|
||||||
|
return latencies.isEmpty() ? 50.0 :
|
||||||
|
latencies.stream().mapToLong(Long::longValue).average().orElse(50.0);
|
||||||
|
}
|
||||||
|
|
||||||
|
public double getThroughputKbps() {
|
||||||
|
return throughputs.isEmpty() ? 1000.0 :
|
||||||
|
throughputs.stream().mapToDouble(Double::doubleValue).average().orElse(1000.0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,255 @@
|
|||||||
|
package gc.mda.signal_batch.domain.vessel.service.query;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.global.websocket.dto.QueryStatusUpdate;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.sql.Connection;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.sql.Statement;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.concurrent.*;
|
||||||
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
public class CancellableQueryManager {
|
||||||
|
|
||||||
|
// 활성 쿼리 컨텍스트 관리
|
||||||
|
private final Map<String, QueryExecutionContext> activeQueries = new ConcurrentHashMap<>();
|
||||||
|
|
||||||
|
// 쿼리 실행을 위한 스레드 풀
|
||||||
|
private final ExecutorService queryExecutor = Executors.newCachedThreadPool(r -> {
|
||||||
|
Thread thread = new Thread(r);
|
||||||
|
thread.setName("query-executor-" + thread.getId());
|
||||||
|
thread.setDaemon(true);
|
||||||
|
return thread;
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 취소 가능한 쿼리 실행
|
||||||
|
*/
|
||||||
|
public CompletableFuture<Void> executeQuery(
|
||||||
|
String queryId,
|
||||||
|
QueryTask queryTask,
|
||||||
|
Consumer<QueryStatusUpdate> statusConsumer) {
|
||||||
|
|
||||||
|
QueryExecutionContext context = new QueryExecutionContext(queryId, statusConsumer);
|
||||||
|
activeQueries.put(queryId, context);
|
||||||
|
|
||||||
|
return CompletableFuture.runAsync(() -> {
|
||||||
|
try {
|
||||||
|
// 쿼리 시작 알림
|
||||||
|
context.updateStatus("STARTED", "Query execution started", 0.0);
|
||||||
|
|
||||||
|
// 쿼리 실행
|
||||||
|
queryTask.execute(context);
|
||||||
|
|
||||||
|
// 완료 처리
|
||||||
|
if (!context.isCancelled()) {
|
||||||
|
context.updateStatus("COMPLETED", "Query completed successfully", 100.0);
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
if (context.isCancelled()) {
|
||||||
|
context.updateStatus("CANCELLED", "Query cancelled by user", context.getProgress());
|
||||||
|
} else {
|
||||||
|
log.error("Query {} failed: {}", queryId, e.getMessage(), e);
|
||||||
|
context.updateStatus("ERROR", "Query failed: " + e.getMessage(), context.getProgress());
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
activeQueries.remove(queryId);
|
||||||
|
context.cleanup();
|
||||||
|
}
|
||||||
|
}, queryExecutor);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 쿼리 취소
|
||||||
|
*/
|
||||||
|
public boolean cancelQuery(String queryId) {
|
||||||
|
QueryExecutionContext context = activeQueries.get(queryId);
|
||||||
|
if (context != null) {
|
||||||
|
context.cancel();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 모든 활성 쿼리 취소
|
||||||
|
*/
|
||||||
|
public void cancelAllQueries() {
|
||||||
|
activeQueries.values().forEach(QueryExecutionContext::cancel);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 쿼리 실행 컨텍스트
|
||||||
|
*/
|
||||||
|
public static class QueryExecutionContext {
|
||||||
|
private final String queryId;
|
||||||
|
private final Consumer<QueryStatusUpdate> statusConsumer;
|
||||||
|
private final AtomicBoolean cancelled = new AtomicBoolean(false);
|
||||||
|
private final Map<String, Statement> activeStatements = new ConcurrentHashMap<>();
|
||||||
|
private volatile double progress = 0.0;
|
||||||
|
|
||||||
|
public QueryExecutionContext(String queryId, Consumer<QueryStatusUpdate> statusConsumer) {
|
||||||
|
this.queryId = queryId;
|
||||||
|
this.statusConsumer = statusConsumer;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 취소 여부 확인
|
||||||
|
*/
|
||||||
|
public boolean isCancelled() {
|
||||||
|
return cancelled.get();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 쿼리 취소
|
||||||
|
*/
|
||||||
|
public void cancel() {
|
||||||
|
if (cancelled.compareAndSet(false, true)) {
|
||||||
|
log.info("Cancelling query: {}", queryId);
|
||||||
|
|
||||||
|
// 모든 활성 Statement 취소
|
||||||
|
activeStatements.values().forEach(stmt -> {
|
||||||
|
try {
|
||||||
|
if (!stmt.isClosed()) {
|
||||||
|
stmt.cancel();
|
||||||
|
}
|
||||||
|
} catch (SQLException e) {
|
||||||
|
log.warn("Failed to cancel statement for query {}: {}", queryId, e.getMessage());
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
updateStatus("CANCELLING", "Query cancellation requested", progress);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Statement 등록 (취소를 위해)
|
||||||
|
*/
|
||||||
|
public void registerStatement(String key, Statement statement) {
|
||||||
|
activeStatements.put(key, statement);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Statement 등록 해제
|
||||||
|
*/
|
||||||
|
public void unregisterStatement(String key) {
|
||||||
|
Statement stmt = activeStatements.remove(key);
|
||||||
|
if (stmt != null) {
|
||||||
|
try {
|
||||||
|
if (!stmt.isClosed()) {
|
||||||
|
stmt.close();
|
||||||
|
}
|
||||||
|
} catch (SQLException e) {
|
||||||
|
log.warn("Failed to close statement: {}", e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 진행률 업데이트
|
||||||
|
*/
|
||||||
|
public void updateProgress(double progress) {
|
||||||
|
this.progress = Math.min(100.0, Math.max(0.0, progress));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 진행률 조회
|
||||||
|
*/
|
||||||
|
public double getProgress() {
|
||||||
|
return progress;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 상태 업데이트
|
||||||
|
*/
|
||||||
|
public void updateStatus(String status, String message, double progress) {
|
||||||
|
updateProgress(progress);
|
||||||
|
QueryStatusUpdate update = new QueryStatusUpdate();
|
||||||
|
update.setQueryId(queryId);
|
||||||
|
update.setStatus(status);
|
||||||
|
update.setMessage(message);
|
||||||
|
update.setProgressPercentage(this.progress);
|
||||||
|
statusConsumer.accept(update);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 리소스 정리
|
||||||
|
*/
|
||||||
|
public void cleanup() {
|
||||||
|
activeStatements.keySet().forEach(this::unregisterStatement);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 취소 확인 포인트
|
||||||
|
* 주기적으로 호출하여 취소 여부 확인
|
||||||
|
*/
|
||||||
|
public void checkCancellation() throws CancellationException {
|
||||||
|
if (isCancelled()) {
|
||||||
|
throw new CancellationException("Query cancelled by user");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 쿼리 실행 태스크 인터페이스
|
||||||
|
*/
|
||||||
|
@FunctionalInterface
|
||||||
|
public interface QueryTask {
|
||||||
|
void execute(QueryExecutionContext context) throws Exception;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 청크 단위 쿼리 실행을 위한 헬퍼 메서드
|
||||||
|
*/
|
||||||
|
public void executeChunkedQuery(
|
||||||
|
QueryExecutionContext context,
|
||||||
|
Connection connection,
|
||||||
|
String sql,
|
||||||
|
ChunkProcessor processor,
|
||||||
|
int totalExpected) throws SQLException {
|
||||||
|
|
||||||
|
try (Statement stmt = connection.createStatement()) {
|
||||||
|
// 취소를 위해 Statement 등록
|
||||||
|
context.registerStatement("main-query", stmt);
|
||||||
|
|
||||||
|
// 스트리밍 모드 설정
|
||||||
|
stmt.setFetchSize(1000);
|
||||||
|
|
||||||
|
try (var rs = stmt.executeQuery(sql)) {
|
||||||
|
int processed = 0;
|
||||||
|
|
||||||
|
while (rs.next()) {
|
||||||
|
// 주기적으로 취소 확인
|
||||||
|
if (processed % 100 == 0) {
|
||||||
|
context.checkCancellation();
|
||||||
|
|
||||||
|
// 진행률 업데이트
|
||||||
|
double progress = totalExpected > 0 ?
|
||||||
|
(processed * 100.0 / totalExpected) : 0.0;
|
||||||
|
context.updateProgress(progress);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 청크 처리
|
||||||
|
processor.processRow(rs);
|
||||||
|
processed++;
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
context.unregisterStatement("main-query");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 청크 프로세서 인터페이스
|
||||||
|
*/
|
||||||
|
@FunctionalInterface
|
||||||
|
public interface ChunkProcessor {
|
||||||
|
void processRow(java.sql.ResultSet rs) throws SQLException;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,304 @@
|
|||||||
|
package gc.mda.signal_batch.domain.vessel.service.simplification;
|
||||||
|
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.locationtech.jts.geom.*;
|
||||||
|
import org.locationtech.jts.simplify.DouglasPeuckerSimplifier;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.time.Duration;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 궤적 간소화 전략 구현
|
||||||
|
* 요청 범위와 기간에 따라 동적으로 간소화 레벨을 적용
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
public class TrackSimplificationStrategy {
|
||||||
|
|
||||||
|
private final GeometryFactory geometryFactory = new GeometryFactory(new PrecisionModel(), 4326);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 간소화 레벨 정의
|
||||||
|
*/
|
||||||
|
public enum SimplificationLevel {
|
||||||
|
NONE(1.0, 0.0), // 원본 (간소화 없음)
|
||||||
|
MINIMAL(0.9, 0.00001), // 최소 간소화 (90% 유지)
|
||||||
|
LIGHT(0.75, 0.0001), // 경량 간소화 (75% 유지)
|
||||||
|
MODERATE(0.5, 0.0005), // 중간 간소화 (50% 유지) - 0.001 -> 0.0005
|
||||||
|
HEAVY(0.25, 0.001), // 고도 간소화 (25% 유지) - 0.01 -> 0.001
|
||||||
|
VERY_HEAVY(0.2, 0.0015), // 매우 강한 간소화 (20% 유지)
|
||||||
|
EXTREME(0.1, 0.002); // 극도 간소화 (10% 유지) - 0.1 -> 0.002
|
||||||
|
|
||||||
|
private final double retentionRatio; // 유지 비율
|
||||||
|
private final double tolerance; // Douglas-Peucker 허용치
|
||||||
|
|
||||||
|
SimplificationLevel(double retentionRatio, double tolerance) {
|
||||||
|
this.retentionRatio = retentionRatio;
|
||||||
|
this.tolerance = tolerance;
|
||||||
|
}
|
||||||
|
|
||||||
|
public double getRetentionRatio() {
|
||||||
|
return retentionRatio;
|
||||||
|
}
|
||||||
|
|
||||||
|
public double getTolerance() {
|
||||||
|
return tolerance;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 요청 기간과 뷰포트 크기에 따른 간소화 레벨 결정
|
||||||
|
*/
|
||||||
|
public SimplificationLevel determineLevel(LocalDateTime startTime, LocalDateTime endTime,
|
||||||
|
Double minLon, Double maxLon,
|
||||||
|
Double minLat, Double maxLat) {
|
||||||
|
|
||||||
|
// 시간 범위 계산
|
||||||
|
Duration duration = Duration.between(startTime, endTime);
|
||||||
|
long hours = duration.toHours();
|
||||||
|
|
||||||
|
// 뷰포트 크기 계산
|
||||||
|
double viewportWidth = maxLon - minLon;
|
||||||
|
double viewportHeight = maxLat - minLat;
|
||||||
|
double viewportArea = viewportWidth * viewportHeight;
|
||||||
|
|
||||||
|
// 시간 범위별 기본 레벨
|
||||||
|
SimplificationLevel baseLevel;
|
||||||
|
if (hours <= 1) {
|
||||||
|
baseLevel = SimplificationLevel.NONE;
|
||||||
|
} else if (hours <= 6) {
|
||||||
|
baseLevel = SimplificationLevel.MINIMAL;
|
||||||
|
} else if (hours <= 24) {
|
||||||
|
baseLevel = SimplificationLevel.LIGHT;
|
||||||
|
} else if (hours <= 72) {
|
||||||
|
baseLevel = SimplificationLevel.MODERATE;
|
||||||
|
} else if (hours <= 168) { // 1주일
|
||||||
|
baseLevel = SimplificationLevel.HEAVY;
|
||||||
|
} else {
|
||||||
|
baseLevel = SimplificationLevel.EXTREME;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 뷰포트 크기에 따른 조정
|
||||||
|
if (viewportArea > 100) { // 매우 넓은 영역
|
||||||
|
baseLevel = adjustLevel(baseLevel, 1); // 한 단계 더 간소화
|
||||||
|
} else if (viewportArea < 1) { // 매우 좁은 영역
|
||||||
|
baseLevel = adjustLevel(baseLevel, -1); // 한 단계 덜 간소화
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info("Simplification level determined: {} ({}h, area: {})",
|
||||||
|
baseLevel, hours, String.format("%.2f", viewportArea));
|
||||||
|
|
||||||
|
return baseLevel;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 간소화 레벨 조정
|
||||||
|
*/
|
||||||
|
private SimplificationLevel adjustLevel(SimplificationLevel current, int adjustment) {
|
||||||
|
SimplificationLevel[] levels = SimplificationLevel.values();
|
||||||
|
int currentIndex = current.ordinal();
|
||||||
|
int newIndex = Math.max(0, Math.min(levels.length - 1, currentIndex + adjustment));
|
||||||
|
return levels[newIndex];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* LineStringM 궤적 간소화
|
||||||
|
*/
|
||||||
|
public String simplifyTrack(String wktLineString, SimplificationLevel level) {
|
||||||
|
if (level == SimplificationLevel.NONE) {
|
||||||
|
return wktLineString;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// WKT 파싱
|
||||||
|
LineString original = parseLineStringM(wktLineString);
|
||||||
|
if (original == null || original.getNumPoints() < 3) {
|
||||||
|
return wktLineString;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Douglas-Peucker 간소화
|
||||||
|
DouglasPeuckerSimplifier simplifier = new DouglasPeuckerSimplifier(original);
|
||||||
|
simplifier.setDistanceTolerance(level.getTolerance());
|
||||||
|
LineString simplified = (LineString) simplifier.getResultGeometry();
|
||||||
|
|
||||||
|
// 중요 포인트 보존 (시작점, 끝점, 방향 전환점)
|
||||||
|
LineString result = preserveKeyPoints(original, simplified, level);
|
||||||
|
|
||||||
|
// WKT로 변환
|
||||||
|
return convertToLineStringM(result, wktLineString);
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Error simplifying track: {}", e.getMessage());
|
||||||
|
return wktLineString;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 포인트 수 기반 간소화
|
||||||
|
*/
|
||||||
|
public String simplifyByPointCount(String wktLineString, int maxPoints) {
|
||||||
|
try {
|
||||||
|
LineString original = parseLineStringM(wktLineString);
|
||||||
|
if (original == null || original.getNumPoints() <= maxPoints) {
|
||||||
|
return wktLineString;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 유지할 포인트 비율 계산
|
||||||
|
double ratio = (double) maxPoints / original.getNumPoints();
|
||||||
|
|
||||||
|
// 적절한 간소화 레벨 찾기
|
||||||
|
SimplificationLevel level = SimplificationLevel.NONE;
|
||||||
|
for (SimplificationLevel l : SimplificationLevel.values()) {
|
||||||
|
if (l.getRetentionRatio() <= ratio) {
|
||||||
|
level = l;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return simplifyTrack(wktLineString, level);
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Error simplifying by point count: {}", e.getMessage());
|
||||||
|
return wktLineString;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 적응형 간소화 - 데이터 밀도에 따라 동적 조정
|
||||||
|
*/
|
||||||
|
public String adaptiveSimplify(String wktLineString, double targetSizeKB) {
|
||||||
|
try {
|
||||||
|
// 현재 크기 추정 (포인트당 약 24바이트)
|
||||||
|
LineString original = parseLineStringM(wktLineString);
|
||||||
|
if (original == null) return wktLineString;
|
||||||
|
|
||||||
|
double currentSizeKB = (original.getNumPoints() * 24.0) / 1024.0;
|
||||||
|
|
||||||
|
if (currentSizeKB <= targetSizeKB) {
|
||||||
|
return wktLineString;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 목표 포인트 수 계산
|
||||||
|
int targetPoints = (int) ((targetSizeKB * 1024) / 24);
|
||||||
|
return simplifyByPointCount(wktLineString, targetPoints);
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Error in adaptive simplification: {}", e.getMessage());
|
||||||
|
return wktLineString;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* LineStringM 파싱 (M값 포함)
|
||||||
|
*/
|
||||||
|
private LineString parseLineStringM(String wkt) {
|
||||||
|
if (wkt == null || wkt.isEmpty()) return null;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// LINESTRING M(...) 형식 파싱
|
||||||
|
String coords = wkt.replace("LINESTRING M(", "")
|
||||||
|
.replace("LINESTRING(", "")
|
||||||
|
.replace(")", "")
|
||||||
|
.trim();
|
||||||
|
|
||||||
|
if (coords.isEmpty()) return null;
|
||||||
|
|
||||||
|
String[] points = coords.split(",");
|
||||||
|
List<Coordinate> coordinates = new ArrayList<>();
|
||||||
|
|
||||||
|
for (String point : points) {
|
||||||
|
String[] parts = point.trim().split("\\s+");
|
||||||
|
if (parts.length >= 2) {
|
||||||
|
double x = Double.parseDouble(parts[0]);
|
||||||
|
double y = Double.parseDouble(parts[1]);
|
||||||
|
// M값은 있으면 사용, 없으면 무시
|
||||||
|
double m = parts.length >= 3 ? Double.parseDouble(parts[2]) : Double.NaN;
|
||||||
|
Coordinate coord = new Coordinate(x, y);
|
||||||
|
coord.setM(m);
|
||||||
|
coordinates.add(coord);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (coordinates.size() < 2) return null;
|
||||||
|
|
||||||
|
return geometryFactory.createLineString(
|
||||||
|
coordinates.toArray(new Coordinate[0])
|
||||||
|
);
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Error parsing LineStringM: {}", e.getMessage());
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* LineString을 LineStringM WKT로 변환
|
||||||
|
*/
|
||||||
|
private String convertToLineStringM(LineString simplified, String originalWkt) {
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
|
||||||
|
// 원본이 M값을 가지고 있는지 확인
|
||||||
|
boolean hasM = originalWkt.contains("LINESTRING M");
|
||||||
|
|
||||||
|
if (hasM) {
|
||||||
|
sb.append("LINESTRING M(");
|
||||||
|
} else {
|
||||||
|
sb.append("LINESTRING(");
|
||||||
|
}
|
||||||
|
|
||||||
|
Coordinate[] coords = simplified.getCoordinates();
|
||||||
|
|
||||||
|
for (int i = 0; i < coords.length; i++) {
|
||||||
|
if (i > 0) sb.append(", ");
|
||||||
|
|
||||||
|
sb.append(String.format("%.6f %.6f", coords[i].x, coords[i].y));
|
||||||
|
|
||||||
|
// M값이 있으면 추가
|
||||||
|
if (hasM) {
|
||||||
|
double m = Double.isNaN(coords[i].getM()) ?
|
||||||
|
(double)i / (coords.length - 1) : coords[i].getM();
|
||||||
|
sb.append(String.format(" %.6f", m));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.append(")");
|
||||||
|
return sb.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 중요 포인트 보존
|
||||||
|
*/
|
||||||
|
private LineString preserveKeyPoints(LineString original, LineString simplified,
|
||||||
|
SimplificationLevel level) {
|
||||||
|
List<Coordinate> result = new ArrayList<>();
|
||||||
|
|
||||||
|
// 시작점과 끝점은 항상 보존
|
||||||
|
result.add(original.getCoordinateN(0));
|
||||||
|
|
||||||
|
// 간소화된 중간 포인트들
|
||||||
|
for (int i = 1; i < simplified.getNumPoints() - 1; i++) {
|
||||||
|
result.add(simplified.getCoordinateN(i));
|
||||||
|
}
|
||||||
|
|
||||||
|
// 끝점
|
||||||
|
result.add(original.getCoordinateN(original.getNumPoints() - 1));
|
||||||
|
|
||||||
|
return geometryFactory.createLineString(
|
||||||
|
result.toArray(new Coordinate[0])
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 배치 간소화 - 여러 궤적을 한번에 처리
|
||||||
|
*/
|
||||||
|
public List<String> batchSimplify(List<String> tracks, SimplificationLevel level) {
|
||||||
|
return tracks.parallelStream()
|
||||||
|
.map(track -> simplifyTrack(track, level))
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,45 @@
|
|||||||
|
package gc.mda.signal_batch.global.config;
|
||||||
|
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.aop.interceptor.AsyncUncaughtExceptionHandler;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.scheduling.annotation.AsyncConfigurer;
|
||||||
|
import org.springframework.scheduling.annotation.EnableAsync;
|
||||||
|
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
|
||||||
|
|
||||||
|
import java.util.concurrent.Executor;
|
||||||
|
import java.util.concurrent.ThreadPoolExecutor;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Configuration
|
||||||
|
@EnableAsync
|
||||||
|
public class AsyncConfig implements AsyncConfigurer {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
@Bean(name = "trackStreamingExecutor")
|
||||||
|
public Executor getAsyncExecutor() {
|
||||||
|
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
|
||||||
|
executor.setCorePoolSize(15);
|
||||||
|
executor.setMaxPoolSize(30);
|
||||||
|
executor.setQueueCapacity(500);
|
||||||
|
executor.setKeepAliveSeconds(40);
|
||||||
|
executor.setThreadNamePrefix("track-stream-");
|
||||||
|
// executor.setTaskDecorator(new MdcTaskDecorator());
|
||||||
|
executor.setRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy());
|
||||||
|
executor.setWaitForTasksToCompleteOnShutdown(true);
|
||||||
|
executor.setAwaitTerminationSeconds(80);
|
||||||
|
executor.initialize();
|
||||||
|
return executor;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public AsyncUncaughtExceptionHandler getAsyncUncaughtExceptionHandler() {
|
||||||
|
return new AsyncUncaughtExceptionHandler() {
|
||||||
|
@Override
|
||||||
|
public void handleUncaughtException(Throwable ex, java.lang.reflect.Method method, Object... params) {
|
||||||
|
log.error("Exception in async method: {} with params: {}", method.getName(), params, ex);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,90 @@
|
|||||||
|
package gc.mda.signal_batch.global.config;
|
||||||
|
|
||||||
|
import org.springframework.batch.core.configuration.JobRegistry;
|
||||||
|
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
|
||||||
|
import org.springframework.batch.core.configuration.support.JobRegistryBeanPostProcessor;
|
||||||
|
import org.springframework.batch.core.explore.JobExplorer;
|
||||||
|
import org.springframework.batch.core.explore.support.JobExplorerFactoryBean;
|
||||||
|
import org.springframework.batch.core.launch.JobLauncher;
|
||||||
|
import org.springframework.batch.core.launch.support.TaskExecutorJobLauncher;
|
||||||
|
import org.springframework.batch.core.repository.JobRepository;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.context.annotation.Profile;
|
||||||
|
import org.springframework.core.task.TaskExecutor;
|
||||||
|
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
|
||||||
|
import org.springframework.transaction.PlatformTransactionManager;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
import java.util.concurrent.ThreadPoolExecutor;
|
||||||
|
|
||||||
|
@Configuration
|
||||||
|
@EnableBatchProcessing
|
||||||
|
@Profile("!query") // query 프로파일에서는 배치 처리 비활성화
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
public class BatchConfig {
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public TaskExecutor batchTaskExecutor() {
|
||||||
|
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
|
||||||
|
executor.setCorePoolSize(8);
|
||||||
|
executor.setMaxPoolSize(16);
|
||||||
|
executor.setQueueCapacity(100);
|
||||||
|
executor.setThreadNamePrefix("batch-");
|
||||||
|
executor.setRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy());
|
||||||
|
executor.setWaitForTasksToCompleteOnShutdown(true);
|
||||||
|
executor.setAwaitTerminationSeconds(60);
|
||||||
|
executor.initialize();
|
||||||
|
return executor;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public TaskExecutor partitionTaskExecutor() {
|
||||||
|
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
|
||||||
|
executor.setCorePoolSize(24);
|
||||||
|
executor.setMaxPoolSize(48);
|
||||||
|
executor.setQueueCapacity(200);
|
||||||
|
executor.setThreadNamePrefix("partition-");
|
||||||
|
executor.setRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy());
|
||||||
|
executor.setKeepAliveSeconds(60);
|
||||||
|
executor.setAllowCoreThreadTimeOut(true);
|
||||||
|
executor.initialize();
|
||||||
|
return executor;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public JobLauncher asyncJobLauncher(JobRepository jobRepository) throws Exception {
|
||||||
|
TaskExecutorJobLauncher jobLauncher = new TaskExecutorJobLauncher();
|
||||||
|
jobLauncher.setJobRepository(jobRepository);
|
||||||
|
jobLauncher.setTaskExecutor(batchTaskExecutor());
|
||||||
|
jobLauncher.afterPropertiesSet();
|
||||||
|
return jobLauncher;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public JobLauncher syncJobLauncher(JobRepository jobRepository) throws Exception {
|
||||||
|
TaskExecutorJobLauncher jobLauncher = new TaskExecutorJobLauncher();
|
||||||
|
jobLauncher.setJobRepository(jobRepository);
|
||||||
|
jobLauncher.afterPropertiesSet();
|
||||||
|
return jobLauncher;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public JobRegistryBeanPostProcessor jobRegistryBeanPostProcessor(JobRegistry jobRegistry) {
|
||||||
|
JobRegistryBeanPostProcessor postProcessor = new JobRegistryBeanPostProcessor();
|
||||||
|
postProcessor.setJobRegistry(jobRegistry);
|
||||||
|
return postProcessor;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public JobExplorer jobExplorer(@Qualifier("batchDataSource") DataSource dataSource,
|
||||||
|
@Qualifier("batchTransactionManager") PlatformTransactionManager transactionManager) throws Exception {
|
||||||
|
JobExplorerFactoryBean factory = new JobExplorerFactoryBean();
|
||||||
|
factory.setDataSource(dataSource);
|
||||||
|
factory.setTransactionManager(transactionManager);
|
||||||
|
factory.afterPropertiesSet();
|
||||||
|
return factory.getObject();
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,60 @@
|
|||||||
|
package gc.mda.signal_batch.global.config;
|
||||||
|
|
||||||
|
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.cache.CacheManager;
|
||||||
|
import org.springframework.cache.annotation.EnableCaching;
|
||||||
|
import org.springframework.cache.caffeine.CaffeineCacheManager;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Caffeine Cache 설정
|
||||||
|
*
|
||||||
|
* 용도: 선박 최신 위치 정보를 메모리에 캐싱하여 DB 부하 감소 및 실시간성 향상
|
||||||
|
*
|
||||||
|
* 특징:
|
||||||
|
* - 최대 60분간 캐시 보관 (TTL)
|
||||||
|
* - API 요청 시 minutes 파라미터로 동적 필터링
|
||||||
|
* - 1분마다 자동 갱신 (VesselPositionCacheRefreshScheduler)
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
@Configuration
|
||||||
|
@EnableCaching
|
||||||
|
public class CacheConfig {
|
||||||
|
|
||||||
|
@Value("${vessel.batch.cache.latest-position.ttl-minutes:60}")
|
||||||
|
private int cacheTtlMinutes;
|
||||||
|
|
||||||
|
@Value("${vessel.batch.cache.latest-position.max-size:50000}")
|
||||||
|
private int cacheMaxSize;
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public CacheManager cacheManager() {
|
||||||
|
CaffeineCacheManager cacheManager = new CaffeineCacheManager("vesselLatestPositions");
|
||||||
|
cacheManager.setCaffeine(caffeineCacheBuilder());
|
||||||
|
|
||||||
|
log.info("Initialized Caffeine CacheManager with TTL={}min, MaxSize={}",
|
||||||
|
cacheTtlMinutes, cacheMaxSize);
|
||||||
|
|
||||||
|
return cacheManager;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Caffeine<Object, Object> caffeineCacheBuilder() {
|
||||||
|
return Caffeine.newBuilder()
|
||||||
|
// TTL: 60분 후 자동 만료 (비활성 선박 제거)
|
||||||
|
.expireAfterWrite(cacheTtlMinutes, TimeUnit.MINUTES)
|
||||||
|
|
||||||
|
// 최대 선박 수 제한 (메모리 보호)
|
||||||
|
.maximumSize(cacheMaxSize)
|
||||||
|
|
||||||
|
// 통계 수집 활성화 (모니터링용)
|
||||||
|
.recordStats()
|
||||||
|
|
||||||
|
// 초기 용량 설정 (rehashing 최소화)
|
||||||
|
.initialCapacity(1000);
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,8 @@
|
|||||||
|
package gc.mda.signal_batch.global.config;
|
||||||
|
|
||||||
|
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
@Configuration
|
||||||
|
public class DataSourceConfig {
|
||||||
|
|
||||||
|
}
|
||||||
@ -0,0 +1,24 @@
|
|||||||
|
package gc.mda.signal_batch.global.config;
|
||||||
|
|
||||||
|
import lombok.Data;
|
||||||
|
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.context.annotation.Profile;
|
||||||
|
|
||||||
|
@Configuration
|
||||||
|
@Profile("prod")
|
||||||
|
@ConfigurationProperties(prefix = "spring.datasource")
|
||||||
|
@Data
|
||||||
|
public class DataSourceConfigProperties {
|
||||||
|
private DatabaseProperties collect = new DatabaseProperties();
|
||||||
|
private DatabaseProperties query = new DatabaseProperties();
|
||||||
|
private DatabaseProperties batch = new DatabaseProperties();
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public static class DatabaseProperties {
|
||||||
|
private String jdbcUrl;
|
||||||
|
private String username;
|
||||||
|
private String password;
|
||||||
|
private String driverClassName = "org.postgresql.Driver";
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,153 @@
|
|||||||
|
package gc.mda.signal_batch.global.config;
|
||||||
|
|
||||||
|
import com.zaxxer.hikari.HikariConfig;
|
||||||
|
import com.zaxxer.hikari.HikariDataSource;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.context.annotation.Primary;
|
||||||
|
import org.springframework.context.annotation.Profile;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
|
||||||
|
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
|
||||||
|
import org.springframework.transaction.PlatformTransactionManager;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Configuration
|
||||||
|
@Profile("dev")
|
||||||
|
public class DevDataSourceConfig {
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@ConfigurationProperties(prefix = "spring.datasource.collect")
|
||||||
|
public HikariConfig collectHikariConfig() {
|
||||||
|
HikariConfig config = new HikariConfig();
|
||||||
|
// 여기서 기본값을 설정하면 yml 파일의 설정과 병합됨
|
||||||
|
config.setConnectionInitSql("SET TIME ZONE 'Asia/Seoul'; SET search_path TO signal, public;");
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public DataSource collectDataSource(@Qualifier("collectHikariConfig") HikariConfig hikariConfig) {
|
||||||
|
// HikariConfig는 이미 @ConfigurationProperties로 설정이 주입되어 있음
|
||||||
|
HikariDataSource dataSource = new HikariDataSource(hikariConfig);
|
||||||
|
|
||||||
|
log.info("Collect DataSource created:");
|
||||||
|
log.info(" - URL: {}", hikariConfig.getJdbcUrl());
|
||||||
|
log.info(" - Connection Init SQL: {}", hikariConfig.getConnectionInitSql());
|
||||||
|
log.info(" - Pool Name: {}", hikariConfig.getPoolName());
|
||||||
|
|
||||||
|
// PostGIS 타입 등록 (선택사항)
|
||||||
|
try {
|
||||||
|
PostGISConfig.registerPostGISTypes(dataSource);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.warn("PostGIS type registration skipped: {}", e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
return dataSource;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean(name = "devQueryHikariConfig")
|
||||||
|
@ConfigurationProperties(prefix = "spring.datasource.query")
|
||||||
|
public HikariConfig devQueryHikariConfig() {
|
||||||
|
HikariConfig config = new HikariConfig();
|
||||||
|
config.setConnectionInitSql("SET TIME ZONE 'Asia/Seoul'; SET search_path TO signal, public;");
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public DataSource queryDataSource(@Qualifier("devQueryHikariConfig") HikariConfig hikariConfig) {
|
||||||
|
HikariDataSource dataSource = new HikariDataSource(hikariConfig);
|
||||||
|
|
||||||
|
log.info("Query DataSource created:");
|
||||||
|
log.info(" - URL: {}", hikariConfig.getJdbcUrl());
|
||||||
|
log.info(" - Connection Init SQL: {}", hikariConfig.getConnectionInitSql());
|
||||||
|
log.info(" - Pool Name: {}", hikariConfig.getPoolName());
|
||||||
|
|
||||||
|
// PostGIS 타입 등록 (선택사항)
|
||||||
|
try {
|
||||||
|
PostGISConfig.registerPostGISTypes(dataSource);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.warn("PostGIS type registration skipped: {}", e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
return dataSource;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@ConfigurationProperties(prefix = "spring.datasource.batch")
|
||||||
|
public HikariConfig batchHikariConfig() {
|
||||||
|
HikariConfig config = new HikariConfig();
|
||||||
|
config.setConnectionInitSql("SET TIME ZONE 'Asia/Seoul'");
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@Primary
|
||||||
|
public DataSource batchDataSource(@Qualifier("batchHikariConfig") HikariConfig hikariConfig) {
|
||||||
|
HikariDataSource dataSource = new HikariDataSource(hikariConfig);
|
||||||
|
|
||||||
|
log.info("Batch DataSource created:");
|
||||||
|
log.info(" - URL: {}", hikariConfig.getJdbcUrl());
|
||||||
|
log.info(" - Connection Init SQL: {}", hikariConfig.getConnectionInitSql());
|
||||||
|
log.info(" - Pool Name: {}", hikariConfig.getPoolName());
|
||||||
|
|
||||||
|
// PostGIS 타입 등록 (선택사항)
|
||||||
|
try {
|
||||||
|
PostGISConfig.registerPostGISTypes(dataSource);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.warn("PostGIS type registration skipped: {}", e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
return dataSource;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public DataSource dataSource(@Qualifier("batchDataSource") DataSource batchDataSource) {
|
||||||
|
return batchDataSource;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public PlatformTransactionManager transactionManager(@Qualifier("collectDataSource") DataSource dataSource) {
|
||||||
|
return new DataSourceTransactionManager(dataSource);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public PlatformTransactionManager queryTransactionManager(@Qualifier("queryDataSource") DataSource dataSource) {
|
||||||
|
return new DataSourceTransactionManager(dataSource);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@Primary
|
||||||
|
public PlatformTransactionManager batchTransactionManager(@Qualifier("batchDataSource") DataSource dataSource) {
|
||||||
|
return new DataSourceTransactionManager(dataSource);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean(name = "collectJdbcTemplate")
|
||||||
|
public JdbcTemplate collectJdbcTemplate(@Qualifier("collectDataSource") DataSource dataSource) {
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
|
||||||
|
jdbcTemplate.setFetchSize(10000);
|
||||||
|
jdbcTemplate.setQueryTimeout(300);
|
||||||
|
return jdbcTemplate;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean(name = "queryJdbcTemplate")
|
||||||
|
public JdbcTemplate queryJdbcTemplate(@Qualifier("queryDataSource") DataSource dataSource) {
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
|
||||||
|
jdbcTemplate.setQueryTimeout(300);
|
||||||
|
return jdbcTemplate;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean(name = "collectNamedJdbcTemplate")
|
||||||
|
public NamedParameterJdbcTemplate collectNamedJdbcTemplate(@Qualifier("collectDataSource") DataSource dataSource) {
|
||||||
|
return new NamedParameterJdbcTemplate(dataSource);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean(name = "queryNamedJdbcTemplate")
|
||||||
|
public NamedParameterJdbcTemplate queryNamedJdbcTemplate(@Qualifier("queryDataSource") DataSource dataSource) {
|
||||||
|
return new NamedParameterJdbcTemplate(dataSource);
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,41 @@
|
|||||||
|
package gc.mda.signal_batch.global.config;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import com.fasterxml.jackson.databind.SerializationFeature;
|
||||||
|
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
|
||||||
|
import com.fasterxml.jackson.datatype.jsr310.deser.LocalDateTimeDeserializer;
|
||||||
|
import com.fasterxml.jackson.datatype.jsr310.ser.LocalDateTimeSerializer;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.context.annotation.Primary;
|
||||||
|
import org.springframework.http.converter.json.Jackson2ObjectMapperBuilder;
|
||||||
|
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.time.format.DateTimeFormatter;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Jackson 설정
|
||||||
|
* LocalDateTime 직렬화/역직렬화 설정
|
||||||
|
*/
|
||||||
|
@Configuration
|
||||||
|
public class JacksonConfig {
|
||||||
|
|
||||||
|
private static final DateTimeFormatter DATETIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss");
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@Primary
|
||||||
|
public ObjectMapper objectMapper() {
|
||||||
|
JavaTimeModule javaTimeModule = new JavaTimeModule();
|
||||||
|
|
||||||
|
// LocalDateTime 직렬화/역직렬화 설정
|
||||||
|
javaTimeModule.addSerializer(LocalDateTime.class,
|
||||||
|
new LocalDateTimeSerializer(DATETIME_FORMATTER));
|
||||||
|
javaTimeModule.addDeserializer(LocalDateTime.class,
|
||||||
|
new LocalDateTimeDeserializer(DATETIME_FORMATTER));
|
||||||
|
|
||||||
|
return Jackson2ObjectMapperBuilder.json()
|
||||||
|
.modules(javaTimeModule)
|
||||||
|
.featuresToDisable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,97 @@
|
|||||||
|
package gc.mda.signal_batch.global.config;
|
||||||
|
|
||||||
|
import com.zaxxer.hikari.HikariDataSource;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.boot.jdbc.DataSourceBuilder;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.context.annotation.Primary;
|
||||||
|
import org.springframework.context.annotation.Profile;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
|
||||||
|
import org.springframework.transaction.PlatformTransactionManager;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
|
||||||
|
|
||||||
|
@Configuration
|
||||||
|
@Profile("local")
|
||||||
|
public class LocalDataSourceConfig {
|
||||||
|
|
||||||
|
private final DataSourceConfigProperties properties;
|
||||||
|
|
||||||
|
public LocalDataSourceConfig(DataSourceConfigProperties properties) {
|
||||||
|
this.properties = properties;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public DataSource collectDataSource() {
|
||||||
|
HikariDataSource dataSource = DataSourceBuilder.create()
|
||||||
|
.type(HikariDataSource.class)
|
||||||
|
.url(properties.getCollect().getJdbcUrl())
|
||||||
|
.username(properties.getCollect().getUsername())
|
||||||
|
.password(properties.getCollect().getPassword())
|
||||||
|
.driverClassName(properties.getCollect().getDriverClassName())
|
||||||
|
.build();
|
||||||
|
return dataSource;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public DataSource queryDataSource() {
|
||||||
|
HikariDataSource dataSource = DataSourceBuilder.create()
|
||||||
|
.type(HikariDataSource.class)
|
||||||
|
.url(properties.getQuery().getJdbcUrl())
|
||||||
|
.username(properties.getQuery().getUsername())
|
||||||
|
.password(properties.getQuery().getPassword())
|
||||||
|
.driverClassName(properties.getQuery().getDriverClassName())
|
||||||
|
.build();
|
||||||
|
return dataSource;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@Primary
|
||||||
|
public DataSource batchDataSource() {
|
||||||
|
HikariDataSource dataSource = DataSourceBuilder.create()
|
||||||
|
.type(HikariDataSource.class)
|
||||||
|
.url(properties.getBatch().getJdbcUrl())
|
||||||
|
.username(properties.getBatch().getUsername())
|
||||||
|
.password(properties.getBatch().getPassword())
|
||||||
|
.driverClassName(properties.getBatch().getDriverClassName())
|
||||||
|
.build();
|
||||||
|
return dataSource;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Spring Batch가 찾는 기본 dataSource 빈
|
||||||
|
@Bean
|
||||||
|
public DataSource dataSource(@Qualifier("batchDataSource") DataSource batchDataSource) {
|
||||||
|
return batchDataSource;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 나머지 빈들은 동일...
|
||||||
|
@Bean
|
||||||
|
public PlatformTransactionManager transactionManager(@Qualifier("collectDataSource") DataSource dataSource) {
|
||||||
|
return new DataSourceTransactionManager(dataSource);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@Primary
|
||||||
|
public PlatformTransactionManager batchTransactionManager(@Qualifier("batchDataSource") DataSource dataSource) {
|
||||||
|
return new DataSourceTransactionManager(dataSource);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean(name = "collectJdbcTemplate")
|
||||||
|
public JdbcTemplate collectJdbcTemplate(@Qualifier("collectDataSource") DataSource dataSource) {
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
|
||||||
|
jdbcTemplate.setFetchSize(10000);
|
||||||
|
jdbcTemplate.setQueryTimeout(300);
|
||||||
|
return jdbcTemplate;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean(name = "queryJdbcTemplate")
|
||||||
|
public JdbcTemplate queryJdbcTemplate(@Qualifier("queryDataSource") DataSource dataSource) {
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
|
||||||
|
jdbcTemplate.setQueryTimeout(300);
|
||||||
|
return jdbcTemplate;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,172 @@
|
|||||||
|
package gc.mda.signal_batch.global.config;
|
||||||
|
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import jakarta.annotation.PostConstruct;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 파티션 보관 기간 설정
|
||||||
|
*
|
||||||
|
* <p>관리 대상: queryDB 파티션 테이블만</p>
|
||||||
|
* <ul>
|
||||||
|
* <li>일별 파티션: 일(day) 단위 retention</li>
|
||||||
|
* <li>월별 파티션: 월(month) 단위 retention</li>
|
||||||
|
* <li>무한 보관: retention 값을 0 또는 -1로 설정</li>
|
||||||
|
* </ul>
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
@Data
|
||||||
|
@Component
|
||||||
|
@ConfigurationProperties(prefix = "vessel.batch.partition")
|
||||||
|
public class PartitionRetentionConfig {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 미래 파티션 생성 기간 (일)
|
||||||
|
*/
|
||||||
|
private int futureDays = 7;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 파티션 자동 생성 활성화
|
||||||
|
*/
|
||||||
|
private boolean enableAutoManagement = true;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 파티션 자동 정리 활성화
|
||||||
|
*/
|
||||||
|
private boolean enableAutoCleanup = true;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 기본 보관 기간 설정
|
||||||
|
*/
|
||||||
|
private DefaultRetention defaultRetention = new DefaultRetention();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 테이블별 보관 기간 설정
|
||||||
|
* <p>Key: 테이블명, Value: TableRetention</p>
|
||||||
|
*/
|
||||||
|
private Map<String, TableRetention> tables = new HashMap<>();
|
||||||
|
|
||||||
|
@PostConstruct
|
||||||
|
public void initialize() {
|
||||||
|
log.info("========== Partition Retention Configuration ==========");
|
||||||
|
log.info("Management Target: queryDB only");
|
||||||
|
log.info("Auto Management: {}", enableAutoManagement);
|
||||||
|
log.info("Auto Cleanup: {}", enableAutoCleanup);
|
||||||
|
log.info("Future Days: {}", futureDays);
|
||||||
|
log.info("");
|
||||||
|
log.info("Default Retention:");
|
||||||
|
log.info(" - Daily Partitions: {} days{}",
|
||||||
|
defaultRetention.getDailyPartitionsRetentionDays(),
|
||||||
|
defaultRetention.getDailyPartitionsRetentionDays() <= 0 ? " (unlimited)" : "");
|
||||||
|
log.info(" - Monthly Partitions: {} months{}",
|
||||||
|
defaultRetention.getMonthlyPartitionsRetentionMonths(),
|
||||||
|
defaultRetention.getMonthlyPartitionsRetentionMonths() <= 0 ? " (unlimited)" : "");
|
||||||
|
|
||||||
|
if (!tables.isEmpty()) {
|
||||||
|
log.info("");
|
||||||
|
log.info("Table-specific Retention:");
|
||||||
|
tables.forEach((tableName, retention) -> {
|
||||||
|
if (retention.getRetentionDays() != null) {
|
||||||
|
String status = retention.getRetentionDays() <= 0 ? " (unlimited)" : " days";
|
||||||
|
log.info(" - {}: {}{}", tableName, retention.getRetentionDays(), status);
|
||||||
|
}
|
||||||
|
if (retention.getRetentionMonths() != null) {
|
||||||
|
String status = retention.getRetentionMonths() <= 0 ? " (unlimited)" : " months";
|
||||||
|
log.info(" - {}: {}{}", tableName, retention.getRetentionMonths(), status);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
log.info("=======================================================");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 일별 파티션 테이블의 보관 기간 조회 (일 단위)
|
||||||
|
*
|
||||||
|
* @param tableName 테이블명
|
||||||
|
* @return 보관 기간 (일), 0 이하면 무한 보관
|
||||||
|
*/
|
||||||
|
public int getRetentionDays(String tableName) {
|
||||||
|
TableRetention config = tables.get(tableName);
|
||||||
|
if (config != null && config.getRetentionDays() != null) {
|
||||||
|
return config.getRetentionDays();
|
||||||
|
}
|
||||||
|
return defaultRetention.getDailyPartitionsRetentionDays();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 월별 파티션 테이블의 보관 기간 조회 (월 단위)
|
||||||
|
*
|
||||||
|
* @param tableName 테이블명
|
||||||
|
* @return 보관 기간 (월), 0 이하면 무한 보관
|
||||||
|
*/
|
||||||
|
public int getRetentionMonths(String tableName) {
|
||||||
|
TableRetention config = tables.get(tableName);
|
||||||
|
if (config != null && config.getRetentionMonths() != null) {
|
||||||
|
return config.getRetentionMonths();
|
||||||
|
}
|
||||||
|
return defaultRetention.getMonthlyPartitionsRetentionMonths();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 테이블의 보관 기간이 무한인지 확인 (일별 파티션)
|
||||||
|
*/
|
||||||
|
public boolean isUnlimitedRetentionDays(String tableName) {
|
||||||
|
return getRetentionDays(tableName) <= 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 테이블의 보관 기간이 무한인지 확인 (월별 파티션)
|
||||||
|
*/
|
||||||
|
public boolean isUnlimitedRetentionMonths(String tableName) {
|
||||||
|
return getRetentionMonths(tableName) <= 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 기본 보관 기간 설정
|
||||||
|
*/
|
||||||
|
@Data
|
||||||
|
public static class DefaultRetention {
|
||||||
|
/**
|
||||||
|
* 일별 파티션의 기본 보관 기간 (일)
|
||||||
|
* <p>0 이하: 무한 보관</p>
|
||||||
|
*/
|
||||||
|
private int dailyPartitionsRetentionDays = 7;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 월별 파티션의 기본 보관 기간 (월)
|
||||||
|
* <p>0 이하: 무한 보관</p>
|
||||||
|
*/
|
||||||
|
private int monthlyPartitionsRetentionMonths = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 테이블별 보관 기간 설정
|
||||||
|
*/
|
||||||
|
@Data
|
||||||
|
public static class TableRetention {
|
||||||
|
/**
|
||||||
|
* 일별 파티션용 보관 기간 (일)
|
||||||
|
* <ul>
|
||||||
|
* <li>null: 기본값 사용</li>
|
||||||
|
* <li>0 이하: 무한 보관</li>
|
||||||
|
* <li>양수: 해당 일수만큼 보관</li>
|
||||||
|
* </ul>
|
||||||
|
*/
|
||||||
|
private Integer retentionDays;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 월별 파티션용 보관 기간 (월)
|
||||||
|
* <ul>
|
||||||
|
* <li>null: 기본값 사용</li>
|
||||||
|
* <li>0 이하: 무한 보관</li>
|
||||||
|
* <li>양수: 해당 월수만큼 보관</li>
|
||||||
|
* </ul>
|
||||||
|
*/
|
||||||
|
private Integer retentionMonths;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,44 @@
|
|||||||
|
package gc.mda.signal_batch.global.config;
|
||||||
|
|
||||||
|
import lombok.Data;
|
||||||
|
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 성능 최적화 설정
|
||||||
|
*/
|
||||||
|
@Data
|
||||||
|
@Configuration
|
||||||
|
@ConfigurationProperties(prefix = "vessel.batch.optimization")
|
||||||
|
public class PerformanceOptimizationProperties {
|
||||||
|
|
||||||
|
private boolean enabled = true;
|
||||||
|
private boolean dynamicChunkSizing = true;
|
||||||
|
private boolean memoryOptimization = true;
|
||||||
|
private boolean cacheOptimization = true;
|
||||||
|
private boolean threadPoolOptimization = true;
|
||||||
|
|
||||||
|
private ChunkSettings chunk = new ChunkSettings();
|
||||||
|
private MemorySettings memory = new MemorySettings();
|
||||||
|
private CacheSettings cache = new CacheSettings();
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public static class ChunkSettings {
|
||||||
|
private int minSize = 1000;
|
||||||
|
private int maxSize = 20000;
|
||||||
|
private double adjustmentFactor = 0.2;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public static class MemorySettings {
|
||||||
|
private int warningThreshold = 70;
|
||||||
|
private int criticalThreshold = 85;
|
||||||
|
private int optimizationThreshold = 80;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public static class CacheSettings {
|
||||||
|
private int minHitRate = 70;
|
||||||
|
private int areaBoundarySize = 5000;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,38 @@
|
|||||||
|
package gc.mda.signal_batch.global.config;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
import java.sql.Connection;
|
||||||
|
import java.sql.Statement;
|
||||||
|
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
|
||||||
|
@Configuration
|
||||||
|
public class PostGISConfig {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* PostGIS 관련 설정
|
||||||
|
*/
|
||||||
|
public static void registerPostGISTypes(DataSource dataSource) {
|
||||||
|
try (Connection conn = dataSource.getConnection();
|
||||||
|
Statement stmt = conn.createStatement()) {
|
||||||
|
|
||||||
|
// search_path는 connection-init-sql에서 설정하므로 여기서는 하지 않음
|
||||||
|
// 각 datasource별로 다른 search_path가 필요할 수 있기 때문
|
||||||
|
|
||||||
|
// bytea 출력 형식 설정 (geometry 타입 처리를 위해)
|
||||||
|
stmt.execute("SET bytea_output = 'hex'");
|
||||||
|
|
||||||
|
// PostGIS 함수들이 사용 가능한지 간단히 테스트
|
||||||
|
// 실패해도 무시 - 실제 쿼리에서 PostGIS 함수 사용 시 오류 발생
|
||||||
|
try {
|
||||||
|
stmt.execute("SELECT 1 WHERE EXISTS (SELECT 1 FROM pg_proc WHERE proname = 'st_makepoint')");
|
||||||
|
} catch (Exception ignored) {
|
||||||
|
// PostGIS 함수 확인 실패 - 무시
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
// 연결 실패 시에만 경고
|
||||||
|
System.err.println("Warning: Failed to configure PostGIS settings: " + e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,125 @@
|
|||||||
|
package gc.mda.signal_batch.global.config;
|
||||||
|
|
||||||
|
import com.zaxxer.hikari.HikariDataSource;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.boot.jdbc.DataSourceBuilder;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.context.annotation.Primary;
|
||||||
|
import org.springframework.context.annotation.Profile;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
|
||||||
|
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
|
||||||
|
import org.springframework.transaction.PlatformTransactionManager;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
|
||||||
|
@Configuration
|
||||||
|
@Profile("prod")
|
||||||
|
public class ProdDataSourceConfig {
|
||||||
|
|
||||||
|
private final DataSourceConfigProperties properties;
|
||||||
|
|
||||||
|
public ProdDataSourceConfig(DataSourceConfigProperties properties) {
|
||||||
|
this.properties = properties;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public DataSource collectDataSource() {
|
||||||
|
HikariDataSource dataSource = DataSourceBuilder.create()
|
||||||
|
.type(HikariDataSource.class)
|
||||||
|
.url(properties.getCollect().getJdbcUrl())
|
||||||
|
.username(properties.getCollect().getUsername())
|
||||||
|
.password(properties.getCollect().getPassword())
|
||||||
|
.driverClassName(properties.getCollect().getDriverClassName())
|
||||||
|
.build();
|
||||||
|
return dataSource;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public DataSource queryDataSource() {
|
||||||
|
System.out.println("========================================");
|
||||||
|
System.out.println("!!! CREATING queryDataSource !!!");
|
||||||
|
System.out.println("URL: " + properties.getQuery().getJdbcUrl());
|
||||||
|
System.out.println("Username: " + properties.getQuery().getUsername());
|
||||||
|
System.out.println("========================================");
|
||||||
|
|
||||||
|
HikariDataSource dataSource = DataSourceBuilder.create()
|
||||||
|
.type(HikariDataSource.class)
|
||||||
|
.url(properties.getQuery().getJdbcUrl())
|
||||||
|
.username(properties.getQuery().getUsername())
|
||||||
|
.password(properties.getQuery().getPassword())
|
||||||
|
.driverClassName(properties.getQuery().getDriverClassName())
|
||||||
|
.build();
|
||||||
|
return dataSource;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@Primary
|
||||||
|
public DataSource batchDataSource() {
|
||||||
|
System.out.println("========================================");
|
||||||
|
System.out.println("!!! CREATING batchDataSource (PRIMARY) !!!");
|
||||||
|
System.out.println("URL: " + properties.getBatch().getJdbcUrl());
|
||||||
|
System.out.println("Username: " + properties.getBatch().getUsername());
|
||||||
|
System.out.println("========================================");
|
||||||
|
|
||||||
|
HikariDataSource dataSource = DataSourceBuilder.create()
|
||||||
|
.type(HikariDataSource.class)
|
||||||
|
.url(properties.getBatch().getJdbcUrl())
|
||||||
|
.username(properties.getBatch().getUsername())
|
||||||
|
.password(properties.getBatch().getPassword())
|
||||||
|
.driverClassName(properties.getBatch().getDriverClassName())
|
||||||
|
.build();
|
||||||
|
return dataSource;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Spring Batch가 찾는 기본 dataSource 빈
|
||||||
|
@Bean
|
||||||
|
public DataSource dataSource(@Qualifier("batchDataSource") DataSource batchDataSource) {
|
||||||
|
return batchDataSource;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Transaction Manager 설정
|
||||||
|
@Bean
|
||||||
|
public PlatformTransactionManager transactionManager(@Qualifier("collectDataSource") DataSource dataSource) {
|
||||||
|
return new DataSourceTransactionManager(dataSource);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean(name = "queryTransactionManager")
|
||||||
|
public PlatformTransactionManager queryTransactionManager(@Qualifier("queryDataSource") DataSource dataSource) {
|
||||||
|
return new DataSourceTransactionManager(dataSource);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@Primary
|
||||||
|
public PlatformTransactionManager batchTransactionManager(@Qualifier("batchDataSource") DataSource dataSource) {
|
||||||
|
return new DataSourceTransactionManager(dataSource);
|
||||||
|
}
|
||||||
|
|
||||||
|
// JdbcTemplate 빈 설정
|
||||||
|
@Bean(name = "collectJdbcTemplate")
|
||||||
|
public JdbcTemplate collectJdbcTemplate(@Qualifier("collectDataSource") DataSource dataSource) {
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
|
||||||
|
jdbcTemplate.setFetchSize(10000);
|
||||||
|
jdbcTemplate.setQueryTimeout(300);
|
||||||
|
return jdbcTemplate;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean(name = "queryJdbcTemplate")
|
||||||
|
public JdbcTemplate queryJdbcTemplate(@Qualifier("queryDataSource") DataSource dataSource) {
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
|
||||||
|
jdbcTemplate.setQueryTimeout(300);
|
||||||
|
return jdbcTemplate;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean(name = "collectNamedJdbcTemplate")
|
||||||
|
public NamedParameterJdbcTemplate collectNamedJdbcTemplate(@Qualifier("collectDataSource") DataSource dataSource) {
|
||||||
|
return new NamedParameterJdbcTemplate(dataSource);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean(name = "queryNamedJdbcTemplate")
|
||||||
|
public NamedParameterJdbcTemplate queryNamedJdbcTemplate(@Qualifier("queryDataSource") DataSource dataSource) {
|
||||||
|
return new NamedParameterJdbcTemplate(dataSource);
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,90 @@
|
|||||||
|
package gc.mda.signal_batch.global.config;
|
||||||
|
|
||||||
|
import com.zaxxer.hikari.HikariDataSource;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties;
|
||||||
|
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.context.annotation.Primary;
|
||||||
|
import org.springframework.context.annotation.Profile;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
|
||||||
|
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
|
||||||
|
import org.springframework.transaction.PlatformTransactionManager;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 조회 전용 서버 DataSource 설정
|
||||||
|
* - query 프로파일에서만 활성화
|
||||||
|
* - application-query.yml의 spring.datasource 설정 사용
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
@Configuration
|
||||||
|
@Profile("query")
|
||||||
|
public class QueryDataSourceConfig {
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@Primary
|
||||||
|
@ConfigurationProperties("spring.datasource")
|
||||||
|
public DataSourceProperties dataSourceProperties() {
|
||||||
|
return new DataSourceProperties();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
@Primary
|
||||||
|
@ConfigurationProperties("spring.datasource.hikari")
|
||||||
|
public DataSource dataSource(DataSourceProperties properties) {
|
||||||
|
log.info("Creating primary DataSource for query-only server");
|
||||||
|
HikariDataSource dataSource = properties.initializeDataSourceBuilder()
|
||||||
|
.type(HikariDataSource.class)
|
||||||
|
.build();
|
||||||
|
log.info("Query DataSource configured: {}", properties.getUrl());
|
||||||
|
return dataSource;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean(name = "queryDataSource")
|
||||||
|
public DataSource queryDataSource(@Qualifier("dataSource") DataSource dataSource) {
|
||||||
|
log.info("Aliasing primary DataSource as queryDataSource");
|
||||||
|
return dataSource;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean(name = "queryJdbcTemplate")
|
||||||
|
public JdbcTemplate queryJdbcTemplate(@Qualifier("dataSource") DataSource dataSource) {
|
||||||
|
JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
|
||||||
|
jdbcTemplate.setQueryTimeout(300);
|
||||||
|
log.info("QueryJdbcTemplate created for query-only server");
|
||||||
|
return jdbcTemplate;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean(name = "queryNamedJdbcTemplate")
|
||||||
|
public NamedParameterJdbcTemplate queryNamedJdbcTemplate(@Qualifier("dataSource") DataSource dataSource) {
|
||||||
|
return new NamedParameterJdbcTemplate(dataSource);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean(name = "queryTransactionManager")
|
||||||
|
@Primary
|
||||||
|
public PlatformTransactionManager queryTransactionManager(@Qualifier("dataSource") DataSource dataSource) {
|
||||||
|
return new DataSourceTransactionManager(dataSource);
|
||||||
|
}
|
||||||
|
|
||||||
|
// collectJdbcTemplate이 필요한 경우를 위한 별칭 (query DB를 사용)
|
||||||
|
@Bean(name = "collectJdbcTemplate")
|
||||||
|
public JdbcTemplate collectJdbcTemplate(@Qualifier("queryJdbcTemplate") JdbcTemplate queryJdbcTemplate) {
|
||||||
|
log.info("CollectJdbcTemplate aliased to QueryJdbcTemplate (query-only mode)");
|
||||||
|
return queryJdbcTemplate;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean(name = "collectNamedJdbcTemplate")
|
||||||
|
public NamedParameterJdbcTemplate collectNamedJdbcTemplate(@Qualifier("queryNamedJdbcTemplate") NamedParameterJdbcTemplate queryNamedJdbcTemplate) {
|
||||||
|
return queryNamedJdbcTemplate;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean(name = "collectDataSource")
|
||||||
|
public DataSource collectDataSource(@Qualifier("dataSource") DataSource dataSource) {
|
||||||
|
log.info("CollectDataSource aliased to primary DataSource (query-only mode)");
|
||||||
|
return dataSource;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,39 @@
|
|||||||
|
package gc.mda.signal_batch.global.config;
|
||||||
|
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.retry.annotation.EnableRetry;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.retry.backoff.ExponentialBackOffPolicy;
|
||||||
|
import org.springframework.retry.policy.SimpleRetryPolicy;
|
||||||
|
import org.springframework.retry.support.RetryTemplate;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@Configuration
|
||||||
|
@EnableRetry
|
||||||
|
public class RetryConfig {
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public RetryTemplate retryTemplate() {
|
||||||
|
RetryTemplate retryTemplate = new RetryTemplate();
|
||||||
|
|
||||||
|
// 재시도 정책
|
||||||
|
Map<Class<? extends Throwable>, Boolean> retryableExceptions = new HashMap<>();
|
||||||
|
retryableExceptions.put(org.springframework.dao.ConcurrencyFailureException.class, true);
|
||||||
|
retryableExceptions.put(org.springframework.dao.CannotAcquireLockException.class, true);
|
||||||
|
retryableExceptions.put(org.springframework.dao.TransientDataAccessResourceException.class, true);
|
||||||
|
|
||||||
|
SimpleRetryPolicy retryPolicy = new SimpleRetryPolicy(3, retryableExceptions);
|
||||||
|
retryTemplate.setRetryPolicy(retryPolicy);
|
||||||
|
|
||||||
|
// Backoff 정책
|
||||||
|
ExponentialBackOffPolicy backOffPolicy = new ExponentialBackOffPolicy();
|
||||||
|
backOffPolicy.setInitialInterval(100);
|
||||||
|
backOffPolicy.setMaxInterval(5000);
|
||||||
|
backOffPolicy.setMultiplier(2);
|
||||||
|
retryTemplate.setBackOffPolicy(backOffPolicy);
|
||||||
|
|
||||||
|
return retryTemplate;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,28 @@
|
|||||||
|
package gc.mda.signal_batch.global.config;
|
||||||
|
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.context.annotation.Profile;
|
||||||
|
import org.springframework.scheduling.TaskScheduler;
|
||||||
|
import org.springframework.scheduling.annotation.EnableScheduling;
|
||||||
|
import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
|
||||||
|
|
||||||
|
@Configuration
|
||||||
|
@EnableScheduling
|
||||||
|
@Profile("!query") // query 프로파일에서는 배치 스케줄러 비활성화
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
public class SchedulerConfig {
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public TaskScheduler taskScheduler() {
|
||||||
|
ThreadPoolTaskScheduler scheduler = new ThreadPoolTaskScheduler();
|
||||||
|
scheduler.setPoolSize(1); // 중복 실행 방지를 위해 1로 설정
|
||||||
|
scheduler.setThreadNamePrefix("batch-scheduler-");
|
||||||
|
scheduler.setAwaitTerminationSeconds(60);
|
||||||
|
scheduler.setWaitForTasksToCompleteOnShutdown(true);
|
||||||
|
scheduler.setRejectedExecutionHandler(new java.util.concurrent.ThreadPoolExecutor.CallerRunsPolicy());
|
||||||
|
scheduler.initialize();
|
||||||
|
return scheduler;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,128 @@
|
|||||||
|
package gc.mda.signal_batch.global.config;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.OpenAPIDefinition;
|
||||||
|
import io.swagger.v3.oas.annotations.info.Contact;
|
||||||
|
import io.swagger.v3.oas.annotations.info.Info;
|
||||||
|
import io.swagger.v3.oas.annotations.info.License;
|
||||||
|
import io.swagger.v3.oas.annotations.servers.Server;
|
||||||
|
import org.springdoc.core.models.GroupedOpenApi;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
|
||||||
|
@Configuration
|
||||||
|
@OpenAPIDefinition(
|
||||||
|
info = @Info(
|
||||||
|
title = "선박 항적 집계 및 조회 시스템 API",
|
||||||
|
version = "2.0.0",
|
||||||
|
description = """
|
||||||
|
실시간 선박 위치 데이터를 계층적으로 집계하여 빠른 항적 조회 성능을 제공하는 시스템
|
||||||
|
|
||||||
|
## 핵심 기능
|
||||||
|
### 1. 계층적 항적 데이터 집계
|
||||||
|
- **5분 단위 집계**: 실시간 선박 위치를 5분마다 LineStringM 형식의 항적으로 변환
|
||||||
|
- **1시간 단위 집계**: 5분 데이터를 시간별로 병합 (매시 10분 실행)
|
||||||
|
- **1일 단위 집계**: 시간별 데이터를 일별로 병합 (매일 01:00 실행)
|
||||||
|
|
||||||
|
### 2. 공간 기반 항적 관리
|
||||||
|
- **해구별 집계**: 대해구(0.5°×0.5°) 및 소해구(대해구 3×3 분할) 단위 항적 저장
|
||||||
|
- **사용자 정의 영역**: 임의 폴리곤 영역별 항적 집계
|
||||||
|
- **타일 기반 조회**: 줌 레벨에 따른 동적 데이터 간소화
|
||||||
|
|
||||||
|
### 3. 비정상 항적 검출 및 필터링
|
||||||
|
- **실시간 검출**: 물리적 불가능 항적 자동 필터링 (속도 100knots, 거리 10nm/5분 초과)
|
||||||
|
- **항공기 예외처리**: sig_src_cd='000019' 항공기는 300knots/30nm 기준 적용
|
||||||
|
- **분리 저장**: 비정상 항적은 별도 테이블(t_abnormal_tracks)에 보관
|
||||||
|
|
||||||
|
### 4. WebSocket 기반 대용량 항적 스트리밍
|
||||||
|
- **STOMP over WebSocket**: 실시간 양방향 통신
|
||||||
|
- **청크 스트리밍**: 대용량 데이터를 작은 단위로 분할 전송
|
||||||
|
- **적응형 간소화**: 조회 범위와 기간에 따른 자동 데이터 최적화
|
||||||
|
- **병렬 처리**: Daily → Hourly → 5min 순서로 병렬 조회
|
||||||
|
|
||||||
|
### 5. 실시간 모니터링 대시보드
|
||||||
|
- **GIS 시각화**: deck.gl 기반 해구/영역별 선박 분포 표시
|
||||||
|
- **항적 애니메이션**: 시간 흐름에 따른 선박 이동 재생
|
||||||
|
- **성능 모니터링**: 배치 Job 실행 상태, 처리 통계, 시스템 리소스 실시간 확인
|
||||||
|
|
||||||
|
## 데이터 구조
|
||||||
|
- **항적 형식**: PostGIS LineStringM (X: 경도, Y: 위도, M: 시간)
|
||||||
|
- **위치 정보**: JSONB {lat, lon, time, sog}
|
||||||
|
- **집계 통계**: vessel_count, total_distance, avg_speed
|
||||||
|
|
||||||
|
## 시스템 아키텍처
|
||||||
|
```
|
||||||
|
CollectDB (실시간) → Spring Batch (집계) → QueryDB (조회)
|
||||||
|
↓
|
||||||
|
BatchDB (메타)
|
||||||
|
```
|
||||||
|
""",
|
||||||
|
contact = @Contact(
|
||||||
|
name = "Signal Batch Team",
|
||||||
|
email = "signal-batch@mda.gc"
|
||||||
|
),
|
||||||
|
license = @License(
|
||||||
|
name = "Internal Use Only"
|
||||||
|
)
|
||||||
|
),
|
||||||
|
servers = {
|
||||||
|
@Server(url = "http://10.26.252.48:8090", description = "Development Server (QueryDB)"),
|
||||||
|
@Server(url = "http://10.26.252.39:8090", description = "Production Server"),
|
||||||
|
@Server(url = "http://localhost:8090", description = "Local Development Server")
|
||||||
|
}
|
||||||
|
)
|
||||||
|
public class SwaggerConfig {
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public GroupedOpenApi trackApi() {
|
||||||
|
return GroupedOpenApi.builder()
|
||||||
|
.group("1-track-api")
|
||||||
|
.displayName("항적 조회 API")
|
||||||
|
.pathsToMatch("/api/v1/tracks/**", "/api/v1/haegu/**", "/api/v1/areas/**", "/api/v1/passages/**", "/api/v1/vessels/**")
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public GroupedOpenApi abnormalTrackApi() {
|
||||||
|
return GroupedOpenApi.builder()
|
||||||
|
.group("2-abnormal-track-api")
|
||||||
|
.displayName("비정상 항적 검출 API")
|
||||||
|
.pathsToMatch("/api/v1/abnormal-tracks/**")
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public GroupedOpenApi tileApi() {
|
||||||
|
return GroupedOpenApi.builder()
|
||||||
|
.group("3-tile-api")
|
||||||
|
.displayName("타일 집계 API")
|
||||||
|
.pathsToMatch("/api/v1/tiles/**", "/api/tiles/**")
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public GroupedOpenApi performanceApi() {
|
||||||
|
return GroupedOpenApi.builder()
|
||||||
|
.group("4-performance-api")
|
||||||
|
.displayName("성능 최적화 API")
|
||||||
|
.pathsToMatch("/api/v1/performance/**")
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public GroupedOpenApi adminApi() {
|
||||||
|
return GroupedOpenApi.builder()
|
||||||
|
.group("5-admin-api")
|
||||||
|
.displayName("관리자 API")
|
||||||
|
.pathsToMatch("/admin/**")
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public GroupedOpenApi monitoringApi() {
|
||||||
|
return GroupedOpenApi.builder()
|
||||||
|
.group("6-monitoring-api")
|
||||||
|
.displayName("모니터링 API")
|
||||||
|
.pathsToMatch("/monitor/**", "/actuator/**", "/api/websocket/**")
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,41 @@
|
|||||||
|
package gc.mda.signal_batch.global.config;
|
||||||
|
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.web.servlet.config.annotation.CorsRegistry;
|
||||||
|
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
|
||||||
|
import org.springframework.web.servlet.config.annotation.ViewControllerRegistry;
|
||||||
|
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
|
||||||
|
|
||||||
|
|
||||||
|
@Configuration
|
||||||
|
public class WebMvcConfig implements WebMvcConfigurer {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void addResourceHandlers(ResourceHandlerRegistry registry) {
|
||||||
|
// 로컬 타일 리소스 매핑 (대안 방법)
|
||||||
|
registry.addResourceHandler("/tiles/**")
|
||||||
|
.addResourceLocations("file:///devdata/MAPS/")
|
||||||
|
.setCachePeriod(86400); // 24시간 캐시
|
||||||
|
|
||||||
|
// 기본 정적 리소스
|
||||||
|
registry.addResourceHandler("/static/**")
|
||||||
|
.addResourceLocations("classpath:/static/");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void addCorsMappings(CorsRegistry registry) {
|
||||||
|
// CORS 설정 (필요시)
|
||||||
|
registry.addMapping("/api/**")
|
||||||
|
.allowedOrigins("*")
|
||||||
|
.allowedMethods("GET", "POST", "PUT", "DELETE")
|
||||||
|
.allowedHeaders("*")
|
||||||
|
.maxAge(3600);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void addViewControllers(ViewControllerRegistry registry) {
|
||||||
|
// 루트 경로를 관리자 페이지로 리다이렉트
|
||||||
|
registry.addRedirectViewController("/", "/admin/batch-admin.html");
|
||||||
|
registry.addRedirectViewController("/index.html", "/admin/batch-admin.html");
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,223 @@
|
|||||||
|
package gc.mda.signal_batch.global.config;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.global.websocket.interceptor.TrackQueryInterceptor;
|
||||||
|
import gc.mda.signal_batch.monitoring.interceptor.WebSocketUsageLoggingInterceptor;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.http.server.ServerHttpRequest;
|
||||||
|
import org.springframework.messaging.converter.MappingJackson2MessageConverter;
|
||||||
|
import org.springframework.messaging.converter.MessageConverter;
|
||||||
|
import org.springframework.messaging.simp.config.ChannelRegistration;
|
||||||
|
import org.springframework.messaging.simp.config.MessageBrokerRegistry;
|
||||||
|
import org.springframework.scheduling.TaskScheduler;
|
||||||
|
import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
|
||||||
|
import org.springframework.web.socket.WebSocketHandler;
|
||||||
|
import org.springframework.web.socket.config.annotation.*;
|
||||||
|
import org.springframework.web.socket.server.HandshakeInterceptor;
|
||||||
|
import org.springframework.web.socket.server.standard.ServletServerContainerFactoryBean;
|
||||||
|
import org.springframework.web.socket.server.support.DefaultHandshakeHandler;
|
||||||
|
import org.springframework.http.server.ServerHttpResponse;
|
||||||
|
import org.springframework.http.server.ServletServerHttpRequest;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import jakarta.servlet.http.HttpServletRequest;
|
||||||
|
import java.security.Principal;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
|
||||||
|
@Configuration
|
||||||
|
@EnableWebSocketMessageBroker
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class WebSocketStompConfig implements WebSocketMessageBrokerConfigurer {
|
||||||
|
|
||||||
|
private final TrackQueryInterceptor trackQueryInterceptor;
|
||||||
|
private final WebSocketUsageLoggingInterceptor webSocketUsageLoggingInterceptor;
|
||||||
|
private final ObjectMapper objectMapper; // JacksonConfig에서 생성된 ObjectMapper 주입
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public ServletServerContainerFactoryBean createWebSocketContainer() {
|
||||||
|
ServletServerContainerFactoryBean container = new ServletServerContainerFactoryBean();
|
||||||
|
container.setMaxTextMessageBufferSize(256 * 1024 * 1024); // 256MB로 증가
|
||||||
|
container.setMaxBinaryMessageBufferSize(256 * 1024 * 1024); // 256MB로 증가
|
||||||
|
container.setMaxSessionIdleTimeout(60000L);
|
||||||
|
return container;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void registerStompEndpoints(StompEndpointRegistry registry) {
|
||||||
|
// 1. 네이티브 WebSocket 엔드포인트 (프론트엔드 @stomp/stompjs 지원)
|
||||||
|
registry.addEndpoint("/ws-tracks")
|
||||||
|
.setHandshakeHandler(new CustomHandshakeHandler())
|
||||||
|
.addInterceptors(new ClientIpHandshakeInterceptor())
|
||||||
|
.setAllowedOriginPatterns("*");
|
||||||
|
|
||||||
|
// 2. SockJS 폴백 엔드포인트 (레거시 브라우저 및 기존 클라이언트 지원)
|
||||||
|
registry.addEndpoint("/ws-tracks")
|
||||||
|
.setHandshakeHandler(new CustomHandshakeHandler())
|
||||||
|
.addInterceptors(new ClientIpHandshakeInterceptor())
|
||||||
|
.setAllowedOriginPatterns("*")
|
||||||
|
.withSockJS()
|
||||||
|
.setClientLibraryUrl("/static/libs/js/sockjs.min.js")
|
||||||
|
.setStreamBytesLimit(100 * 1024 * 1024) // 100MB로 증가
|
||||||
|
.setHttpMessageCacheSize(1000)
|
||||||
|
.setDisconnectDelay(30 * 1000)
|
||||||
|
.setWebSocketEnabled(true)
|
||||||
|
.setSuppressCors(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void configureMessageBroker(MessageBrokerRegistry registry) {
|
||||||
|
// 인메모리 메시지 브로커 설정
|
||||||
|
registry.enableSimpleBroker("/topic", "/queue")
|
||||||
|
.setTaskScheduler(messageBrokerTaskScheduler())
|
||||||
|
.setHeartbeatValue(new long[]{10000, 10000}); // 서버/클라이언트 하트비트
|
||||||
|
|
||||||
|
// 애플리케이션 destination prefix
|
||||||
|
registry.setApplicationDestinationPrefixes("/app");
|
||||||
|
|
||||||
|
// 사용자별 destination prefix
|
||||||
|
registry.setUserDestinationPrefix("/user");
|
||||||
|
|
||||||
|
// 캐시 크기 제한
|
||||||
|
registry.setCacheLimit(2048); // 1024 -> 2048로 증가
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void configureWebSocketTransport(WebSocketTransportRegistration registration) {
|
||||||
|
registration
|
||||||
|
.setMessageSizeLimit(50 * 1024 * 1024) // 50MB로 증가
|
||||||
|
.setSendBufferSizeLimit(256 * 1024 * 1024) // 256MB로 증가
|
||||||
|
.setSendTimeLimit(120 * 1000) // 120초로 증가
|
||||||
|
.setTimeToFirstMessage(30 * 1000); // 첫 메시지까지 30초
|
||||||
|
|
||||||
|
// 에러 핸들러 등록은 별도로 처리
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void configureClientInboundChannel(ChannelRegistration registration) {
|
||||||
|
registration
|
||||||
|
.interceptors(webSocketUsageLoggingInterceptor, trackQueryInterceptor)
|
||||||
|
.taskExecutor()
|
||||||
|
.corePoolSize(10)
|
||||||
|
.maxPoolSize(20)
|
||||||
|
.queueCapacity(100);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void configureClientOutboundChannel(ChannelRegistration registration) {
|
||||||
|
registration
|
||||||
|
.taskExecutor()
|
||||||
|
.corePoolSize(20) // 10 -> 20로 증가
|
||||||
|
.maxPoolSize(40) // 20 -> 40로 증가
|
||||||
|
.queueCapacity(5000); // 1000 -> 5000로 증가
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean configureMessageConverters(List<MessageConverter> messageConverters) {
|
||||||
|
// Use the ObjectMapper from JacksonConfig which already has JavaTimeModule configured
|
||||||
|
MappingJackson2MessageConverter converter = new MappingJackson2MessageConverter();
|
||||||
|
converter.setObjectMapper(objectMapper);
|
||||||
|
converter.setPrettyPrint(false);
|
||||||
|
|
||||||
|
// Clear default converters and add our custom one
|
||||||
|
messageConverters.clear();
|
||||||
|
messageConverters.add(converter);
|
||||||
|
|
||||||
|
return true; // Don't add default converters
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public TaskScheduler messageBrokerTaskScheduler() {
|
||||||
|
ThreadPoolTaskScheduler scheduler = new ThreadPoolTaskScheduler();
|
||||||
|
scheduler.setPoolSize(4);
|
||||||
|
scheduler.setThreadNamePrefix("wsMsgBroker-");
|
||||||
|
scheduler.setAwaitTerminationSeconds(60);
|
||||||
|
scheduler.setWaitForTasksToCompleteOnShutdown(true);
|
||||||
|
scheduler.initialize();
|
||||||
|
return scheduler;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 세션별 고유 ID 생성을 위한 HandshakeHandler
|
||||||
|
private static class CustomHandshakeHandler extends DefaultHandshakeHandler {
|
||||||
|
@Override
|
||||||
|
protected Principal determineUser(ServerHttpRequest request,
|
||||||
|
WebSocketHandler wsHandler,
|
||||||
|
Map<String, Object> attributes) {
|
||||||
|
return new StompPrincipal(UUID.randomUUID().toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static class StompPrincipal implements Principal {
|
||||||
|
private final String name;
|
||||||
|
|
||||||
|
public StompPrincipal(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 클라이언트 IP 추출을 위한 HandshakeInterceptor
|
||||||
|
private static class ClientIpHandshakeInterceptor implements HandshakeInterceptor {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean beforeHandshake(ServerHttpRequest request, ServerHttpResponse response,
|
||||||
|
WebSocketHandler wsHandler, Map<String, Object> attributes) {
|
||||||
|
// HTTP 요청에서 클라이언트 IP 추출
|
||||||
|
String clientIp = extractClientIp(request);
|
||||||
|
attributes.put("CLIENT_IP", clientIp);
|
||||||
|
|
||||||
|
// User-Agent 추출
|
||||||
|
if (request instanceof ServletServerHttpRequest) {
|
||||||
|
HttpServletRequest servletRequest = ((ServletServerHttpRequest) request).getServletRequest();
|
||||||
|
String userAgent = servletRequest.getHeader("User-Agent");
|
||||||
|
attributes.put("USER_AGENT", userAgent);
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void afterHandshake(ServerHttpRequest request, ServerHttpResponse response,
|
||||||
|
WebSocketHandler wsHandler, Exception exception) {
|
||||||
|
// 핸드셰이크 완료 후 처리 (필요시 사용)
|
||||||
|
}
|
||||||
|
|
||||||
|
private String extractClientIp(ServerHttpRequest request) {
|
||||||
|
if (request instanceof ServletServerHttpRequest) {
|
||||||
|
HttpServletRequest servletRequest = ((ServletServerHttpRequest) request).getServletRequest();
|
||||||
|
|
||||||
|
// X-Forwarded-For 헤더 확인 (프록시/로드밸런서 환경)
|
||||||
|
String xForwardedFor = servletRequest.getHeader("X-Forwarded-For");
|
||||||
|
if (xForwardedFor != null && !xForwardedFor.isEmpty()) {
|
||||||
|
return xForwardedFor.split(",")[0].trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
// X-Real-IP 헤더 확인
|
||||||
|
String xRealIp = servletRequest.getHeader("X-Real-IP");
|
||||||
|
if (xRealIp != null && !xRealIp.isEmpty()) {
|
||||||
|
return xRealIp;
|
||||||
|
}
|
||||||
|
|
||||||
|
// X-Original-Forwarded-For 헤더 확인
|
||||||
|
String xOriginalFor = servletRequest.getHeader("X-Original-Forwarded-For");
|
||||||
|
if (xOriginalFor != null && !xOriginalFor.isEmpty()) {
|
||||||
|
return xOriginalFor.split(",")[0].trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
// 기본 Remote Address
|
||||||
|
return servletRequest.getRemoteAddr();
|
||||||
|
}
|
||||||
|
|
||||||
|
// ServletServerHttpRequest가 아닌 경우 기본값
|
||||||
|
return "unknown";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,427 @@
|
|||||||
|
package gc.mda.signal_batch.global.tool;
|
||||||
|
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.time.LocalDate;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.time.format.DateTimeFormatter;
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class BatchDiagnosticTool {
|
||||||
|
|
||||||
|
@Qualifier("collectJdbcTemplate")
|
||||||
|
private final JdbcTemplate collectJdbcTemplate;
|
||||||
|
|
||||||
|
@Qualifier("queryJdbcTemplate")
|
||||||
|
private final JdbcTemplate queryJdbcTemplate;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 종합 진단 실행
|
||||||
|
*/
|
||||||
|
public DiagnosticReport runFullDiagnostic() {
|
||||||
|
log.info("Starting batch system diagnostic...");
|
||||||
|
|
||||||
|
DiagnosticReport report = new DiagnosticReport();
|
||||||
|
report.setTimestamp(LocalDateTime.now());
|
||||||
|
|
||||||
|
// 1. 데이터베이스 상태 확인
|
||||||
|
report.setDatabaseHealth(checkDatabaseHealth());
|
||||||
|
|
||||||
|
// 2. 파티션 상태 확인
|
||||||
|
report.setPartitionStatus(checkPartitionStatus());
|
||||||
|
|
||||||
|
// 3. 성능 메트릭 수집
|
||||||
|
report.setPerformanceMetrics(collectPerformanceMetrics());
|
||||||
|
|
||||||
|
// 4. 데이터 무결성 검사
|
||||||
|
report.setDataIntegrity(checkDataIntegrity());
|
||||||
|
|
||||||
|
// 5. 시스템 리소스 확인
|
||||||
|
report.setSystemResources(checkSystemResources());
|
||||||
|
|
||||||
|
// 6. 권장사항 생성
|
||||||
|
report.setRecommendations(generateRecommendations(report));
|
||||||
|
|
||||||
|
log.info("Diagnostic completed");
|
||||||
|
return report;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 데이터베이스 상태 확인
|
||||||
|
*/
|
||||||
|
private DatabaseHealth checkDatabaseHealth() {
|
||||||
|
DatabaseHealth health = new DatabaseHealth();
|
||||||
|
|
||||||
|
// 연결 상태
|
||||||
|
health.setCollectDbConnected(testConnection(collectJdbcTemplate));
|
||||||
|
health.setQueryDbConnected(testConnection(queryJdbcTemplate));
|
||||||
|
|
||||||
|
// 활성 연결 수
|
||||||
|
health.setActiveConnections(getActiveConnections());
|
||||||
|
|
||||||
|
// 느린 쿼리
|
||||||
|
health.setSlowQueries(getSlowQueries());
|
||||||
|
|
||||||
|
// 테이블 크기
|
||||||
|
health.setTableSizes(getTableSizes());
|
||||||
|
|
||||||
|
// Lock 상황
|
||||||
|
health.setLockInfo(getLockInfo());
|
||||||
|
|
||||||
|
return health;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 파티션 상태 확인
|
||||||
|
*/
|
||||||
|
private PartitionStatus checkPartitionStatus() {
|
||||||
|
PartitionStatus status = new PartitionStatus();
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
SELECT
|
||||||
|
tablename,
|
||||||
|
pg_size_pretty(pg_total_relation_size(schemaname||'.'||tablename)) as size,
|
||||||
|
pg_total_relation_size(schemaname||'.'||tablename) as size_bytes
|
||||||
|
FROM pg_tables
|
||||||
|
WHERE schemaname = 'signal'
|
||||||
|
AND tablename LIKE 'sig_test_%'
|
||||||
|
ORDER BY tablename
|
||||||
|
""";
|
||||||
|
|
||||||
|
List<Map<String, Object>> partitions = collectJdbcTemplate.queryForList(sql);
|
||||||
|
|
||||||
|
status.setTotalPartitions(partitions.size());
|
||||||
|
status.setPartitionDetails(partitions);
|
||||||
|
|
||||||
|
// 미래 파티션 확인
|
||||||
|
LocalDate tomorrow = LocalDate.now().plusDays(1);
|
||||||
|
String tomorrowPartition = "sig_test_" +
|
||||||
|
tomorrow.format(DateTimeFormatter.BASIC_ISO_DATE);
|
||||||
|
|
||||||
|
status.setHasFuturePartitions(
|
||||||
|
partitions.stream().anyMatch(p -> p.get("tablename").equals(tomorrowPartition))
|
||||||
|
);
|
||||||
|
|
||||||
|
// 가장 큰 파티션
|
||||||
|
partitions.stream()
|
||||||
|
.max(Comparator.comparing(p -> (Long) p.get("size_bytes")))
|
||||||
|
.ifPresent(p -> status.setLargestPartition(p));
|
||||||
|
|
||||||
|
return status;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 성능 메트릭 수집
|
||||||
|
*/
|
||||||
|
private PerformanceMetrics collectPerformanceMetrics() {
|
||||||
|
PerformanceMetrics metrics = new PerformanceMetrics();
|
||||||
|
|
||||||
|
// 처리 속도
|
||||||
|
Map<String, Object> throughput = queryJdbcTemplate.queryForMap("""
|
||||||
|
SELECT
|
||||||
|
COUNT(*) as records_last_hour,
|
||||||
|
COUNT(*) / 3600.0 as records_per_second
|
||||||
|
FROM signal.t_vessel_latest_position
|
||||||
|
WHERE last_update > NOW() - INTERVAL '1 hour'
|
||||||
|
""");
|
||||||
|
|
||||||
|
metrics.setRecordsLastHour(((Number) throughput.get("records_last_hour")).longValue());
|
||||||
|
metrics.setRecordsPerSecond(((Number) throughput.get("records_per_second")).doubleValue());
|
||||||
|
|
||||||
|
// 인덱스 효율성
|
||||||
|
List<Map<String, Object>> indexStats = collectJdbcTemplate.queryForList("""
|
||||||
|
SELECT
|
||||||
|
indexrelname,
|
||||||
|
idx_scan,
|
||||||
|
idx_tup_read,
|
||||||
|
idx_tup_fetch,
|
||||||
|
pg_size_pretty(pg_relation_size(indexrelid)) as size
|
||||||
|
FROM pg_stat_user_indexes
|
||||||
|
WHERE schemaname = 'signal'
|
||||||
|
AND idx_scan > 0
|
||||||
|
ORDER BY idx_scan DESC
|
||||||
|
LIMIT 10
|
||||||
|
""");
|
||||||
|
|
||||||
|
metrics.setIndexEfficiency(indexStats);
|
||||||
|
|
||||||
|
// 캐시 히트율
|
||||||
|
Map<String, Object> cacheStats = collectJdbcTemplate.queryForMap("""
|
||||||
|
SELECT
|
||||||
|
sum(heap_blks_read) as heap_read,
|
||||||
|
sum(heap_blks_hit) as heap_hit,
|
||||||
|
CASE WHEN sum(heap_blks_hit) + sum(heap_blks_read) > 0 THEN
|
||||||
|
sum(heap_blks_hit)::float / (sum(heap_blks_hit) + sum(heap_blks_read))
|
||||||
|
ELSE 0 END as cache_hit_ratio
|
||||||
|
FROM pg_statio_user_tables
|
||||||
|
""");
|
||||||
|
|
||||||
|
metrics.setCacheHitRatio(((Number) cacheStats.get("cache_hit_ratio")).doubleValue());
|
||||||
|
|
||||||
|
return metrics;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 데이터 무결성 검사
|
||||||
|
*/
|
||||||
|
private DataIntegrity checkDataIntegrity() {
|
||||||
|
DataIntegrity integrity = new DataIntegrity();
|
||||||
|
|
||||||
|
// 중복 데이터 확인
|
||||||
|
Long duplicates = collectJdbcTemplate.queryForObject("""
|
||||||
|
SELECT COUNT(*) FROM (
|
||||||
|
SELECT sig_src_cd, target_id, message_time, COUNT(*)
|
||||||
|
FROM signal.sig_test
|
||||||
|
WHERE message_time > NOW() - INTERVAL '1 day'
|
||||||
|
AND sig_src_cd != '000005'
|
||||||
|
AND length(target_id) > 5
|
||||||
|
GROUP BY sig_src_cd, target_id, message_time
|
||||||
|
HAVING COUNT(*) > 1
|
||||||
|
) dup
|
||||||
|
""", Long.class);
|
||||||
|
|
||||||
|
integrity.setDuplicateRecords(duplicates);
|
||||||
|
|
||||||
|
// 누락된 시간대 확인
|
||||||
|
List<String> missingHours = collectJdbcTemplate.queryForList("""
|
||||||
|
WITH hours AS (
|
||||||
|
SELECT generate_series(
|
||||||
|
NOW() - INTERVAL '24 hours',
|
||||||
|
NOW(),
|
||||||
|
INTERVAL '1 hour'
|
||||||
|
) as hour
|
||||||
|
)
|
||||||
|
SELECT TO_CHAR(h.hour, 'YYYY-MM-DD HH24:00') as missing_hour
|
||||||
|
FROM hours h
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT DATE_TRUNC('hour', message_time) as data_hour
|
||||||
|
FROM signal.sig_test
|
||||||
|
WHERE message_time > NOW() - INTERVAL '24 hours'
|
||||||
|
GROUP BY DATE_TRUNC('hour', message_time)
|
||||||
|
) d ON h.hour = d.data_hour
|
||||||
|
WHERE d.data_hour IS NULL
|
||||||
|
""", String.class);
|
||||||
|
|
||||||
|
integrity.setMissingTimeRanges(missingHours);
|
||||||
|
|
||||||
|
// 데이터 지연 확인
|
||||||
|
LocalDateTime latestData = collectJdbcTemplate.queryForObject(
|
||||||
|
"SELECT MAX(message_time) FROM signal.sig_test",
|
||||||
|
LocalDateTime.class
|
||||||
|
);
|
||||||
|
|
||||||
|
if (latestData != null) {
|
||||||
|
long delayMinutes = java.time.Duration.between(latestData, LocalDateTime.now()).toMinutes();
|
||||||
|
integrity.setDataDelayMinutes(delayMinutes);
|
||||||
|
}
|
||||||
|
|
||||||
|
return integrity;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 시스템 리소스 확인
|
||||||
|
*/
|
||||||
|
private SystemResources checkSystemResources() {
|
||||||
|
SystemResources resources = new SystemResources();
|
||||||
|
|
||||||
|
Runtime runtime = Runtime.getRuntime();
|
||||||
|
|
||||||
|
// JVM 메모리
|
||||||
|
resources.setMaxMemory(runtime.maxMemory());
|
||||||
|
resources.setTotalMemory(runtime.totalMemory());
|
||||||
|
resources.setFreeMemory(runtime.freeMemory());
|
||||||
|
resources.setUsedMemory(runtime.totalMemory() - runtime.freeMemory());
|
||||||
|
|
||||||
|
// CPU
|
||||||
|
resources.setAvailableProcessors(runtime.availableProcessors());
|
||||||
|
|
||||||
|
// 스레드
|
||||||
|
resources.setActiveThreads(Thread.activeCount());
|
||||||
|
|
||||||
|
// 디스크 공간 (데이터베이스)
|
||||||
|
Map<String, Object> diskSpace = collectJdbcTemplate.queryForMap("""
|
||||||
|
SELECT
|
||||||
|
pg_database_size(current_database()) as db_size,
|
||||||
|
pg_size_pretty(pg_database_size(current_database())) as db_size_pretty
|
||||||
|
""");
|
||||||
|
|
||||||
|
resources.setDatabaseSize((Long) diskSpace.get("db_size"));
|
||||||
|
resources.setDatabaseSizePretty((String) diskSpace.get("db_size_pretty"));
|
||||||
|
|
||||||
|
return resources;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 권장사항 생성
|
||||||
|
*/
|
||||||
|
private List<String> generateRecommendations(DiagnosticReport report) {
|
||||||
|
List<String> recommendations = new ArrayList<>();
|
||||||
|
|
||||||
|
// 메모리 사용률 확인
|
||||||
|
SystemResources resources = report.getSystemResources();
|
||||||
|
double memoryUsage = (double) resources.getUsedMemory() / resources.getMaxMemory();
|
||||||
|
if (memoryUsage > 0.8) {
|
||||||
|
recommendations.add("High memory usage detected (" +
|
||||||
|
String.format("%.1f%%", memoryUsage * 100) +
|
||||||
|
"). Consider increasing heap size or optimizing memory usage.");
|
||||||
|
}
|
||||||
|
|
||||||
|
// 캐시 히트율 확인
|
||||||
|
PerformanceMetrics metrics = report.getPerformanceMetrics();
|
||||||
|
if (metrics.getCacheHitRatio() < 0.9) {
|
||||||
|
recommendations.add("Low cache hit ratio (" +
|
||||||
|
String.format("%.1f%%", metrics.getCacheHitRatio() * 100) +
|
||||||
|
"). Consider increasing shared_buffers or optimizing queries.");
|
||||||
|
}
|
||||||
|
|
||||||
|
// 데이터 지연 확인
|
||||||
|
DataIntegrity integrity = report.getDataIntegrity();
|
||||||
|
if (integrity.getDataDelayMinutes() > 30) {
|
||||||
|
recommendations.add("Data processing delay detected (" +
|
||||||
|
integrity.getDataDelayMinutes() + " minutes). Check job execution status.");
|
||||||
|
}
|
||||||
|
|
||||||
|
// 파티션 확인
|
||||||
|
PartitionStatus partitionStatus = report.getPartitionStatus();
|
||||||
|
if (!partitionStatus.isHasFuturePartitions()) {
|
||||||
|
recommendations.add("No future partitions found. Run partition creation job.");
|
||||||
|
}
|
||||||
|
|
||||||
|
// 중복 데이터 확인
|
||||||
|
if (integrity.getDuplicateRecords() > 0) {
|
||||||
|
recommendations.add("Found " + integrity.getDuplicateRecords() +
|
||||||
|
" duplicate records. Review data ingestion process.");
|
||||||
|
}
|
||||||
|
|
||||||
|
// 느린 쿼리 확인
|
||||||
|
DatabaseHealth dbHealth = report.getDatabaseHealth();
|
||||||
|
if (!dbHealth.getSlowQueries().isEmpty()) {
|
||||||
|
recommendations.add("Found " + dbHealth.getSlowQueries().size() +
|
||||||
|
" slow queries. Consider query optimization or index creation.");
|
||||||
|
}
|
||||||
|
|
||||||
|
return recommendations;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper 메소드들
|
||||||
|
private boolean testConnection(JdbcTemplate jdbcTemplate) {
|
||||||
|
try {
|
||||||
|
jdbcTemplate.queryForObject("SELECT 1", Integer.class);
|
||||||
|
return true;
|
||||||
|
} catch (Exception e) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private int getActiveConnections() {
|
||||||
|
return collectJdbcTemplate.queryForObject(
|
||||||
|
"SELECT COUNT(*) FROM pg_stat_activity WHERE state = 'active'",
|
||||||
|
Integer.class
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<Map<String, Object>> getSlowQueries() {
|
||||||
|
return collectJdbcTemplate.queryForList("""
|
||||||
|
SELECT
|
||||||
|
query,
|
||||||
|
mean_exec_time,
|
||||||
|
calls,
|
||||||
|
total_exec_time
|
||||||
|
FROM pg_stat_statements
|
||||||
|
WHERE mean_exec_time > 1000
|
||||||
|
ORDER BY mean_exec_time DESC
|
||||||
|
LIMIT 10
|
||||||
|
""");
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<Map<String, Object>> getTableSizes() {
|
||||||
|
return collectJdbcTemplate.queryForList("""
|
||||||
|
SELECT
|
||||||
|
schemaname,
|
||||||
|
tablename,
|
||||||
|
pg_size_pretty(pg_total_relation_size(schemaname||'.'||tablename)) as size
|
||||||
|
FROM pg_tables
|
||||||
|
WHERE schemaname = 'signal'
|
||||||
|
ORDER BY pg_total_relation_size(schemaname||'.'||tablename) DESC
|
||||||
|
LIMIT 10
|
||||||
|
""");
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<Map<String, Object>> getLockInfo() {
|
||||||
|
return collectJdbcTemplate.queryForList("""
|
||||||
|
SELECT
|
||||||
|
pid,
|
||||||
|
locktype,
|
||||||
|
mode,
|
||||||
|
granted
|
||||||
|
FROM pg_locks
|
||||||
|
WHERE locktype NOT IN ('virtualxid', 'relation')
|
||||||
|
LIMIT 20
|
||||||
|
""");
|
||||||
|
}
|
||||||
|
|
||||||
|
// 리포트 클래스들
|
||||||
|
@lombok.Data
|
||||||
|
public static class DiagnosticReport {
|
||||||
|
private LocalDateTime timestamp;
|
||||||
|
private DatabaseHealth databaseHealth;
|
||||||
|
private PartitionStatus partitionStatus;
|
||||||
|
private PerformanceMetrics performanceMetrics;
|
||||||
|
private DataIntegrity dataIntegrity;
|
||||||
|
private SystemResources systemResources;
|
||||||
|
private List<String> recommendations;
|
||||||
|
}
|
||||||
|
|
||||||
|
@lombok.Data
|
||||||
|
public static class DatabaseHealth {
|
||||||
|
private boolean collectDbConnected;
|
||||||
|
private boolean queryDbConnected;
|
||||||
|
private int activeConnections;
|
||||||
|
private List<Map<String, Object>> slowQueries;
|
||||||
|
private List<Map<String, Object>> tableSizes;
|
||||||
|
private List<Map<String, Object>> lockInfo;
|
||||||
|
}
|
||||||
|
|
||||||
|
@lombok.Data
|
||||||
|
public static class PartitionStatus {
|
||||||
|
private int totalPartitions;
|
||||||
|
private boolean hasFuturePartitions;
|
||||||
|
private Map<String, Object> largestPartition;
|
||||||
|
private List<Map<String, Object>> partitionDetails;
|
||||||
|
}
|
||||||
|
|
||||||
|
@lombok.Data
|
||||||
|
public static class PerformanceMetrics {
|
||||||
|
private long recordsLastHour;
|
||||||
|
private double recordsPerSecond;
|
||||||
|
private double cacheHitRatio;
|
||||||
|
private List<Map<String, Object>> indexEfficiency;
|
||||||
|
}
|
||||||
|
|
||||||
|
@lombok.Data
|
||||||
|
public static class DataIntegrity {
|
||||||
|
private long duplicateRecords;
|
||||||
|
private List<String> missingTimeRanges;
|
||||||
|
private long dataDelayMinutes;
|
||||||
|
}
|
||||||
|
|
||||||
|
@lombok.Data
|
||||||
|
public static class SystemResources {
|
||||||
|
private long maxMemory;
|
||||||
|
private long totalMemory;
|
||||||
|
private long freeMemory;
|
||||||
|
private long usedMemory;
|
||||||
|
private int availableProcessors;
|
||||||
|
private int activeThreads;
|
||||||
|
private long databaseSize;
|
||||||
|
private String databaseSizePretty;
|
||||||
|
}
|
||||||
|
}
|
||||||
158
src/main/java/gc/mda/signal_batch/global/util/BatchUtils.java
Normal file
158
src/main/java/gc/mda/signal_batch/global/util/BatchUtils.java
Normal file
@ -0,0 +1,158 @@
|
|||||||
|
package gc.mda.signal_batch.global.util;
|
||||||
|
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.core.JobExecution;
|
||||||
|
import org.springframework.batch.core.JobParameters;
|
||||||
|
import org.springframework.batch.core.JobParametersBuilder;
|
||||||
|
import org.springframework.batch.core.StepExecution;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.time.*;
|
||||||
|
import java.time.format.DateTimeFormatter;
|
||||||
|
import java.util.Collection;
|
||||||
|
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
|
||||||
|
public class BatchUtils {
|
||||||
|
|
||||||
|
@SuppressWarnings("unused")
|
||||||
|
private static final DateTimeFormatter DATE_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd");
|
||||||
|
@SuppressWarnings("unused")
|
||||||
|
private static final DateTimeFormatter DATETIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 기본 Job 파라미터 생성
|
||||||
|
*/
|
||||||
|
public JobParameters createJobParameters(LocalDateTime startTime, LocalDateTime endTime) {
|
||||||
|
return new JobParametersBuilder()
|
||||||
|
.addLocalDateTime("startTime", startTime)
|
||||||
|
.addLocalDateTime("endTime", endTime)
|
||||||
|
.addLong("executionTime", System.currentTimeMillis())
|
||||||
|
.addLong("tileLevel", 1L) // 기본값 1 (소해구도)
|
||||||
|
.addLong("timeBucketMinutes", 5L) // 5분 단위로 변경 (배치 주기와 일치)
|
||||||
|
.toJobParameters();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 일별 처리용 Job 파라미터 생성
|
||||||
|
*/
|
||||||
|
public JobParameters createDailyJobParameters(LocalDate processingDate) {
|
||||||
|
return new JobParametersBuilder()
|
||||||
|
.addLocalDate("processingDate", processingDate)
|
||||||
|
.addLocalDateTime("startTime", processingDate.atStartOfDay())
|
||||||
|
.addLocalDateTime("endTime", processingDate.plusDays(1).atStartOfDay())
|
||||||
|
.addLong("executionTime", System.currentTimeMillis())
|
||||||
|
.toJobParameters();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Job 실행 결과 요약
|
||||||
|
*/
|
||||||
|
public String getJobExecutionSummary(JobExecution jobExecution) {
|
||||||
|
StringBuilder summary = new StringBuilder();
|
||||||
|
summary.append(String.format("Job: %s, Status: %s\n",
|
||||||
|
jobExecution.getJobInstance().getJobName(),
|
||||||
|
jobExecution.getStatus()
|
||||||
|
));
|
||||||
|
|
||||||
|
// 방법 1: ZoneOffset 사용
|
||||||
|
LocalDateTime startTime = jobExecution.getStartTime();
|
||||||
|
LocalDateTime endTime = jobExecution.getEndTime();
|
||||||
|
|
||||||
|
if (startTime != null && endTime != null) {
|
||||||
|
// LocalDateTime을 Instant로 변환 시 시스템 기본 시간대 사용
|
||||||
|
Instant startInstant = startTime.atZone(ZoneId.systemDefault()).toInstant();
|
||||||
|
Instant endInstant = endTime.atZone(ZoneId.systemDefault()).toInstant();
|
||||||
|
|
||||||
|
Duration duration = Duration.between(startInstant, endInstant);
|
||||||
|
|
||||||
|
summary.append(String.format("Duration: %d minutes %d seconds\n",
|
||||||
|
duration.toMinutes(), duration.getSeconds() % 60
|
||||||
|
));
|
||||||
|
} else if (startTime != null) {
|
||||||
|
// endTime이 null인 경우 (실행 중)
|
||||||
|
Instant startInstant = startTime.atZone(ZoneId.systemDefault()).toInstant();
|
||||||
|
Duration duration = Duration.between(startInstant, Instant.now());
|
||||||
|
|
||||||
|
summary.append(String.format("Running for: %d minutes %d seconds\n",
|
||||||
|
duration.toMinutes(), duration.getSeconds() % 60
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// 또는 방법 2: LocalDateTime 직접 사용 (더 간단함)
|
||||||
|
/*
|
||||||
|
LocalDateTime startTime = jobExecution.getStartTime();
|
||||||
|
LocalDateTime endTime = jobExecution.getEndTime() != null ?
|
||||||
|
jobExecution.getEndTime() : LocalDateTime.now();
|
||||||
|
|
||||||
|
if (startTime != null) {
|
||||||
|
Duration duration = Duration.between(startTime, endTime);
|
||||||
|
summary.append(String.format("Duration: %d minutes %d seconds\n",
|
||||||
|
duration.toMinutes(), duration.getSeconds() % 60
|
||||||
|
));
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
Collection<StepExecution> stepExecutions = jobExecution.getStepExecutions();
|
||||||
|
for (StepExecution stepExecution : stepExecutions) {
|
||||||
|
summary.append(String.format(" Step: %s, Status: %s, Read: %d, Write: %d, Skip: %d\n",
|
||||||
|
stepExecution.getStepName(),
|
||||||
|
stepExecution.getStatus(),
|
||||||
|
stepExecution.getReadCount(),
|
||||||
|
stepExecution.getWriteCount(),
|
||||||
|
stepExecution.getSkipCount()
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
return summary.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 파티션 테이블명 생성
|
||||||
|
*/
|
||||||
|
public String generatePartitionName(String baseTableName, LocalDate date) {
|
||||||
|
return baseTableName + "_" + date.format(DateTimeFormatter.ofPattern("yyMMdd"));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 처리 속도 계산 (records/sec)
|
||||||
|
*/
|
||||||
|
public double calculateThroughput(long recordCount, Duration duration) {
|
||||||
|
if (duration.getSeconds() == 0) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
return (double) recordCount / duration.getSeconds();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 메모리 사용량 로깅
|
||||||
|
*/
|
||||||
|
public void logMemoryUsage(String context) {
|
||||||
|
Runtime runtime = Runtime.getRuntime();
|
||||||
|
long maxMemory = runtime.maxMemory() / 1024 / 1024;
|
||||||
|
long totalMemory = runtime.totalMemory() / 1024 / 1024;
|
||||||
|
long freeMemory = runtime.freeMemory() / 1024 / 1024;
|
||||||
|
long usedMemory = totalMemory - freeMemory;
|
||||||
|
|
||||||
|
log.info("{} - Memory usage: Used: {}MB, Free: {}MB, Total: {}MB, Max: {}MB",
|
||||||
|
context, usedMemory, freeMemory, totalMemory, maxMemory);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 재시도 가능 여부 판단
|
||||||
|
*/
|
||||||
|
public boolean isRetryableException(Exception e) {
|
||||||
|
String message = e.getMessage();
|
||||||
|
if (message == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return message.contains("Connection") ||
|
||||||
|
message.contains("Timeout") ||
|
||||||
|
message.contains("Lock") ||
|
||||||
|
message.contains("Deadlock");
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,418 @@
|
|||||||
|
package gc.mda.signal_batch.global.util;
|
||||||
|
|
||||||
|
import gc.mda.signal_batch.domain.vessel.model.VesselLatestPosition;
|
||||||
|
import gc.mda.signal_batch.domain.gis.model.TileStatistics;
|
||||||
|
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.retry.annotation.Backoff;
|
||||||
|
import org.springframework.retry.annotation.Retryable;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
import org.springframework.transaction.annotation.Isolation;
|
||||||
|
import org.springframework.transaction.annotation.Propagation;
|
||||||
|
import org.springframework.transaction.annotation.Transactional;
|
||||||
|
|
||||||
|
import java.sql.Timestamp;
|
||||||
|
import java.time.Duration;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
public class ConcurrentUpdateManager {
|
||||||
|
|
||||||
|
private final JdbcTemplate queryJdbcTemplate;
|
||||||
|
|
||||||
|
public ConcurrentUpdateManager(@Qualifier("queryJdbcTemplate") JdbcTemplate queryJdbcTemplate) {
|
||||||
|
this.queryJdbcTemplate = queryJdbcTemplate;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Value("${vessel.batch.lock.timeout:10}")
|
||||||
|
private int lockTimeoutSeconds;
|
||||||
|
|
||||||
|
@Value("${vessel.batch.lock.max-retry:3}")
|
||||||
|
private int maxRetryAttempts;
|
||||||
|
|
||||||
|
// 락 통계 관리
|
||||||
|
private final Map<String, LockStatistics> lockStats = new ConcurrentHashMap<>();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Advisory Lock을 사용한 최신 위치 업데이트
|
||||||
|
*/
|
||||||
|
@Retryable(
|
||||||
|
value = {Exception.class},
|
||||||
|
maxAttempts = 3,
|
||||||
|
backoff = @Backoff(delay = 100, maxDelay = 1000, multiplier = 2)
|
||||||
|
)
|
||||||
|
@Transactional(isolation = Isolation.READ_COMMITTED, propagation = Propagation.REQUIRED)
|
||||||
|
public int updateLatestPositionWithLock(VesselLatestPosition position) {
|
||||||
|
String lockKey = position.getSigSrcCd() + ":" + position.getTargetId();
|
||||||
|
long lockId = generateLockId(lockKey);
|
||||||
|
|
||||||
|
LocalDateTime startTime = LocalDateTime.now();
|
||||||
|
LockStatistics stats = lockStats.computeIfAbsent(lockKey, k -> new LockStatistics());
|
||||||
|
stats.attempts.incrementAndGet();
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Advisory Lock 획득 시도
|
||||||
|
Boolean lockAcquired = queryJdbcTemplate.queryForObject(
|
||||||
|
"SELECT pg_try_advisory_lock(?)",
|
||||||
|
Boolean.class,
|
||||||
|
lockId
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!lockAcquired) {
|
||||||
|
stats.failures.incrementAndGet();
|
||||||
|
log.debug("Failed to acquire lock for vessel: {}", lockKey);
|
||||||
|
|
||||||
|
// 대기 후 재시도
|
||||||
|
Thread.sleep(50);
|
||||||
|
return updateLatestPositionWithoutLock(position);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 락 획득 성공 - 업데이트 수행
|
||||||
|
// 방법 1: queryForList 사용 (권장)
|
||||||
|
String sql = """
|
||||||
|
INSERT INTO signal.t_vessel_latest_position (
|
||||||
|
sig_src_cd, target_id, lat, lon, geom,
|
||||||
|
sog, cog, heading, ship_nm, ship_ty,
|
||||||
|
last_update, update_count, created_at
|
||||||
|
) VALUES (
|
||||||
|
?, ?, ?, ?, public.ST_SetSRID(public.ST_MakePoint(?, ?), 4326),
|
||||||
|
?, ?, ?, ?, ?,
|
||||||
|
?, 1, CURRENT_TIMESTAMP
|
||||||
|
)
|
||||||
|
ON CONFLICT (sig_src_cd, target_id) DO UPDATE SET
|
||||||
|
lat = EXCLUDED.lat,
|
||||||
|
lon = EXCLUDED.lon,
|
||||||
|
geom = EXCLUDED.geom,
|
||||||
|
sog = EXCLUDED.sog,
|
||||||
|
cog = EXCLUDED.cog,
|
||||||
|
heading = EXCLUDED.heading,
|
||||||
|
ship_nm = COALESCE(EXCLUDED.ship_nm, t_vessel_latest_position.ship_nm),
|
||||||
|
ship_ty = COALESCE(EXCLUDED.ship_ty, t_vessel_latest_position.ship_ty),
|
||||||
|
last_update = EXCLUDED.last_update,
|
||||||
|
update_count = t_vessel_latest_position.update_count + 1
|
||||||
|
WHERE EXCLUDED.last_update > t_vessel_latest_position.last_update
|
||||||
|
RETURNING update_count
|
||||||
|
""";
|
||||||
|
|
||||||
|
List<Integer> results = queryJdbcTemplate.queryForList(sql,
|
||||||
|
new Object[]{
|
||||||
|
position.getSigSrcCd(),
|
||||||
|
position.getTargetId(),
|
||||||
|
position.getLat(),
|
||||||
|
position.getLon(),
|
||||||
|
position.getLon(),
|
||||||
|
position.getLat(),
|
||||||
|
position.getSog(),
|
||||||
|
position.getCog(),
|
||||||
|
position.getHeading(),
|
||||||
|
position.getShipNm(),
|
||||||
|
position.getShipTy(),
|
||||||
|
Timestamp.valueOf(position.getLastUpdate())
|
||||||
|
},
|
||||||
|
Integer.class
|
||||||
|
);
|
||||||
|
|
||||||
|
// 결과 확인 - 빈 리스트면 업데이트 안됨 (이미 최신 데이터)
|
||||||
|
int updateResult = results.isEmpty() ? 0 : 1;
|
||||||
|
|
||||||
|
if (updateResult == 0) {
|
||||||
|
log.debug("Skipped update for vessel {} - existing data is newer", lockKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
stats.successes.incrementAndGet();
|
||||||
|
Duration duration = Duration.between(startTime, LocalDateTime.now());
|
||||||
|
stats.totalDuration.addAndGet((int) duration.toMillis());
|
||||||
|
|
||||||
|
return updateResult;
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
stats.errors.incrementAndGet();
|
||||||
|
log.error("Error updating vessel position: {}", lockKey, e);
|
||||||
|
throw new RuntimeException("Failed to update vessel position", e);
|
||||||
|
|
||||||
|
} finally {
|
||||||
|
// Advisory Lock 해제
|
||||||
|
try {
|
||||||
|
queryJdbcTemplate.update("SELECT pg_advisory_unlock(?)", lockId);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.warn("Failed to release advisory lock: {}", lockId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 락 없이 업데이트 (Fallback) - 수정 버전
|
||||||
|
*/
|
||||||
|
private int updateLatestPositionWithoutLock(VesselLatestPosition position) {
|
||||||
|
String sql = """
|
||||||
|
INSERT INTO signal.t_vessel_latest_position (
|
||||||
|
sig_src_cd, target_id, lat, lon, geom,
|
||||||
|
sog, cog, heading, ship_nm, ship_ty,
|
||||||
|
last_update, update_count, created_at
|
||||||
|
) VALUES (
|
||||||
|
?, ?, ?, ?, public.ST_SetSRID(public.ST_MakePoint(?, ?), 4326),
|
||||||
|
?, ?, ?, ?, ?,
|
||||||
|
?, 1, CURRENT_TIMESTAMP
|
||||||
|
)
|
||||||
|
ON CONFLICT (sig_src_cd, target_id) DO UPDATE SET
|
||||||
|
lat = EXCLUDED.lat,
|
||||||
|
lon = EXCLUDED.lon,
|
||||||
|
geom = EXCLUDED.geom,
|
||||||
|
sog = EXCLUDED.sog,
|
||||||
|
cog = EXCLUDED.cog,
|
||||||
|
heading = EXCLUDED.heading,
|
||||||
|
ship_nm = COALESCE(EXCLUDED.ship_nm, t_vessel_latest_position.ship_nm),
|
||||||
|
ship_ty = COALESCE(EXCLUDED.ship_ty, t_vessel_latest_position.ship_ty),
|
||||||
|
last_update = EXCLUDED.last_update,
|
||||||
|
update_count = t_vessel_latest_position.update_count + 1
|
||||||
|
WHERE EXCLUDED.last_update > t_vessel_latest_position.last_update
|
||||||
|
""";
|
||||||
|
|
||||||
|
return queryJdbcTemplate.update(sql,
|
||||||
|
position.getSigSrcCd(),
|
||||||
|
position.getTargetId(),
|
||||||
|
position.getLat(),
|
||||||
|
position.getLon(),
|
||||||
|
position.getLon(),
|
||||||
|
position.getLat(),
|
||||||
|
position.getSog(),
|
||||||
|
position.getCog(),
|
||||||
|
position.getHeading(),
|
||||||
|
position.getShipNm(),
|
||||||
|
position.getShipTy(),
|
||||||
|
Timestamp.valueOf(position.getLastUpdate())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* 배치 업데이트 with Row-Level Locking
|
||||||
|
*/
|
||||||
|
@Transactional(isolation = Isolation.READ_COMMITTED)
|
||||||
|
public void batchUpdateWithRowLock(List<VesselLatestPosition> positions) {
|
||||||
|
// 선박별로 정렬하여 데드락 방지
|
||||||
|
positions.sort(Comparator.comparing(p -> p.getSigSrcCd() + p.getTargetId()));
|
||||||
|
|
||||||
|
String lockSql = """
|
||||||
|
SELECT 1 FROM signal.t_vessel_latest_position
|
||||||
|
WHERE sig_src_cd = ? AND target_id = ?
|
||||||
|
FOR UPDATE NOWAIT
|
||||||
|
""";
|
||||||
|
|
||||||
|
String updateSql = """
|
||||||
|
UPDATE signal.t_vessel_latest_position SET
|
||||||
|
lat = ?, lon = ?, geom = public.ST_SetSRID(public.ST_MakePoint(?, ?), 4326),
|
||||||
|
sog = ?, cog = ?, heading = ?,
|
||||||
|
ship_nm = COALESCE(?, ship_nm),
|
||||||
|
ship_ty = COALESCE(?, ship_ty),
|
||||||
|
last_update = ?,
|
||||||
|
update_count = update_count + 1
|
||||||
|
WHERE sig_src_cd = ? AND target_id = ?
|
||||||
|
AND ? > last_update
|
||||||
|
""";
|
||||||
|
|
||||||
|
String insertSql = """
|
||||||
|
INSERT INTO signal.t_vessel_latest_position (
|
||||||
|
sig_src_cd, target_id, lat, lon, geom,
|
||||||
|
sog, cog, heading, ship_nm, ship_ty,
|
||||||
|
last_update, update_count, created_at
|
||||||
|
) VALUES (
|
||||||
|
?, ?, ?, ?, public.ST_SetSRID(public.ST_MakePoint(?, ?), 4326),
|
||||||
|
?, ?, ?, ?, ?,
|
||||||
|
?, 1, CURRENT_TIMESTAMP
|
||||||
|
)
|
||||||
|
""";
|
||||||
|
|
||||||
|
for (VesselLatestPosition position : positions) {
|
||||||
|
try {
|
||||||
|
// Row lock 시도
|
||||||
|
List<Integer> locked = queryJdbcTemplate.queryForList(
|
||||||
|
lockSql, Integer.class,
|
||||||
|
position.getSigSrcCd(), position.getTargetId()
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!locked.isEmpty()) {
|
||||||
|
// 업데이트
|
||||||
|
int updated = queryJdbcTemplate.update(updateSql,
|
||||||
|
position.getLat(), position.getLon(),
|
||||||
|
position.getLon(), position.getLat(),
|
||||||
|
position.getSog(), position.getCog(), position.getHeading(),
|
||||||
|
position.getShipNm(), position.getShipTy(),
|
||||||
|
Timestamp.valueOf(position.getLastUpdate()),
|
||||||
|
position.getSigSrcCd(), position.getTargetId(),
|
||||||
|
Timestamp.valueOf(position.getLastUpdate())
|
||||||
|
);
|
||||||
|
|
||||||
|
if (updated == 0) {
|
||||||
|
log.debug("Skipped outdated update for vessel: {}:{}",
|
||||||
|
position.getSigSrcCd(), position.getTargetId());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// 신규 삽입
|
||||||
|
queryJdbcTemplate.update(insertSql,
|
||||||
|
position.getSigSrcCd(), position.getTargetId(),
|
||||||
|
position.getLat(), position.getLon(),
|
||||||
|
position.getLon(), position.getLat(),
|
||||||
|
position.getSog(), position.getCog(), position.getHeading(),
|
||||||
|
position.getShipNm(), position.getShipTy(),
|
||||||
|
Timestamp.valueOf(position.getLastUpdate())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.warn("Failed to update vessel position: {}:{}",
|
||||||
|
position.getSigSrcCd(), position.getTargetId(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 타일 통계 병합 업데이트
|
||||||
|
*/
|
||||||
|
@Transactional(isolation = Isolation.READ_COMMITTED)
|
||||||
|
public void mergeTileStatistics(List<TileStatistics> statistics) {
|
||||||
|
String sql = """
|
||||||
|
INSERT INTO signal.t_tile_summary (
|
||||||
|
tile_id, tile_level, time_bucket, vessel_count,
|
||||||
|
unique_vessels, total_points, avg_sog, max_sog,
|
||||||
|
vessel_density, created_at
|
||||||
|
) VALUES (
|
||||||
|
?, ?, ?, ?, ?::jsonb, ?, ?, ?, ?, ?
|
||||||
|
)
|
||||||
|
ON CONFLICT (tile_id, time_bucket) DO UPDATE SET
|
||||||
|
vessel_count = t_tile_summary.vessel_count + EXCLUDED.vessel_count,
|
||||||
|
unique_vessels = t_tile_summary.unique_vessels || EXCLUDED.unique_vessels,
|
||||||
|
total_points = t_tile_summary.total_points + EXCLUDED.total_points,
|
||||||
|
avg_sog = (t_tile_summary.avg_sog * t_tile_summary.total_points +
|
||||||
|
EXCLUDED.avg_sog * EXCLUDED.total_points) /
|
||||||
|
(t_tile_summary.total_points + EXCLUDED.total_points),
|
||||||
|
max_sog = GREATEST(t_tile_summary.max_sog, EXCLUDED.max_sog),
|
||||||
|
vessel_density = (t_tile_summary.vessel_count + EXCLUDED.vessel_count) /
|
||||||
|
(SELECT area FROM signal.t_grid_tiles WHERE tile_id = t_tile_summary.tile_id)
|
||||||
|
""";
|
||||||
|
|
||||||
|
queryJdbcTemplate.batchUpdate(sql, statistics, 100, (ps, stat) -> {
|
||||||
|
ps.setString(1, stat.getTileId());
|
||||||
|
ps.setInt(2, stat.getTileLevel());
|
||||||
|
ps.setTimestamp(3, java.sql.Timestamp.valueOf(stat.getTimeBucket()));
|
||||||
|
ps.setInt(4, stat.getVesselCount());
|
||||||
|
ps.setString(5, convertToJson(stat.getUniqueVessels()));
|
||||||
|
ps.setLong(6, stat.getTotalPoints());
|
||||||
|
ps.setBigDecimal(7, stat.getAvgSog());
|
||||||
|
ps.setBigDecimal(8, stat.getMaxSog());
|
||||||
|
ps.setBigDecimal(9, stat.getVesselDensity());
|
||||||
|
ps.setTimestamp(10, java.sql.Timestamp.valueOf(LocalDateTime.now()));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 락 ID 생성
|
||||||
|
*/
|
||||||
|
private long generateLockId(String key) {
|
||||||
|
// PostgreSQL advisory lock은 bigint 사용
|
||||||
|
return Math.abs(key.hashCode());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* JSON 변환
|
||||||
|
*/
|
||||||
|
private String convertToJson(Object obj) {
|
||||||
|
try {
|
||||||
|
return new com.fasterxml.jackson.databind.ObjectMapper()
|
||||||
|
.writeValueAsString(obj);
|
||||||
|
} catch (Exception e) {
|
||||||
|
return "{}";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 락 통계 조회
|
||||||
|
*/
|
||||||
|
public Map<String, Map<String, Object>> getLockStatistics() {
|
||||||
|
Map<String, Map<String, Object>> result = new HashMap<>();
|
||||||
|
|
||||||
|
lockStats.forEach((key, stats) -> {
|
||||||
|
Map<String, Object> statMap = new HashMap<>();
|
||||||
|
statMap.put("attempts", stats.attempts.get());
|
||||||
|
statMap.put("successes", stats.successes.get());
|
||||||
|
statMap.put("failures", stats.failures.get());
|
||||||
|
statMap.put("errors", stats.errors.get());
|
||||||
|
|
||||||
|
if (stats.attempts.get() > 0) {
|
||||||
|
statMap.put("successRate",
|
||||||
|
(double) stats.successes.get() / stats.attempts.get());
|
||||||
|
statMap.put("avgDurationMs",
|
||||||
|
stats.totalDuration.get() / stats.successes.get());
|
||||||
|
}
|
||||||
|
|
||||||
|
result.put(key, statMap);
|
||||||
|
});
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 현재 락 상태 모니터링
|
||||||
|
*/
|
||||||
|
public List<Map<String, Object>> getCurrentLocks() {
|
||||||
|
String sql = """
|
||||||
|
SELECT
|
||||||
|
pid,
|
||||||
|
locktype,
|
||||||
|
database,
|
||||||
|
relation::regclass,
|
||||||
|
mode,
|
||||||
|
granted,
|
||||||
|
EXTRACT(EPOCH FROM (NOW() - query_start)) as duration_seconds
|
||||||
|
FROM pg_locks l
|
||||||
|
JOIN pg_stat_activity a ON l.pid = a.pid
|
||||||
|
WHERE l.locktype IN ('advisory', 'relation', 'tuple')
|
||||||
|
AND a.application_name LIKE '%vessel-batch%'
|
||||||
|
ORDER BY duration_seconds DESC
|
||||||
|
""";
|
||||||
|
|
||||||
|
return queryJdbcTemplate.queryForList(sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 데드락 모니터링
|
||||||
|
*/
|
||||||
|
public List<Map<String, Object>> getDeadlockInfo() {
|
||||||
|
String sql = """
|
||||||
|
SELECT
|
||||||
|
pid,
|
||||||
|
usename,
|
||||||
|
application_name,
|
||||||
|
client_addr,
|
||||||
|
query_start,
|
||||||
|
state,
|
||||||
|
wait_event_type,
|
||||||
|
wait_event,
|
||||||
|
query
|
||||||
|
FROM pg_stat_activity
|
||||||
|
WHERE wait_event_type = 'Lock'
|
||||||
|
AND state != 'idle'
|
||||||
|
ORDER BY query_start
|
||||||
|
""";
|
||||||
|
|
||||||
|
return queryJdbcTemplate.queryForList(sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 락 통계 클래스
|
||||||
|
*/
|
||||||
|
private static class LockStatistics {
|
||||||
|
final AtomicInteger attempts = new AtomicInteger();
|
||||||
|
final AtomicInteger successes = new AtomicInteger();
|
||||||
|
final AtomicInteger failures = new AtomicInteger();
|
||||||
|
final AtomicInteger errors = new AtomicInteger();
|
||||||
|
final AtomicInteger totalDuration = new AtomicInteger();
|
||||||
|
}
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
불러오는 중...
Reference in New Issue
Block a user