feat: Add V2 REST API with WebSocket-compatible responses

- Add GisControllerV2/GisServiceV2 for CompactVesselTrack responses
- Add nationalCode and shipKindCode fields to REST API responses
- Add flexible DateTime parsing support (multiple formats)
- Add TrackConverter utility for track data conversion
- Update SwaggerConfig with V2 API endpoints and unified tags
- Update ProdDataSourceConfig for prod-mpr profile support
- Enhance Swagger documentation for all DTOs
This commit is contained in:
HeungTak Lee 2026-01-20 13:38:31 +09:00
부모 2a708b3318
커밋 89482d854f
58개의 변경된 파일5018개의 추가작업 그리고 1868개의 파일을 삭제

파일 보기

@ -8,6 +8,7 @@ import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Profile;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.scheduling.annotation.Scheduled;
@ -40,6 +41,7 @@ import java.util.List;
*/
@Slf4j
@Component
@Profile("!query") // query 프로파일에서는 캐시 갱신 스케줄러 비활성화
@RequiredArgsConstructor
@ConditionalOnProperty(name = "vessel.batch.cache.latest-position.enabled", havingValue = "true", matchIfMissing = false)
public class VesselPositionCacheRefreshScheduler {
@ -117,7 +119,7 @@ public class VesselPositionCacheRefreshScheduler {
message_time as last_update
FROM signal.sig_test
WHERE message_time >= ? AND message_time < ?
AND sig_src_cd NOT IN ('000004', '000005')
AND sig_src_cd != '000005'
AND length(target_id) > 5
AND lat BETWEEN -90 AND 90
AND lon BETWEEN -180 AND 180
@ -141,10 +143,10 @@ public class VesselPositionCacheRefreshScheduler {
private void logCacheStats() {
try {
VesselLatestPositionCache.CacheStats stats = cache.getStats();
log.info("Cache Stats - Size: {}, HitRate: {:.2f}%, MissRate: {:.2f}%, Hits: {}, Misses: {}",
log.info("Cache Stats - Size: {}, HitRate: {}%, MissRate: {}%, Hits: {}, Misses: {}",
stats.currentSize(),
stats.hitRate(),
stats.missRate(),
String.format("%.2f", stats.hitRate()),
String.format("%.2f", stats.missRate()),
stats.hitCount(),
stats.missCount());
} catch (Exception e) {

파일 보기

@ -2,6 +2,8 @@ package gc.mda.signal_batch.batch.job;
import gc.mda.signal_batch.domain.vessel.model.VesselData;
import gc.mda.signal_batch.domain.vessel.model.VesselTrack;
import gc.mda.signal_batch.domain.vessel.dto.VesselBucketPositionDto;
import gc.mda.signal_batch.domain.vessel.service.VesselPreviousBucketCache;
import gc.mda.signal_batch.batch.processor.VesselTrackProcessor;
import gc.mda.signal_batch.batch.processor.AbnormalTrackDetector;
import gc.mda.signal_batch.batch.processor.AbnormalTrackDetector.AbnormalDetectionResult;
@ -56,6 +58,10 @@ public class VesselTrackStepConfig {
private final TrackClippingUtils trackClippingUtils;
private final AbnormalTrackDetector abnormalTrackDetector;
private final AbnormalTrackWriter abnormalTrackWriter;
private final VesselPreviousBucketCache previousBucketCache;
// 현재 처리 중인 버킷의 종료 위치 저장 (캐시 업데이트용)
private final Map<String, VesselBucketPositionDto> currentBucketEndPositions = new HashMap<>();
public VesselTrackStepConfig(
JobRepository jobRepository,
@ -66,7 +72,8 @@ public class VesselTrackStepConfig {
VesselTrackBulkWriter vesselTrackBulkWriter,
TrackClippingUtils trackClippingUtils,
AbnormalTrackDetector abnormalTrackDetector,
AbnormalTrackWriter abnormalTrackWriter) {
AbnormalTrackWriter abnormalTrackWriter,
VesselPreviousBucketCache previousBucketCache) {
this.jobRepository = jobRepository;
this.transactionManager = transactionManager;
this.queryDataSource = queryDataSource;
@ -76,6 +83,7 @@ public class VesselTrackStepConfig {
this.trackClippingUtils = trackClippingUtils;
this.abnormalTrackDetector = abnormalTrackDetector;
this.abnormalTrackWriter = abnormalTrackWriter;
this.previousBucketCache = previousBucketCache;
}
@Value("${vessel.batch.chunk-size:1000}")
@ -113,41 +121,112 @@ public class VesselTrackStepConfig {
if (tracks == null || tracks.isEmpty()) {
return null;
}
// 2. 강화된 비정상 궤적 필터링
// 2. 이전 버킷 위치 조회 (캐시 + DB Fallback)
List<String> vesselKeys = tracks.stream()
.map(track -> track.getSigSrcCd() + ":" + track.getTargetId())
.distinct()
.collect(Collectors.toList());
Map<String, VesselBucketPositionDto> previousPositions =
previousBucketCache.getBatch(vesselKeys);
// 3. 강화된 비정상 궤적 필터링 (버킷 + 버킷 점프 검출)
List<VesselTrack> filteredTracks = new ArrayList<>();
for (VesselTrack track : tracks) {
boolean isAbnormal = false;
String abnormalReason = "";
// 선박/항공기 구분
boolean isAircraft = "000019".equals(track.getSigSrcCd());
double speedLimit = isAircraft ? 300.0 : 100.0; // 항공기 300, 선박 100
double distanceLimit = isAircraft ? 30.0 : 10.0; // 항공기 30nm, 선박 10nm
// 평균속도 체크
// 버킷 평균속도 체크
if (track.getAvgSpeed() != null && track.getAvgSpeed().doubleValue() >= speedLimit) {
isAbnormal = true;
abnormalReason = "within_bucket_speed";
}
// 5분간 이동거리 체크
// 버킷 이동거리 체크
if (track.getDistanceNm() != null && track.getDistanceNm().doubleValue() >= distanceLimit) {
isAbnormal = true;
abnormalReason = "within_bucket_distance";
}
// 버킷 점프 검출 (NEW!)
if (!isAbnormal && track.getStartPosition() != null) {
String vesselKey = track.getSigSrcCd() + ":" + track.getTargetId();
VesselBucketPositionDto prevPosition = previousPositions.get(vesselKey);
if (prevPosition != null) {
double jumpDistance = calculateDistance(
prevPosition.getEndLat(), prevPosition.getEndLon(),
track.getStartPosition().getLat(), track.getStartPosition().getLon()
);
// 위성 AIS는 2시간, 일반 신호는 15분 범위 체크
boolean isSatellite = "000016".equals(track.getSigSrcCd());
double maxGapMinutes = isSatellite ? 120.0 : 15.0;
double expectedMaxDistance = isAircraft ? (maxGapMinutes / 60.0 * 300.0) : (maxGapMinutes / 60.0 * 50.0);
if (jumpDistance > expectedMaxDistance) {
isAbnormal = true;
abnormalReason = "bucket_to_bucket_jump";
log.warn("버킷 간 점프 검출: vessel={}, jump_distance={}nm, threshold={}nm, " +
"prev_pos=[{},{}], curr_pos=[{},{}]",
vesselKey,
String.format("%.1f", jumpDistance),
String.format("%.1f", expectedMaxDistance),
String.format("%.4f", prevPosition.getEndLat()),
String.format("%.4f", prevPosition.getEndLon()),
String.format("%.4f", track.getStartPosition().getLat()),
String.format("%.4f", track.getStartPosition().getLon()));
}
}
}
if (isAbnormal) {
log.warn("5분 비정상 궤적 감지: vessel={}, avg_speed={}, distance={}",
track.getVesselKey(), track.getAvgSpeed(), track.getDistanceNm());
saveAbnormalTrack(track);
log.warn("비정상 궤적 감지 [{}]: vessel={}, avg_speed={}, distance={}",
abnormalReason, track.getVesselKey(), track.getAvgSpeed(), track.getDistanceNm());
saveAbnormalTrack(track, abnormalReason);
} else {
filteredTracks.add(track);
// 정상 궤적의 종료 위치 저장 (캐시 업데이트용)
if (track.getEndPosition() != null) {
String vesselKey = track.getSigSrcCd() + ":" + track.getTargetId();
currentBucketEndPositions.put(vesselKey, VesselBucketPositionDto.builder()
.sigSrcCd(track.getSigSrcCd())
.targetId(track.getTargetId())
.endLon(track.getEndPosition().getLon())
.endLat(track.getEndPosition().getLat())
.endTime(track.getEndPosition().getTime())
.build());
}
}
}
return filteredTracks.isEmpty() ? null : filteredTracks;
};
}
/**
* Haversine 거리 계산 (해리)
*/
private double calculateDistance(double lat1, double lon1, double lat2, double lon2) {
final double R = 3440.065; // 지구 반경 (해리)
double dLat = Math.toRadians(lat2 - lat1);
double dLon = Math.toRadians(lon2 - lon1);
double a = Math.sin(dLat/2) * Math.sin(dLat/2) +
Math.cos(Math.toRadians(lat1)) * Math.cos(Math.toRadians(lat2)) *
Math.sin(dLon/2) * Math.sin(dLon/2);
double c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1-a));
return R * c;
}
private void saveAbnormalTrack(VesselTrack track) {
private void saveAbnormalTrack(VesselTrack track, String abnormalReason) {
try {
// Job 이름 설정
abnormalTrackWriter.setJobName("vesselTrackAggregationJob");
@ -220,7 +299,23 @@ public class VesselTrackStepConfig {
@Bean
@StepScope
public ItemWriter<List<VesselTrack>> vesselTrackWriter() {
return vesselTrackBulkWriter;
// Bulk Writer + 캐시 업데이트를 래핑
return chunk -> {
// 1. 기존 Writer로 DB 저장
vesselTrackBulkWriter.write(chunk);
// 2. 캐시 업데이트 (현재 버킷 종료 위치)
if (!currentBucketEndPositions.isEmpty()) {
List<VesselBucketPositionDto> positions = new ArrayList<>(currentBucketEndPositions.values());
previousBucketCache.putAll(positions);
log.debug("Updated previous bucket cache with {} vessel positions",
currentBucketEndPositions.size());
// 다음 청크를 위해 초기화
currentBucketEndPositions.clear();
}
};
}
@Bean

파일 보기

@ -7,6 +7,7 @@ import org.springframework.batch.item.database.JdbcPagingItemReader;
import org.springframework.batch.item.database.Order;
import org.springframework.batch.item.database.support.PostgresPagingQueryProvider;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapper;
@ -32,6 +33,9 @@ public class VesselDataReader {
private final DataSource collectDataSource;
private final JdbcTemplate collectJdbcTemplate;
@Value("${vessel.filter.zero-coordinates.enabled:false}")
private boolean filterZeroCoordinates;
private static final DateTimeFormatter PARTITION_FORMATTER = DateTimeFormatter.ofPattern("yyMMdd");
public VesselDataReader(
@ -44,6 +48,17 @@ public class VesselDataReader {
@PostConstruct
public void init() {
logDataSourceInfo();
log.info("Zero coordinates filter enabled: {}", filterZeroCoordinates);
}
/**
* 0 근처 좌표 필터링 조건 생성
*/
private String getZeroCoordinatesFilter() {
if (filterZeroCoordinates) {
return "AND NOT (lat BETWEEN -1 AND 1 AND lon BETWEEN -1 AND 1) ";
}
return "";
}
/**
@ -88,19 +103,20 @@ public class VesselDataReader {
log.info("Using table: {}", tableName);
// 최신 위치만 가져오는 SQL - DISTINCT ON 사용
String sql = """
String sql = String.format("""
SELECT DISTINCT ON (sig_src_cd, target_id)
message_time, real_time, sig_src_cd, target_id,
lat, lon, sog, cog, heading, ship_nm, ship_ty, rot, posacc,
sensor_id, base_st_id, mode, gps_sttus, battery_sttus,
vts_cd, mmsi, vpass_id, ship_no
FROM signal.%s
WHERE message_time >= ? AND message_time < ?
WHERE message_time >= ? AND message_time < ?
AND sig_src_cd != '000005'
AND lat BETWEEN -90 AND 90
AND lat BETWEEN -90 AND 90
AND lon BETWEEN -180 AND 180
%s
ORDER BY sig_src_cd, target_id, message_time DESC
""".formatted(tableName);
""", tableName, getZeroCoordinatesFilter());
reader.setSql(sql);
@ -181,6 +197,7 @@ public class VesselDataReader {
sql.append("vts_cd, mmsi, vpass_id, ship_no ");
sql.append("FROM signal.").append(tableName).append(" ");
sql.append("WHERE message_time >= ? AND message_time < ? AND sig_src_cd != '000005' ");
sql.append(getZeroCoordinatesFilter());
sql.append("ORDER BY message_time, sig_src_cd, target_id");
reader.setSql(sql.toString());
@ -218,7 +235,10 @@ public class VesselDataReader {
"vts_cd, mmsi, vpass_id, ship_no ");
queryProvider.setFromClause("FROM signal." + tableName);
queryProvider.setWhereClause("WHERE message_time >= :startTime AND message_time < :endTime and sig_src_cd != '000005'");
String whereClause = "WHERE message_time >= :startTime AND message_time < :endTime and sig_src_cd != '000005' "
+ getZeroCoordinatesFilter();
queryProvider.setWhereClause(whereClause);
Map<String, Order> sortKeys = new HashMap<>();
sortKeys.put("message_time", Order.ASCENDING);

파일 보기

@ -108,28 +108,90 @@ public class VesselTrackBulkWriter implements ItemWriter<List<VesselTrack>> {
}
}
// track_geom만 사용하는 단순화된 COPY
// 임시 테이블 + MERGE 패턴을 사용한 Bulk Upsert
private void bulkInsertTracks(List<VesselTrack> tracks, String tableName) throws Exception {
try (Connection conn = queryDataSource.getConnection()) {
BaseConnection baseConn = conn.unwrap(BaseConnection.class);
CopyManager copyManager = new CopyManager(baseConn);
String copySql = String.format("""
COPY %s (
sig_src_cd, target_id, time_bucket, track_geom,
distance_nm, avg_speed, max_speed, point_count,
start_position, end_position
) FROM STDIN
""", tableName);
StringWriter writer = new StringWriter();
for (VesselTrack track : tracks) {
writer.write(formatTrackLine(track));
writer.write('\n');
conn.setAutoCommit(false);
try {
BaseConnection baseConn = conn.unwrap(BaseConnection.class);
CopyManager copyManager = new CopyManager(baseConn);
// 1. 임시 테이블 생성 (UNLOGGED for performance)
String tempTableName = "temp_vessel_tracks_" + Thread.currentThread().getId();
try (var stmt = conn.createStatement()) {
stmt.execute(String.format("""
CREATE TEMP TABLE IF NOT EXISTS %s (
sig_src_cd VARCHAR(10),
target_id VARCHAR(30),
time_bucket TIMESTAMP,
track_geom GEOMETRY,
distance_nm NUMERIC,
avg_speed NUMERIC,
max_speed NUMERIC,
point_count INTEGER,
start_position JSONB,
end_position JSONB
) ON COMMIT DROP
""", tempTableName));
// 임시 테이블 비우기 (이전 실행에서 남은 데이터 제거)
stmt.execute("TRUNCATE " + tempTableName);
}
// 2. COPY로 임시 테이블에 bulk insert
String copySql = String.format("""
COPY %s (
sig_src_cd, target_id, time_bucket, track_geom,
distance_nm, avg_speed, max_speed, point_count,
start_position, end_position
) FROM STDIN
""", tempTableName);
StringWriter writer = new StringWriter();
for (VesselTrack track : tracks) {
writer.write(formatTrackLine(track));
writer.write('\n');
}
long rowsCopied = copyManager.copyIn(copySql, new StringReader(writer.toString()));
// 3. 임시 테이블에서 최종 테이블로 UPSERT
String upsertSql = String.format("""
INSERT INTO %s (
sig_src_cd, target_id, time_bucket, track_geom,
distance_nm, avg_speed, max_speed, point_count,
start_position, end_position
)
SELECT
sig_src_cd, target_id, time_bucket, track_geom,
distance_nm, avg_speed, max_speed, point_count,
start_position, end_position
FROM %s
ON CONFLICT (sig_src_cd, target_id, time_bucket)
DO UPDATE SET
track_geom = EXCLUDED.track_geom,
distance_nm = EXCLUDED.distance_nm,
avg_speed = EXCLUDED.avg_speed,
max_speed = EXCLUDED.max_speed,
point_count = EXCLUDED.point_count,
start_position = EXCLUDED.start_position,
end_position = EXCLUDED.end_position
""", tableName, tempTableName);
int rowsUpserted;
try (var stmt = conn.createStatement()) {
rowsUpserted = stmt.executeUpdate(upsertSql);
}
conn.commit();
log.info("Bulk upserted {} vessel tracks to {} (copied: {}, upserted: {})",
tracks.size(), tableName, rowsCopied, rowsUpserted);
} catch (Exception e) {
conn.rollback();
throw e;
}
long rowsInserted = copyManager.copyIn(copySql, new StringReader(writer.toString()));
log.info("Bulk inserted {} vessel tracks to {} (v2 only)", rowsInserted, tableName);
}
}

파일 보기

@ -1,5 +1,8 @@
package gc.mda.signal_batch.domain.debug;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.tags.Tag;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
@ -17,6 +20,7 @@ import java.util.*;
@Slf4j
@RestController
@RequestMapping("/api/debug")
@Tag(name = "시간 분석 디버그 API", description = "항적 데이터의 시간 정보 및 Unix timestamp 분석 API")
public class DebugTimeController {
private final DataSource queryDataSource;
@ -26,11 +30,12 @@ public class DebugTimeController {
}
@GetMapping("/time-analysis")
@Operation(summary = "시간 데이터 분석", description = "특정 선박의 항적 데이터에서 시간 정보(time_bucket, Unix timestamp)를 상세 분석합니다. DB 서버 시간, 최근 데이터, 시간 차이 분석을 포함합니다")
public Map<String, Object> analyzeTimeData(
@RequestParam(defaultValue = "000001") String sigSrcCd,
@RequestParam(defaultValue = "440331240") String targetId,
@RequestParam(defaultValue = "2025-08-26T08:02:59") String startTime,
@RequestParam(defaultValue = "2025-08-27T08:02:59") String endTime) {
@Parameter(description = "신호 소스 코드 (기본: 000001)") @RequestParam(defaultValue = "000001") String sigSrcCd,
@Parameter(description = "선박 ID (기본: 440331240)") @RequestParam(defaultValue = "440331240") String targetId,
@Parameter(description = "시작 시간 (형식: yyyy-MM-ddTHH:mm:ss)") @RequestParam(defaultValue = "2025-08-26T08:02:59") String startTime,
@Parameter(description = "종료 시간 (형식: yyyy-MM-ddTHH:mm:ss)") @RequestParam(defaultValue = "2025-08-27T08:02:59") String endTime) {
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
Map<String, Object> result = new HashMap<>();

파일 보기

@ -23,7 +23,7 @@ import java.util.Map;
@RestController
@RequestMapping("/api/v1")
@RequiredArgsConstructor
@Tag(name = "항적 조회 API", description = "해구 및 영역별 선박 항적 조회 및 통계 API")
@Tag(name = "항적 조회 API V1", description = "해구 및 영역별 선박 항적 조회 및 통계 API (WKT 응답)")
public class GisController {
private final GisService gisService;

파일 보기

@ -0,0 +1,207 @@
package gc.mda.signal_batch.domain.gis.controller;
import gc.mda.signal_batch.domain.gis.dto.GisBoundaryResponse;
import gc.mda.signal_batch.domain.vessel.dto.CompactVesselTrack;
import gc.mda.signal_batch.domain.vessel.dto.VesselStatsResponse;
import gc.mda.signal_batch.domain.vessel.dto.VesselTracksRequest;
import gc.mda.signal_batch.domain.vessel.dto.RecentVesselPositionDto;
import gc.mda.signal_batch.domain.gis.service.GisService;
import gc.mda.signal_batch.domain.gis.service.GisServiceV2;
import gc.mda.signal_batch.domain.vessel.service.VesselPositionService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.media.ArraySchema;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import java.util.Map;
/**
* GIS API V2 - WebSocket과 동일한 응답 구조 (CompactVesselTrack)
*
* V1과의 차이점:
* - 항적 조회 CompactVesselTrack 반환 (선박별 병합, 배열 기반)
* - 통합선박 필터링 옵션 지원
* - 선박 메타정보(shipName, shipType, nationalCode) 포함
*/
@Slf4j
@RestController
@RequestMapping("/api/v2")
@RequiredArgsConstructor
@Tag(name = "항적 조회 API V2", description = "해구/영역별 선박 항적 조회 API (WebSocket 호환 CompactVesselTrack 응답)")
public class GisControllerV2 {
private final GisService gisService;
private final GisServiceV2 gisServiceV2;
private final VesselPositionService vesselPositionService;
@GetMapping("/haegu/boundaries")
@Operation(
summary = "해구 경계 조회",
description = "모든 해구의 경계 정보를 GeoJSON 형식으로 반환합니다."
)
@ApiResponses(value = {
@ApiResponse(responseCode = "200", description = "조회 성공",
content = @Content(array = @ArraySchema(schema = @Schema(implementation = GisBoundaryResponse.class))))
})
public List<GisBoundaryResponse> getHaeguBoundaries() {
return gisService.getHaeguBoundaries();
}
@GetMapping("/haegu/vessel-stats")
@Operation(
summary = "해구별 선박 통계",
description = "지정된 시간 범위 내의 해구별 선박 통계를 조회합니다."
)
@ApiResponses(value = {
@ApiResponse(responseCode = "200", description = "조회 성공")
})
public Map<Integer, VesselStatsResponse> getHaeguVesselStats(
@Parameter(description = "조회 시간 범위 (분)", example = "60")
@RequestParam(defaultValue = "60") int minutes) {
return gisService.getHaeguVesselStats(minutes);
}
@GetMapping("/areas/boundaries")
@Operation(
summary = "사용자 정의 영역 경계 조회",
description = "모든 사용자 정의 영역의 경계 정보를 GeoJSON 형식으로 반환합니다."
)
@ApiResponses(value = {
@ApiResponse(responseCode = "200", description = "조회 성공",
content = @Content(array = @ArraySchema(schema = @Schema(implementation = GisBoundaryResponse.class))))
})
public List<GisBoundaryResponse> getAreaBoundaries() {
return gisService.getAreaBoundaries();
}
@GetMapping("/areas/vessel-stats")
@Operation(
summary = "영역별 선박 통계",
description = "지정된 시간 범위 내의 영역별 선박 통계를 조회합니다."
)
@ApiResponses(value = {
@ApiResponse(responseCode = "200", description = "조회 성공")
})
public Map<String, VesselStatsResponse> getAreaVesselStats(
@Parameter(description = "조회 시간 범위 (분)", example = "60")
@RequestParam(defaultValue = "60") int minutes) {
return gisService.getAreaVesselStats(minutes);
}
@GetMapping("/tracks/haegu/{haeguNo}")
@Operation(
summary = "해구별 선박 항적 조회",
description = """
특정 해구 선박 항적을 CompactVesselTrack 형식으로 반환합니다.
**V1과의 차이점:**
- 선박별로 병합된 단일 객체 반환 (V1은 세그먼트별 분리)
- geometry: [[lon, lat], ...] 배열 형태 (V1은 WKT)
- timestamps, speeds 배열 포함
- 선박 정보(shipName, shipType, nationalCode) 포함
- 통합선박 필터링 지원
"""
)
@ApiResponses(value = {
@ApiResponse(responseCode = "200", description = "조회 성공",
content = @Content(array = @ArraySchema(schema = @Schema(implementation = CompactVesselTrack.class))))
})
public List<CompactVesselTrack> getHaeguTracks(
@Parameter(description = "해구 번호", example = "1", required = true)
@PathVariable Integer haeguNo,
@Parameter(description = "조회 시간 범위 (분, 최대 1440)", example = "60")
@RequestParam(defaultValue = "60") int minutes,
@Parameter(description = "통합선박 필터링 (0: 미적용, 1: 적용)", example = "0",
schema = @Schema(allowableValues = {"0", "1"}))
@RequestParam(defaultValue = "0") String isIntegration) {
boolean filterByIntegration = "1".equals(isIntegration);
return gisServiceV2.getHaeguTracks(haeguNo, minutes, filterByIntegration);
}
@GetMapping("/tracks/area/{areaId}")
@Operation(
summary = "영역별 선박 항적 조회",
description = """
특정 영역 선박 항적을 CompactVesselTrack 형식으로 반환합니다.
**V1과의 차이점:**
- 선박별로 병합된 단일 객체 반환 (V1은 세그먼트별 분리)
- geometry: [[lon, lat], ...] 배열 형태 (V1은 WKT)
- timestamps, speeds 배열 포함
- 선박 정보(shipName, shipType, nationalCode) 포함
- 통합선박 필터링 지원
"""
)
@ApiResponses(value = {
@ApiResponse(responseCode = "200", description = "조회 성공",
content = @Content(array = @ArraySchema(schema = @Schema(implementation = CompactVesselTrack.class))))
})
public List<CompactVesselTrack> getAreaTracks(
@Parameter(description = "영역 ID", example = "AREA_001", required = true)
@PathVariable String areaId,
@Parameter(description = "조회 시간 범위 (분, 최대 1440)", example = "60")
@RequestParam(defaultValue = "60") int minutes,
@Parameter(description = "통합선박 필터링 (0: 미적용, 1: 적용)", example = "0",
schema = @Schema(allowableValues = {"0", "1"}))
@RequestParam(defaultValue = "0") String isIntegration) {
boolean filterByIntegration = "1".equals(isIntegration);
return gisServiceV2.getAreaTracks(areaId, minutes, filterByIntegration);
}
@PostMapping("/tracks/vessels")
@Operation(
summary = "선박별 항적 조회",
description = """
지정된 선박 목록의 항적을 시간 범위 내에서 조회합니다.
**응답 구조:** CompactVesselTrack (WebSocket과 동일)
- 선박별 병합된 단일 객체
- geometry/timestamps/speeds 배열 포함
- 선박 메타정보 포함
"""
)
@ApiResponses(value = {
@ApiResponse(responseCode = "200", description = "조회 성공",
content = @Content(array = @ArraySchema(schema = @Schema(implementation = CompactVesselTrack.class))))
})
public List<CompactVesselTrack> getVesselTracks(
@io.swagger.v3.oas.annotations.parameters.RequestBody(
description = "선박 항적 조회 요청",
required = true,
content = @Content(schema = @Schema(implementation = VesselTracksRequest.class))
)
@RequestBody VesselTracksRequest request) {
return gisService.getVesselTracks(request);
}
@GetMapping("/vessels/recent-positions")
@Operation(
summary = "최근 위치 업데이트된 선박 조회",
description = "지정된 시간(분) 이내에 위치가 업데이트된 모든 선박의 최신 위치 정보를 반환합니다."
)
@ApiResponses(value = {
@ApiResponse(responseCode = "200", description = "조회 성공",
content = @Content(array = @ArraySchema(schema = @Schema(implementation = RecentVesselPositionDto.class)))),
@ApiResponse(responseCode = "400", description = "잘못된 파라미터 (minutes: 1~1440)")
})
public List<RecentVesselPositionDto> getRecentVesselPositions(
@Parameter(description = "조회 시간 범위 (분, 1~1440)", example = "5")
@RequestParam(defaultValue = "5") int minutes) {
if (minutes <= 0 || minutes > 1440) {
throw new IllegalArgumentException("Minutes must be between 1 and 1440");
}
return vesselPositionService.getRecentVesselPositions(minutes);
}
}

파일 보기

@ -1,27 +1,39 @@
package gc.mda.signal_batch.domain.gis.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Builder;
import lombok.Data;
@Data
@Builder
@Schema(description = "해구/영역 경계 정보")
public class GisBoundaryResponse {
@JsonProperty("haegu_no")
@Schema(description = "해구 번호 (해구 조회 시, 1~약 200)", example = "93")
private Integer haeguNo;
@JsonProperty("area_id")
@Schema(description = "영역 ID (영역 조회 시)", example = "AREA_YEOSU_01")
private String areaId;
@JsonProperty("area_name")
@Schema(description = "영역명", example = "여수 연안")
private String areaName;
@JsonProperty("geom_json")
private String geomJson; // GeoJSON 형식의 geometry
@Schema(
description = "GeoJSON 형식의 geometry (Polygon)",
example = "{\"type\":\"Polygon\",\"coordinates\":[[[127.0,34.2],[127.1,34.2],[127.1,34.3],[127.0,34.3],[127.0,34.2]]]}"
)
private String geomJson;
@JsonProperty("center_lat")
@Schema(description = "중심점 위도", example = "34.22")
private Double centerLat;
@JsonProperty("center_lon")
@Schema(description = "중심점 경도", example = "127.05")
private Double centerLon;
}

파일 보기

@ -1,5 +1,8 @@
package gc.mda.signal_batch.domain.gis.dto;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import gc.mda.signal_batch.global.config.FlexibleLocalDateTimeDeserializer;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
@ -11,8 +14,31 @@ import java.time.LocalDateTime;
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Schema(description = "타일 집계 조회 요청")
public class TileAggregationRequest {
@Schema(
description = """
조회 시작 시간.
지원 형식: ISO 8601 (2026-01-20T00:00:00, 2026-01-20T00:00:00Z),
표준 형식 (2026-01-20 00:00:00), 압축 형식 (20260120000000)
""",
example = "2026-01-20T00:00:00"
)
@JsonDeserialize(using = FlexibleLocalDateTimeDeserializer.class)
private LocalDateTime fromDate;
@Schema(
description = """
조회 종료 시간.
지원 형식: ISO 8601 (2026-01-20T23:59:59, 2026-01-20T23:59:59Z),
표준 형식 (2026-01-20 23:59:59), 압축 형식 (20260120235959)
""",
example = "2026-01-20T23:59:59"
)
@JsonDeserialize(using = FlexibleLocalDateTimeDeserializer.class)
private LocalDateTime toDate;
@Schema(description = "타일 ID", example = "12_3456_7890")
private String tileId;
}

파일 보기

@ -8,6 +8,8 @@ import gc.mda.signal_batch.domain.vessel.dto.CompactVesselTrack;
import gc.mda.signal_batch.domain.vessel.dto.IntegrationVessel;
import gc.mda.signal_batch.domain.vessel.service.IntegrationVesselService;
import gc.mda.signal_batch.global.util.IntegrationSignalConstants;
import gc.mda.signal_batch.global.util.NationalCodeUtil;
import gc.mda.signal_batch.global.util.ShipKindCodeConverter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
@ -568,11 +570,22 @@ public class GisService {
// Get vessel info
Map<String, String> vesselInfo = getVesselInfo(vessel.getSigSrcCd(), vessel.getTargetId());
String shipName = vesselInfo.get("ship_name");
String shipType = vesselInfo.get("ship_type");
// Calculate nationalCode (same as WebSocket)
String nationalCode = NationalCodeUtil.calculateNationalCode(
vessel.getSigSrcCd(), vessel.getTargetId());
// Calculate shipKindCode (same as WebSocket - using name pattern matching for buoy/net detection)
String shipKindCode = ShipKindCodeConverter.getShipKindCodeWithNamePattern(
vessel.getSigSrcCd(), shipType, shipName, vessel.getTargetId());
return CompactVesselTrack.builder()
.vesselId(vesselId)
.sigSrcCd(vessel.getSigSrcCd())
.targetId(vessel.getTargetId())
.nationalCode(nationalCode)
.geometry(geometry)
.timestamps(timestamps)
.speeds(speeds)
@ -580,9 +593,9 @@ public class GisService {
.avgSpeed(avgSpeed)
.maxSpeed(maxSpeed)
.pointCount(geometry.size())
.shipName(vesselInfo.get("ship_name"))
.shipType(vesselInfo.get("ship_type"))
.shipKindCode(null) // Not available in current schema
.shipName(shipName)
.shipType(shipType)
.shipKindCode(shipKindCode)
.build();
}

파일 보기

@ -0,0 +1,370 @@
package gc.mda.signal_batch.domain.gis.service;
import gc.mda.signal_batch.domain.vessel.dto.CompactVesselTrack;
import gc.mda.signal_batch.domain.vessel.dto.TrackResponse;
import gc.mda.signal_batch.domain.vessel.dto.IntegrationVessel;
import gc.mda.signal_batch.domain.vessel.service.IntegrationVesselService;
import gc.mda.signal_batch.global.util.IntegrationSignalConstants;
import gc.mda.signal_batch.global.util.TrackConverter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Service;
import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.time.LocalDateTime;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
/**
* GIS 서비스 V2 - CompactVesselTrack 기반 응답
* WebSocket API와 동일한 응답 구조 제공
*/
@Slf4j
@Service
public class GisServiceV2 {
private final DataSource queryDataSource;
private final IntegrationVesselService integrationVesselService;
// 선박 정보 캐시 (TTL: 1시간)
private final ConcurrentHashMap<String, VesselInfoCache> vesselInfoCache = new ConcurrentHashMap<>();
private static final long VESSEL_CACHE_TTL = 3600_000; // 1시간
public GisServiceV2(@Qualifier("queryDataSource") DataSource queryDataSource,
IntegrationVesselService integrationVesselService) {
this.queryDataSource = queryDataSource;
this.integrationVesselService = integrationVesselService;
}
/**
* 해구별 선박 항적 조회 (V2 - CompactVesselTrack 반환)
*/
public List<CompactVesselTrack> getHaeguTracks(Integer haeguNo, int minutes, boolean filterByIntegration) {
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
List<TrackResponse> rawTracks = new ArrayList<>();
LocalDateTime now = LocalDateTime.now();
LocalDateTime startTime = now.minusMinutes(minutes);
if (minutes > 60) {
LocalDateTime currentHour = now.withMinute(0).withSecond(0).withNano(0);
if (minutes <= 1440) {
// hourly 테이블에서 과거 데이터 조회
String hourlySql = """
SELECT DISTINCT t.sig_src_cd, t.target_id, t.time_bucket,
public.ST_AsText(t.track_geom) as track_geom,
t.distance_nm, t.avg_speed, t.max_speed, t.point_count
FROM signal.t_vessel_tracks_hourly t
WHERE EXISTS (
SELECT 1 FROM signal.t_grid_vessel_tracks g
WHERE g.sig_src_cd = t.sig_src_cd
AND g.target_id = t.target_id
AND g.haegu_no = %d
AND g.time_bucket >= '%s'
)
AND t.time_bucket >= '%s'
AND t.time_bucket < '%s'
ORDER BY t.sig_src_cd, t.target_id, t.time_bucket
""".formatted(haeguNo, startTime, startTime, currentHour);
rawTracks.addAll(jdbcTemplate.query(hourlySql, this::mapTrackResponse));
}
// 5min 테이블에서 최근 데이터 조회
String recentSql = """
SELECT DISTINCT t.sig_src_cd, t.target_id, t.time_bucket,
public.ST_AsText(t.track_geom) as track_geom,
t.distance_nm, t.avg_speed, t.max_speed, t.point_count
FROM signal.t_vessel_tracks_5min t
WHERE EXISTS (
SELECT 1 FROM signal.t_grid_vessel_tracks g
WHERE g.sig_src_cd = t.sig_src_cd
AND g.target_id = t.target_id
AND g.haegu_no = %d
AND g.time_bucket >= '%s'
)
AND t.time_bucket >= '%s'
ORDER BY t.sig_src_cd, t.target_id, t.time_bucket
""".formatted(haeguNo, startTime, currentHour);
rawTracks.addAll(jdbcTemplate.query(recentSql, this::mapTrackResponse));
} else {
// 1시간 이하는 5분 테이블만 사용
String sql = """
SELECT DISTINCT t.sig_src_cd, t.target_id, t.time_bucket,
public.ST_AsText(t.track_geom) as track_geom,
t.distance_nm, t.avg_speed, t.max_speed, t.point_count
FROM signal.t_vessel_tracks_5min t
WHERE EXISTS (
SELECT 1 FROM signal.t_grid_vessel_tracks g
WHERE g.sig_src_cd = t.sig_src_cd
AND g.target_id = t.target_id
AND g.haegu_no = %d
AND g.time_bucket >= NOW() - INTERVAL '%d minutes'
)
AND t.time_bucket >= NOW() - INTERVAL '%d minutes'
ORDER BY t.sig_src_cd, t.target_id, t.time_bucket
""".formatted(haeguNo, minutes, minutes);
rawTracks = jdbcTemplate.query(sql, this::mapTrackResponse);
}
// CompactVesselTrack으로 변환
List<CompactVesselTrack> result = TrackConverter.convert(rawTracks, this::getVesselInfo);
// 통합선박 필터링 적용
if (filterByIntegration && integrationVesselService.isEnabled()) {
result = filterByIntegration(result);
}
log.debug("V2 API: Fetched {} compact tracks for haegu {} in last {} minutes",
result.size(), haeguNo, minutes);
return result;
}
/**
* 영역별 선박 항적 조회 (V2 - CompactVesselTrack 반환)
*/
public List<CompactVesselTrack> getAreaTracks(String areaId, int minutes, boolean filterByIntegration) {
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
List<TrackResponse> rawTracks = new ArrayList<>();
LocalDateTime now = LocalDateTime.now();
LocalDateTime startTime = now.minusMinutes(minutes);
if (minutes > 60) {
LocalDateTime currentHour = now.withMinute(0).withSecond(0).withNano(0);
if (minutes <= 1440) {
// hourly 테이블에서 과거 데이터 조회
String hourlySql = """
SELECT DISTINCT t.sig_src_cd, t.target_id, t.time_bucket,
public.ST_AsText(t.track_geom) as track_geom,
t.distance_nm, t.avg_speed, t.max_speed, t.point_count
FROM signal.t_vessel_tracks_hourly t
WHERE EXISTS (
SELECT 1 FROM signal.t_area_vessel_tracks a
WHERE a.sig_src_cd = t.sig_src_cd
AND a.target_id = t.target_id
AND a.area_id = '%s'
AND a.time_bucket >= '%s'
)
AND t.time_bucket >= '%s'
AND t.time_bucket < '%s'
ORDER BY t.sig_src_cd, t.target_id, t.time_bucket
""".formatted(areaId, startTime, startTime, currentHour);
rawTracks.addAll(jdbcTemplate.query(hourlySql, this::mapTrackResponse));
}
// 5min 테이블에서 최근 데이터 조회
String recentSql = """
SELECT DISTINCT t.sig_src_cd, t.target_id, t.time_bucket,
public.ST_AsText(t.track_geom) as track_geom,
t.distance_nm, t.avg_speed, t.max_speed, t.point_count
FROM signal.t_vessel_tracks_5min t
WHERE EXISTS (
SELECT 1 FROM signal.t_area_vessel_tracks a
WHERE a.sig_src_cd = t.sig_src_cd
AND a.target_id = t.target_id
AND a.area_id = '%s'
AND a.time_bucket >= '%s'
)
AND t.time_bucket >= '%s'
ORDER BY t.sig_src_cd, t.target_id, t.time_bucket
""".formatted(areaId, startTime, currentHour);
rawTracks.addAll(jdbcTemplate.query(recentSql, this::mapTrackResponse));
} else {
// 1시간 이하는 5분 테이블만 사용
String sql = """
SELECT DISTINCT t.sig_src_cd, t.target_id, t.time_bucket,
public.ST_AsText(t.track_geom) as track_geom,
t.distance_nm, t.avg_speed, t.max_speed, t.point_count
FROM signal.t_vessel_tracks_5min t
WHERE EXISTS (
SELECT 1 FROM signal.t_area_vessel_tracks a
WHERE a.sig_src_cd = t.sig_src_cd
AND a.target_id = t.target_id
AND a.area_id = '%s'
AND a.time_bucket >= NOW() - INTERVAL '%d minutes'
)
AND t.time_bucket >= NOW() - INTERVAL '%d minutes'
ORDER BY t.sig_src_cd, t.target_id, t.time_bucket
""".formatted(areaId, minutes, minutes);
rawTracks = jdbcTemplate.query(sql, this::mapTrackResponse);
}
// CompactVesselTrack으로 변환
List<CompactVesselTrack> result = TrackConverter.convert(rawTracks, this::getVesselInfo);
// 통합선박 필터링 적용
if (filterByIntegration && integrationVesselService.isEnabled()) {
result = filterByIntegration(result);
}
log.debug("V2 API: Fetched {} compact tracks for area {} in last {} minutes",
result.size(), areaId, minutes);
return result;
}
/**
* TrackResponse 매핑
*/
private TrackResponse mapTrackResponse(ResultSet rs, int rowNum) throws SQLException {
return TrackResponse.builder()
.sigSrcCd(rs.getString("sig_src_cd"))
.targetId(rs.getString("target_id"))
.timeBucket(rs.getObject("time_bucket", LocalDateTime.class))
.trackGeom(rs.getString("track_geom"))
.distanceNm(rs.getBigDecimal("distance_nm"))
.avgSpeed(rs.getBigDecimal("avg_speed"))
.maxSpeed(rs.getBigDecimal("max_speed"))
.pointCount(rs.getInt("point_count"))
.build();
}
/**
* 선박 정보 조회 (캐시 우선)
*/
private TrackConverter.VesselInfo getVesselInfo(String sigSrcCd, String targetId) {
String cacheKey = sigSrcCd + "_" + targetId;
VesselInfoCache cached = vesselInfoCache.get(cacheKey);
if (cached != null && !cached.isExpired()) {
return new TrackConverter.VesselInfo(cached.shipName, cached.shipType);
}
// DB 조회
JdbcTemplate jdbcTemplate = new JdbcTemplate(queryDataSource);
try {
String sql = """
SELECT ship_nm, ship_ty
FROM signal.t_vessel_latest_position
WHERE sig_src_cd = ? AND target_id = ?
LIMIT 1
""";
Map<String, Object> result = jdbcTemplate.queryForMap(sql, sigSrcCd, targetId);
String shipName = result.get("ship_nm") != null ? result.get("ship_nm").toString() : "-";
String shipType = result.get("ship_ty") != null ? result.get("ship_ty").toString() : "-";
// 캐시 저장
vesselInfoCache.put(cacheKey, new VesselInfoCache(shipName, shipType));
return new TrackConverter.VesselInfo(shipName, shipType);
} catch (Exception e) {
return new TrackConverter.VesselInfo("-", "-");
}
}
/**
* 통합선박 기준 필터링
*/
private List<CompactVesselTrack> filterByIntegration(List<CompactVesselTrack> tracks) {
if (tracks == null || tracks.isEmpty()) {
return tracks;
}
// 1. 모든 트랙의 통합선박 정보 조회
Map<String, IntegrationVessel> vesselIntegrations = new HashMap<>();
for (CompactVesselTrack track : tracks) {
String key = track.getSigSrcCd() + "_" + track.getTargetId();
if (!vesselIntegrations.containsKey(key)) {
IntegrationVessel integration = integrationVesselService.findByVessel(
track.getSigSrcCd(), track.getTargetId()
);
vesselIntegrations.put(key, integration);
}
}
// 2. 통합선박별 그룹핑
Map<Long, List<CompactVesselTrack>> groupedByIntegration = new HashMap<>();
Map<Long, IntegrationVessel> integrationMap = new HashMap<>();
long tempSeq = -1;
for (CompactVesselTrack track : tracks) {
String key = track.getSigSrcCd() + "_" + track.getTargetId();
IntegrationVessel integration = vesselIntegrations.get(key);
Long seq;
if (integration != null) {
seq = integration.getIntgrSeq();
integrationMap.putIfAbsent(seq, integration);
} else {
seq = tempSeq--;
}
groupedByIntegration.computeIfAbsent(seq, k -> new ArrayList<>()).add(track);
}
// 3. 그룹에서 최고 우선순위 신호만 선택
List<CompactVesselTrack> result = new ArrayList<>();
for (Map.Entry<Long, List<CompactVesselTrack>> entry : groupedByIntegration.entrySet()) {
Long seq = entry.getKey();
List<CompactVesselTrack> groupTracks = entry.getValue();
if (seq < 0) {
// 통합정보 없는 단독 선박
CompactVesselTrack firstTrack = groupTracks.get(0);
String soloIntegrationId = IntegrationSignalConstants.generateSoloIntegrationId(
firstTrack.getSigSrcCd(),
firstTrack.getTargetId()
);
groupTracks.forEach(t -> t.setIntegrationTargetId(soloIntegrationId));
result.addAll(groupTracks);
} else {
// 통합선박 존재하는 신호 최고 우선순위 선택
IntegrationVessel integration = integrationMap.get(seq);
Set<String> existingSigSrcCds = groupTracks.stream()
.map(CompactVesselTrack::getSigSrcCd)
.collect(Collectors.toSet());
String selectedSigSrcCd = integrationVesselService.selectHighestPriorityFromExisting(existingSigSrcCds);
List<CompactVesselTrack> selectedTracks = groupTracks.stream()
.filter(t -> t.getSigSrcCd().equals(selectedSigSrcCd))
.collect(Collectors.toList());
String integrationId = integration.generateIntegrationId();
selectedTracks.forEach(t -> t.setIntegrationTargetId(integrationId));
result.addAll(selectedTracks);
}
}
log.info("[INTEGRATION_FILTER] V2 API - Filtered {} tracks to {} tracks", tracks.size(), result.size());
return result;
}
/**
* 선박 정보 캐시 내부 클래스
*/
private static class VesselInfoCache {
String shipName;
String shipType;
long cacheTime;
VesselInfoCache(String shipName, String shipType) {
this.shipName = shipName;
this.shipType = shipType;
this.cacheTime = System.currentTimeMillis();
}
boolean isExpired() {
return System.currentTimeMillis() - cacheTime > VESSEL_CACHE_TTL;
}
}
}

파일 보기

@ -1,5 +1,7 @@
package gc.mda.signal_batch.domain.passage.dto;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import gc.mda.signal_batch.global.config.FlexibleLocalDateTimeDeserializer;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.AllArgsConstructor;
import lombok.Builder;
@ -17,13 +19,31 @@ import java.util.List;
@AllArgsConstructor
@Schema(description = "순차 구역 통과 조회 요청")
public class SequentialPassageRequest {
@NotNull(message = "조회 시작 시간은 필수입니다")
@Schema(description = "조회 시작 시간", example = "2025-08-01T00:00:00", required = true)
@Schema(
description = """
조회 시작 시간.
지원 형식: ISO 8601 (2025-08-01T00:00:00, 2025-08-01T00:00:00Z),
표준 형식 (2025-08-01 00:00:00), 압축 형식 (20250801000000)
""",
example = "2025-08-01T00:00:00",
required = true
)
@JsonDeserialize(using = FlexibleLocalDateTimeDeserializer.class)
private LocalDateTime startTime;
@NotNull(message = "조회 종료 시간은 필수입니다")
@Schema(description = "조회 종료 시간", example = "2025-08-07T23:59:59", required = true)
@Schema(
description = """
조회 종료 시간.
지원 형식: ISO 8601 (2025-08-07T23:59:59, 2025-08-07T23:59:59Z),
표준 형식 (2025-08-07 23:59:59), 압축 형식 (20250807235959)
""",
example = "2025-08-07T23:59:59",
required = true
)
@JsonDeserialize(using = FlexibleLocalDateTimeDeserializer.class)
private LocalDateTime endTime;
@Schema(description = "조회 유형 (GRID: 해구, AREA: 사용자정의구역)", example = "GRID", required = true)

파일 보기

@ -44,11 +44,11 @@ public class SequentialAreaTrackingService {
sig_src_cd,
target_id,
haegu_no,
FIRpublic.ST_VALUE(time_bucket) OVER (
FIRST_VALUE(time_bucket) OVER (
PARTITION BY sig_src_cd, target_id, haegu_no
ORDER BY time_bucket
) as entry_time,
LApublic.ST_VALUE(time_bucket) OVER (
LAST_VALUE(time_bucket) OVER (
PARTITION BY sig_src_cd, target_id, haegu_no
ORDER BY time_bucket
ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING
@ -103,11 +103,11 @@ public class SequentialAreaTrackingService {
sig_src_cd,
target_id,
area_id,
FIRpublic.ST_VALUE(time_bucket) OVER (
FIRST_VALUE(time_bucket) OVER (
PARTITION BY sig_src_cd, target_id, area_id
ORDER BY time_bucket
) as entry_time,
LApublic.ST_VALUE(time_bucket) OVER (
LAST_VALUE(time_bucket) OVER (
PARTITION BY sig_src_cd, target_id, area_id
ORDER BY time_bucket
ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING

파일 보기

@ -0,0 +1,56 @@
package gc.mda.signal_batch.domain.ship.controller;
import gc.mda.signal_batch.domain.ship.dto.ShipImageDto;
import gc.mda.signal_batch.domain.ship.service.ShipImageService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.media.ArraySchema;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
@Slf4j
@RestController
@RequestMapping("/api/v1/shipimg")
@RequiredArgsConstructor
@Tag(name = "Ship Image", description = "선박 이미지 API")
public class ShipImageController {
private final ShipImageService shipImageService;
@GetMapping("/{imo}")
@Operation(
summary = "선박 이미지 경로 조회",
description = "IMO 번호로 선박 이미지 경로 목록을 조회합니다. 프론트엔드에서 썸네일은 path + '_1.jpg', 원본은 path + '_2.jpg'를 사용합니다."
)
@ApiResponses(value = {
@ApiResponse(
responseCode = "200",
description = "조회 성공 (데이터 없으면 빈 배열 반환)",
content = @Content(array = @ArraySchema(schema = @Schema(implementation = ShipImageDto.class)))
)
})
public ResponseEntity<List<ShipImageDto>> getShipImages(
@Parameter(description = "IMO 번호", example = "9141833")
@PathVariable Integer imo) {
log.debug("Requesting ship images for IMO: {}", imo);
List<ShipImageDto> images = shipImageService.getImagesByImo(imo);
log.debug("Found {} images for IMO: {}", images.size(), imo);
return ResponseEntity.ok(images);
}
}

파일 보기

@ -0,0 +1,18 @@
package gc.mda.signal_batch.domain.ship.dto;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Builder;
import lombok.Data;
import java.time.LocalDate;
@Data
@Builder
public class ShipImageDto {
private Integer picId;
private String path;
private String copyright;
@JsonFormat(pattern = "yyyy-MM-dd")
private LocalDate date;
}

파일 보기

@ -0,0 +1,58 @@
package gc.mda.signal_batch.domain.ship.repository;
import gc.mda.signal_batch.domain.ship.dto.ShipImageDto;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.stereotype.Repository;
import java.sql.Date;
import java.util.List;
@Slf4j
@Repository
public class ShipImageRepository {
private final JdbcTemplate queryJdbcTemplate;
public ShipImageRepository(@Qualifier("queryJdbcTemplate") JdbcTemplate queryJdbcTemplate) {
this.queryJdbcTemplate = queryJdbcTemplate;
}
private static final String FIND_BY_IMO_SQL = """
SELECT pic_id, copyright, dateofphoto
FROM signal.t_snp_ship_img
WHERE lrno = ?
ORDER BY dateofphoto DESC NULLS LAST, pic_id DESC
""";
private static final RowMapper<ShipImageRawData> ROW_MAPPER = (rs, rowNum) -> {
ShipImageRawData data = new ShipImageRawData();
data.picId = rs.getInt("pic_id");
data.copyright = rs.getString("copyright");
Date dateOfPhoto = rs.getDate("dateofphoto");
data.dateOfPhoto = dateOfPhoto != null ? dateOfPhoto.toLocalDate() : null;
return data;
};
/**
* IMO(lrno) 이미지 정보 조회
*
* @param imo IMO 번호
* @return 이미지 raw 데이터 목록
*/
public List<ShipImageRawData> findByImo(Integer imo) {
return queryJdbcTemplate.query(FIND_BY_IMO_SQL, ROW_MAPPER, imo);
}
/**
* Repository 내부에서 사용하는 Raw 데이터 클래스
*/
public static class ShipImageRawData {
public Integer picId;
public String copyright;
public java.time.LocalDate dateOfPhoto;
}
}

파일 보기

@ -0,0 +1,58 @@
package gc.mda.signal_batch.domain.ship.service;
import gc.mda.signal_batch.domain.ship.dto.ShipImageDto;
import gc.mda.signal_batch.domain.ship.repository.ShipImageRepository;
import gc.mda.signal_batch.domain.ship.repository.ShipImageRepository.ShipImageRawData;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.stream.Collectors;
@Slf4j
@Service
@RequiredArgsConstructor
public class ShipImageService {
private final ShipImageRepository shipImageRepository;
/**
* IMO로 이미지 정보 목록 조회
*
* @param imo IMO 번호
* @return 이미지 정보 목록 (없으면 배열)
*/
public List<ShipImageDto> getImagesByImo(Integer imo) {
List<ShipImageRawData> rawDataList = shipImageRepository.findByImo(imo);
return rawDataList.stream()
.map(this::toDto)
.collect(Collectors.toList());
}
/**
* Raw 데이터를 DTO로 변환
*/
private ShipImageDto toDto(ShipImageRawData rawData) {
return ShipImageDto.builder()
.picId(rawData.picId)
.path(buildImagePath(rawData.picId))
.copyright(rawData.copyright)
.date(rawData.dateOfPhoto)
.build();
}
/**
* pic_id로 이미지 경로 생성
* 규칙: 폴더명 = pic_id / 100
* : pic_id=816100 /shipimg/8161/816100
*
* @param picId pic_id
* @return 이미지 경로
*/
private String buildImagePath(Integer picId) {
int folderName = picId / 100;
return String.format("/shipimg/%d/%d", folderName, picId);
}
}

파일 보기

@ -1,10 +1,13 @@
package gc.mda.signal_batch.domain.track.controller;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import gc.mda.signal_batch.domain.track.dto.AbnormalTrackResponse;
import gc.mda.signal_batch.domain.track.dto.AbnormalTrackStatsResponse;
import gc.mda.signal_batch.domain.track.service.AbnormalTrackService;
import gc.mda.signal_batch.global.config.FlexibleLocalDateTimeDeserializer;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.tags.Tag;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@ -17,7 +20,6 @@ import java.time.LocalDateTime;
import java.util.List;
import java.util.Map;
import java.math.BigDecimal;
import com.fasterxml.jackson.annotation.JsonFormat;
/**
@ -135,31 +137,67 @@ public class AbnormalTrackController {
@lombok.Data
static class DetectRequest {
private String tableType; // "hourly" or "daily"
@DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME)
@JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss")
@Schema(description = "테이블 유형", example = "hourly", allowableValues = {"hourly", "daily"})
private String tableType;
@Schema(
description = """
조회 시작 시간.
지원 형식: ISO 8601, 표준 형식 (YYYY-MM-DD HH:MM:SS), 압축 형식 (YYYYMMDDHHMMSS)
""",
example = "2026-01-20T00:00:00"
)
@JsonDeserialize(using = FlexibleLocalDateTimeDeserializer.class)
private LocalDateTime startTime;
@DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME)
@JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss")
@Schema(
description = """
조회 종료 시간.
지원 형식: ISO 8601, 표준 형식 (YYYY-MM-DD HH:MM:SS), 압축 형식 (YYYYMMDDHHMMSS)
""",
example = "2026-01-20T23:59:59"
)
@JsonDeserialize(using = FlexibleLocalDateTimeDeserializer.class)
private LocalDateTime endTime;
private BigDecimal minDistance; // 최소 거리 (nm)
private BigDecimal minSpeed; // 최소 평균속도 (knots)
@Schema(description = "최소 거리 (nm)", example = "10.0")
private BigDecimal minDistance;
@Schema(description = "최소 평균속도 (knots)", example = "50.0")
private BigDecimal minSpeed;
}
@lombok.Data
static class MoveTracksRequest {
private String tableType; // "hourly" or "daily"
@Schema(description = "테이블 유형", example = "hourly", allowableValues = {"hourly", "daily"})
private String tableType;
@Schema(description = "이동할 항적 목록")
private List<TrackIdentifier> tracks;
@Schema(description = "비정상 유형", example = "SPEED_ANOMALY")
private String abnormalType;
@Schema(description = "이동 사유", example = "물리적으로 불가능한 속도")
private String reason;
}
@lombok.Data
public static class TrackIdentifier {
@Schema(description = "신호 소스 코드", example = "000001")
private String sigSrcCd;
@Schema(description = "타겟 ID", example = "440123456")
private String targetId;
@DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME)
@JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss")
@Schema(
description = """
시간 버킷.
지원 형식: ISO 8601, 표준 형식 (YYYY-MM-DD HH:MM:SS), 압축 형식 (YYYYMMDDHHMMSS)
""",
example = "2026-01-20T10:00:00"
)
@JsonDeserialize(using = FlexibleLocalDateTimeDeserializer.class)
private LocalDateTime timeBucket;
}

파일 보기

@ -2,12 +2,15 @@ package gc.mda.signal_batch.domain.vessel.dto;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.swagger.v3.oas.annotations.media.ArraySchema;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
/**
* 압축된 선박 궤적 데이터 전송용 DTO
* LineStringM 대신 단순 배열로 전송하여 프론트엔드 파싱 부하 제거
@ -17,32 +20,80 @@ import java.util.List;
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
@Schema(description = "선박별 병합된 항적 데이터 (WebSocket/REST 공통)")
public class CompactVesselTrack {
private String vesselId; // sig_src_cd + "_" + target_id
private String sigSrcCd;
private String targetId;
private String nationalCode; // National code based on sigSrcCd and targetId
// 궤적 데이터 (배열 형태)
private List<double[]> geometry; // [[lon, lat], ...]
// MIGRATION_V2: 호환성을 위해 모두 String으로 처리
@JsonProperty("timestamps")
private List<String> timestamps; // String 리스트 (Unix timestamp도 String으로 변환)
private List<Double> speeds; // [12.5, 13.2, ...]
// 메타데이터
private Double totalDistance; // 전체 이동거리 (nm)
private Double avgSpeed; // 평균속도 (knots)
private Double maxSpeed; // 최대속도 (knots)
private Integer pointCount; // 포인트
// 선박 정보
private String shipName; // 선명
private String shipType; // 선종
private String shipKindCode; // 선박 종류 코드
// 통합선박 정보
private String integrationTargetId; // 통합선박 ID (형식: AIS_ENAV_VPASS_VTS-AIS_D-MF/HF)
@Schema(description = "선박 고유 ID (sigSrcCd_targetId)", example = "000001_440113620")
private String vesselId;
@Schema(description = "신호 소스 코드", example = "000001")
private String sigSrcCd;
@Schema(description = "타겟 ID (MMSI 등)", example = "440113620")
private String targetId;
@Schema(description = "국적 코드 (MID 기반, MMSI 앞 3자리로 판별)", example = "KR")
private String nationalCode;
@ArraySchema(
schema = @Schema(
description = "좌표 [경도, 위도]",
type = "array",
example = "[127.0638, 34.227527]"
),
arraySchema = @Schema(
description = "궤적 좌표 배열 [[lon, lat], ...] - 시간순 정렬",
example = "[[127.0638, 34.227527], [127.063303, 34.226685], [127.063008, 34.226203]]"
)
)
private List<double[]> geometry;
@ArraySchema(
schema = @Schema(description = "Unix timestamp (초)", example = "1768878312"),
arraySchema = @Schema(
description = "각 좌표에 대응하는 Unix timestamp 배열",
example = "[\"1768878312\", \"1768878332\", \"1768878342\"]"
)
)
@JsonProperty("timestamps")
private List<String> timestamps;
@ArraySchema(
schema = @Schema(description = "속도 (knots)", example = "10.4"),
arraySchema = @Schema(
description = "각 좌표에 대응하는 속도 배열 (knots)",
example = "[10.4, 10.4, 10.67]"
)
)
private List<Double> speeds;
@Schema(description = "전체 이동거리 (해리)", example = "1.64")
private Double totalDistance;
@Schema(description = "평균 속도 (knots)", example = "10.54")
private Double avgSpeed;
@Schema(description = "최대 속도 (knots)", example = "10.9")
private Double maxSpeed;
@Schema(description = "궤적 포인트 수", example = "38")
private Integer pointCount;
@Schema(description = "선박명", example = "SAM SUNG 2HO")
private String shipName;
@Schema(description = "선종 코드 (AIS ship type)", example = "74")
private String shipType;
@Schema(
description = "선박 종류 코드 (000020:어선, 000021:함정, 000022:여객선, 000023:화물선, 000024:유조선, 000025:관공선, 000027:기타)",
example = "000023"
)
private String shipKindCode;
@Schema(
description = "통합선박 ID (동일 선박의 다중 신호원 통합 식별자)",
example = "440113620___440113620_"
)
private String integrationTargetId;
}

파일 보기

@ -8,7 +8,7 @@ import lombok.NoArgsConstructor;
/**
* 통합선박 정보 DTO
* gis.t_ship_integration_sub 테이블 매핑
* signal.t_ship_integration_sub 테이블 매핑
*/
@Data
@Builder

파일 보기

@ -16,37 +16,44 @@ import java.time.LocalDateTime;
@AllArgsConstructor
@Schema(description = "최근 위치 업데이트된 선박 정보")
public class RecentVesselPositionDto {
@Schema(description = "신호원 코드", example = "000001")
@Schema(
description = "신호원 코드 (000001:AIS, 000002:LRIT, 000003:VPASS, 000004:VTS-AIS 등)",
example = "000001"
)
private String sigSrcCd;
@Schema(description = "대상 ID", example = "440331240")
@Schema(description = "대상 ID (MMSI: 9자리, 한국선박 440/441로 시작)", example = "440113620")
private String targetId;
@Schema(description = "경도", example = "126.9779")
@Schema(description = "경도 (WGS84)", example = "127.0638")
private Double lon;
@Schema(description = "위도", example = "37.5665")
@Schema(description = "위도 (WGS84)", example = "34.227527")
private Double lat;
@Schema(description = "대지속도 (knots)", example = "12.5")
@Schema(description = "대지속도 (knots)", example = "10.4")
private BigDecimal sog;
@Schema(description = "대지침로 (도)", example = "180.0")
@Schema(description = "대지침로 (도, 0~360)", example = "215.3")
private BigDecimal cog;
@Schema(description = "선박명", example = "SEOM SARANG 11")
@Schema(description = "선박명", example = "SAM SUNG 2HO")
private String shipNm;
@Schema(description = "선박 유형", example = "60")
@Schema(description = "선박 유형 (AIS ship type)", example = "74")
private String shipTy;
@Schema(description = "선박 종류 코드", example = "000022")
@Schema(
description = "선박 종류 코드 (000020:어선, 000022:여객선, 000023:화물선 등)",
example = "000023"
)
private String shipKindCode;
@Schema(description = "국가 코드", example = "440")
@Schema(description = "국가 코드 (MID 기반, KR=한국)", example = "KR")
private String nationalCode;
@Schema(description = "최종 업데이트 시간", example = "2025-08-28 15:30:00")
@Schema(description = "최종 업데이트 시간", example = "2026-01-20 12:05:00")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private LocalDateTime lastUpdate;
}

파일 보기

@ -1,6 +1,7 @@
package gc.mda.signal_batch.domain.vessel.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Builder;
import lombok.Data;
@ -9,28 +10,44 @@ import java.time.LocalDateTime;
@Data
@Builder
@Schema(description = "항적 세그먼트 (V1 API용, WKT 기반)")
public class TrackResponse {
@JsonProperty("sig_src_cd")
@Schema(
description = "신호 소스 코드 (000001:AIS, 000002:LRIT, 000003:VPASS, 000004:VTS-AIS 등)",
example = "000001"
)
private String sigSrcCd;
@JsonProperty("target_id")
@Schema(description = "타겟 ID (MMSI: 9자리, 한국선박 440/441로 시작)", example = "440113620")
private String targetId;
@JsonProperty("track_geom")
private String trackGeom; // WKT format
@Schema(
description = "항적 geometry (WKT LineStringM 형식: X=경도, Y=위도, M=Unix timestamp)",
example = "LINESTRING M(127.0638 34.227527 1768878312, 127.063303 34.226685 1768878332, 127.063008 34.226203 1768878342)"
)
private String trackGeom;
@JsonProperty("distance_nm")
@Schema(description = "이동거리 (해리)", example = "1.64")
private BigDecimal distanceNm;
@JsonProperty("avg_speed")
@Schema(description = "평균 속도 (knots)", example = "10.54")
private BigDecimal avgSpeed;
@JsonProperty("max_speed")
@Schema(description = "최대 속도 (knots)", example = "10.9")
private BigDecimal maxSpeed;
@JsonProperty("point_count")
@Schema(description = "포인트 수", example = "38")
private Integer pointCount;
@JsonProperty("time_bucket")
@Schema(description = "시간 버킷 (5분/1시간/1일 집계 단위)", example = "2026-01-20T12:00:00")
private LocalDateTime timeBucket;
}

파일 보기

@ -0,0 +1,56 @@
package gc.mda.signal_batch.domain.vessel.dto;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.time.LocalDateTime;
/**
* 선박 버킷 종료 위치 DTO
*
* 용도: 버킷 점프 검출을 위한 이전 버킷 종료 위치 저장
*
* 데이터 흐름:
* 1. 5분 집계 완료 선박의 버킷 종료 위치 저장
* 2. 다음 버킷 처리 이전 위치와 비교하여 점프 검출
* 3. 캐시 미스 t_vessel_latest_position 테이블에서 fallback 조회
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class VesselBucketPositionDto {
/**
* 신호원 코드
*/
private String sigSrcCd;
/**
* 대상 ID
*/
private String targetId;
/**
* 경도 (버킷 종료 시점)
*/
private Double endLon;
/**
* 위도 (버킷 종료 시점)
*/
private Double endLat;
/**
* 버킷 종료 시간
*/
private LocalDateTime endTime;
/**
* 선박 생성 (sigSrcCd:targetId)
*/
public String getVesselKey() {
return sigSrcCd + ":" + targetId;
}
}

파일 보기

@ -1,6 +1,7 @@
package gc.mda.signal_batch.domain.vessel.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Builder;
import lombok.Data;
@ -8,16 +9,22 @@ import java.math.BigDecimal;
@Data
@Builder
@Schema(description = "해구/영역별 선박 통계")
public class VesselStatsResponse {
@JsonProperty("vessel_count")
@Schema(description = "고유 선박 수 (해당 영역 내 탐지된 선박)", example = "156")
private Integer vesselCount;
@JsonProperty("total_distance")
@Schema(description = "총 이동거리 (해리, 모든 선박 합산)", example = "1248.73")
private BigDecimal totalDistance;
@JsonProperty("avg_speed")
@Schema(description = "평균 속도 (knots)", example = "10.54")
private BigDecimal avgSpeed;
@JsonProperty("active_tracks")
@Schema(description = "활성 항적 수 (현재 이동 중인 선박)", example = "89")
private Integer activeTracks;
}

파일 보기

@ -1,5 +1,8 @@
package gc.mda.signal_batch.domain.vessel.dto;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import gc.mda.signal_batch.global.config.FlexibleLocalDateTimeDeserializer;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import lombok.Builder;
import lombok.AllArgsConstructor;
@ -12,25 +15,63 @@ import java.util.List;
@Builder
@AllArgsConstructor
@NoArgsConstructor
@Schema(description = "선박별 항적 조회 요청")
public class VesselTracksRequest {
@Schema(
description = """
조회 시작 시간.
지원 형식: ISO 8601 (2026-01-20T12:00:00), UTC (2026-01-20T12:00:00Z),
표준 형식 (2026-01-20 12:00:00), 압축 형식 (20260120120000)
""",
example = "2026-01-20T12:00:00",
required = true
)
@JsonDeserialize(using = FlexibleLocalDateTimeDeserializer.class)
private LocalDateTime startTime;
@Schema(
description = """
조회 종료 시간.
지원 형식: ISO 8601 (2026-01-20T12:10:00), UTC (2026-01-20T12:10:00Z),
표준 형식 (2026-01-20 12:10:00), 압축 형식 (20260120121000)
""",
example = "2026-01-20T12:10:00",
required = true
)
@JsonDeserialize(using = FlexibleLocalDateTimeDeserializer.class)
private LocalDateTime endTime;
@Schema(description = "조회할 선박 목록", required = true)
private List<VesselIdentifier> vessels;
/**
* 통합선박신호 모드
* "0": 기본 모드 - 전체 항적 반환 (기본값)
* "1": 통합 모드 - 동일 선박의 여러 신호 최고 우선순위만 반환
*/
@Schema(
description = "통합선박 필터링 모드 (0: 전체 항적, 1: 통합선박 우선순위 적용)",
example = "0",
allowableValues = {"0", "1"}
)
@Builder.Default
private String isIntegration = "0";
@Data
@Builder
@AllArgsConstructor
@NoArgsConstructor
@Schema(description = "선박 식별자")
public static class VesselIdentifier {
@Schema(
description = "신호 소스 코드 (000001:AIS, 000002:LRIT, 000003:VPASS, 000004:VTS-AIS, 000019:항공기 등)",
example = "000001",
required = true
)
private String sigSrcCd;
@Schema(
description = "타겟 ID (MMSI: 9자리, 한국선박 440/441로 시작)",
example = "440113620",
required = true
)
private String targetId;
}
}

파일 보기

@ -167,7 +167,7 @@ public class IntegrationVesselService {
SELECT intgr_seq, ais, enav, vpass, vts_ais, d_mf_hf,
ais_ship_nm, enav_ship_nm, vpass_ship_nm, vts_ais_ship_nm, d_mf_hf_ship_nm,
integration_ship_ty
FROM gis.t_ship_integration_sub
FROM signal.t_ship_integration_sub
""";
List<IntegrationVessel> vessels = jdbcTemplate.query(sql, (rs, rowNum) ->

파일 보기

@ -0,0 +1,303 @@
package gc.mda.signal_batch.domain.vessel.service;
import com.github.benmanes.caffeine.cache.Cache;
import gc.mda.signal_batch.domain.vessel.dto.VesselBucketPositionDto;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.cache.CacheManager;
import org.springframework.cache.caffeine.CaffeineCache;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.stereotype.Service;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.time.LocalDateTime;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
/**
* 선박 이전 버킷 종료 위치 캐시 관리 서비스
*
* 용도: 버킷 점프 검출을 위한 이전 버킷 종료 위치 캐싱
*
* 캐시 구조:
* - Key: "{sigSrcCd}:{targetId}" (: "000001:440123456")
* - Value: VesselBucketPositionDto (endLon, endLat, endTime)
* - TTL: 120분 (위성 AIS 고려)
*
* 데이터 흐름:
* 1. 5분 집계 시작 캐시에서 이전 버킷 위치 조회
* 2. 캐시 미스 t_vessel_latest_position 테이블에서 최근 2시간 데이터 조회 (fallback)
* 3. 현재 버킷과 이전 버킷 점프 검출
* 4. 처리 완료 현재 버킷 종료 위치를 캐시에 업데이트
*/
@Slf4j
@Service
@RequiredArgsConstructor
public class VesselPreviousBucketCache {
private final CacheManager cacheManager;
@Qualifier("queryJdbcTemplate")
private final JdbcTemplate queryJdbcTemplate;
private static final String CACHE_NAME = "vesselPreviousBuckets";
// DB 전체 조회 중복 방지 플래그
private volatile boolean fallbackLoaded = false;
// DB 조회 통계
private volatile int totalDbQueriesCount = 0;
private volatile int totalVesselsLoadedFromDb = 0;
/**
* 캐시 생성
*/
private String createKey(String sigSrcCd, String targetId) {
return sigSrcCd + ":" + targetId;
}
/**
* 단일 선박의 버킷 종료 위치 조회
*
* @param sigSrcCd 신호원 코드
* @param targetId 대상 ID
* @return 이전 버킷 종료 위치 (캐시 미스 null)
*/
public VesselBucketPositionDto get(String sigSrcCd, String targetId) {
String key = createKey(sigSrcCd, targetId);
org.springframework.cache.Cache cache = getCache();
if (cache != null) {
org.springframework.cache.Cache.ValueWrapper wrapper = cache.get(key);
if (wrapper != null && wrapper.get() instanceof VesselBucketPositionDto) {
return (VesselBucketPositionDto) wrapper.get();
}
}
return null;
}
/**
* 여러 선박의 이전 버킷 위치 일괄 조회 (캐시 + DB Fallback)
*
* @param vesselKeys 조회할 선박 목록 (sigSrcCd:targetId)
* @return 선박 -> 이전 버킷 위치 매핑
*/
public Map<String, VesselBucketPositionDto> getBatch(List<String> vesselKeys) {
Map<String, VesselBucketPositionDto> result = new HashMap<>();
if (vesselKeys == null || vesselKeys.isEmpty()) {
return result;
}
ConcurrentMap<Object, Object> nativeCache = getNativeCache();
int cacheHits = 0;
int cacheMisses = 0;
// 1. 캐시에서 조회
for (String vesselKey : vesselKeys) {
if (nativeCache != null && nativeCache.containsKey(vesselKey)) {
Object value = nativeCache.get(vesselKey);
if (value instanceof VesselBucketPositionDto position) {
result.put(vesselKey, position);
cacheHits++;
}
} else {
cacheMisses++;
}
}
// 캐시 조회 로그 제거 (Job 레벨 통계로 대체)
// 2. 캐시 미스 DB에서 일괄 조회 (최근 2시간) - 실행 1회만
if (cacheMisses > 0 && !fallbackLoaded) {
synchronized (this) {
if (!fallbackLoaded) {
long startTime = System.currentTimeMillis();
Map<String, VesselBucketPositionDto> dbResults = loadFromDatabaseBatch();
long elapsed = System.currentTimeMillis() - startTime;
// 전체 결과를 캐시에 저장 (요청 여부와 관계없이 모두 저장)
dbResults.values().forEach(this::put);
// 요청된 선박들을 결과에 추가
for (String vesselKey : vesselKeys) {
if (!result.containsKey(vesselKey) && dbResults.containsKey(vesselKey)) {
result.put(vesselKey, dbResults.get(vesselKey));
}
}
totalDbQueriesCount++;
totalVesselsLoadedFromDb += dbResults.size();
fallbackLoaded = true;
log.info("Previous bucket cache initialized: loaded {} vessels from DB in {}ms",
dbResults.size(), elapsed);
}
}
}
return result;
}
/**
* DB에서 최근 2시간 데이터 일괄 조회 (Fallback)
*/
private Map<String, VesselBucketPositionDto> loadFromDatabaseBatch() {
Map<String, VesselBucketPositionDto> result = new HashMap<>();
// t_vessel_latest_position 테이블에서 최근 2시간 데이터 조회
String sql = """
SELECT
sig_src_cd,
target_id,
lon,
lat,
last_update
FROM signal.t_vessel_latest_position
WHERE last_update >= NOW() - INTERVAL '2 hours'
AND sig_src_cd NOT IN ('000004', '000005')
""";
try {
List<VesselBucketPositionDto> positions = queryJdbcTemplate.query(sql, new BucketPositionRowMapper());
for (VesselBucketPositionDto position : positions) {
result.put(position.getVesselKey(), position);
}
log.debug("Queried {} vessel positions from t_vessel_latest_position (last 2 hours)", positions.size());
} catch (Exception e) {
log.error("Failed to load previous bucket positions from DB", e);
}
return result;
}
/**
* 단일 선박 위치 캐시 저장
*/
public void put(VesselBucketPositionDto position) {
if (position == null || position.getSigSrcCd() == null || position.getTargetId() == null) {
log.warn("Invalid position data, skipping cache: {}", position);
return;
}
String key = createKey(position.getSigSrcCd(), position.getTargetId());
getCache().put(key, position);
}
/**
* 여러 선박 위치 일괄 캐시 저장
*/
public void putAll(List<VesselBucketPositionDto> positions) {
if (positions == null || positions.isEmpty()) {
log.debug("No positions to cache");
return;
}
int count = 0;
for (VesselBucketPositionDto position : positions) {
if (position.getSigSrcCd() != null && position.getTargetId() != null) {
put(position);
count++;
}
}
log.debug("Cached {} vessel bucket positions (Total cache size: {})",
count, getCacheSize());
}
/**
* 캐시 전체 삭제
*/
public void clear() {
org.springframework.cache.Cache cache = getCache();
if (cache != null) {
cache.clear();
fallbackLoaded = false;
log.info("Previous bucket cache cleared");
}
}
/**
* Job 시작 플래그 리셋
*/
public void resetFallbackFlag() {
fallbackLoaded = false;
totalDbQueriesCount = 0;
totalVesselsLoadedFromDb = 0;
log.debug("Fallback flag and statistics reset");
}
/**
* Job 완료 통계 출력
*/
public void logJobStatistics() {
if (totalDbQueriesCount > 0) {
log.info("Bucket jump detection stats - Cache size: {}, DB queries: {}, Vessels loaded: {}",
getCacheSize(), totalDbQueriesCount, totalVesselsLoadedFromDb);
}
}
/**
* Spring Cache 객체 가져오기
*/
private org.springframework.cache.Cache getCache() {
return cacheManager.getCache(CACHE_NAME);
}
/**
* Caffeine Cache 네이티브 객체 가져오기
*/
private Cache<Object, Object> getCaffeineCache() {
org.springframework.cache.Cache cache = getCache();
if (cache instanceof CaffeineCache caffeineCache) {
return caffeineCache.getNativeCache();
}
return null;
}
/**
* ConcurrentMap 가져오기 (배치 조회용)
*/
private ConcurrentMap<Object, Object> getNativeCache() {
Cache<Object, Object> caffeineCache = getCaffeineCache();
if (caffeineCache != null) {
return caffeineCache.asMap();
}
return null;
}
/**
* 현재 캐시 크기 조회
*/
private int getCacheSize() {
ConcurrentMap<Object, Object> nativeCache = getNativeCache();
return nativeCache != null ? nativeCache.size() : 0;
}
/**
* RowMapper 구현
*/
private static class BucketPositionRowMapper implements RowMapper<VesselBucketPositionDto> {
@Override
public VesselBucketPositionDto mapRow(ResultSet rs, int rowNum) throws SQLException {
return VesselBucketPositionDto.builder()
.sigSrcCd(rs.getString("sig_src_cd"))
.targetId(rs.getString("target_id"))
.endLon(rs.getDouble("lon"))
.endLat(rs.getDouble("lat"))
.endTime(rs.getTimestamp("last_update") != null ?
rs.getTimestamp("last_update").toLocalDateTime() : null)
.build();
}
}
}

파일 보기

@ -1,17 +1,18 @@
package gc.mda.signal_batch.global.config;
import org.springframework.batch.core.configuration.JobRegistry;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.support.DefaultBatchConfiguration;
import org.springframework.batch.core.configuration.support.JobRegistryBeanPostProcessor;
import org.springframework.batch.core.explore.JobExplorer;
import org.springframework.batch.core.explore.support.JobExplorerFactoryBean;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.launch.support.TaskExecutorJobLauncher;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Lazy;
import org.springframework.context.annotation.Profile;
import org.springframework.core.task.TaskExecutor;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
@ -21,10 +22,37 @@ import javax.sql.DataSource;
import java.util.concurrent.ThreadPoolExecutor;
@Configuration
@EnableBatchProcessing
@Profile("!query") // query 프로파일에서는 배치 처리 비활성화
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
public class BatchConfig {
public class BatchConfig extends DefaultBatchConfiguration {
@Value("${spring.batch.jdbc.table-prefix:BATCH_}")
private String tablePrefix;
@Autowired
@Qualifier("batchDataSource")
@Lazy
private DataSource batchDataSource;
@Autowired
@Qualifier("batchTransactionManager")
@Lazy
private PlatformTransactionManager batchTransactionManager;
@Override
protected DataSource getDataSource() {
return batchDataSource;
}
@Override
protected PlatformTransactionManager getTransactionManager() {
return batchTransactionManager;
}
@Override
protected String getTablePrefix() {
return tablePrefix;
}
@Bean
public TaskExecutor batchTaskExecutor() {
@ -77,14 +105,4 @@ public class BatchConfig {
postProcessor.setJobRegistry(jobRegistry);
return postProcessor;
}
@Bean
public JobExplorer jobExplorer(@Qualifier("batchDataSource") DataSource dataSource,
@Qualifier("batchTransactionManager") PlatformTransactionManager transactionManager) throws Exception {
JobExplorerFactoryBean factory = new JobExplorerFactoryBean();
factory.setDataSource(dataSource);
factory.setTransactionManager(transactionManager);
factory.afterPropertiesSet();
return factory.getObject();
}
}
}

파일 보기

@ -32,21 +32,40 @@ public class CacheConfig {
@Value("${vessel.batch.cache.latest-position.max-size:50000}")
private int cacheMaxSize;
@Value("${vessel.batch.cache.previous-bucket.ttl-minutes:120}")
private int previousBucketTtlMinutes;
@Value("${vessel.batch.cache.previous-bucket.max-size:50000}")
private int previousBucketMaxSize;
@Bean
public CacheManager cacheManager() {
CaffeineCacheManager cacheManager = new CaffeineCacheManager("vesselLatestPositions");
cacheManager.setCaffeine(caffeineCacheBuilder());
CaffeineCacheManager cacheManager = new CaffeineCacheManager();
log.info("Initialized Caffeine CacheManager with TTL={}min, MaxSize={}",
cacheTtlMinutes, cacheMaxSize);
// 1. vesselLatestPositions 캐시 (최신 위치 API용)
cacheManager.registerCustomCache("vesselLatestPositions",
latestPositionCacheBuilder().build());
// 2. vesselPreviousBuckets 캐시 (버킷 점프 검출용)
cacheManager.registerCustomCache("vesselPreviousBuckets",
previousBucketCacheBuilder().build());
log.info("Initialized Caffeine CacheManager:");
log.info(" - vesselLatestPositions: TTL={}min, MaxSize={}", cacheTtlMinutes, cacheMaxSize);
log.info(" - vesselPreviousBuckets: TTL={}min, MaxSize={}", previousBucketTtlMinutes, previousBucketMaxSize);
return cacheManager;
}
private Caffeine<Object, Object> caffeineCacheBuilder() {
/**
* 최신 위치 캐시 빌더 (API 응답용)
*/
private Caffeine<Object, Object> latestPositionCacheBuilder() {
return Caffeine.newBuilder()
// TTL: 60분 자동 만료 (비활성 선박 제거)
.expireAfterWrite(cacheTtlMinutes, TimeUnit.MINUTES)
// TTL: 마지막 접근 60분 (읽기/쓰기 리셋)
// - 활성 선박: API 조회 또는 스케줄러 갱신 TTL 계속 리셋
// - 비활성 선박: 60분 미접근 자동 제거
.expireAfterAccess(cacheTtlMinutes, TimeUnit.MINUTES)
// 최대 선박 제한 (메모리 보호)
.maximumSize(cacheMaxSize)
@ -57,4 +76,24 @@ public class CacheConfig {
// 초기 용량 설정 (rehashing 최소화)
.initialCapacity(1000);
}
/**
* 이전 버킷 위치 캐시 빌더 (버킷 점프 검출용)
*/
private Caffeine<Object, Object> previousBucketCacheBuilder() {
return Caffeine.newBuilder()
// TTL: 마지막 접근 120분 (읽기/쓰기 리셋)
// - 활성 선박: 5분마다 접근 TTL 계속 갱신 영구 유지
// - 비활성 선박: 120분 미접근 자동 제거
.expireAfterAccess(previousBucketTtlMinutes, TimeUnit.MINUTES)
// 최대 선박 제한
.maximumSize(previousBucketMaxSize)
// 통계 수집 활성화
.recordStats()
// 초기 용량 설정
.initialCapacity(1000);
}
}

파일 보기

@ -6,7 +6,7 @@ import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Profile;
@Configuration
@Profile("prod")
@Profile({"prod", "prod-mpr", "dev", "local"})
@ConfigurationProperties(prefix = "spring.datasource")
@Data
public class DataSourceConfigProperties {

파일 보기

@ -0,0 +1,47 @@
package gc.mda.signal_batch.global.config;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
import gc.mda.signal_batch.global.util.DateTimeParseUtil;
import java.io.IOException;
import java.time.LocalDateTime;
/**
* 다양한 날짜/시간 형식을 지원하는 Jackson Deserializer
*
* <p>지원 형식:</p>
* <ul>
* <li>ISO 8601: 2026-01-20T10:00:00</li>
* <li>ISO 8601 (UTC): 2026-01-20T10:00:00Z</li>
* <li>ISO 8601 (Offset): 2026-01-20T10:00:00+09:00</li>
* <li>표준 형식: 2026-01-20 10:00:00</li>
* <li> 생략: 2026-01-20T10:00, 2026-01-20 10:00</li>
* <li>압축 형식: 20260120100000</li>
* </ul>
*
* <p>사용 :</p>
* <pre>{@code
* @JsonDeserialize(using = FlexibleLocalDateTimeDeserializer.class)
* private LocalDateTime startTime;
* }</pre>
*
* @see DateTimeParseUtil
*/
public class FlexibleLocalDateTimeDeserializer extends JsonDeserializer<LocalDateTime> {
@Override
public LocalDateTime deserialize(JsonParser p, DeserializationContext ctxt) throws IOException {
String value = p.getValueAsString();
if (value == null || value.isBlank()) {
return null;
}
try {
return DateTimeParseUtil.parse(value);
} catch (IllegalArgumentException e) {
throw new IOException("날짜/시간 파싱 실패: " + value + ". " + e.getMessage(), e);
}
}
}

파일 보기

@ -15,7 +15,7 @@ import org.springframework.transaction.PlatformTransactionManager;
import javax.sql.DataSource;
@Configuration
@Profile("prod")
@Profile({"prod", "prod-mpr"})
public class ProdDataSourceConfig {
private final DataSourceConfigProperties properties;

파일 보기

@ -57,15 +57,15 @@ import org.springframework.context.annotation.Configuration;
```
""",
contact = @Contact(
name = "Signal Batch Team",
email = "signal-batch@mda.gc"
name = "GC MDA Team",
email = "htlee@gcsc.co.kr"
),
license = @License(
name = "Internal Use Only"
)
),
servers = {
@Server(url = "http://10.26.252.48:8090", description = "Development Server (QueryDB)"),
@Server(url = "http://10.26.252.51:8090", description = "Development Server (QueryDB)"),
@Server(url = "http://10.26.252.39:8090", description = "Production Server"),
@Server(url = "http://localhost:8090", description = "Local Development Server")
}
@ -77,14 +77,27 @@ public class SwaggerConfig {
return GroupedOpenApi.builder()
.group("1-track-api")
.displayName("항적 조회 API")
.pathsToMatch("/api/v1/tracks/**", "/api/v1/haegu/**", "/api/v1/areas/**", "/api/v1/passages/**", "/api/v1/vessels/**")
.pathsToMatch(
"/api/v1/tracks/**", "/api/v1/haegu/**", "/api/v1/areas/**",
"/api/v1/passages/**", "/api/v1/vessels/**",
"/api/v2/**"
)
.build();
}
@Bean
public GroupedOpenApi shipImageApi() {
return GroupedOpenApi.builder()
.group("2-ship-image-api")
.displayName("선박 이미지 API")
.pathsToMatch("/api/v1/shipimg/**")
.build();
}
@Bean
public GroupedOpenApi abnormalTrackApi() {
return GroupedOpenApi.builder()
.group("2-abnormal-track-api")
.group("3-abnormal-track-api")
.displayName("비정상 항적 검출 API")
.pathsToMatch("/api/v1/abnormal-tracks/**")
.build();
@ -93,7 +106,7 @@ public class SwaggerConfig {
@Bean
public GroupedOpenApi tileApi() {
return GroupedOpenApi.builder()
.group("3-tile-api")
.group("4-tile-api")
.displayName("타일 집계 API")
.pathsToMatch("/api/v1/tiles/**", "/api/tiles/**")
.build();
@ -102,7 +115,7 @@ public class SwaggerConfig {
@Bean
public GroupedOpenApi performanceApi() {
return GroupedOpenApi.builder()
.group("4-performance-api")
.group("5-performance-api")
.displayName("성능 최적화 API")
.pathsToMatch("/api/v1/performance/**")
.build();
@ -111,7 +124,7 @@ public class SwaggerConfig {
@Bean
public GroupedOpenApi adminApi() {
return GroupedOpenApi.builder()
.group("5-admin-api")
.group("6-admin-api")
.displayName("관리자 API")
.pathsToMatch("/admin/**")
.build();
@ -120,9 +133,27 @@ public class SwaggerConfig {
@Bean
public GroupedOpenApi monitoringApi() {
return GroupedOpenApi.builder()
.group("6-monitoring-api")
.group("7-monitoring-api")
.displayName("모니터링 API")
.pathsToMatch("/monitor/**", "/actuator/**", "/api/websocket/**")
.pathsToMatch("/monitor/**", "/actuator/**", "/api/websocket/**", "/api/monitoring/**")
.build();
}
@Bean
public GroupedOpenApi migrationApi() {
return GroupedOpenApi.builder()
.group("8-migration-api")
.displayName("마이그레이션 API")
.pathsToMatch("/api/migration/**")
.build();
}
@Bean
public GroupedOpenApi debugApi() {
return GroupedOpenApi.builder()
.group("9-debug-api")
.displayName("디버그 API")
.pathsToMatch("/api/debug/**")
.build();
}
}

파일 보기

@ -0,0 +1,176 @@
package gc.mda.signal_batch.global.util;
import java.time.LocalDateTime;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeParseException;
import java.util.Arrays;
import java.util.List;
/**
* 다양한 날짜/시간 형식을 파싱하는 유틸리티 클래스
*
* <p>지원 형식:</p>
* <ul>
* <li>ISO 8601 (T 포함): 2026-01-20T10:00:00</li>
* <li>ISO 8601 with timezone: 2026-01-20T10:00:00Z, 2026-01-20T10:00:00+09:00</li>
* <li>표준 형식 (공백 구분): 2026-01-20 10:00:00</li>
* <li> 생략 형식: 2026-01-20T10:00, 2026-01-20 10:00</li>
* <li>압축 형식: 20260120100000 (YYYYMMDDHHMMSS)</li>
* <li>압축 형식 ( 생략): 202601201000 (YYYYMMDDHHMM)</li>
* </ul>
*
* <p>사용 :</p>
* <pre>{@code
* LocalDateTime dt1 = DateTimeParseUtil.parse("2026-01-20T10:00:00");
* LocalDateTime dt2 = DateTimeParseUtil.parse("2026-01-20 10:00:00");
* LocalDateTime dt3 = DateTimeParseUtil.parse("20260120100000");
* }</pre>
*/
public class DateTimeParseUtil {
/**
* 지원하는 날짜/시간 형식 목록 (우선순위 )
*/
private static final List<DateTimeFormatter> FORMATTERS = Arrays.asList(
// ISO 8601 with timezone (Z or offset)
DateTimeFormatter.ISO_ZONED_DATE_TIME,
DateTimeFormatter.ISO_OFFSET_DATE_TIME,
// ISO 8601 standard
DateTimeFormatter.ISO_LOCAL_DATE_TIME,
// Standard format with space
DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"),
// Without seconds
DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm"),
DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm"),
// Compact format (YYYYMMDDHHMMSS)
DateTimeFormatter.ofPattern("yyyyMMddHHmmss"),
// Compact format without seconds (YYYYMMDDHHMM)
DateTimeFormatter.ofPattern("yyyyMMddHHmm")
);
/**
* Timezone 포함 형식 판별용
*/
private static final List<DateTimeFormatter> ZONED_FORMATTERS = Arrays.asList(
DateTimeFormatter.ISO_ZONED_DATE_TIME,
DateTimeFormatter.ISO_OFFSET_DATE_TIME
);
private DateTimeParseUtil() {
// Utility class - prevent instantiation
}
/**
* 다양한 형식의 날짜/시간 문자열을 LocalDateTime으로 파싱
*
* @param dateTimeString 파싱할 날짜/시간 문자열
* @return 파싱된 LocalDateTime
* @throws IllegalArgumentException 지원하지 않는 형식인 경우
*/
public static LocalDateTime parse(String dateTimeString) {
if (dateTimeString == null || dateTimeString.isBlank()) {
throw new IllegalArgumentException("날짜/시간 문자열이 null이거나 비어있습니다.");
}
String trimmed = dateTimeString.trim();
// Try zoned formatters first for Z or offset patterns
if (trimmed.contains("Z") || trimmed.matches(".*[+-]\\d{2}:\\d{2}$")) {
for (DateTimeFormatter formatter : ZONED_FORMATTERS) {
try {
ZonedDateTime zdt = ZonedDateTime.parse(trimmed, formatter);
return zdt.toLocalDateTime();
} catch (DateTimeParseException ignored) {
// Try next formatter
}
}
}
// Try other formatters
for (DateTimeFormatter formatter : FORMATTERS) {
if (ZONED_FORMATTERS.contains(formatter)) {
continue; // Skip zoned formatters already tried
}
try {
return LocalDateTime.parse(trimmed, formatter);
} catch (DateTimeParseException ignored) {
// Try next formatter
}
}
throw new IllegalArgumentException(
"지원하지 않는 날짜/시간 형식입니다: " + dateTimeString + "\n" +
"지원 형식: ISO 8601 (2026-01-20T10:00:00, 2026-01-20T10:00:00Z), " +
"표준 형식 (2026-01-20 10:00:00), 압축 형식 (20260120100000)"
);
}
/**
* 날짜/시간 문자열 파싱 시도 (실패 null 반환)
*
* @param dateTimeString 파싱할 날짜/시간 문자열
* @return 파싱된 LocalDateTime 또는 null
*/
public static LocalDateTime parseOrNull(String dateTimeString) {
try {
return parse(dateTimeString);
} catch (IllegalArgumentException e) {
return null;
}
}
/**
* 날짜/시간 문자열이 파싱 가능한지 확인
*
* @param dateTimeString 확인할 날짜/시간 문자열
* @return 파싱 가능 여부
*/
public static boolean isValid(String dateTimeString) {
return parseOrNull(dateTimeString) != null;
}
/**
* LocalDateTime을 ISO 8601 형식으로 포맷
*
* @param dateTime 포맷할 LocalDateTime
* @return ISO 8601 형식 문자열 (: 2026-01-20T10:00:00)
*/
public static String formatIso(LocalDateTime dateTime) {
if (dateTime == null) {
return null;
}
return dateTime.format(DateTimeFormatter.ISO_LOCAL_DATE_TIME);
}
/**
* LocalDateTime을 표준 형식으로 포맷
*
* @param dateTime 포맷할 LocalDateTime
* @return 표준 형식 문자열 (: 2026-01-20 10:00:00)
*/
public static String formatStandard(LocalDateTime dateTime) {
if (dateTime == null) {
return null;
}
return dateTime.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"));
}
/**
* 지원 형식 설명 문자열 반환 (API 문서용)
*
* @return 지원 형식 설명
*/
public static String getSupportedFormatsDescription() {
return """
지원 형식:
ISO 8601: 2026-01-20T10:00:00
ISO 8601 (UTC): 2026-01-20T10:00:00Z
ISO 8601 (Offset): 2026-01-20T10:00:00+09:00
표준 형식: 2026-01-20 10:00:00
생략: 2026-01-20T10:00, 2026-01-20 10:00
압축 형식: 20260120100000 (YYYYMMDDHHMMSS)
압축 형식 ( 생략): 202601201000 (YYYYMMDDHHMM)
""";
}
}

파일 보기

@ -26,20 +26,28 @@ import java.util.stream.IntStream;
public class PartitionManager {
private final JdbcTemplate queryJdbcTemplate;
private final JdbcTemplate collectJdbcTemplate;
private final gc.mda.signal_batch.global.config.PartitionRetentionConfig retentionConfig;
public PartitionManager(
@Qualifier("queryJdbcTemplate") JdbcTemplate queryJdbcTemplate,
@Qualifier("collectJdbcTemplate") JdbcTemplate collectJdbcTemplate,
gc.mda.signal_batch.global.config.PartitionRetentionConfig retentionConfig) {
this.queryJdbcTemplate = queryJdbcTemplate;
this.collectJdbcTemplate = collectJdbcTemplate;
this.retentionConfig = retentionConfig;
}
private static final DateTimeFormatter PARTITION_DATE_FORMAT = DateTimeFormatter.ofPattern("yyMMdd");
private static final DateTimeFormatter PARTITION_MONTH_FORMAT = DateTimeFormatter.ofPattern("yyyy_MM");
// 일별 파티션 테이블 목록 (queryDB만 관리)
private static final List<String> DAILY_PARTITION_TABLES = List.of(
// CollectDB 일별 파티션 테이블 목록
private static final List<String> COLLECT_DAILY_PARTITION_TABLES = List.of(
"sig_test"
);
// QueryDB 일별 파티션 테이블 목록
private static final List<String> QUERY_DAILY_PARTITION_TABLES = List.of(
"t_vessel_tracks_5min",
"t_grid_vessel_tracks",
"t_grid_tracks_summary",
@ -52,12 +60,11 @@ public class PartitionManager {
// 월별 파티션 테이블 목록
private static final List<String> MONTHLY_PARTITION_TABLES = List.of(
"t_vessel_tracks_hourly",
"t_grid_vessel_tracks_hourly",
"t_vessel_tracks_daily",
"t_grid_tracks_summary_hourly",
"t_grid_tracks_summary_daily",
"t_area_tracks_summary_hourly",
"t_area_tracks_summary_daily",
"t_vessel_tracks_daily",
"t_abnormal_tracks"
);
@ -68,7 +75,10 @@ public class PartitionManager {
public void initialize() {
log.info("========== PartitionManager Initialization ==========");
// DataSource 정보 로깅 (queryDB만)
// DataSource 정보 로깅
log.info("=== Collect DataSource Info ===");
DataSourceLogger.logJdbcTemplateInfo("PartitionManager-Collect", collectJdbcTemplate);
log.info("=== Query DataSource Info ===");
DataSourceLogger.logJdbcTemplateInfo("PartitionManager-Query", queryJdbcTemplate);
@ -113,7 +123,7 @@ public class PartitionManager {
try {
LocalDate today = LocalDate.now();
// 1. 일별 파티션 생성
// 1. 일별 파티션 생성 (CollectDB + QueryDB)
createDailyPartitions(today, retentionConfig.getFutureDays());
// 2. 월별 파티션 생성 (매월 1일에 다음달 파티션 생성)
@ -138,13 +148,20 @@ public class PartitionManager {
}
/**
* 현재 존재하는 테이블 확인 (queryDB만)
* 현재 존재하는 테이블 확인
*/
private void checkExistingTables() {
log.info("Checking existing tables in queryDB...");
log.info("Checking existing tables...");
// 일별 파티션 테이블 확인
for (String table : DAILY_PARTITION_TABLES) {
// CollectDB 일별 파티션 테이블 확인
log.info("--- CollectDB Tables ---");
for (String table : COLLECT_DAILY_PARTITION_TABLES) {
checkTableExists("signal", table, collectJdbcTemplate, "CollectDB");
}
// QueryDB 일별 파티션 테이블 확인
log.info("--- QueryDB Tables ---");
for (String table : QUERY_DAILY_PARTITION_TABLES) {
checkTableExists("signal", table, queryJdbcTemplate, "QueryDB");
}
@ -166,7 +183,7 @@ public class PartitionManager {
}
/**
* 일별 파티션 생성 (queryDB만)
* 일별 파티션 생성 (CollectDB + QueryDB)
*/
private void createDailyPartitions(LocalDate startDate, int days) {
log.info("Creating daily partitions for {} days starting from {}", days, startDate);
@ -176,8 +193,14 @@ public class PartitionManager {
IntStream.range(0, days).forEach(offset -> {
LocalDate targetDate = startDate.plusDays(offset);
for (String table : DAILY_PARTITION_TABLES) {
tasks.add(new PartitionTask("signal", table, targetDate, queryJdbcTemplate, "daily"));
// CollectDB 파티션
for (String table : COLLECT_DAILY_PARTITION_TABLES) {
tasks.add(new PartitionTask("signal", table, targetDate, collectJdbcTemplate, "daily", "CollectDB"));
}
// QueryDB 파티션
for (String table : QUERY_DAILY_PARTITION_TABLES) {
tasks.add(new PartitionTask("signal", table, targetDate, queryJdbcTemplate, "daily", "QueryDB"));
}
});
@ -186,7 +209,7 @@ public class PartitionManager {
}
/**
* 월별 파티션 생성
* 월별 파티션 생성 (QueryDB만)
*/
private void createMonthlyPartitions(LocalDate targetMonth) {
log.info("Creating monthly partitions for {}", targetMonth.format(DateTimeFormatter.ofPattern("yyyy-MM")));
@ -194,7 +217,7 @@ public class PartitionManager {
List<PartitionTask> tasks = new ArrayList<>();
for (String table : MONTHLY_PARTITION_TABLES) {
tasks.add(new PartitionTask("signal", table, targetMonth, queryJdbcTemplate, "monthly"));
tasks.add(new PartitionTask("signal", table, targetMonth, queryJdbcTemplate, "monthly", "QueryDB"));
}
// 병렬 처리
@ -235,19 +258,19 @@ public class PartitionManager {
try {
// 파티션 존재 확인
if (partitionExists(task.schema, partitionName, task.jdbcTemplate)) {
log.debug("[QueryDB] Partition already exists: {}.{}", task.schema, partitionName);
log.debug("[{}] Partition already exists: {}.{}", task.dbType, task.schema, partitionName);
return;
}
// 파티션 생성
task.jdbcTemplate.execute(createSql);
log.info("[QueryDB] Created partition: {}.{}", task.schema, partitionName);
log.info("[{}] Created partition: {}.{}", task.dbType, task.schema, partitionName);
// 파티션별 인덱스 생성
createPartitionIndexes(task.schema, partitionName, task.baseTable, task.jdbcTemplate);
} catch (Exception e) {
log.error("[QueryDB] Failed to create partition: {}.{}", task.schema, partitionName, e);
log.error("[{}] Failed to create partition: {}.{}", task.dbType, task.schema, partitionName, e);
}
}
@ -384,13 +407,6 @@ public class PartitionManager {
));
}
}
// 해구별 hourly 테이블 진입 이력 인덱스
if (baseTable.equals("t_grid_vessel_tracks_hourly")) {
indexSqls.add(String.format(
"CREATE INDEX CONCURRENTLY IF NOT EXISTS %s_vessel_time_idx ON %s.%s (sig_src_cd, target_id, time_bucket DESC)",
partitionName, schema, partitionName
));
}
}
// 일별 궤적 테이블
else if (baseTable.contains("daily")) {
@ -459,38 +475,54 @@ public class PartitionManager {
log.info("Cleanup Date: {}", today);
try {
// 일별 파티션 테이블 정리
log.info("--- Daily Partition Tables ---");
for (String tableName : DAILY_PARTITION_TABLES) {
// CollectDB 일별 파티션 테이블 정리
log.info("--- CollectDB Daily Partition Tables ---");
for (String tableName : COLLECT_DAILY_PARTITION_TABLES) {
int retentionDays = retentionConfig.getRetentionDays(tableName);
if (retentionDays <= 0) {
log.info("[{}] Unlimited retention (days={}). Skipping cleanup.", tableName, retentionDays);
log.info("[CollectDB:{}] Unlimited retention (days={}). Skipping cleanup.", tableName, retentionDays);
continue;
}
LocalDate cutoffDate = today.minusDays(retentionDays);
log.info("[{}] Retention: {} days, Cutoff: {}", tableName, retentionDays, cutoffDate);
log.info("[CollectDB:{}] Retention: {} days, Cutoff: {}", tableName, retentionDays, cutoffDate);
dropPartitionsForTable(tableName, cutoffDate);
dropPartitionsForTable(tableName, cutoffDate, collectJdbcTemplate, "CollectDB");
}
// 월별 파티션 테이블 정리
// QueryDB 일별 파티션 테이블 정리
log.info("--- QueryDB Daily Partition Tables ---");
for (String tableName : QUERY_DAILY_PARTITION_TABLES) {
int retentionDays = retentionConfig.getRetentionDays(tableName);
if (retentionDays <= 0) {
log.info("[QueryDB:{}] Unlimited retention (days={}). Skipping cleanup.", tableName, retentionDays);
continue;
}
LocalDate cutoffDate = today.minusDays(retentionDays);
log.info("[QueryDB:{}] Retention: {} days, Cutoff: {}", tableName, retentionDays, cutoffDate);
dropPartitionsForTable(tableName, cutoffDate, queryJdbcTemplate, "QueryDB");
}
// QueryDB 월별 파티션 테이블 정리
log.info("");
log.info("--- Monthly Partition Tables ---");
log.info("--- QueryDB Monthly Partition Tables ---");
for (String tableName : MONTHLY_PARTITION_TABLES) {
int retentionMonths = retentionConfig.getRetentionMonths(tableName);
if (retentionMonths <= 0) {
log.info("[{}] Unlimited retention (months={}). Skipping cleanup.", tableName, retentionMonths);
log.info("[QueryDB:{}] Unlimited retention (months={}). Skipping cleanup.", tableName, retentionMonths);
continue;
}
// 단위 계산: N개월 전의 1일
LocalDate cutoffDate = today.minusMonths(retentionMonths).withDayOfMonth(1);
log.info("[{}] Retention: {} months, Cutoff: {}", tableName, retentionMonths, cutoffDate);
log.info("[QueryDB:{}] Retention: {} months, Cutoff: {}", tableName, retentionMonths, cutoffDate);
dropPartitionsForTable(tableName, cutoffDate);
dropPartitionsForTable(tableName, cutoffDate, queryJdbcTemplate, "QueryDB");
}
log.info("========== Partition Cleanup Completed ==========");
@ -503,34 +535,60 @@ public class PartitionManager {
/**
* 개별 테이블의 파티션 삭제
*/
private void dropPartitionsForTable(String tableName, LocalDate cutoffDate) {
private void dropPartitionsForTable(String tableName, LocalDate cutoffDate, JdbcTemplate jdbcTemplate, String dbType) {
int droppedCount = 0;
int keptCount = 0;
int skippedCount = 0;
// 월별 파티션 테이블인지 확인
boolean isMonthlyTable = MONTHLY_PARTITION_TABLES.contains(tableName);
try {
List<PartitionInfo> partitions = findPartitions("signal", tableName + "_%", queryJdbcTemplate);
List<PartitionInfo> partitions = findPartitions("signal", tableName + "_%", jdbcTemplate);
for (PartitionInfo partition : partitions) {
if (partition.partitionDate == null) {
log.warn(" [{}] Cannot determine partition date, skipping: {}",
tableName, partition.tableName);
log.warn(" [{}:{}] Cannot determine partition date, skipping: {}",
dbType, tableName, partition.tableName);
skippedCount++;
continue;
}
// 파티션 패턴 검증: 일별 vs 월별
boolean isDailyPattern = partition.tableName.matches(".*_\\d{6}$");
boolean isMonthlyPattern = partition.tableName.matches(".*_\\d{4}_\\d{2}$");
// 일별 파티션 정리 : 월별 파티션은 건너뛰기
if (!isMonthlyTable && !isDailyPattern) {
log.debug(" [{}:{}] Skipping non-daily partition during daily cleanup: {}",
dbType, tableName, partition.tableName);
skippedCount++;
continue;
}
// 월별 파티션 정리 : 일별 파티션은 건너뛰기
if (isMonthlyTable && !isMonthlyPattern) {
log.debug(" [{}:{}] Skipping non-monthly partition during monthly cleanup: {}",
dbType, tableName, partition.tableName);
skippedCount++;
continue;
}
if (partition.partitionDate.isBefore(cutoffDate)) {
dropPartition(partition, queryJdbcTemplate);
dropPartition(partition, jdbcTemplate);
droppedCount++;
log.info(" [{}] Dropped: {} (date: {})",
tableName, partition.tableName, partition.partitionDate);
log.info(" [{}:{}] Dropped: {} (date: {})",
dbType, tableName, partition.tableName, partition.partitionDate);
} else {
keptCount++;
}
}
log.info(" [{}] Result: dropped={}, kept={}", tableName, droppedCount, keptCount);
log.info(" [{}:{}] Result: dropped={}, kept={}, skipped={}",
dbType, tableName, droppedCount, keptCount, skippedCount);
} catch (Exception e) {
log.error(" [{}] Failed to clean partitions: {}", tableName, e.getMessage(), e);
log.error(" [{}:{}] Failed to clean partitions: {}", dbType, tableName, e.getMessage(), e);
}
}
@ -716,13 +774,15 @@ public class PartitionManager {
LocalDate date;
JdbcTemplate jdbcTemplate;
String partitionType; // "daily" or "monthly"
String dbType; // "CollectDB" or "QueryDB"
PartitionTask(String schema, String baseTable, LocalDate date, JdbcTemplate jdbcTemplate, String partitionType) {
PartitionTask(String schema, String baseTable, LocalDate date, JdbcTemplate jdbcTemplate, String partitionType, String dbType) {
this.schema = schema;
this.baseTable = baseTable;
this.date = date;
this.jdbcTemplate = jdbcTemplate;
this.partitionType = partitionType;
this.dbType = dbType;
}
}

파일 보기

@ -124,9 +124,85 @@ public class ShipKindCodeConverter {
return SHIP_KIND_MAP.getOrDefault(key, "000027"); // 기본값: 기타
}
/**
* sig_src_cd, shipType, shipName, targetId를 조합하여 shipKindCode를 반환
* 선박명 패턴 매칭을 통해 어망/부이(000028) 우선 판별
*
* @param sigSrcCd 신호 소스 코드 (ex: 000001, 000004, ...)
* @param shipType 선박 타입 (ex: 30, B005, ...)
* @param shipName 선박명 (ex: "부이-123", "어망.설치선", ...)
* @param targetId 타겟 ID (MMSI , ex: "440123456", "123456789")
* @return shipKindCode (ex: 000020, 000028, ...) 매칭되지 않으면 000027(기타)
*/
public static String getShipKindCodeWithNamePattern(String sigSrcCd, String shipType, String shipName, String targetId) {
// 1. 어망/부이 패턴 체크 조건:
// - AIS(000001) 또는 VTS-AIS(000004)
// - 한국 국적 선박 제외 (target_id가 440 또는 441로 시작하지 않음)
// - 선박명에 어망/부이 패턴 포함
if (isAisOrVtsAis(sigSrcCd) && !isKoreanVessel(targetId) && containsBuoyPattern(shipName)) {
return "000028"; // 어망/부이
}
// 2. 기존 로직 수행
return getShipKindCode(sigSrcCd, shipType);
}
/**
* 하위 호환성을 위한 오버로드 메서드 (targetId 없이 호출 )
* @deprecated targetId를 포함한 메서드 사용 권장
*/
@Deprecated
public static String getShipKindCodeWithNamePattern(String sigSrcCd, String shipType, String shipName) {
return getShipKindCodeWithNamePattern(sigSrcCd, shipType, shipName, null);
}
/**
* AIS(000001) 또는 VTS-AIS(000004) 신호원인지 확인
*/
private static boolean isAisOrVtsAis(String sigSrcCd) {
return "000001".equals(sigSrcCd) || "000004".equals(sigSrcCd);
}
/**
* 한국 국적 선박인지 확인 (MMSI가 440 또는 441로 시작)
* 한국 국적 선박은 선박명에 특수문자가 포함되어도 어망/부이가 아님
*/
private static boolean isKoreanVessel(String targetId) {
if (targetId == null || targetId.length() < 3) {
return false;
}
String prefix = targetId.substring(0, 3);
return "440".equals(prefix) || "441".equals(prefix);
}
/**
* 선박명에 어망/부이 패턴이 포함되어 있는지 확인
*
* 패턴 규칙:
* - '%' 포함 어망/부이
* - '-' '.' 동시에 포함 어망/부이 (: "ABC-5.5", "ABC.5-5")
* - '-' 또는 '.' 포함 일반 선박 (: "ABC NO.5", "S-92")
*/
private static boolean containsBuoyPattern(String shipName) {
if (shipName == null || shipName.isEmpty()) {
return false;
}
// '%' 포함 어망/부이
if (shipName.contains("%")) {
return true;
}
// '-' '.' 동시에 포함될 때만 어망/부이
boolean hasDash = shipName.contains("-");
boolean hasDot = shipName.contains(".");
return hasDash && hasDot;
}
/**
* 선박 종류 명칭 반환
*
*
* @param shipKindCode 선박 종류 코드
* @return 선박 종류 명칭
*/
@ -139,6 +215,7 @@ public class ShipKindCodeConverter {
case "000024": return "유조선";
case "000025": return "관공선";
case "000027": return "기타";
case "000028": return "어망/부이";
default: return "기타";
}
}

파일 보기

@ -0,0 +1,231 @@
package gc.mda.signal_batch.global.util;
import gc.mda.signal_batch.domain.vessel.dto.CompactVesselTrack;
import gc.mda.signal_batch.domain.vessel.dto.TrackResponse;
import lombok.extern.slf4j.Slf4j;
import org.locationtech.jts.geom.Coordinate;
import org.locationtech.jts.geom.LineString;
import org.locationtech.jts.io.ParseException;
import org.locationtech.jts.io.WKTReader;
import java.util.*;
import java.util.stream.Collectors;
/**
* TrackResponse (WKT 기반) CompactVesselTrack (배열 기반) 변환 유틸리티
* WebSocket API와 REST API의 응답 구조 통일을 위해 사용
*/
@Slf4j
public class TrackConverter {
private static final WKTReader wktReader = new WKTReader();
/**
* 선박별로 분리된 TrackResponse 리스트를 CompactVesselTrack 리스트로 변환
* - 동일 선박의 여러 세그먼트를 병합
* - LineStringM을 geometry/timestamps/speeds 배열로 파싱
*
* @param trackResponses TrackResponse 리스트 (세그먼트별 분리)
* @param vesselInfoProvider 선박 정보 조회 함수 (sigSrcCd, targetId VesselInfo)
* @return CompactVesselTrack 리스트 (선박별 병합)
*/
public static List<CompactVesselTrack> convert(
List<TrackResponse> trackResponses,
VesselInfoProvider vesselInfoProvider) {
if (trackResponses == null || trackResponses.isEmpty()) {
return Collections.emptyList();
}
// 1. 선박별로 그룹핑
Map<String, List<TrackResponse>> byVessel = trackResponses.stream()
.collect(Collectors.groupingBy(
t -> t.getSigSrcCd() + "_" + t.getTargetId(),
LinkedHashMap::new,
Collectors.toList()
));
// 2. 선박별로 세그먼트 병합 변환
List<CompactVesselTrack> result = new ArrayList<>();
for (Map.Entry<String, List<TrackResponse>> entry : byVessel.entrySet()) {
String vesselId = entry.getKey();
List<TrackResponse> segments = entry.getValue();
CompactVesselTrack compactTrack = mergeSegments(vesselId, segments, vesselInfoProvider);
if (compactTrack != null && compactTrack.getPointCount() > 0) {
result.add(compactTrack);
}
}
return result;
}
/**
* 단일 선박의 여러 세그먼트를 하나의 CompactVesselTrack으로 병합
*/
private static CompactVesselTrack mergeSegments(
String vesselId,
List<TrackResponse> segments,
VesselInfoProvider vesselInfoProvider) {
if (segments == null || segments.isEmpty()) {
return null;
}
// 시간순 정렬
segments.sort(Comparator.comparing(TrackResponse::getTimeBucket));
String sigSrcCd = segments.get(0).getSigSrcCd();
String targetId = segments.get(0).getTargetId();
// 누적 데이터
List<double[]> allGeometry = new ArrayList<>();
List<String> allTimestamps = new ArrayList<>();
List<Double> allSpeeds = new ArrayList<>();
double totalDistance = 0;
double maxSpeed = 0;
Coordinate prevCoord = null;
long prevTimeMillis = 0;
for (TrackResponse segment : segments) {
String trackGeom = segment.getTrackGeom();
if (trackGeom == null || trackGeom.isEmpty() || "LINESTRING EMPTY".equals(trackGeom)) {
continue;
}
try {
LineString lineString = (LineString) wktReader.read(trackGeom);
if (lineString.getNumPoints() == 0) {
continue;
}
for (int i = 0; i < lineString.getNumPoints(); i++) {
Coordinate coord = lineString.getCoordinateN(i);
allGeometry.add(new double[]{coord.x, coord.y});
// M값 (Unix timestamp 단위)
long unixTimestamp = (long) coord.getM();
allTimestamps.add(String.valueOf(unixTimestamp));
// 속도 계산
double speed = 0.0;
long currentTimeMillis = unixTimestamp * 1000;
if (prevCoord != null && prevTimeMillis > 0) {
double distance = calculateDistanceNm(prevCoord.y, prevCoord.x, coord.y, coord.x);
long timeDiff = currentTimeMillis - prevTimeMillis;
if (timeDiff > 0) {
speed = distance / (timeDiff / 3600000.0); // knots
}
}
allSpeeds.add(speed);
prevCoord = coord;
prevTimeMillis = currentTimeMillis;
}
// 통계 누적
if (segment.getDistanceNm() != null) {
totalDistance += segment.getDistanceNm().doubleValue();
}
if (segment.getMaxSpeed() != null) {
maxSpeed = Math.max(maxSpeed, segment.getMaxSpeed().doubleValue());
}
} catch (ParseException e) {
log.warn("Failed to parse LineStringM for vessel {}: {}", vesselId, e.getMessage());
}
}
if (allGeometry.isEmpty()) {
return null;
}
// 선박 정보 조회
VesselInfo vesselInfo = vesselInfoProvider != null
? vesselInfoProvider.getVesselInfo(sigSrcCd, targetId)
: new VesselInfo("-", "-");
// 국적 코드 계산
String nationalCode = NationalCodeUtil.calculateNationalCode(sigSrcCd, targetId);
// shipKindCode 계산
String shipKindCode = ShipKindCodeConverter.getShipKindCodeWithNamePattern(
sigSrcCd, vesselInfo.getShipType(), vesselInfo.getShipName(), targetId);
// 평균 속도 계산
double avgSpeed = allSpeeds.stream()
.filter(s -> s > 0)
.mapToDouble(Double::doubleValue)
.average()
.orElse(0.0);
return CompactVesselTrack.builder()
.vesselId(vesselId)
.sigSrcCd(sigSrcCd)
.targetId(targetId)
.nationalCode(nationalCode)
.geometry(allGeometry)
.timestamps(allTimestamps)
.speeds(allSpeeds)
.totalDistance(totalDistance)
.avgSpeed(Math.round(avgSpeed * 100.0) / 100.0)
.maxSpeed(Math.round(maxSpeed * 100.0) / 100.0)
.pointCount(allGeometry.size())
.shipName(vesselInfo.getShipName())
.shipType(vesselInfo.getShipType())
.shipKindCode(shipKindCode)
.build();
}
/**
* 좌표 사이의 거리 계산 (Haversine 공식, 해리)
*/
private static double calculateDistanceNm(double lat1, double lon1, double lat2, double lon2) {
double lat1Rad = Math.toRadians(lat1);
double lat2Rad = Math.toRadians(lat2);
double deltaLat = lat2Rad - lat1Rad;
double deltaLon = Math.toRadians(lon2 - lon1);
double a = Math.sin(deltaLat / 2) * Math.sin(deltaLat / 2) +
Math.cos(lat1Rad) * Math.cos(lat2Rad) *
Math.sin(deltaLon / 2) * Math.sin(deltaLon / 2);
double c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a));
// 지구 반지름 (nm)
double R = 3440.065;
return R * c;
}
/**
* 선박 정보 조회 인터페이스
*/
@FunctionalInterface
public interface VesselInfoProvider {
VesselInfo getVesselInfo(String sigSrcCd, String targetId);
}
/**
* 선박 정보 DTO
*/
public static class VesselInfo {
private final String shipName;
private final String shipType;
public VesselInfo(String shipName, String shipType) {
this.shipName = shipName != null ? shipName : "-";
this.shipType = shipType != null ? shipType : "-";
}
public String getShipName() {
return shipName;
}
public String getShipType() {
return shipType;
}
}
}

파일 보기

@ -2,6 +2,7 @@ package gc.mda.signal_batch.global.util;
import gc.mda.signal_batch.domain.vessel.model.VesselData;
import gc.mda.signal_batch.domain.gis.cache.AreaBoundaryCache;
import gc.mda.signal_batch.domain.vessel.service.VesselPreviousBucketCache;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@ -28,16 +29,21 @@ public class VesselTrackDataJobListener implements JobExecutionListener {
private final JdbcTemplate collectJdbcTemplate;
private final VesselTrackDataHolder vesselTrackDataHolder;
private final AreaBoundaryCache areaBoundaryCache; // 추가
private final AreaBoundaryCache areaBoundaryCache;
private final VesselPreviousBucketCache previousBucketCache;
@Value("${vessel.batch.fetch-size:50000}")
private int fetchSize;
@BeforeJob
public void beforeJob(JobExecution jobExecution) {
// Area/Haegu 경계 캐시 갱신
areaBoundaryCache.refresh();
log.info("Refreshed area boundary cache");
// 이전 버킷 캐시 Fallback 플래그 리셋 ( Job 실행 1회만 DB 조회)
previousBucketCache.resetFallbackFlag();
log.info("Reset previous bucket cache fallback flag");
LocalDateTime startTime = LocalDateTime.parse(
jobExecution.getJobParameters().getString("startTime"));
@ -111,6 +117,10 @@ public class VesselTrackDataJobListener implements JobExecutionListener {
@Override
public void afterJob(JobExecution jobExecution) {
// DB 조회 통계 출력
previousBucketCache.logJobStatistics();
// 데이터 정리
vesselTrackDataHolder.clear();
log.debug("Cleared vessel track data after job completion");
}

파일 보기

@ -535,8 +535,9 @@ public class ChunkedTrackStreamingService {
accumulator.shipName = vesselInfo.shipName;
accumulator.shipType = vesselInfo.shipType;
// shipKindCode 계산
accumulator.shipKindCode = ShipKindCodeConverter.getShipKindCode(sigSrcCd, vesselInfo.shipType);
// shipKindCode 계산 (선박명 패턴 매칭 포함 - 어망/부이 판별)
accumulator.shipKindCode = ShipKindCodeConverter.getShipKindCodeWithNamePattern(
sigSrcCd, vesselInfo.shipType, vesselInfo.shipName, targetId);
// 테스트용 로그 - 처음 10개 선박만
// if (vesselCount <= 10) {
@ -568,8 +569,9 @@ public class ChunkedTrackStreamingService {
// log.info("[{}] Range [{} - {}] = {} minutes, {} buckets expected",
// tableName, range.getStart(), range.getEnd(), rangeMinutes, expectedBuckets);
// log.info("[{}] Processed {} tracks for {} unique vessels (avg {:.1f} tracks/vessel)",
// tableName, trackCount, vesselCount, vesselCount > 0 ? (double)trackCount/vesselCount : 0);
// log.info("[{}] Processed {} tracks for {} unique vessels (avg {} tracks/vessel)",
// tableName, trackCount, vesselCount,
// vesselCount > 0 ? String.format("%.1f", (double)trackCount/vesselCount) : "0.0");
// log.debug("[{}] Vessel count in this chunk: {} (cumulative in vesselMap: {})",
// tableName, vesselCount, vesselMap.size());
}
@ -849,8 +851,9 @@ public class ChunkedTrackStreamingService {
accumulator.shipName = vesselInfo.shipName;
accumulator.shipType = vesselInfo.shipType;
// shipKindCode 계산
accumulator.shipKindCode = ShipKindCodeConverter.getShipKindCode(track.getSigSrcCd(), vesselInfo.shipType);
// shipKindCode 계산 (선박명 패턴 매칭 포함 - 어망/부이 판별)
accumulator.shipKindCode = ShipKindCodeConverter.getShipKindCodeWithNamePattern(
track.getSigSrcCd(), vesselInfo.shipType, vesselInfo.shipName, track.getTargetId());
mergedMap.put(vesselId, accumulator);
}
@ -1761,8 +1764,9 @@ public class ChunkedTrackStreamingService {
acc.targetId = finalTargetId;
acc.shipName = info.shipName;
acc.shipType = info.shipType;
// shipKindCode 계산 추가
acc.shipKindCode = ShipKindCodeConverter.getShipKindCode(finalSigSrcCd, info.shipType);
// shipKindCode 계산 (선박명 패턴 매칭 포함 - 어망/부이 판별)
acc.shipKindCode = ShipKindCodeConverter.getShipKindCodeWithNamePattern(
finalSigSrcCd, info.shipType, info.shipName, finalTargetId);
return acc;
});
@ -2406,7 +2410,9 @@ public class ChunkedTrackStreamingService {
acc.targetId = finalTargetId;
acc.shipName = info.shipName;
acc.shipType = info.shipType;
acc.shipKindCode = ShipKindCodeConverter.getShipKindCode(finalSigSrcCd, info.shipType);
// shipKindCode 계산 (선박명 패턴 매칭 포함 - 어망/부이 판별)
acc.shipKindCode = ShipKindCodeConverter.getShipKindCodeWithNamePattern(
finalSigSrcCd, info.shipType, info.shipName, finalTargetId);
return acc;
});

파일 보기

@ -1,6 +1,9 @@
package gc.mda.signal_batch.monitoring.controller;
import gc.mda.signal_batch.monitoring.service.BatchMetadataCleanupService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.tags.Tag;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.*;
@ -28,6 +31,7 @@ import java.util.stream.Collectors;
@RequestMapping("/admin/batch")
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
@RequiredArgsConstructor
@Tag(name = "배치 관리 API", description = "Spring Batch Job 실행, 중지, 재시작 및 통계 조회 API")
public class BatchAdminController {
@Autowired
@ -57,10 +61,11 @@ public class BatchAdminController {
* Job 실행
*/
@PostMapping("/job/run")
@Operation(summary = "배치 Job 실행", description = "지정된 배치 Job을 실행합니다. vesselAggregationJob, vesselTrackAggregationJob, dailyAggregationJob 지원")
public ResponseEntity<Map<String, Object>> runJob(
@RequestParam(required = false) String jobName,
@RequestParam(required = false) String startTime,
@RequestParam(required = false) String endTime) {
@Parameter(description = "Job 이름 (기본: vesselAggregationJob)") @RequestParam(required = false) String jobName,
@Parameter(description = "시작 시간 (형식: yyyy-MM-ddTHH:mm:ss)") @RequestParam(required = false) String startTime,
@Parameter(description = "종료 시간 (형식: yyyy-MM-ddTHH:mm:ss)") @RequestParam(required = false) String endTime) {
try {
Job job;
@ -113,7 +118,9 @@ public class BatchAdminController {
* Job 중지
*/
@PostMapping("/job/stop/{executionId}")
public ResponseEntity<Map<String, Object>> stopJob(@PathVariable Long executionId) {
@Operation(summary = "실행 중인 Job 중지", description = "실행 중인 배치 Job을 중지합니다")
public ResponseEntity<Map<String, Object>> stopJob(
@Parameter(description = "Job 실행 ID") @PathVariable Long executionId) {
try {
boolean stopped = jobOperator.stop(executionId);
@ -133,7 +140,9 @@ public class BatchAdminController {
* Job 재시작
*/
@PostMapping("/job/restart/{executionId}")
public ResponseEntity<Map<String, Object>> restartJob(@PathVariable Long executionId) {
@Operation(summary = "실패한 Job 재시작", description = "실패한 배치 Job을 재시작합니다")
public ResponseEntity<Map<String, Object>> restartJob(
@Parameter(description = "Job 실행 ID") @PathVariable Long executionId) {
try {
Long newExecutionId = jobOperator.restart(executionId);
@ -153,7 +162,9 @@ public class BatchAdminController {
* 실패한 Job을 ABANDONED 상태로 변경 (재시도 방지)
*/
@PostMapping("/job/abandon/{executionId}")
public ResponseEntity<Map<String, Object>> abandonJob(@PathVariable Long executionId) {
@Operation(summary = "Job ABANDONED 처리", description = "실패한 Job을 ABANDONED 상태로 변경하여 재시도를 방지합니다")
public ResponseEntity<Map<String, Object>> abandonJob(
@Parameter(description = "Job 실행 ID") @PathVariable Long executionId) {
try {
JobExecution execution = jobExplorer.getJobExecution(executionId);
@ -214,6 +225,7 @@ public class BatchAdminController {
* 모든 실패한 Job을 ABANDONED로 변경
*/
@PostMapping("/job/abandon-all-failed")
@Operation(summary = "모든 실패한 Job ABANDONED 처리", description = "모든 실패 상태의 Job을 일괄로 ABANDONED 상태로 변경합니다")
public ResponseEntity<Map<String, Object>> abandonAllFailedJobs() {
try {
int abandonedCount = 0;
@ -272,6 +284,7 @@ public class BatchAdminController {
* 실행 중인 Job 목록
*/
@GetMapping("/job/running")
@Operation(summary = "실행 중인 Job 목록", description = "현재 실행 중인 모든 배치 Job 목록을 조회합니다")
public ResponseEntity<List<Map<String, Object>>> getRunningJobs() {
try {
List<Map<String, Object>> runningJobs = new ArrayList<>();
@ -315,9 +328,10 @@ public class BatchAdminController {
* Job 실행 이력
*/
@GetMapping("/job/history")
@Operation(summary = "Job 실행 이력 조회", description = "배치 Job의 실행 이력을 조회합니다")
public ResponseEntity<List<Map<String, Object>>> getJobHistory(
@RequestParam(required = false) String jobName,
@RequestParam(defaultValue = "50") int limit) {
@Parameter(description = "Job 이름 (미지정시 전체)") @RequestParam(required = false) String jobName,
@Parameter(description = "조회 개수") @RequestParam(defaultValue = "50") int limit) {
try {
List<Map<String, Object>> history = new ArrayList<>();
@ -452,8 +466,9 @@ public class BatchAdminController {
* 실패한 Job 목록
*/
@GetMapping("/job/failed")
@Operation(summary = "실패한 Job 목록", description = "지정된 시간 내의 실패한 Job 목록을 조회합니다")
public ResponseEntity<List<Map<String, Object>>> getFailedJobs(
@RequestParam(defaultValue = "24") int hoursBack) {
@Parameter(description = "조회 기간 (시간)") @RequestParam(defaultValue = "24") int hoursBack) {
try {
LocalDateTime since = LocalDateTime.now().minusHours(hoursBack);
@ -523,8 +538,9 @@ public class BatchAdminController {
* 배치 통계
*/
@GetMapping("/statistics")
@Operation(summary = "배치 통계 조회", description = "지정된 기간의 배치 실행 통계를 조회합니다")
public ResponseEntity<Map<String, Object>> getBatchStatistics(
@RequestParam(defaultValue = "7") int days) {
@Parameter(description = "조회 기간 (일)") @RequestParam(defaultValue = "7") int days) {
try {
LocalDateTime since = LocalDateTime.now().minusDays(days);
@ -639,6 +655,7 @@ public class BatchAdminController {
* 일별 처리 통계 (Dashboard 차트용)
*/
@GetMapping("/daily-stats")
@Operation(summary = "일별 처리 통계", description = "최근 7일간 일별 배치 처리 통계를 조회합니다 (대시보드 차트용)")
public ResponseEntity<Map<String, Object>> getDailyStatistics() {
try {
Map<String, Object> result = new HashMap<>();
@ -753,6 +770,7 @@ public class BatchAdminController {
* 배치 메타데이터 정리 실행
*/
@PostMapping("/cleanup")
@Operation(summary = "배치 메타데이터 정리", description = "오래된 배치 Job 메타데이터를 정리합니다")
public ResponseEntity<Map<String, Object>> cleanupBatchMetadata() {
try {
log.info("Manual batch metadata cleanup requested");
@ -806,6 +824,7 @@ public class BatchAdminController {
* 손상된 ExecutionContext 데이터 정리 (패키지 변경으로 인한 역직렬화 오류)
*/
@PostMapping("/cleanup-corrupted")
@Operation(summary = "손상된 ExecutionContext 정리", description = "패키지 변경으로 인한 역직렬화 오류가 발생한 ExecutionContext 데이터를 정리합니다")
public ResponseEntity<Map<String, Object>> cleanupCorruptedExecutionContext() {
try {
log.info("Manual corrupted ExecutionContext cleanup requested");
@ -833,6 +852,7 @@ public class BatchAdminController {
* 배치 메타데이터 현재 상태 조회
*/
@GetMapping("/metadata-status")
@Operation(summary = "배치 메타데이터 상태", description = "배치 메타데이터 테이블의 현재 상태를 조회합니다")
public ResponseEntity<Map<String, Object>> getBatchMetadataStatus() {
try {
// 여기서는 간단하게 현재 테이블 상태만 조회

파일 보기

@ -1,6 +1,8 @@
package gc.mda.signal_batch.monitoring.controller;
import gc.mda.signal_batch.global.util.DataSourceLogger;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.tags.Tag;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Qualifier;
@ -23,6 +25,7 @@ import java.util.Map;
@RequestMapping("/admin/debug")
@RequiredArgsConstructor
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
@Tag(name = "DataSource 디버그 API", description = "DataSource 연결 상태 및 테이블 정보 확인 API")
public class DataSourceDebugController {
@Qualifier("collectDataSource")
@ -41,6 +44,7 @@ public class DataSourceDebugController {
private final JdbcTemplate queryJdbcTemplate;
@GetMapping("/datasources")
@Operation(summary = "DataSource 정보 조회", description = "Collect, Query, Batch DataSource의 연결 정보, 스키마, 연결 테스트 결과를 조회합니다")
public Map<String, Object> getDataSourceInfo() {
Map<String, Object> result = new HashMap<>();
@ -53,6 +57,7 @@ public class DataSourceDebugController {
}
@GetMapping("/check-tables")
@Operation(summary = "테이블 존재 여부 확인", description = "Collect DB와 Query DB의 주요 테이블 존재 여부 및 크기를 확인합니다")
public Map<String, Object> checkTables() {
Map<String, Object> result = new HashMap<>();

파일 보기

@ -1,5 +1,8 @@
package gc.mda.signal_batch.monitoring.controller;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.tags.Tag;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.JobExecution;
@ -19,6 +22,7 @@ import java.util.*;
@RequestMapping("/admin")
@RequiredArgsConstructor
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
@Tag(name = "메트릭스 API", description = "시스템 메트릭, Job 통계, 대해구별 집계 및 데이터 정합성 검증 API")
public class MetricsController {
private final JobExplorer jobExplorer;
@ -26,6 +30,7 @@ public class MetricsController {
@GetMapping("/metrics/summary")
@Operation(summary = "시스템 메트릭 요약", description = "Job 통계, 메모리 사용량, 스레드 수, DB 연결 수 등 시스템 전반의 메트릭을 조회합니다")
public Map<String, Object> getMetricsSummary() {
Map<String, Object> summary = new HashMap<>();
@ -80,7 +85,9 @@ public class MetricsController {
}
@GetMapping("/jobs/recent")
public List<Map<String, Object>> getRecentJobs(@RequestParam(defaultValue = "10") int count) {
@Operation(summary = "최근 Job 실행 이력", description = "vesselAggregationJob의 최근 실행 이력을 조회합니다. 상태, 시작/종료 시간, 처리 건수를 포함합니다")
public List<Map<String, Object>> getRecentJobs(
@Parameter(description = "조회할 Job 수 (기본: 10)") @RequestParam(defaultValue = "10") int count) {
List<Map<String, Object>> jobs = new ArrayList<>();
try {
@ -124,6 +131,7 @@ public class MetricsController {
}
@GetMapping("/haegu/stats")
@Operation(summary = "대해구별 통계", description = "최근 15분 데이터 기준 대해구별 선박 수, 밀도 등 통계를 조회합니다. 대해구/소해구 레벨 데이터를 집계합니다")
public List<Map<String, Object>> getHaeguStats() {
List<Map<String, Object>> results = new ArrayList<>();
@ -216,6 +224,7 @@ public class MetricsController {
* 대해구 데이터 정합성 검증
*/
@GetMapping("/haegu/validate")
@Operation(summary = "대해구 데이터 정합성 검증", description = "타일 레벨별 분포, 중복 데이터, 대해구-소해구 집계 정합성을 검증합니다")
public Map<String, Object> validateHaeguData() {
Map<String, Object> validation = new HashMap<>();

파일 보기

@ -1,6 +1,9 @@
package gc.mda.signal_batch.monitoring.controller;
import gc.mda.signal_batch.migration.unix_timestamp.MissingDataFiller;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.tags.Tag;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
@ -20,12 +23,15 @@ import java.util.Map;
@RequestMapping("/api/migration")
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
@RequiredArgsConstructor
@Tag(name = "마이그레이션 API", description = "Unix timestamp 기반 항적 데이터 마이그레이션 및 누락 데이터 복구 API")
public class MigrationController {
private final MissingDataFiller missingDataFiller;
@PostMapping("/fill-hourly")
@Operation(summary = "Hourly 누락 데이터 채우기", description = "지정된 시간의 누락된 Hourly 집계 데이터를 5분 데이터로부터 생성합니다")
public ResponseEntity<Map<String, Object>> fillHourlyData(
@Parameter(description = "대상 시간 (형식: yyyy-MM-dd HH:mm:ss)")
@RequestParam @DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss") LocalDateTime timeBucket) {
try {
@ -46,7 +52,9 @@ public class MigrationController {
}
@PostMapping("/fill-daily")
@Operation(summary = "Daily 누락 데이터 채우기", description = "지정된 날짜의 누락된 Daily 집계 데이터를 Hourly 데이터로부터 생성합니다")
public ResponseEntity<Map<String, Object>> fillDailyData(
@Parameter(description = "대상 날짜 (형식: yyyy-MM-dd)")
@RequestParam @DateTimeFormat(pattern = "yyyy-MM-dd") LocalDate date) {
try {
@ -67,7 +75,9 @@ public class MigrationController {
}
@GetMapping("/status")
@Operation(summary = "마이그레이션 상태 조회", description = "지정된 날짜의 마이그레이션 상태를 조회합니다")
public ResponseEntity<Map<String, Object>> getStatus(
@Parameter(description = "대상 날짜 (형식: yyyy-MM-dd)")
@RequestParam @DateTimeFormat(pattern = "yyyy-MM-dd") LocalDate date) {
String sql = """
@ -93,11 +103,12 @@ public class MigrationController {
* - M값이 잘못되었으면(9시간 차이) 수정
*/
@PostMapping("/fix-unix-timestamps")
@Operation(summary = "Unix timestamp 검증 및 수정", description = "track_geom_v2가 비어있거나 M값이 잘못된 레코드를 자동으로 수정합니다")
public ResponseEntity<Map<String, Object>> fixUnixTimestamps(
@RequestParam String tableName, // "5min", "hourly", "daily"
@RequestParam @DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss") LocalDateTime startTime,
@RequestParam @DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss") LocalDateTime endTime,
@RequestParam(defaultValue = "1000") int batchSize) {
@Parameter(description = "테이블 타입 (5min, hourly, daily)") @RequestParam String tableName,
@Parameter(description = "시작 시간") @RequestParam @DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss") LocalDateTime startTime,
@Parameter(description = "종료 시간") @RequestParam @DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss") LocalDateTime endTime,
@Parameter(description = "배치 크기") @RequestParam(defaultValue = "1000") int batchSize) {
try {
FixResult result = missingDataFiller.fixUnixTimestamps(tableName, startTime, endTime, batchSize);
@ -125,9 +136,10 @@ public class MigrationController {
* Unix timestamp 검증 (수정 없이 확인만)
*/
@GetMapping("/verify-unix-timestamps")
@Operation(summary = "Unix timestamp 검증", description = "지정된 테이블과 시간의 Unix timestamp 정합성을 검증합니다 (수정 없음)")
public ResponseEntity<Map<String, Object>> verifyUnixTimestamps(
@RequestParam String tableName,
@RequestParam @DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss") LocalDateTime timeBucket) {
@Parameter(description = "테이블 타입 (5min, hourly, daily)") @RequestParam String tableName,
@Parameter(description = "대상 시간") @RequestParam @DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss") LocalDateTime timeBucket) {
try {
VerifyResult result = missingDataFiller.verifyUnixTimestamps(tableName, timeBucket);

파일 보기

@ -1,5 +1,7 @@
package gc.mda.signal_batch.monitoring.controller;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.tags.Tag;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
@ -14,6 +16,7 @@ import java.util.*;
@RequestMapping("/monitor")
@RequiredArgsConstructor
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
@Tag(name = "시스템 모니터링 API", description = "데이터 처리 지연, 해구별 현황, 처리량 및 데이터 품질 모니터링 API")
public class MonitoringController {
private final JdbcTemplate collectJdbcTemplate;
@ -23,6 +26,7 @@ public class MonitoringController {
* 데이터 처리 지연 상태 확인
*/
@GetMapping("/delay")
@Operation(summary = "데이터 처리 지연 상태", description = "수집DB와 조회DB 간의 데이터 처리 지연 시간을 확인합니다")
public Map<String, Object> getProcessingDelay() {
Map<String, Object> result = new HashMap<>();
@ -76,6 +80,7 @@ public class MonitoringController {
* 대해구별 실시간 처리 현황
*/
@GetMapping("/haegu/realtime")
@Operation(summary = "대해구별 실시간 현황", description = "최신 타일 데이터 기준 대해구별 선박 통계를 조회합니다")
public List<Map<String, Object>> getRealtimeHaeguStatus() {
String sql = """
WITH recent_data AS (
@ -123,6 +128,7 @@ public class MonitoringController {
* 시스템 처리량 메트릭
*/
@GetMapping("/throughput")
@Operation(summary = "시스템 처리량 메트릭", description = "최근 1시간 처리량 및 파티션 크기 정보를 조회합니다")
public Map<String, Object> getThroughputMetrics() {
Map<String, Object> metrics = new HashMap<>();
@ -185,6 +191,7 @@ public class MonitoringController {
*/
@SuppressWarnings("null")
@GetMapping("/quality")
@Operation(summary = "데이터 품질 검증", description = "중복 데이터 및 누락 타일을 확인하여 데이터 품질을 검증합니다")
public Map<String, Object> checkDataQuality() {
Map<String, Object> quality = new HashMap<>();

파일 보기

@ -2,6 +2,9 @@ package gc.mda.signal_batch.monitoring.controller;
import gc.mda.signal_batch.monitoring.service.TrackStreamingMetrics;
import gc.mda.signal_batch.global.websocket.service.StompTrackStreamingService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.tags.Tag;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.http.ResponseEntity;
@ -16,6 +19,7 @@ import java.util.Map;
@RestController
@RequestMapping("/api/websocket")
@RequiredArgsConstructor
@Tag(name = "WebSocket 모니터링 API", description = "WebSocket 스트리밍 상태 및 쿼리 진행 상황 모니터링 API")
public class WebSocketMonitoringController {
private final TrackStreamingMetrics trackStreamingMetrics;
@ -25,6 +29,7 @@ public class WebSocketMonitoringController {
* WebSocket 스트리밍 현황 조회
*/
@GetMapping("/status")
@Operation(summary = "WebSocket 스트리밍 현황", description = "활성 쿼리 수, 메모리 사용량, 서버 시간 등 WebSocket 스트리밍 전반의 상태를 조회합니다")
public ResponseEntity<Map<String, Object>> getStreamingStatus() {
Map<String, Object> status = new HashMap<>();
@ -50,7 +55,9 @@ public class WebSocketMonitoringController {
* 특정 쿼리의 상태 조회
*/
@GetMapping("/query/{queryId}/status")
public ResponseEntity<Map<String, Object>> getQueryStatus(@PathVariable String queryId) {
@Operation(summary = "쿼리 상태 조회", description = "특정 WebSocket 쿼리의 진행 상태, 메시지, 진행률, 스트리밍된 항적 수를 조회합니다")
public ResponseEntity<Map<String, Object>> getQueryStatus(
@Parameter(description = "쿼리 ID") @PathVariable String queryId) {
Map<String, Object> result = new HashMap<>();
var queryStatus = trackStreamingService.getQueryStatus(queryId);
@ -66,6 +73,7 @@ public class WebSocketMonitoringController {
* WebSocket 테스트 페이지로 리다이렉트
*/
@GetMapping("/test")
@Operation(summary = "WebSocket 테스트 페이지", description = "WebSocket 스트리밍 테스트 페이지로 리다이렉트합니다")
public RedirectView redirectToTestPage() {
return new RedirectView("/websocket/track-streaming-test.html");
}

파일 보기

@ -5,6 +5,7 @@ import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Profile;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
@ -21,6 +22,7 @@ import java.time.format.DateTimeFormatter;
*/
@Slf4j
@Service
@Profile("!query") // query 프로파일에서는 배치 메타데이터 정리 비활성화
@ConditionalOnProperty(name = "vessel.batch.scheduler.enabled", havingValue = "true", matchIfMissing = true)
public class BatchMetadataCleanupService {

파일 보기

@ -102,6 +102,9 @@ logging:
# 개발 환경 배치 설정 (성능 최적화)
vessel: # spring 하위가 아닌 최상위 레벨
# 통합선박 설정
integration:
enabled: true # 통합선박 기능 활성화 여부
batch:
# Area Statistics 처리를 위한 별도 설정
area-statistics:
@ -194,6 +197,12 @@ vessel: # spring 하위가 아닌 최상위 레벨
cache-enabled: true
cache-size: 2000
cache:
latest-position:
enabled: true # 운영 환경: 활성화
ttl-minutes: 60 # 60분 TTL
max-size: 80000 # 최대 50,000척
refresh-interval-minutes: 2 # 2분치 데이터 조회 (수집 지연 고려)
# 비정상 궤적 검출 설정 (개선됨)
abnormal-detection:

파일 보기

@ -1,20 +1,42 @@
# 로컬 개발 환경 설정
# 단일 PostgreSQL 인스턴스를 모든 DataSource가 공유
server:
port: 8090
spring:
# 로컬 환경에서는 하나의 DB만 사용
datasource:
url: jdbc:postgresql://localhost:5432/mdadb2?stringtype=unspecified&currentSchema=signal&TimeZone=Asia/Seoul
username: mda
password: mda#8932
driver-class-name: org.postgresql.Driver
hikari:
pool-name: LocalHikariPool
connection-timeout: 30000
idle-timeout: 600000
max-lifetime: 1800000
maximum-pool-size: 20
minimum-idle: 5
# 로컬 수집 DB (동일 DB 사용)
collect:
jdbc-url: jdbc:postgresql://localhost:5432/mdadb2?stringtype=unspecified&currentSchema=signal&TimeZone=Asia/Seoul
username: mda
password: mda#8932
driver-class-name: org.postgresql.Driver
hikari:
pool-name: LocalCollectPool
maximum-pool-size: 10
minimum-idle: 2
# 로컬 조회 DB (동일 DB 사용)
query:
jdbc-url: jdbc:postgresql://localhost:5432/mdadb2?stringtype=unspecified&currentSchema=signal&TimeZone=Asia/Seoul
username: mda
password: mda#8932
driver-class-name: org.postgresql.Driver
hikari:
pool-name: LocalQueryPool
maximum-pool-size: 10
minimum-idle: 2
# 로컬 배치 메타 DB (동일 DB 사용)
batch:
jdbc-url: jdbc:postgresql://localhost:5432/mdadb2?stringtype=unspecified&currentSchema=signal&TimeZone=Asia/Seoul
username: mda
password: mda#8932
driver-class-name: org.postgresql.Driver
hikari:
pool-name: LocalBatchPool
maximum-pool-size: 5
minimum-idle: 1
batch:
job:
@ -27,13 +49,35 @@ logging:
level:
root: INFO
gc.mda.signal_batch: DEBUG
gc.mda.signal_batch.batch: DEBUG
org.springframework.batch: DEBUG
org.springframework.jdbc: DEBUG
org.springframework.transaction: DEBUG
# 로컬 환경 배치 설정
vessel:
# 통합선박 기능 비활성화 (로컬에서는 테이블 없을 수 있음)
integration:
enabled: false
batch:
# 스케줄러 설정 - 로컬에서는 비활성화
scheduler:
enabled: false
incremental:
delay-minutes: 3
# 비정상 궤적 검출 비활성화
abnormal-detection:
enabled: false
# 로컬 최적화 설정
chunk-size: 1000
page-size: 1000
partition-size: 4 # 로컬에서는 적은 수의 파티션 사용
partition-size: 4
fetch-size: 10000
# 캐시 비활성화
cache:
latest-position:
enabled: false

파일 보기

@ -0,0 +1,278 @@
server:
port: 8090
tomcat:
threads:
max: 200
min-spare: 10
connection-timeout: 60000 # 60초로 증가
max-http-form-post-size: 50MB # 50MB로 증가
max-swallow-size: 50MB # 50MB로 증가
max-connections: 10000 # 최대 연결 수
accept-count: 100 # 대기열 크기
spring:
datasource:
# 원격 수집 DB
collect:
jdbc-url: jdbc:postgresql://10.26.252.39:5432/mdadb?currentSchema=signal&options=-csearch_path=signal,public&assumeMinServerVersion=12&reWriteBatchedInserts=true
username: mda
password: mda#8932
driver-class-name: org.postgresql.Driver
hikari:
pool-name: CollectHikariPool
connection-timeout: 30000 # 원격 연결이므로 타임아웃 증가
idle-timeout: 600000
max-lifetime: 1800000
maximum-pool-size: 20 # 10 -> 20 증가
minimum-idle: 5 # 2 -> 5 증가
# 원격 연결 안정성을 위한 추가 설정
connection-test-query: SELECT 1
validation-timeout: 5000
leak-detection-threshold: 60000 # 커넥션 누수 감지 (60초)
connection-init-sql: "SET TIME ZONE 'Asia/Seoul'; SET search_path TO signal, public;"
# 원격 조회 DB
query:
jdbc-url: jdbc:postgresql://10.29.17.90:5432/mpcdb2?currentSchema=signal&assumeMinServerVersion=12&reWriteBatchedInserts=true
username: mpc
password: mpc#8932
driver-class-name: org.postgresql.Driver
hikari:
pool-name: QueryHikariPool
connection-timeout: 5000
idle-timeout: 600000
max-lifetime: 1800000
maximum-pool-size: 60 # 20 -> 40 증가
minimum-idle: 10 # 5 -> 10 증가
connection-test-query: SELECT 1
validation-timeout: 5000
leak-detection-threshold: 60000 # 커넥션 누수 감지 (60초)
# PostGIS 함수를 위해 public 스키마를 search_path에 명시적으로 추가
connection-init-sql: "SET TIME ZONE 'Asia/Seoul'; SET search_path TO signal, public, pg_catalog;"
statement-cache-size: 250
data-source-properties:
prepareThreshold: 3
preparedStatementCacheQueries: 250
# 로컬 배치 메타 DB
batch:
jdbc-url: jdbc:postgresql://10.26.252.51:5432/mdadb?currentSchema=public&assumeMinServerVersion=12&reWriteBatchedInserts=true
username: mda
password: mda#8932
driver-class-name: org.postgresql.Driver
hikari:
pool-name: BatchHikariPool
maximum-pool-size: 20 # 10 → 30 증가
minimum-idle: 10 # 2 → 10 증가
connection-timeout: 30000 # 30초 타임아웃
idle-timeout: 600000
max-lifetime: 1800000
leak-detection-threshold: 60000 # 커넥션 누수 감지 (60초)
connection-init-sql: "SET TIME ZONE 'Asia/Seoul'; SET search_path TO public, signal;"
# Request 크기 설정
servlet:
multipart:
max-file-size: 50MB
max-request-size: 50MB
# Spring Batch 설정
batch:
job:
enabled: false
jdbc:
initialize-schema: never # always에서 never로 변경 (이미 수동으로 생성했으므로)
table-prefix: BATCH_
logging:
level:
root: INFO
gc.mda.signal_batch: DEBUG
gc.mda.signal_batch.global.util: INFO
gc.mda.signal_batch.global.websocket.service: INFO
gc.mda.signal_batch.batch.writer: INFO
gc.mda.signal_batch.batch.reader: INFO
gc.mda.signal_batch.batch.processor: INFO
gc.mda.signal_batch.domain: INFO
gc.mda.signal_batch.monitoring: DEBUG
gc.mda.signal_batch.monitoring.controller: INFO
org.springframework.batch: INFO
org.springframework.jdbc: WARN
org.postgresql: WARN
com.zaxxer.hikari: INFO
# 개발 환경 배치 설정 (성능 최적화)
vessel: # spring 하위가 아닌 최상위 레벨
# 통합선박 설정
integration:
enabled: true # 통합선박 기능 활성화 여부
batch:
# Area Statistics 처리를 위한 별도 설정
area-statistics:
chunk-size: 1000 # 5000 → 1000
batch-size: 500 # 새로 추가
chunk-size: 10000
page-size: 5000
partition-size: 12
# 성능 최적화 설정
optimization:
enabled: true
dynamic-chunk-sizing: true
memory-optimization: true
cache-optimization: true
thread-pool-optimization: true
# 동적 청크 크기 조정
chunk:
min-size: 1000
max-size: 20000
adjustment-factor: 0.2
# 메모리 임계값
memory:
warning-threshold: 70
critical-threshold: 85
optimization-threshold: 80
# 캐시 설정
cache:
min-hit-rate: 70
area-boundary-size: 5000
# Reader 최적화
fetch-size: 200000
use-cursor-reader: true
# Bulk Insert 최적화
bulk-insert:
batch-size: 10000
parallel-threads: 8
use-binary-copy: false
# Writer 설정
writer:
use-advisory-lock: false
parallel-threads: 4
# 재시도 설정
retry:
max-attempts: 3
initial-interval: 1000
max-interval: 10000
multiplier: 2
# 스케줄러 설정
scheduler:
enabled: true
incremental:
delay-minutes: 3 # 데이터 수집 지연 고려
# Batch 메타데이터 정리 설정
metadata:
cleanup:
enabled: true # 자동 정리 활성화
retention-days: 30 # 보존 기간 (30일)
dry-run: false # false: 실제 삭제, true: 테스트만
# 궤적 비정상 검출 설정
track:
abnormal-detection:
large-gap-threshold-hours: 6 # 이 시간 이상 gap은 연결 안함
extreme-speed-threshold: 1000 # 이 속도 이상은 무조건 비정상 (knots)
enable-merger-filtering: false # VesselTrackMerger 필터링 활성화 (기본: false)
# 타임아웃 설정
query-timeout: 1800 # 30분
lock:
timeout: 30
max-retry: 5
# Health Check 설정
health:
job-timeout-hours: 2
min-partition-count: 1
# 그리드 설정
grid:
mode: haegu
haegu:
cache-enabled: true
cache-size: 2000
# 선박 최신 위치 캐시 설정 (운영 환경 활성화)
cache:
latest-position:
enabled: true # 운영 환경: 활성화
ttl-minutes: 60 # 60분 TTL
max-size: 50000 # 최대 50,000척
refresh-interval-minutes: 2 # 2분치 데이터 조회 (수집 지연 고려)
# 비정상 궤적 검출 설정 (개선됨)
abnormal-detection:
enabled: true
5min-speed-threshold: 500 # 5분 집계 비정상 속도 임계값 (200 knots로 완화)
# 비정상 판정 기준 (명백한 비정상만 검출하도록 완화)
thresholds:
# 정박/저속 판단 기준
min-movement-nm: 0.05 # 최소 이동거리 (정박 판단)
stationary-speed-knots: 0.5 # 정박 속도 기준
# 선박 관련 임계값
vessel-physical-limit-knots: 100.0 # 선박 물리적 한계
vessel-abnormal-speed-knots: 200.0 # 선박 명백한 비정상 속도
# 항공기 관련 임계값
aircraft-physical-limit-knots: 600.0 # 항공기 물리적 한계
aircraft-abnormal-speed-knots: 800.0 # 항공기 명백한 비정상 속도
# 거리 관련 임계값
base-distance-5min-nm: 20.0 # 5분 기준 거리 (20nm로 완화)
extreme-distance-5min-nm: 100.0 # 5분간 극단적 이동거리
# Hourly/Daily 전용 임계값
hourly-daily-speed-limit: 500.0 # 시간/일 집계시 극단적 속도만 검출
# 기타 설정
distance-tolerance: 3.0 # 거리 허용 배수 (3.0으로 완화)
time-scaling-method: "sqrt" # 시간 스케일링 방법 (sqrt)
# 캐시 설정
cache:
previous-track-size: 10000 # 이전 궤적 캐시 크기
ttl-hours: 24 # 캐시 TTL
# 처리 옵션
processing:
remove-abnormal-segments: false # 비정상 구간 제거 여부
save-corrected-tracks: true # 보정된 궤적 저장 여부
exclude-stationary-vessels: true # 정박 선박 제외 여부
lenient-mode: true # 관대한 모드 활성화
# 파티션 관리 설정 (운영 환경 - application.yml 설정 오버라이드)
partition:
# 운영 환경에서는 더 긴 보관 기간 설정 가능
default-retention:
daily-partitions-retention-days: 7 # 일별 파티션 7일 보관
monthly-partitions-retention-months: 3 # 월별 파티션 3개월 보관
tables:
# 중요 데이터는 더 오래 보관
t_area_vessel_tracks:
retention-days: 60 # 구역별 선박 항적: 60일
t_grid_vessel_tracks:
retention-days: 30 # 해구별 선박 항적: 30일
t_abnormal_tracks:
retention-months: 0 # 비정상 항적: 무한 보관
# 액추에이터 설정
management:
endpoints:
web:
exposure:
include: health,info,metrics,prometheus,env,loggers,threaddump,heapdump,scheduledtasks
endpoint:
health:
show-details: always
show-components: always
metrics:
tags:
application: vessel-batch-aggregation
environment: prod

파일 보기

@ -1,5 +1,5 @@
server:
port: 8090
port: 18090
tomcat:
threads:
max: 200
@ -14,7 +14,7 @@ spring:
datasource:
# 원격 수집 DB
collect:
jdbc-url: jdbc:postgresql://10.26.252.39:5432/mdadb?currentSchema=signal&options=-csearch_path=signal,public&assumeMinServerVersion=12&reWriteBatchedInserts=true
jdbc-url: jdbc:postgresql://10.188.171.182:5432/mdadb?currentSchema=signal&options=-csearch_path=signal,public&assumeMinServerVersion=12&reWriteBatchedInserts=true
username: mda
password: mda#8932
driver-class-name: org.postgresql.Driver
@ -33,9 +33,9 @@ spring:
# 원격 조회 DB
query:
jdbc-url: jdbc:postgresql://10.29.17.90:5432/mpcdb2?currentSchema=signal&assumeMinServerVersion=12&reWriteBatchedInserts=true
username: mpc
password: mpc#8932
jdbc-url: jdbc:postgresql://10.188.171.182:5432/mdadb?currentSchema=signal&assumeMinServerVersion=12&reWriteBatchedInserts=true
username: mda
password: mda#8932
driver-class-name: org.postgresql.Driver
hikari:
pool-name: QueryHikariPool
@ -54,9 +54,9 @@ spring:
prepareThreshold: 3
preparedStatementCacheQueries: 250
# 로컬 배치 메타 DB
# 로컬 배치 메타 DB (signal 스키마 사용)
batch:
jdbc-url: jdbc:postgresql://10.26.252.51:5432/mdadb?currentSchema=public&assumeMinServerVersion=12&reWriteBatchedInserts=true
jdbc-url: jdbc:postgresql://10.188.171.182:5432/mdadb?currentSchema=signal&assumeMinServerVersion=12&reWriteBatchedInserts=true
username: mda
password: mda#8932
driver-class-name: org.postgresql.Driver
@ -68,7 +68,7 @@ spring:
idle-timeout: 600000
max-lifetime: 1800000
leak-detection-threshold: 60000 # 커넥션 누수 감지 (60초)
connection-init-sql: "SET TIME ZONE 'Asia/Seoul'; SET search_path TO public, signal;"
connection-init-sql: "SET TIME ZONE 'Asia/Seoul'; SET search_path TO signal, public;"
# Request 크기 설정
servlet:
@ -82,7 +82,7 @@ spring:
enabled: false
jdbc:
initialize-schema: never # always에서 never로 변경 (이미 수동으로 생성했으므로)
table-prefix: BATCH_
table-prefix: signal.BATCH_
logging:
level:
@ -103,6 +103,9 @@ logging:
# 개발 환경 배치 설정 (성능 최적화)
vessel: # spring 하위가 아닌 최상위 레벨
# 통합선박 설정
integration:
enabled: true # 통합선박 기능 활성화 여부
batch:
# Area Statistics 처리를 위한 별도 설정
area-statistics:
@ -167,13 +170,13 @@ vessel: # spring 하위가 아닌 최상위 레벨
metadata:
cleanup:
enabled: true # 자동 정리 활성화
retention-days: 30 # 보존 기간 (30일)
retention-days: 21 # 보존 기간 (30일)
dry-run: false # false: 실제 삭제, true: 테스트만
# 궤적 비정상 검출 설정
track:
abnormal-detection:
large-gap-threshold-hours: 6 # 이 시간 이상 gap은 연결 안함
large-gap-threshold-hours: 4 # 이 시간 이상 gap은 연결 안함
extreme-speed-threshold: 1000 # 이 속도 이상은 무조건 비정상 (knots)
enable-merger-filtering: false # VesselTrackMerger 필터링 활성화 (기본: false)
@ -200,7 +203,7 @@ vessel: # spring 하위가 아닌 최상위 레벨
latest-position:
enabled: true # 운영 환경: 활성화
ttl-minutes: 60 # 60분 TTL
max-size: 50000 # 최대 50,000척
max-size: 60000 # 최대 60,000척
refresh-interval-minutes: 2 # 2분치 데이터 조회 (수집 지연 고려)
@ -238,9 +241,9 @@ vessel: # spring 하위가 아닌 최상위 레벨
ttl-hours: 24 # 캐시 TTL
# 처리 옵션
processing:
remove-abnormal-segments: false # 비정상 구간 제거 여부
remove-abnormal-segments: true # 비정상 구간 제거 여부
save-corrected-tracks: true # 보정된 궤적 저장 여부
exclude-stationary-vessels: true # 정박 선박 제외 여부
exclude-stationary-vessels: false # 정박 선박 제외 여부
lenient-mode: true # 관대한 모드 활성화
# 파티션 관리 설정 (운영 환경 - application.yml 설정 오버라이드)

파일 보기

@ -126,11 +126,17 @@ management:
# 커스텀 설정
vessel:
# 데이터 필터 설정
filter:
zero-coordinates:
enabled: ${FILTER_ZERO_COORDS:false} # 0 근처 좌표 필터링 (기본: 비활성화)
# true일 경우 lat/lon이 -1 ~ 1 범위인 데이터를 제외
# 통합선박 설정
integration:
enabled: ${INTEGRATION_ENABLED:true} # 통합선박 기능 활성화 여부
enabled: ${INTEGRATION_ENABLED:false} # 통합선박 기능 활성화 여부
cache:
refresh-cron: "0 0 3 * * ?" # 매일 03:00 갱신
refresh-cron: "0 0 6 * * ?" # 매일 06:00 갱신
batch:
chunk-size: ${BATCH_CHUNK_SIZE:10000}
@ -160,7 +166,11 @@ vessel:
# 테이블별 보관 기간 (기본값과 다를 경우만 설정)
tables:
# 일별 파티션 테이블 (단위: 일)
# CollectDB 일별 파티션 테이블 (단위: 일)
sig_test:
retention-days: 14 # 14일 보관
# QueryDB 일별 파티션 테이블 (단위: 일)
t_vessel_tracks_5min:
retention-days: 7 # 7일 보관
t_area_vessel_tracks:
@ -180,9 +190,7 @@ vessel:
t_vessel_tracks_hourly:
retention-months: 2 # 2개월 보관
t_vessel_tracks_daily:
retention-months: 6 # 6개월 보관
t_grid_vessel_tracks_hourly:
retention-months: 2 # 2개월 보관
retention-months: 60 # 60개월 보관
t_grid_tracks_summary_hourly:
retention-months: 2 # 2개월 보관
t_grid_tracks_summary_daily:
@ -246,6 +254,11 @@ vessel:
max-size: 50000 # 최대 선박 수: 50,000척
refresh-interval-minutes: 2 # 갱신 주기 데이터 범위: 2분치 조회 (수집 지연 고려)
# 이전 버킷 위치 캐시 설정 (버킷 간 점프 검출용)
previous-bucket:
ttl-minutes: 120 # 캐시 TTL: 120분 (위성 AIS 30~60분 간격 고려)
max-size: 100000 # 최대 선박 수: 100,000척 (2시간 누적 고려)
# Swagger/OpenAPI 설정
springdoc:
api-docs:

파일 보기

@ -3,7 +3,7 @@
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Vessel Batch Admin</title>
<title>선박 배치 관리</title>
<!-- Local CSS -->
<link href="/libs/css/bootstrap.min.css" rel="stylesheet">
<link href="/libs/css/bootstrap-icons.css" rel="stylesheet">
@ -129,7 +129,7 @@
<nav class="navbar navbar-expand-lg navbar-dark bg-dark">
<div class="container-fluid">
<a class="navbar-brand" href="#">
<i class="bi bi-speedometer2"></i> Vessel Batch Admin
<i class="bi bi-speedometer2"></i> 선박 배치 관리
</a>
<div class="navbar-nav ms-auto">
<span class="navbar-text" id="currentTime"></span>
@ -143,36 +143,36 @@
<button class="sidebar-toggle" id="sidebarToggle" onclick="toggleSidebar()" title="사이드바 토글">
<i class="bi bi-chevron-left" id="toggleIcon"></i>
</button>
<div class="list-group">
<a href="#" class="list-group-item list-group-item-action active" data-page="dashboard" title="Dashboard">
<i class="bi bi-speedometer2"></i> <span class="sidebar-text">Dashboard</span>
<div class="list-group">
<a href="#" class="list-group-item list-group-item-action active" data-page="dashboard" title="대시보드">
<i class="bi bi-speedometer2"></i> <span class="sidebar-text">대시보드</span>
</a>
<a href="#" class="list-group-item list-group-item-action" data-page="jobs" title="Job Management">
<i class="bi bi-play-circle"></i> <span class="sidebar-text">Job Management</span>
<a href="#" class="list-group-item list-group-item-action" data-page="jobs" title="Job 관리">
<i class="bi bi-play-circle"></i> <span class="sidebar-text">Job 관리</span>
</a>
<a href="#" class="list-group-item list-group-item-action" data-page="history" title="Execution History">
<i class="bi bi-clock-history"></i> <span class="sidebar-text">Execution History</span>
<a href="#" class="list-group-item list-group-item-action" data-page="history" title="실행 이력">
<i class="bi bi-clock-history"></i> <span class="sidebar-text">실행 이력</span>
</a>
<a href="#" class="list-group-item list-group-item-action" data-page="monitoring" title="Monitoring">
<i class="bi bi-graph-up"></i> <span class="sidebar-text">Monitoring</span>
<a href="#" class="list-group-item list-group-item-action" data-page="monitoring" title="모니터링">
<i class="bi bi-graph-up"></i> <span class="sidebar-text">모니터링</span>
</a>
<a href="#" class="list-group-item list-group-item-action" data-page="gis" title="GIS Monitoring">
<i class="bi bi-geo-alt-fill"></i> <span class="sidebar-text">GIS Monitoring</span>
<a href="#" class="list-group-item list-group-item-action" data-page="gis" title="GIS 모니터링">
<i class="bi bi-geo-alt-fill"></i> <span class="sidebar-text">GIS 모니터링</span>
</a>
<a href="#" class="list-group-item list-group-item-action" data-page="websocket" title="WebSocket Test">
<i class="bi bi-broadcast"></i> <span class="sidebar-text">WebSocket Test</span>
<a href="#" class="list-group-item list-group-item-action" data-page="websocket" title="WebSocket 테스트">
<i class="bi bi-broadcast"></i> <span class="sidebar-text">WebSocket 테스트</span>
</a>
<a href="#" class="list-group-item list-group-item-action" data-page="websocket-gis" title="WebSocket GIS">
<i class="bi bi-globe2"></i> <span class="sidebar-text">WebSocket GIS</span>
</a>
<a href="#" class="list-group-item list-group-item-action" data-page="loadtest" title="Load Test">
<i class="bi bi-speedometer"></i> <span class="sidebar-text">Load Test</span>
<a href="#" class="list-group-item list-group-item-action" data-page="loadtest" title="부하 테스트">
<i class="bi bi-speedometer"></i> <span class="sidebar-text">부하 테스트</span>
</a>
<a href="#" class="list-group-item list-group-item-action" data-page="abnormal" title="Abnormal Tracks">
<i class="bi bi-exclamation-triangle"></i> <span class="sidebar-text">Abnormal Tracks</span>
<a href="#" class="list-group-item list-group-item-action" data-page="abnormal" title="비정상 궤적">
<i class="bi bi-exclamation-triangle"></i> <span class="sidebar-text">비정상 궤적</span>
</a>
<a href="#" class="list-group-item list-group-item-action" data-page="settings" title="Settings">
<i class="bi bi-gear"></i> <span class="sidebar-text">Settings</span>
<a href="#" class="list-group-item list-group-item-action" data-page="settings" title="설정">
<i class="bi bi-gear"></i> <span class="sidebar-text">설정</span>
</a>
</div>
</div>
@ -181,14 +181,14 @@
<div id="mainContent">
<!-- Dashboard Page -->
<div id="dashboard-page" class="page-content">
<h2>Dashboard</h2>
<h2>대시보드</h2>
<!-- 요약 카드 -->
<div class="row mb-4">
<div class="col-md-3">
<div class="card metric-card">
<div class="card-body">
<h6 class="card-subtitle mb-2 text-muted">Running Jobs</h6>
<h6 class="card-subtitle mb-2 text-muted">실행 중인 Job</h6>
<h2 class="card-title" id="runningJobsCount">0</h2>
</div>
</div>
@ -196,7 +196,7 @@
<div class="col-md-3">
<div class="card metric-card" style="border-color: #28a745;">
<div class="card-body">
<h6 class="card-subtitle mb-2 text-muted">Success Rate (24h)</h6>
<h6 class="card-subtitle mb-2 text-muted">성공률 (24시간)</h6>
<h2 class="card-title" id="successRate">0%</h2>
</div>
</div>
@ -204,7 +204,7 @@
<div class="col-md-3">
<div class="card metric-card" style="border-color: #ffc107;">
<div class="card-body">
<h6 class="card-subtitle mb-2 text-muted">Records/Hour</h6>
<h6 class="card-subtitle mb-2 text-muted">시간당 처리량</h6>
<h2 class="card-title" id="throughput">0</h2>
</div>
</div>
@ -212,19 +212,19 @@
<div class="col-md-3">
<div class="card metric-card" style="border-color: #dc3545;">
<div class="card-body">
<h6 class="card-subtitle mb-2 text-muted">Failed Jobs (24h)</h6>
<h6 class="card-subtitle mb-2 text-muted">실패한 Job (24시간)</h6>
<h2 class="card-title" id="failedCount">0</h2>
</div>
</div>
</div>
</div>
<!-- 궤적 집계 메트릭 -->
<div class="row mb-4">
<div class="col-md-3">
<div class="card metric-card" style="border-color: #17a2b8;">
<div class="card-body">
<h6 class="card-subtitle mb-2 text-muted">Active Vessels (1h)</h6>
<h6 class="card-subtitle mb-2 text-muted">활성 선박 (1시간)</h6>
<h2 class="card-title" id="activeVessels">0</h2>
</div>
</div>
@ -232,7 +232,7 @@
<div class="col-md-3">
<div class="card metric-card" style="border-color: #6610f2;">
<div class="card-body">
<h6 class="card-subtitle mb-2 text-muted">Total Distance (1h)</h6>
<h6 class="card-subtitle mb-2 text-muted">총 이동거리 (1시간)</h6>
<h2 class="card-title" id="totalDistance">0 nm</h2>
</div>
</div>
@ -240,7 +240,7 @@
<div class="col-md-3">
<div class="card metric-card" style="border-color: #e83e8c;">
<div class="card-body">
<h6 class="card-subtitle mb-2 text-muted">Active Haegus</h6>
<h6 class="card-subtitle mb-2 text-muted">활성 대해구</h6>
<h2 class="card-title" id="activeHaegus">0</h2>
</div>
</div>
@ -248,7 +248,7 @@
<div class="col-md-3">
<div class="card metric-card" style="border-color: #fd7e14;">
<div class="card-body">
<h6 class="card-subtitle mb-2 text-muted">Abnormal Tracks (24h)</h6>
<h6 class="card-subtitle mb-2 text-muted">비정상 궤적 (24시간)</h6>
<h2 class="card-title" id="abnormalTracks">0</h2>
</div>
</div>
@ -258,18 +258,18 @@
<!-- 실행 중인 Job -->
<div class="card mb-4">
<div class="card-header">
<h5><i class="bi bi-activity"></i> Running Jobs</h5>
<h5><i class="bi bi-activity"></i> 실행 중인 Job</h5>
</div>
<div class="card-body">
<div class="table-responsive">
<table class="table table-hover" id="runningJobsTable">
<thead>
<tr>
<th>Job Name</th>
<th>Execution ID</th>
<th>Start Time</th>
<th>Progress</th>
<th>Actions</th>
<th>Job 이름</th>
<th>실행 ID</th>
<th>시작 시간</th>
<th>진행률</th>
<th>작업</th>
</tr>
</thead>
<tbody></tbody>
@ -283,7 +283,7 @@
<div class="col-md-6">
<div class="card">
<div class="card-header">
<h5>Processing Trend (Last 7 Days)</h5>
<h5>처리량 추이 (최근 7일)</h5>
</div>
<div class="card-body">
<div class="chart-container">
@ -295,7 +295,7 @@
<div class="col-md-6">
<div class="card">
<div class="card-header">
<h5>Job Status Distribution</h5>
<h5>Job 상태 분포</h5>
</div>
<div class="card-body">
<div class="chart-container">
@ -309,34 +309,34 @@
<!-- Job Management Page -->
<div id="jobs-page" class="page-content" style="display: none;">
<h2>Job Management</h2>
<h2>Job 관리</h2>
<div class="card">
<div class="card-body">
<h5>Run New Job</h5>
<h5>새 Job 실행</h5>
<form id="runJobForm">
<div class="row">
<div class="col-md-3">
<label>Job Type</label>
<label>Job 유형</label>
<select class="form-select" id="jobType">
<option value="vesselAggregationJob">Vessel Aggregation (Position)</option>
<option value="vesselTrackAggregationJob">Vessel Track Aggregation</option>
<option value="hourlyAggregationJob">Hourly Aggregation</option>
<option value="dailyAggregationJob">Daily Aggregation</option>
<option value="vesselAggregationJob">선박 위치 집계</option>
<option value="vesselTrackAggregationJob">선박 항적 집계</option>
<option value="hourlyAggregationJob">시간 집계</option>
<option value="dailyAggregationJob">일간 집계</option>
</select>
</div>
<div class="col-md-3">
<label>Start Time</label>
<label>시작 시간</label>
<input type="datetime-local" class="form-control" id="startTime">
</div>
<div class="col-md-3">
<label>End Time</label>
<label>종료 시간</label>
<input type="datetime-local" class="form-control" id="endTime">
</div>
<div class="col-md-3">
<label>&nbsp;</label>
<button type="submit" class="btn btn-primary d-block">
<i class="bi bi-play-fill"></i> Run Job
<i class="bi bi-play-fill"></i> Job 실행
</button>
</div>
</div>
@ -345,52 +345,52 @@
</div>
<div class="mt-4">
<h5>Scheduled Jobs</h5>
<h5>예약된 Job</h5>
<table class="table">
<thead>
<tr>
<th>Schedule</th>
<th>Job Name</th>
<th>Next Run</th>
<th>Status</th>
<th>Actions</th>
<th>스케줄</th>
<th>Job 이름</th>
<th>다음 실행</th>
<th>상태</th>
<th>작업</th>
</tr>
</thead>
<tbody>
<tr>
<td>0 3,8,13,18,23,28,33,38,43,48,53,58 * * * *</td>
<td>Vessel Position Aggregation</td>
<td>선박 위치 집계</td>
<td id="nextPosition">-</td>
<td><span class="badge bg-success">Active</span></td>
<td><span class="badge bg-success">활성</span></td>
<td>
<button class="btn btn-sm btn-warning">Pause</button>
<button class="btn btn-sm btn-warning">일시정지</button>
</td>
</tr>
<tr>
<td>0 4,9,14,19,24,29,34,39,44,49,54,59 * * * *</td>
<td>Vessel Track Aggregation</td>
<td>선박 항적 집계</td>
<td id="nextTrack">-</td>
<td><span class="badge bg-success">Active</span></td>
<td><span class="badge bg-success">활성</span></td>
<td>
<button class="btn btn-sm btn-warning">Pause</button>
<button class="btn btn-sm btn-warning">일시정지</button>
</td>
</tr>
<tr>
<td>0 10 * * * *</td>
<td>Hourly Aggregation</td>
<td>시간 집계</td>
<td id="nextHourly">-</td>
<td><span class="badge bg-success">Active</span></td>
<td><span class="badge bg-success">활성</span></td>
<td>
<button class="btn btn-sm btn-warning">Pause</button>
<button class="btn btn-sm btn-warning">일시정지</button>
</td>
</tr>
<tr>
<td>0 0 1 * * *</td>
<td>Daily Aggregation</td>
<td>일간 집계</td>
<td id="nextDaily">-</td>
<td><span class="badge bg-success">Active</span></td>
<td><span class="badge bg-success">활성</span></td>
<td>
<button class="btn btn-sm btn-warning">Pause</button>
<button class="btn btn-sm btn-warning">일시정지</button>
</td>
</tr>
</tbody>
@ -400,26 +400,26 @@
<!-- History Page -->
<div id="history-page" class="page-content" style="display: none;">
<h2>Execution History</h2>
<h2>실행 이력</h2>
<div class="card">
<div class="card-body">
<div class="row mb-3">
<div class="col-md-4">
<input type="text" class="form-control" id="historySearch"
placeholder="Search by job name...">
placeholder="Job 이름으로 검색...">
</div>
<div class="col-md-2">
<select class="form-select" id="historyFilter">
<option value="">All Status</option>
<option value="COMPLETED">Completed</option>
<option value="FAILED">Failed</option>
<option value="STOPPED">Stopped</option>
<option value="">전체 상태</option>
<option value="COMPLETED">완료</option>
<option value="FAILED">실패</option>
<option value="STOPPED">중지</option>
</select>
</div>
<div class="col-md-2">
<button class="btn btn-primary" onclick="loadHistory()">
<i class="bi bi-arrow-clockwise"></i> Refresh
<i class="bi bi-arrow-clockwise"></i> 새로고침
</button>
</div>
</div>
@ -428,14 +428,14 @@
<table class="table table-hover" id="historyTable">
<thead>
<tr>
<th>Execution ID</th>
<th>Job Name</th>
<th>Start Time</th>
<th>End Time</th>
<th>Duration</th>
<th>Status</th>
<th>Records</th>
<th>Actions</th>
<th>실행 ID</th>
<th>Job 이름</th>
<th>시작 시간</th>
<th>종료 시간</th>
<th>소요 시간</th>
<th>상태</th>
<th>레코드</th>
<th>작업</th>
</tr>
</thead>
<tbody></tbody>
@ -447,13 +447,13 @@
<!-- Monitoring Page -->
<div id="monitoring-page" class="page-content" style="display: none;">
<h2>Real-time Monitoring</h2>
<h2>실시간 모니터링</h2>
<div class="row">
<div class="col-md-8">
<div class="card">
<div class="card-header">
<h5>Processing Rate</h5>
<h5>처리율</h5>
</div>
<div class="card-body">
<div class="chart-container">
@ -465,27 +465,27 @@
<div class="col-md-4">
<div class="card">
<div class="card-header">
<h5>System Metrics</h5>
<h5>시스템 메트릭</h5>
</div>
<div class="card-body">
<div class="mb-3">
<label>Memory Usage</label>
<label>메모리 사용량</label>
<div class="progress">
<div class="progress-bar" id="memoryBar" style="width: 0%">0%</div>
</div>
</div>
<div class="mb-3">
<label>CPU Usage</label>
<label>CPU 사용량</label>
<div class="progress">
<div class="progress-bar bg-warning" id="cpuBar" style="width: 0%">0%</div>
</div>
</div>
<div class="mb-3">
<label>Active Threads</label>
<label>활성 스레드</label>
<h4 id="activeThreads">0</h4>
</div>
<div class="mb-3">
<label>DB Connections</label>
<label>DB 연결</label>
<h4 id="dbConnections">0</h4>
</div>
</div>
@ -516,43 +516,43 @@
<!-- Settings Page -->
<div id="settings-page" class="page-content" style="display: none;">
<h2>Settings</h2>
<h2>설정</h2>
<div class="card">
<div class="card-body">
<h5>Batch Configuration</h5>
<h5>배치 설정</h5>
<form id="settingsForm">
<div class="row">
<div class="col-md-6">
<div class="mb-3">
<label>Chunk Size</label>
<label>청크 크기</label>
<input type="number" class="form-control" id="chunkSize" value="10000">
</div>
<div class="mb-3">
<label>Thread Pool Size</label>
<label>스레드 풀 크기</label>
<input type="number" class="form-control" id="threadPoolSize" value="8">
</div>
<div class="mb-3">
<label>Skip Limit</label>
<label>Skip 제한</label>
<input type="number" class="form-control" id="skipLimit" value="1000">
</div>
</div>
<div class="col-md-6">
<div class="mb-3">
<label>Query Timeout (seconds)</label>
<label>쿼리 타임아웃 (초)</label>
<input type="number" class="form-control" id="queryTimeout" value="600">
</div>
<div class="mb-3">
<label>Lock Timeout (seconds)</label>
<label>Lock 타임아웃 (초)</label>
<input type="number" class="form-control" id="lockTimeout" value="10">
</div>
<div class="mb-3">
<label>Partition Retention Days</label>
<label>파티션 보관 일수</label>
<input type="number" class="form-control" id="retentionDays" value="30">
</div>
</div>
</div>
<button type="submit" class="btn btn-primary">Save Settings</button>
<button type="submit" class="btn btn-primary">설정 저장</button>
</form>
</div>
</div>
@ -566,7 +566,7 @@
<div class="modal-dialog modal-lg">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title">Job Execution Details</h5>
<h5 class="modal-title">Job 실행 상세</h5>
<button type="button" class="btn-close" data-bs-dismiss="modal"></button>
</div>
<div class="modal-body" id="modalBody">

파일 크기가 너무 크기때문에 변경 상태를 표시하지 않습니다. Load Diff

파일 크기가 너무 크기때문에 변경 상태를 표시하지 않습니다. Load Diff

파일 보기

@ -3,7 +3,7 @@
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Chunked GIS Streaming v2.0 - Vessel Animation</title>
<title>청크 GIS 스트리밍 v2.0 - 선박 애니메이션</title>
<!-- 외부 라이브러리 CSS (폐쇄망 대응) -->
<link href="/libs/css/maplibre-gl.css" rel="stylesheet">
@ -20,7 +20,7 @@
<!-- 제어 패널 -->
<div class="control-panel">
<h5 class="mb-3">
<i class="bi bi-globe2"></i> Chunked GIS v2.0 + Animation
<i class="bi bi-globe2"></i> 청크 GIS v2.0 + 애니메이션
</h5>
<!-- 연결 제어 -->
@ -232,7 +232,7 @@
<div class="log-panel">
<div class="d-flex justify-content-between align-items-center mb-2">
<h6 class="mb-0"><i class="bi bi-terminal"></i> 로그</h6>
<button class="btn btn-sm btn-outline-secondary" onclick="clearLog()">Clear</button>
<button class="btn btn-sm btn-outline-secondary" onclick="clearLog()">초기화</button>
</div>
<div id="logContainer"></div>
</div>

파일 보기

@ -3,7 +3,7 @@
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>GIS Monitoring v2.0 - Vessel Batch</title>
<title>GIS 모니터링 v2.0 - 선박 항적</title>
<!-- 외부 라이브러리 CSS (폐쇄망 대응) -->
<link href="/libs/css/bootstrap.min.css" rel="stylesheet">
@ -19,7 +19,7 @@
<!-- 메인 컨트롤 패널 -->
<div class="gis-control-panel" id="controlPanel">
<div class="panel-header">
<h5 class="mb-0">GIS Monitoring v2.0</h5>
<h5 class="mb-0">GIS 모니터링 v2.0</h5>
<span class="panel-toggle" id="panelToggle" title="패널 접기/펼치기">
<i class="bi bi-chevron-left"></i>
</span>
@ -62,7 +62,7 @@
</button>
<button class="btn btn-success btn-sm w-100" id="sequentialBtn">
<i class="bi bi-diagram-3"></i> Sequential Passage
<i class="bi bi-diagram-3"></i> 순차 통과 분석
</button>
<!-- 선박 목록 섹션 -->
@ -180,7 +180,7 @@
<!-- Sequential Passage 패널 -->
<div class="sequential-panel" id="sequentialPanel">
<div class="d-flex justify-content-between align-items-center mb-3">
<h5 class="mb-0">Sequential Passage 분석</h5>
<h5 class="mb-0">순차 통과 분석</h5>
<button class="btn btn-sm btn-link" id="closeSequentialBtn">
<i class="bi bi-x-lg"></i>
</button>
@ -279,7 +279,7 @@
<!-- 컨텍스트 메뉴 -->
<div class="context-menu" id="contextMenu">
<div class="context-menu-item" id="addToSequential">
<i class="bi bi-plus-circle"></i> Sequential에 추가
<i class="bi bi-plus-circle"></i> 순차 통과에 추가
</div>
<div class="context-menu-item" id="viewAreaDetails">
<i class="bi bi-info-circle"></i> 상세 정보 보기

파일 보기

@ -3,7 +3,7 @@
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Chunked GIS Streaming with Vessel Animation</title>
<title>청크 GIS 스트리밍 - 선박 애니메이션</title>
<!-- CSS -->
<link href="/libs/css/maplibre-gl.css" rel="stylesheet">
@ -216,7 +216,7 @@
<!-- 제어 패널 -->
<div class="control-panel">
<h5 class="mb-3">
<i class="bi bi-globe2"></i> Chunked GIS + Animation
<i class="bi bi-globe2"></i> 청크 GIS + 애니메이션
</h5>
<!-- 연결 제어 -->
@ -428,7 +428,7 @@
<div class="log-panel">
<div class="d-flex justify-content-between align-items-center mb-2">
<h6 class="mb-0"><i class="bi bi-terminal"></i> 로그</h6>
<button class="btn btn-sm btn-outline-secondary" onclick="clearLog()">Clear</button>
<button class="btn btn-sm btn-outline-secondary" onclick="clearLog()">초기화</button>
</div>
<div id="logContainer"></div>
</div>

파일 보기

@ -230,17 +230,19 @@ public class WebSocketLoadTest {
.mapToLong(QueryMetrics::getTotalTracks)
.summaryStatistics();
log.info("실행 시간: 평균 {:.2f}초, 최소 {:.2f}초, 최대 {:.2f}초",
durationStats.getAverage(), durationStats.getMin(), durationStats.getMax());
log.info("실행 시간: 평균 {}초, 최소 {}초, 최대 {}초",
String.format("%.2f", durationStats.getAverage()),
String.format("%.2f", durationStats.getMin()),
String.format("%.2f", durationStats.getMax()));
log.info("청크 수: 평균 {}, 총 {}",
chunkStats.getAverage(), chunkStats.getSum());
log.info("트랙 수: 평균 {}, 총 {}",
trackStats.getAverage(), trackStats.getSum());
double totalThroughput = trackStats.getSum() / durationStats.getSum();
log.info("전체 처리량: {:.2f} tracks/second", totalThroughput);
log.info("전체 처리량: {} tracks/second", String.format("%.2f", totalThroughput));
}
// Inner classes