diff --git a/backend/src/main/java/gc/mda/kcg/domain/analysis/VesselAnalysisService.java b/backend/src/main/java/gc/mda/kcg/domain/analysis/VesselAnalysisService.java index 57fc091..63c40c0 100644 --- a/backend/src/main/java/gc/mda/kcg/domain/analysis/VesselAnalysisService.java +++ b/backend/src/main/java/gc/mda/kcg/domain/analysis/VesselAnalysisService.java @@ -1,49 +1,74 @@ package gc.mda.kcg.domain.analysis; -import gc.mda.kcg.config.CacheConfig; import gc.mda.kcg.domain.fleet.GroupPolygonService; import lombok.RequiredArgsConstructor; -import org.springframework.cache.Cache; -import org.springframework.cache.CacheManager; +import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Service; import java.time.Instant; import java.time.temporal.ChronoUnit; import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +@Slf4j @Service @RequiredArgsConstructor public class VesselAnalysisService { private final VesselAnalysisResultRepository repository; - private final CacheManager cacheManager; private final GroupPolygonService groupPolygonService; + private static final long CACHE_TTL_MS = 2 * 60 * 60_000L; // 2시간 + + /** mmsi → 최신 분석 결과 (인메모리 캐시) */ + private final Map cache = new ConcurrentHashMap<>(); + private volatile Instant lastFetchTime = null; + private volatile long lastUpdatedAt = 0; + /** - * 최근 2시간 내 분석 결과 + 집계 통계를 반환한다. - * mmsi별 최신 1건만. Caffeine 캐시(TTL 5분) 적용. + * 최근 2시간 분석 결과 + 집계 통계. + * - 첫 호출(warmup): 2시간 전체 조회 → 캐시 구축 + * - 이후: lastFetchTime 이후 증분만 조회 → 캐시 병합 + * - 2시간 초과 항목은 evict + * - 값 갱신 시 TTL 타이머 초기화 */ - @SuppressWarnings("unchecked") public Map getLatestResultsWithStats() { - Cache cache = cacheManager.getCache(CacheConfig.VESSEL_ANALYSIS); - if (cache != null) { - Cache.ValueWrapper wrapper = cache.get("data_with_stats"); - if (wrapper != null) { - return (Map) wrapper.get(); + Instant now = Instant.now(); + + if (lastFetchTime == null || (System.currentTimeMillis() - lastUpdatedAt) > CACHE_TTL_MS) { + // warmup: 2시간 전체 조회 + Instant since = now.minus(2, ChronoUnit.HOURS); + List rows = repository.findByAnalyzedAtAfter(since); + cache.clear(); + for (VesselAnalysisResult r : rows) { + cache.merge(r.getMmsi(), r, (old, cur) -> + cur.getAnalyzedAt().isAfter(old.getAnalyzedAt()) ? cur : old); } + lastFetchTime = now; + lastUpdatedAt = System.currentTimeMillis(); + log.info("vessel analysis cache warmup: {} vessels from DB", cache.size()); + } else { + // 증분: lastFetchTime 이후만 조회 + List rows = repository.findByAnalyzedAtAfter(lastFetchTime); + if (!rows.isEmpty()) { + for (VesselAnalysisResult r : rows) { + cache.merge(r.getMmsi(), r, (old, cur) -> + cur.getAnalyzedAt().isAfter(old.getAnalyzedAt()) ? cur : old); + } + lastUpdatedAt = System.currentTimeMillis(); + log.debug("vessel analysis incremental merge: {} new rows", rows.size()); + } + lastFetchTime = now; } - Instant since = Instant.now().minus(2, ChronoUnit.HOURS); - Map latest = new LinkedHashMap<>(); - for (VesselAnalysisResult r : repository.findByAnalyzedAtAfter(since)) { - latest.merge(r.getMmsi(), r, (old, cur) -> - cur.getAnalyzedAt().isAfter(old.getAnalyzedAt()) ? cur : old); - } + // 2시간 초과 항목 evict + Instant cutoff = now.minus(2, ChronoUnit.HOURS); + cache.entrySet().removeIf(e -> e.getValue().getAnalyzedAt().isBefore(cutoff)); - // 집계 통계 — 같은 루프에서 계산 + // 집계 통계 int dark = 0, spoofing = 0, critical = 0, high = 0, medium = 0, low = 0; Set clusterIds = new HashSet<>(); - for (VesselAnalysisResult r : latest.values()) { + for (VesselAnalysisResult r : cache.values()) { if (Boolean.TRUE.equals(r.getIsDark())) dark++; if (r.getSpoofingScore() != null && r.getSpoofingScore() > 0.5) spoofing++; String level = r.getRiskLevel(); @@ -62,11 +87,10 @@ public class VesselAnalysisService { } } - // 어구 통계 — group_polygon_snapshots 기반 Map gearStats = groupPolygonService.getGearStats(); Map stats = new LinkedHashMap<>(); - stats.put("total", latest.size()); + stats.put("total", cache.size()); stats.put("dark", dark); stats.put("spoofing", spoofing); stats.put("critical", critical); @@ -77,21 +101,15 @@ public class VesselAnalysisService { stats.put("gearGroups", gearStats.get("gearGroups")); stats.put("gearCount", gearStats.get("gearCount")); - List results = latest.values().stream() + List results = cache.values().stream() .sorted(Comparator.comparingInt(VesselAnalysisResult::getRiskScore).reversed()) .map(VesselAnalysisDto::from) .toList(); - Map response = Map.of( + return Map.of( "count", results.size(), "items", results, "stats", stats ); - - if (cache != null) { - cache.put("data_with_stats", response); - } - - return response; } } diff --git a/deploy/nginx-kcg.conf b/deploy/nginx-kcg.conf index a4c9821..05f0b60 100644 --- a/deploy/nginx-kcg.conf +++ b/deploy/nginx-kcg.conf @@ -94,6 +94,16 @@ server { proxy_ssl_server_name on; } + # ── Google TTS 프록시 (중국어 경고문 음성) ── + location /api/gtts { + rewrite ^/api/gtts(.*)$ /translate_tts$1 break; + proxy_pass https://translate.google.com; + proxy_set_header Host translate.google.com; + proxy_set_header Referer "https://translate.google.com/"; + proxy_set_header User-Agent "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"; + proxy_ssl_server_name on; + } + # gzip gzip on; gzip_types text/plain text/css application/json application/javascript text/xml application/xml; diff --git a/prediction/algorithms/fishing_pattern.py b/prediction/algorithms/fishing_pattern.py index c2815ec..64201b6 100644 --- a/prediction/algorithms/fishing_pattern.py +++ b/prediction/algorithms/fishing_pattern.py @@ -92,6 +92,26 @@ def detect_fishing_segments(df_vessel: pd.DataFrame, }) in_fishing = False + # 트랙 끝까지 조업 중이면 마지막 세그먼트 추가 + if in_fishing and len(records) > seg_start_idx: + start_ts = records[seg_start_idx].get('timestamp') + end_ts = records[-1].get('timestamp') + if start_ts and end_ts: + dur_sec = (pd.Timestamp(end_ts) - pd.Timestamp(start_ts)).total_seconds() + dur_min = dur_sec / 60 + if dur_min >= window_min: + zone_info = classify_zone( + records[seg_start_idx].get('lat', 0), + records[seg_start_idx].get('lon', 0), + ) + segments.append({ + 'start_idx': seg_start_idx, + 'end_idx': len(records) - 1, + 'duration_min': round(dur_min, 1), + 'zone': zone_info.get('zone', 'UNKNOWN'), + 'in_territorial_sea': zone_info.get('zone') == 'TERRITORIAL_SEA', + }) + return segments diff --git a/prediction/algorithms/risk.py b/prediction/algorithms/risk.py index d0c58b2..a5938f5 100644 --- a/prediction/algorithms/risk.py +++ b/prediction/algorithms/risk.py @@ -54,6 +54,13 @@ def compute_vessel_risk_score( if teleports: score += 20 + from algorithms.spoofing import count_speed_jumps + jumps = count_speed_jumps(df_vessel) + if jumps >= 3: + score += 10 + elif jumps >= 1: + score += 5 + gaps = detect_ais_gaps(df_vessel) critical_gaps = [g for g in gaps if g['gap_min'] >= 60] if critical_gaps: diff --git a/prediction/algorithms/spoofing.py b/prediction/algorithms/spoofing.py index e2ec081..a75db08 100644 --- a/prediction/algorithms/spoofing.py +++ b/prediction/algorithms/spoofing.py @@ -68,13 +68,15 @@ def compute_spoofing_score(df_vessel: pd.DataFrame) -> float: if jumps > 0: score += min(0.3, jumps / n * 5) - # BD09 오프셋 (중국 좌표 사용 의심) - mid_idx = len(df_vessel) // 2 - row = df_vessel.iloc[mid_idx] - offset = compute_bd09_offset(row['lat'], row['lon']) - if offset > 300: # 300m 이상 - score += 0.3 - elif offset > 100: - score += 0.1 + # BD09 오프셋 — 중국 선박(412*)은 좌표계 차이로 항상 ~300m이므로 제외 + mmsi_str = str(df_vessel.iloc[0].get('mmsi', '')) if 'mmsi' in df_vessel.columns else '' + if not mmsi_str.startswith('412'): + mid_idx = len(df_vessel) // 2 + row = df_vessel.iloc[mid_idx] + offset = compute_bd09_offset(row['lat'], row['lon']) + if offset > 300: + score += 0.3 + elif offset > 100: + score += 0.1 return round(min(score, 1.0), 4) diff --git a/prediction/pipeline/constants.py b/prediction/pipeline/constants.py index 4f07866..83a22e4 100644 --- a/prediction/pipeline/constants.py +++ b/prediction/pipeline/constants.py @@ -18,7 +18,7 @@ MIN_CLUSTER_SIZE = 5 MMSI_DIGITS = 9 MAX_VESSEL_LENGTH = 300 MAX_SOG_KNOTS = 30.0 -MIN_TRAJ_POINTS = 100 +MIN_TRAJ_POINTS = 20 KR_BOUNDS = { 'lat_min': 32.0, 'lat_max': 39.0,