Codex Lab 환경(iran-airstrike-replay-codex)에서 검증 완료된 어구 모선 자동 추론 + 검토 워크플로우 전체를 이식. ## Python (prediction/) - gear_parent_inference(1,428줄): 다층 점수 모델 (correlation + name + track + prior bonus) - gear_parent_episode(631줄): Episode 연속성 (Jaccard + 공간거리) - gear_name_rules: 모선 이름 정규화 + 4자 미만 필터 - scheduler: 추론 호출 단계 추가 (4.8) - fleet_tracker/kcgdb: SQL qualified_table() 동적화 - gear_correlation: timestamp 필드 추가 ## DB (database/migration/ 012~015) - 후보 스냅샷, resolution, episode, 라벨 세션, 제외 관리 테이블 9개 + VIEW 2개 ## Backend (Java) - 12개 DTO/Controller (ParentInferenceWorkflowController 등) - GroupPolygonService: parent_resolution LEFT JOIN + 15개 API 메서드 ## Frontend - ParentReviewPanel: 모선 검토 대시보드 - vesselAnalysis: 10개 신규 API 함수 + 6개 타입 Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
91 lines
3.1 KiB
Python
91 lines
3.1 KiB
Python
import unittest
|
|
import sys
|
|
import types
|
|
from datetime import datetime, timezone
|
|
from zoneinfo import ZoneInfo
|
|
|
|
import pandas as pd
|
|
|
|
stub = types.ModuleType('pydantic_settings')
|
|
|
|
|
|
class BaseSettings:
|
|
def __init__(self, **kwargs):
|
|
for name, value in self.__class__.__dict__.items():
|
|
if name.isupper():
|
|
setattr(self, name, kwargs.get(name, value))
|
|
|
|
|
|
stub.BaseSettings = BaseSettings
|
|
sys.modules.setdefault('pydantic_settings', stub)
|
|
|
|
from cache.vessel_store import VesselStore
|
|
from time_bucket import compute_incremental_window_start, compute_initial_window_start, compute_safe_bucket
|
|
|
|
|
|
class TimeBucketRuleTest(unittest.TestCase):
|
|
def test_safe_bucket_uses_delay_then_floors_to_5m(self):
|
|
now = datetime(2026, 4, 2, 15, 14, 0, tzinfo=ZoneInfo('Asia/Seoul'))
|
|
self.assertEqual(compute_safe_bucket(now), datetime(2026, 4, 2, 15, 0, 0))
|
|
|
|
def test_incremental_window_includes_overlap_buckets(self):
|
|
last_bucket = datetime(2026, 4, 2, 15, 0, 0)
|
|
self.assertEqual(compute_incremental_window_start(last_bucket), datetime(2026, 4, 2, 14, 45, 0))
|
|
|
|
def test_initial_window_start_anchors_to_safe_bucket(self):
|
|
safe_bucket = datetime(2026, 4, 2, 15, 0, 0)
|
|
self.assertEqual(compute_initial_window_start(24, safe_bucket), datetime(2026, 4, 1, 15, 0, 0))
|
|
|
|
def test_merge_incremental_prefers_newer_overlap_rows(self):
|
|
store = VesselStore()
|
|
store._tracks = {
|
|
'412000001': pd.DataFrame([
|
|
{
|
|
'mmsi': '412000001',
|
|
'timestamp': pd.Timestamp('2026-04-02T00:01:00Z'),
|
|
'time_bucket': datetime(2026, 4, 2, 9, 0, 0),
|
|
'lat': 30.0,
|
|
'lon': 120.0,
|
|
'raw_sog': 1.0,
|
|
},
|
|
{
|
|
'mmsi': '412000001',
|
|
'timestamp': pd.Timestamp('2026-04-02T00:02:00Z'),
|
|
'time_bucket': datetime(2026, 4, 2, 9, 0, 0),
|
|
'lat': 30.1,
|
|
'lon': 120.1,
|
|
'raw_sog': 1.0,
|
|
},
|
|
])
|
|
}
|
|
df_new = pd.DataFrame([
|
|
{
|
|
'mmsi': '412000001',
|
|
'timestamp': pd.Timestamp('2026-04-02T00:02:00Z'),
|
|
'time_bucket': datetime(2026, 4, 2, 9, 0, 0),
|
|
'lat': 30.2,
|
|
'lon': 120.2,
|
|
'raw_sog': 2.0,
|
|
},
|
|
{
|
|
'mmsi': '412000001',
|
|
'timestamp': pd.Timestamp('2026-04-02T00:03:00Z'),
|
|
'time_bucket': datetime(2026, 4, 2, 9, 5, 0),
|
|
'lat': 30.3,
|
|
'lon': 120.3,
|
|
'raw_sog': 2.0,
|
|
},
|
|
])
|
|
|
|
store.merge_incremental(df_new)
|
|
|
|
merged = store._tracks['412000001']
|
|
self.assertEqual(len(merged), 3)
|
|
replacement = merged.loc[merged['timestamp'] == pd.Timestamp('2026-04-02T00:02:00Z')].iloc[0]
|
|
self.assertEqual(float(replacement['lat']), 30.2)
|
|
self.assertEqual(float(replacement['lon']), 120.2)
|
|
|
|
|
|
if __name__ == '__main__':
|
|
unittest.main()
|