iran prediction 47개 Python 파일을 prediction/ 디렉토리로 복제: - algorithms/ 14개 분석 알고리즘 (어구추론, 다크베셀, 스푸핑, 환적, 위험도 등) - pipeline/ 7단계 분류 파이프라인 - cache/vessel_store (24h 슬라이딩 윈도우) - db/ 어댑터 (snpdb 원본조회, kcgdb 결과저장) - chat/ AI 채팅 (Ollama, 후순위) - data/ 정적 데이터 (기선, 특정어업수역 GeoJSON) config.py를 kcgaidb로 재구성 (DB명, 사용자, 비밀번호) DB 연결 검증 완료 (kcgaidb 37개 테이블 접근 확인) Makefile에 dev-prediction / dev-all 타겟 추가 CLAUDE.md에 prediction 섹션 추가 Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
91 lines
3.1 KiB
Python
91 lines
3.1 KiB
Python
import unittest
|
|
import sys
|
|
import types
|
|
from datetime import datetime, timezone
|
|
from zoneinfo import ZoneInfo
|
|
|
|
import pandas as pd
|
|
|
|
stub = types.ModuleType('pydantic_settings')
|
|
|
|
|
|
class BaseSettings:
|
|
def __init__(self, **kwargs):
|
|
for name, value in self.__class__.__dict__.items():
|
|
if name.isupper():
|
|
setattr(self, name, kwargs.get(name, value))
|
|
|
|
|
|
stub.BaseSettings = BaseSettings
|
|
sys.modules.setdefault('pydantic_settings', stub)
|
|
|
|
from cache.vessel_store import VesselStore
|
|
from time_bucket import compute_incremental_window_start, compute_initial_window_start, compute_safe_bucket
|
|
|
|
|
|
class TimeBucketRuleTest(unittest.TestCase):
|
|
def test_safe_bucket_uses_delay_then_floors_to_5m(self):
|
|
now = datetime(2026, 4, 2, 15, 14, 0, tzinfo=ZoneInfo('Asia/Seoul'))
|
|
self.assertEqual(compute_safe_bucket(now), datetime(2026, 4, 2, 15, 0, 0))
|
|
|
|
def test_incremental_window_includes_overlap_buckets(self):
|
|
last_bucket = datetime(2026, 4, 2, 15, 0, 0)
|
|
self.assertEqual(compute_incremental_window_start(last_bucket), datetime(2026, 4, 2, 14, 45, 0))
|
|
|
|
def test_initial_window_start_anchors_to_safe_bucket(self):
|
|
safe_bucket = datetime(2026, 4, 2, 15, 0, 0)
|
|
self.assertEqual(compute_initial_window_start(24, safe_bucket), datetime(2026, 4, 1, 15, 0, 0))
|
|
|
|
def test_merge_incremental_prefers_newer_overlap_rows(self):
|
|
store = VesselStore()
|
|
store._tracks = {
|
|
'412000001': pd.DataFrame([
|
|
{
|
|
'mmsi': '412000001',
|
|
'timestamp': pd.Timestamp('2026-04-02T00:01:00Z'),
|
|
'time_bucket': datetime(2026, 4, 2, 9, 0, 0),
|
|
'lat': 30.0,
|
|
'lon': 120.0,
|
|
'raw_sog': 1.0,
|
|
},
|
|
{
|
|
'mmsi': '412000001',
|
|
'timestamp': pd.Timestamp('2026-04-02T00:02:00Z'),
|
|
'time_bucket': datetime(2026, 4, 2, 9, 0, 0),
|
|
'lat': 30.1,
|
|
'lon': 120.1,
|
|
'raw_sog': 1.0,
|
|
},
|
|
])
|
|
}
|
|
df_new = pd.DataFrame([
|
|
{
|
|
'mmsi': '412000001',
|
|
'timestamp': pd.Timestamp('2026-04-02T00:02:00Z'),
|
|
'time_bucket': datetime(2026, 4, 2, 9, 0, 0),
|
|
'lat': 30.2,
|
|
'lon': 120.2,
|
|
'raw_sog': 2.0,
|
|
},
|
|
{
|
|
'mmsi': '412000001',
|
|
'timestamp': pd.Timestamp('2026-04-02T00:03:00Z'),
|
|
'time_bucket': datetime(2026, 4, 2, 9, 5, 0),
|
|
'lat': 30.3,
|
|
'lon': 120.3,
|
|
'raw_sog': 2.0,
|
|
},
|
|
])
|
|
|
|
store.merge_incremental(df_new)
|
|
|
|
merged = store._tracks['412000001']
|
|
self.assertEqual(len(merged), 3)
|
|
replacement = merged.loc[merged['timestamp'] == pd.Timestamp('2026-04-02T00:02:00Z')].iloc[0]
|
|
self.assertEqual(float(replacement['lat']), 30.2)
|
|
self.assertEqual(float(replacement['lon']), 120.2)
|
|
|
|
|
|
if __name__ == '__main__':
|
|
unittest.main()
|