spring: application: name: snp-batch-validation # PostgreSQL Database Configuration datasource: url: jdbc:postgresql://211.208.115.83:5432/snpdb username: snp password: snp#8932 driver-class-name: org.postgresql.Driver hikari: maximum-pool-size: 10 minimum-idle: 5 connection-timeout: 30000 # JPA Configuration jpa: hibernate: ddl-auto: update show-sql: false properties: hibernate: dialect: org.hibernate.dialect.PostgreSQLDialect format_sql: true default_schema: std_snp_data # Batch Configuration batch: jdbc: table-prefix: "std_snp_data.batch_" initialize-schema: never # Changed to 'never' as tables already exist job: enabled: false # Prevent auto-run on startup # Thymeleaf Configuration thymeleaf: cache: false prefix: classpath:/templates/ suffix: .html # Quartz Scheduler Configuration - Using JDBC Store for persistence quartz: job-store-type: jdbc # JDBC store for schedule persistence jdbc: initialize-schema: never # Quartz tables manually created in std_snp_data schema properties: org.quartz.scheduler.instanceName: SNPBatchScheduler org.quartz.scheduler.instanceId: AUTO org.quartz.threadPool.threadCount: 10 org.quartz.jobStore.class: org.quartz.impl.jdbcjobstore.JobStoreTX org.quartz.jobStore.driverDelegateClass: org.quartz.impl.jdbcjobstore.PostgreSQLDelegate org.quartz.jobStore.tablePrefix: std_snp_data.QRTZ_ org.quartz.jobStore.isClustered: false org.quartz.jobStore.misfireThreshold: 60000 # Kafka Configuration kafka: bootstrap-servers: localhost:9092 producer: key-serializer: org.apache.kafka.common.serialization.StringSerializer value-serializer: org.apache.kafka.common.serialization.StringSerializer acks: all retries: 3 properties: enable.idempotence: true compression.type: snappy linger.ms: 20 batch.size: 65536 max.block.ms: 3000 request.timeout.ms: 5000 delivery.timeout.ms: 10000 # Server Configuration server: port: 8041 servlet: context-path: /snp-api # Actuator Configuration management: endpoints: web: exposure: include: health,info,metrics,prometheus,batch endpoint: health: show-details: always # Logging Configuration (logback-spring.xml에서 상세 설정) logging: config: classpath:logback-spring.xml # Custom Application Properties app: batch: chunk-size: 1000 target-schema: name: std_snp_data tables: ship-001: tb_ship_default_info ship-002: tb_ship_info_mst ship-003: tb_ship_add_info ship-004: tb_ship_bbctr_hstry ship-005: tb_ship_idntf_info_hstry ship-006: tb_ship_clfic_hstry ship-007: tb_ship_company_rel ship-008: tb_ship_crew_list ship-009: tb_ship_dark_actv_idnty ship-010: tb_ship_country_hstry ship-011: tb_ship_group_revn_ownr_hstry ship-012: tb_ship_ice_grd ship-013: tb_ship_nm_chg_hstry ship-014: tb_ship_operator_hstry ship-015: tb_ship_ownr_hstry ship-016: tb_ship_prtc_rpn_hstry ship-017: tb_ship_sfty_mng_evdc_hstry ship-018: tb_ship_mng_company_hstry ship-019: tb_ship_sstrvsl_rel ship-020: tb_ship_spc_fetr ship-021: tb_ship_status_hstry ship-022: tb_ship_cargo_capacity ship-023: tb_ship_inspection_ymd ship-024: tb_ship_inspection_ymd_hstry ship-025: tb_ship_tech_mng_company_hstry ship-026: tb_ship_thrstr_info company-001: tb_company_dtl_info event-001: tb_event_mst event-002: tb_event_cargo event-003: tb_event_humn_acdnt event-004: tb_event_rel facility-001: tb_port_facility_info psc-001: tb_psc_mst psc-002: tb_psc_defect psc-003: tb_psc_oa_certf movements-001: tb_ship_anchrgcall_hstry movements-002: tb_ship_berthcall_hstry movements-003: tb_ship_now_status_hstry movements-004: tb_ship_dest_hstry movements-005: tb_ship_prtcll_hstry movements-006: tb_ship_sts_opert_hstry movements-007: tb_ship_teminalcall_hstry movements-008: tb_ship_trnst_hstry code-001: tb_ship_type_cd code-002: tb_ship_country_cd risk-compliance-001: tb_ship_risk_info risk-compliance-002: tb_ship_compliance_info risk-compliance-003: tb_company_compliance_info ship-028: ship_detail_hash_json service-schema: name: std_snp_svc tables: service-001: tb_ship_main_info api: url: https://api.example.com/data timeout: 30000 ship-api: url: https://shipsapi.maritime.spglobal.com username: 7cc0517d-5ed6-452e-a06f-5bbfd6ab6ade password: 2LLzSJNqtxWVD8zC ais-api: url: https://aisapi.maritime.spglobal.com webservice-api: url: https://webservices.maritime.spglobal.com schedule: enabled: true cron: "0 0 * * * ?" # Every hour # LAST_EXECUTION 버퍼 시간 (시간 단위) - 외부 DB 동기화 지연 대응 last-execution-buffer-hours: 24 # ShipDetailUpdate 배치 설정 ship-detail-update: batch-size: 10 # API 요청 당 IMO 건수 delay-on-success-ms: 300 # 성공 시 딜레이 (ms) delay-on-failure-ms: 2000 # 실패 시 딜레이 (ms) max-retry-count: 3 # 최대 재시도 횟수 # AIS Target Import 배치 설정 (캐시 업데이트 전용) ais-target: since-seconds: 60 # API 조회 범위 (초) chunk-size: 50000 # 배치 청크 크기 schedule: cron: "15 * * * * ?" # 매 분 15초 실행 kafka: enabled: false # true로 변경 시 Kafka 브로커 연결 필요 topic: tp_Global_AIS_Signal send-chunk-size: 5000 fail-on-send-error: false # AIS Target DB Sync 배치 설정 (캐시 → DB 저장) ais-target-db-sync: time-range-minutes: 15 # 캐시에서 조회할 시간 범위 (분) schedule: cron: "0 0/15 * * * ?" # 매 15분 정각 실행 (00, 15, 30, 45분) # AIS Target 캐시 설정 ais-target-cache: ttl-minutes: 120 # 캐시 TTL (분) - 2시간 max-size: 300000 # 최대 캐시 크기 - 30만 건 # 중국 허가선박 전용 캐시 설정 chnprmship: mmsi-resource-path: classpath:chnprmship-mmsi.txt ttl-days: 2 max-size: 2000 warmup-enabled: true warmup-days: 2 # ClassType 분류 설정 class-type: refresh-hour: 4 # Core20 캐시 갱신 시간 (기본: 04시) # Core20 캐시 테이블 설정 (환경별로 테이블/컬럼명이 다를 수 있음) core20: schema: std_snp_svc # 스키마명 table: tb_ship_main_info # 테이블명 imo-column: imo_no # IMO/LRNO 컬럼명 (PK, NOT NULL) mmsi-column: mmsi_no # MMSI 컬럼명 (NULLABLE) # 파티션 관리 설정 partition: # 일별 파티션 테이블 목록 (네이밍: {table}_YYMMDD) daily-tables: - schema: std_snp_data table-name: ais_target partition-column: message_timestamp periods-ahead: 3 # 미리 생성할 일수 # 월별 파티션 테이블 목록 (네이밍: {table}_YYYY_MM) monthly-tables: [] # 현재 없음 # 기본 보관기간 retention: daily-default-days: 14 # 일별 파티션 기본 보관기간 (14일) monthly-default-months: 1 # 월별 파티션 기본 보관기간 (1개월) # 개별 테이블 보관기간 설정 (옵션) custom: # - table-name: ais_target # retention-days: 30 # ais_target만 30일 보관