commit 744cc02f366fc1662890d8c7698a363e9159adec Author: HYOJIN Date: Mon Mar 23 13:33:31 2026 +0900 feat: snp-sync-batch 프로젝트 초기 설정 mda-snp-batch 기반으로 snp-sync-batch 프로젝트 생성 - 프론트엔드: Thymeleaf → React + TypeScript + Vite + Tailwind CSS 전환 - 컨텍스트: /snp-sync, 포트 8051 - 재수집(Recollection) 관련 코드 제거 - displayName → job_schedule.description 기반으로 전환 - 누락 API 추가 (statistics, jobs/detail, executions/recent) - 실행 이력 조회 속도 개선 (JDBC 경량 쿼리) - 스케줄 CRUD API 메서드 매핑 수정 (PUT/DELETE) Co-Authored-By: Claude Opus 4.6 (1M context) diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..2125666 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +* text=auto \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..86da2d9 --- /dev/null +++ b/.gitignore @@ -0,0 +1,109 @@ +# Compiled class file +*.class + +# Log file +*.log + +# BlueJ files +*.ctxt + +# Mobile Tools for Java (J2ME) +.mtj.tmp/ + +# Package Files +*.jar +*.war +*.nar +*.ear +*.zip +*.tar.gz +*.rar + +# virtual machine crash logs +hs_err_pid* +replay_pid* + +# Maven +target/ +pom.xml.tag +pom.xml.releaseBackup +pom.xml.versionsBackup +pom.xml.next +release.properties +dependency-reduced-pom.xml +buildNumber.properties +.mvn/timing.properties +.mvn/wrapper/maven-wrapper.jar + +# Gradle +.gradle/ +build/ +!gradle/wrapper/gradle-wrapper.jar +!**/src/main/**/build/ +!**/src/test/**/build/ + +# IntelliJ IDEA +.idea/ +*.iws +*.iml +*.ipr +out/ +!**/src/main/**/out/ +!**/src/test/**/out/ + +# Eclipse +.apt_generated +.classpath +.factorypath +.project +.settings +.springBeans +.sts4-cache +bin/ +!**/src/main/**/bin/ +!**/src/test/**/bin/ + +# VS Code +.vscode/ + +# NetBeans +/nbproject/private/ +/nbbuild/ +/dist/ +/nbdist/ +/.nb-gradle/ + +# Mac +.DS_Store + +# Windows +Thumbs.db +ehthumbs.db +Desktop.ini + +# Application specific +application-local.yml +*.env +.env.* + +# Database +*.db +*.sqlite +*.sqlite3 + +# Logs +logs/ +docs/ +*.log.* + +# Session continuity files (for AI assistants) +.claude/ +CLAUDE.md +README.md + +# Frontend +frontend/node/ +frontend/node_modules/ +src/main/resources/static/ + +nul \ No newline at end of file diff --git a/DEVELOPMENT_GUIDE.md b/DEVELOPMENT_GUIDE.md new file mode 100644 index 0000000..8050350 --- /dev/null +++ b/DEVELOPMENT_GUIDE.md @@ -0,0 +1,1602 @@ +# Spring Batch 개발 가이드 + +## 목차 +1. [프로젝트 개요](#1-프로젝트-개요) +2. [프로젝트 구조](#2-프로젝트-구조) +3. [추상 클래스 구조](#3-추상-클래스-구조) +4. [새로운 배치 Job 생성 가이드](#4-새로운-배치-job-생성-가이드) +5. [예제: 전체 구현 과정](#5-예제-전체-구현-과정) +6. [베스트 프랙티스](#6-베스트-프랙티스) +7. [트러블슈팅](#7-트러블슈팅) + +--- + +## 1. 프로젝트 개요 + +### 1.1 기술 스택 +- **Spring Boot**: 3.2.1 +- **Spring Batch**: 5.1.0 +- **Quartz Scheduler**: 2.5.0 +- **PostgreSQL**: 42.7.4 +- **WebClient**: REST API 호출 +- **Thymeleaf**: 웹 UI +- **Bootstrap 5**: 프론트엔드 + +### 1.2 주요 기능 +- REST API 기반 데이터 수집 배치 +- Quartz 기반 스케줄링 관리 (DB 영속화) +- 웹 UI를 통한 배치 모니터링 +- 타임라인 차트 (일/주/월 단위) +- 대시보드 및 실행 이력 조회 +- 통일된 추상화 구조 (Reader/Processor/Writer) +- REST API 표준 응답 형식 + +### 1.3 아키텍처 패턴 + +#### 배치 처리 패턴 +``` +External API → Reader → Processor → Writer → Database + ↓ ↓ ↓ + (API Call) (Transform) (Batch Insert) +``` + +#### 계층 구조 +``` +Controller (REST API) + ↓ +Service (비즈니스 로직) + ↓ +Repository (데이터 액세스) + ↓ +Database (PostgreSQL) +``` + +#### 추상화 구조 +``` +com.snp.batch/ +├── SnpBatchApplication.java # Spring Boot 메인 클래스 +│ +├── common/ # 공통 추상화 모듈 +│ ├── batch/ # 배치 작업 추상화 +│ │ ├── config/ +│ │ │ └── BaseJobConfig # Job/Step 설정 템플릿 +│ │ ├── entity/ +│ │ │ └── BaseEntity # Entity 공통 감사 필드 +│ │ ├── processor/ +│ │ │ └── BaseProcessor # Processor 템플릿 +│ │ ├── reader/ +│ │ │ └── BaseApiReader # API Reader 템플릿 +│ │ ├── repository/ +│ │ │ └── BaseJdbcRepository # JDBC Repository 템플릿 +│ │ └── writer/ +│ │ └── BaseWriter # Writer 템플릿 +│ │ +│ └── web/ # 웹 API 추상화 +│ ├── controller/ +│ │ └── BaseController # 컨트롤러 템플릿 +│ ├── dto/ +│ │ └── BaseDto # 공통 DTO 필드 +│ ├── service/ +│ │ ├── BaseService # 서비스 인터페이스 +│ │ ├── BaseServiceImpl # 서비스 구현 템플릿 (JDBC) +│ │ ├── BaseProxyService # 프록시 서비스 템플릿 +│ │ └── BaseHybridService # 하이브리드 서비스 템플릿 +│ └── ApiResponse # 공통 API 응답 래퍼 +│ +├── global/ # 전역 클래스 (애플리케이션 레벨) +│ ├── config/ # 전역 설정 (Quartz, Swagger 등) +│ ├── controller/ # 전역 컨트롤러 (Batch, Web UI) +│ ├── dto/ # 전역 DTO (Job 실행, 스케줄 등) +│ ├── model/ # 전역 Entity (스케줄 정보 등) - JPA 허용 +│ └── repository/ # 전역 Repository - JPA 허용 +│ +├── jobs/ # 배치 Job 구현 (도메인별, JDBC 전용) +│ ├── sample/ # 샘플 제품 데이터 Job +│ │ ├── batch/ # 배치 작업 +│ │ └── web/ # 웹 API (선택) +│ └── shipimport/ # 선박 데이터 Import Job +│ └── batch/ # 배치 작업만 +│ +├── service/ # 전역 서비스 +└── scheduler/ # 스케줄러 (Quartz Job, Initializer) +``` + +--- + +## 2. 프로젝트 구조 + +``` +src/main/java/com/snp/batch/ +├── SnpBatchApplication.java # Spring Boot 메인 클래스 +│ +├── common/ # 공통 추상화 모듈 +│ ├── batch/ # 배치 작업 공통 Base 클래스 +│ │ ├── config/ +│ │ │ └── BaseJobConfig.java # Job/Step 설정 템플릿 +│ │ ├── entity/ +│ │ │ └── BaseEntity.java # 공통 감사 필드 (JPA 제거) +│ │ ├── processor/ +│ │ │ └── BaseProcessor.java # Processor 템플릿 +│ │ ├── reader/ +│ │ │ └── BaseApiReader.java # API Reader 템플릿 +│ │ ├── repository/ +│ │ │ └── BaseJdbcRepository.java # JDBC Repository 템플릿 +│ │ └── writer/ +│ │ └── BaseWriter.java # Writer 템플릿 +│ │ +│ └── web/ # 웹 API 공통 Base 클래스 +│ ├── controller/ +│ │ └── BaseController.java # 컨트롤러 템플릿 +│ ├── dto/ +│ │ └── BaseDto.java # 공통 DTO 필드 +│ ├── service/ +│ │ ├── BaseService.java # 서비스 인터페이스 +│ │ ├── BaseServiceImpl.java # 서비스 구현 템플릿 (JDBC) +│ │ ├── BaseProxyService.java # 프록시 서비스 템플릿 +│ │ └── BaseHybridService.java # 하이브리드 서비스 템플릿 +│ └── ApiResponse.java # 공통 API 응답 래퍼 +│ +├── global/ # 전역 클래스 (JPA 허용) +│ ├── config/ # 전역 설정 +│ │ ├── QuartzConfig.java # Quartz 스케줄러 설정 +│ │ └── SwaggerConfig.java # Swagger 설정 +│ │ +│ ├── controller/ # 전역 컨트롤러 +│ │ ├── BatchController.java # 배치 관리 REST API +│ │ └── WebViewController.java # Thymeleaf 뷰 컨트롤러 +│ │ +│ ├── dto/ # 전역 DTO +│ │ ├── DashboardResponse.java # 대시보드 응답 +│ │ ├── JobExecutionDetailDto.java # 실행 상세 정보 +│ │ ├── JobExecutionDto.java # 실행 이력 DTO +│ │ ├── ScheduleRequest.java # 스케줄 등록/수정 요청 +│ │ ├── ScheduleResponse.java # 스케줄 조회 응답 +│ │ └── TimelineResponse.java # 타임라인 응답 +│ │ +│ ├── model/ # 전역 Entity (JPA) +│ │ └── JobScheduleEntity.java # 스케줄 Entity (JPA) +│ │ +│ └── repository/ # 전역 Repository (JPA) +│ ├── JobScheduleRepository.java # JpaRepository (JPA) +│ └── TimelineRepository.java # 타임라인 Repository +│ +├── jobs/ # 도메인별 배치 Job (JDBC 전용) +│ │ +│ ├── sample/ # 샘플 제품 데이터 Job +│ │ ├── batch/ # 배치 작업 +│ │ │ ├── config/ +│ │ │ │ └── ProductDataImportJobConfig.java +│ │ │ ├── dto/ +│ │ │ │ ├── ProductApiResponse.java +│ │ │ │ └── ProductDto.java +│ │ │ ├── entity/ +│ │ │ │ └── ProductEntity.java # extends BaseEntity (JPA 제거) +│ │ │ ├── processor/ +│ │ │ │ └── ProductDataProcessor.java +│ │ │ ├── reader/ +│ │ │ │ └── ProductDataReader.java +│ │ │ ├── repository/ +│ │ │ │ ├── ProductRepository.java +│ │ │ │ └── ProductRepositoryImpl.java # extends BaseJdbcRepository +│ │ │ └── writer/ +│ │ │ └── ProductDataWriter.java +│ │ │ +│ │ └── web/ # 웹 API +│ │ ├── controller/ +│ │ │ └── ProductWebController.java +│ │ ├── dto/ +│ │ │ └── ProductWebDto.java +│ │ └── service/ +│ │ └── ProductWebService.java +│ │ +│ └── shipimport/ # 선박 데이터 Import Job +│ └── batch/ # 배치 작업 (웹 API 없음) +│ ├── config/ +│ │ └── ShipImportJobConfig.java +│ ├── dto/ +│ │ ├── ShipApiResponse.java +│ │ └── ShipDto.java +│ ├── entity/ +│ │ └── ShipEntity.java # extends BaseEntity (JPA 제거) +│ ├── processor/ +│ │ └── ShipDataProcessor.java +│ ├── reader/ +│ │ └── ShipDataReader.java +│ ├── repository/ +│ │ ├── ShipRepository.java +│ │ └── ShipRepositoryImpl.java # extends BaseJdbcRepository +│ └── writer/ +│ └── ShipDataWriter.java +│ +├── service/ # 전역 서비스 +│ ├── BatchService.java # 배치 실행 관리 +│ ├── QuartzJobService.java # Quartz-Batch 연동 +│ └── ScheduleService.java # 스케줄 DB 영속화 +│ +└── scheduler/ # 스케줄러 + ├── QuartzBatchJob.java # Quartz Job 구현체 + └── SchedulerInitializer.java # 스케줄 자동 로드 +``` + +**주요 특징**: +- `common/batch/`: 배치 작업 전용 Base 클래스 (JDBC 기반) +- `common/web/`: 웹 API 전용 Base 클래스 (JDBC 기반) +- `global/`: JPA 사용 허용 (간단한 CRUD만) +- `jobs/`: 모든 Job은 JDBC 전용 (성능 최적화) + +--- + +## 3. 추상 클래스 구조 + +### 3.0 공통 베이스 클래스 + +#### 3.0.1 BaseEntity + +**목적**: 모든 Entity의 공통 감사(Audit) 필드 관리 + +**위치**: `com.snp.batch.common.batch.entity.BaseEntity` + +**제공 필드**: +```java +@CreatedDate +private LocalDateTime createdAt; // 생성 일시 (자동 설정) + +@LastModifiedDate +private LocalDateTime updatedAt; // 수정 일시 (자동 업데이트) + +private String createdBy; // 생성자 (기본값: "SYSTEM") +private String updatedBy; // 수정자 (기본값: "SYSTEM") +``` + +**사용 방법 (jobs 패키지 - JDBC 전용)**: +```java +/** + * Ship Entity - JDBC Template 기반 + * JPA 어노테이션 사용 금지 + * 컬럼 매핑은 주석으로 명시 + */ +@Data +@SuperBuilder +@NoArgsConstructor +@AllArgsConstructor +@EqualsAndHashCode(callSuper = true) +public class ShipEntity extends BaseEntity { + /** + * 기본 키 (자동 생성) + * 컬럼: id (BIGSERIAL) + */ + private Long id; + + /** + * 선박 이름 + * 컬럼: ship_name (VARCHAR(100)) + */ + private String shipName; + + // createdAt, updatedAt, createdBy, updatedBy는 BaseEntity에서 상속 +} +``` + +**사용 방법 (global 패키지 - JPA 허용)**: +```java +@Entity +@Table(name = "job_schedule") +@Data +@SuperBuilder +@NoArgsConstructor +@AllArgsConstructor +public class JobScheduleEntity extends BaseEntity { + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + + @Column(name = "job_name", unique = true, nullable = false) + private String jobName; + // createdAt, updatedAt, createdBy, updatedBy는 자동 관리됨 +} +``` + +**주요 기능**: +- jobs 패키지: JDBC 기반, JPA 어노테이션 없음, RowMapper로 수동 매핑 +- global 패키지: JPA 기반, @PrePersist/@PreUpdate로 자동 관리 +- 공통 감사 필드: createdAt, updatedAt, createdBy, updatedBy + +--- + +#### 3.0.2 BaseDto + +**목적**: 모든 DTO의 공통 감사 필드 제공 + +**위치**: `com.snp.batch.common.web.dto.BaseDto` + +**제공 필드**: +```java +private LocalDateTime createdAt; +private LocalDateTime updatedAt; +private String createdBy; +private String updatedBy; +``` + +**사용 방법**: +```java +@Data +@EqualsAndHashCode(callSuper = true) +public class ShipDto extends BaseDto { + private String shipName; + private String shipType; + // 부모 클래스의 감사 필드 자동 상속 +} +``` + +--- + +#### 3.0.3 BaseService + +**목적**: Service 계층의 공통 CRUD 인터페이스 정의 + +**위치**: `com.snp.batch.common.web.service.BaseService` + +**제공 메서드**: +```java +D create(D dto); // 생성 +Optional findById(ID id); // 단건 조회 +List findAll(); // 전체 조회 +Page findAll(Pageable pageable); // 페이징 조회 +D update(ID id, D dto); // 수정 +void deleteById(ID id); // 삭제 +boolean existsById(ID id); // 존재 여부 확인 +D toDto(T entity); // Entity → DTO 변환 +T toEntity(D dto); // DTO → Entity 변환 +``` + +--- + +#### 3.0.4 BaseServiceImpl + +**목적**: BaseService의 기본 구현 제공 + +**위치**: `com.snp.batch.common.web.service.BaseServiceImpl` + +**필수 구현 메서드**: +```java +protected abstract BaseJdbcRepository getRepository(); // Repository 반환 +protected abstract String getEntityName(); // Entity 이름 (로깅용) +protected abstract void updateEntity(T entity, D dto); // Entity 업데이트 로직 +protected abstract ID extractId(T entity); // Entity에서 ID 추출 +``` + +**참고**: jobs 패키지에서는 JDBC 기반 Repository를 사용합니다 (BaseJdbcRepository 상속). + +**사용 예제**: +```java +@Service +@RequiredArgsConstructor +public class ProductWebService extends BaseServiceImpl { + + private final ProductRepository productRepository; + + @Override + protected BaseJdbcRepository getRepository() { + return productRepository; + } + + @Override + protected String getEntityName() { + return "Product"; + } + + @Override + protected void updateEntity(ProductEntity entity, ProductWebDto dto) { + entity.setProductName(dto.getProductName()); + entity.setCategory(dto.getCategory()); + entity.setPrice(dto.getPrice()); + } + + @Override + protected Long extractId(ProductEntity entity) { + return entity.getId(); + } + + @Override + public ProductWebDto toDto(ProductEntity entity) { + return ProductWebDto.builder() + .productId(entity.getProductId()) + .productName(entity.getProductName()) + .category(entity.getCategory()) + .price(entity.getPrice()) + .build(); + } + + @Override + public ProductEntity toEntity(ProductWebDto dto) { + return ProductEntity.builder() + .productId(dto.getProductId()) + .productName(dto.getProductName()) + .category(dto.getCategory()) + .price(dto.getPrice()) + .build(); + } +} +``` + +--- + +#### 3.0.5 BaseController + +**목적**: REST Controller의 공통 CRUD API 제공 + +**위치**: `com.snp.batch.common.web.controller.BaseController` + +**필수 구현 메서드**: +```java +protected abstract BaseService getService(); // Service 반환 +protected abstract String getResourceName(); // 리소스 이름 (로깅용) +``` + +**제공 API**: +```java +POST / → create(D dto) # 생성 +GET /{id} → getById(ID id) # 단건 조회 +GET / → getAll() # 전체 조회 +GET /page → getPage(Pageable) # 페이징 조회 +PUT /{id} → update(ID id, D dto) # 수정 +DELETE /{id} → delete(ID id) # 삭제 +GET /{id}/exists → exists(ID id) # 존재 여부 +``` + +**사용 예제**: +```java +@RestController +@RequestMapping("/api/ships") +@RequiredArgsConstructor +public class ShipController extends BaseController { + + private final ShipService shipService; + + @Override + protected BaseService getService() { + return shipService; + } + + @Override + protected String getResourceName() { + return "Ship"; + } + + // 추가 커스텀 API가 필요한 경우 여기에 정의 +} +``` + +--- + +#### 3.0.6 ApiResponse + +**목적**: 통일된 API 응답 형식 제공 + +**위치**: `com.snp.batch.common.web.ApiResponse` + +**필드 구조**: +```java +private boolean success; // 성공 여부 +private String message; // 메시지 +private T data; // 응답 데이터 +private String errorCode; // 에러 코드 (실패 시) +``` + +**사용 방법**: +```java +// 성공 응답 +ApiResponse response = ApiResponse.success(shipDto); +ApiResponse response = ApiResponse.success("Ship created", shipDto); + +// 실패 응답 +ApiResponse response = ApiResponse.error("Ship not found"); +ApiResponse response = ApiResponse.error("Validation failed", "ERR_001"); +``` + +**응답 예제**: +```json +{ + "success": true, + "message": "Success", + "data": { + "shipName": "Titanic", + "shipType": "Passenger" + }, + "errorCode": null +} +``` + +--- + +### 3.1 BaseApiReader + +**목적**: REST API에서 데이터를 읽어오는 ItemReader 구현 패턴 제공 + +**위치**: `com.snp.batch.common.batch.reader.BaseApiReader` + +**필수 구현 메소드**: +```java +protected abstract String getApiPath(); // API 경로 +protected abstract List extractDataFromResponse(Object response); // 응답 파싱 +protected abstract Class getResponseType(); // 응답 클래스 +protected abstract String getReaderName(); // Reader 이름 +``` + +**선택적 오버라이드 메소드**: +```java +protected void addQueryParams(UriBuilder uriBuilder) {} // 쿼리 파라미터 추가 +protected void beforeApiCall() {} // API 호출 전처리 +protected void afterApiCall(List data) {} // API 호출 후처리 +protected void handleApiError(Exception e) {} // 에러 처리 +``` + +**제공되는 기능**: +- API 호출 및 데이터 캐싱 (한 번만 호출) +- 순차적 데이터 반환 (read() 메소드) +- 자동 로깅 및 에러 핸들링 + +**사용 예제**: +```java +@Component +public class ShipDataReader extends BaseApiReader { + + public ShipDataReader(WebClient webClient) { + super(webClient); + } + + @Override + protected String getApiPath() { + return "/api/v1/ships"; + } + + @Override + protected List extractDataFromResponse(Object response) { + ShipApiResponse apiResponse = (ShipApiResponse) response; + return apiResponse.getData(); + } + + @Override + protected Class getResponseType() { + return ShipApiResponse.class; + } + + @Override + protected String getReaderName() { + return "ShipDataReader"; + } +} +``` + +--- + +### 3.2 BaseProcessor + +**목적**: 데이터 변환 및 처리 로직의 템플릿 제공 + +**위치**: `com.snp.batch.common.batch.processor.BaseProcessor` + +**필수 구현 메소드**: +```java +protected abstract O transform(I item) throws Exception; // 데이터 변환 +protected abstract boolean shouldProcess(I item); // 처리 여부 판단 +protected abstract String getProcessorName(); // Processor 이름 +``` + +**선택적 오버라이드 메소드**: +```java +protected void beforeProcess(I item) {} // 전처리 +protected void afterProcess(I input, O output) {} // 후처리 +protected void handleProcessError(I item, Exception e) {} // 에러 처리 +protected void onItemFiltered(I item) {} // 필터링 로깅 +``` + +**제공되는 기능**: +- DTO → Entity 변환 패턴 +- 데이터 필터링 (shouldProcess 기반) +- 자동 로깅 및 에러 핸들링 + +**사용 예제**: +```java +@Component +public class ShipDataProcessor extends BaseProcessor { + + @Override + protected ShipEntity transform(ShipDto dto) { + return ShipEntity.builder() + .shipId(dto.getShipId()) + .shipName(dto.getShipName()) + .shipType(dto.getShipType()) + .build(); + } + + @Override + protected boolean shouldProcess(ShipDto dto) { + // 유효성 검사: shipId가 있는 경우만 처리 + return dto.getShipId() != null && !dto.getShipId().isEmpty(); + } + + @Override + protected String getProcessorName() { + return "ShipDataProcessor"; + } + + @Override + protected void onItemFiltered(ShipDto dto) { + log.warn("Ship ID가 없어 필터링됨: {}", dto); + } +} +``` + +--- + +### 3.3 BaseWriter + +**목적**: 데이터베이스 저장 로직의 템플릿 제공 + +**위치**: `com.snp.batch.common.batch.writer.BaseWriter` + +**필수 구현 메소드**: +```java +protected abstract void writeItems(List items) throws Exception; // 저장 로직 +protected abstract String getWriterName(); // Writer 이름 +``` + +**선택적 오버라이드 메소드**: +```java +protected void beforeWrite(List items) {} // 저장 전처리 +protected void afterWrite(List items) {} // 저장 후처리 +protected void handleWriteError(List items, Exception e) {} // 에러 처리 +protected List filterItems(List items) {} // 아이템 필터링 +protected void validateBatchSize(List items) {} // 배치 크기 검증 +``` + +**제공되는 기능**: +- 배치 저장 패턴 (Chunk 단위) +- Null 아이템 자동 필터링 +- 배치 크기 검증 및 경고 +- 자동 로깅 및 에러 핸들링 + +**사용 예제**: +```java +@Component +@RequiredArgsConstructor +public class ShipDataWriter extends BaseWriter { + + private final ShipRepository shipRepository; + + @Override + protected void writeItems(List items) { + shipRepository.saveAll(items); + } + + @Override + protected String getWriterName() { + return "ShipDataWriter"; + } + + @Override + protected void afterWrite(List items) { + log.info("Ship 데이터 저장 완료: {} 건", items.size()); + } +} +``` + +--- + +### 3.4 BaseJobConfig + +**목적**: Batch Job 설정의 표준 템플릿 제공 + +**위치**: `com.snp.batch.common.batch.config.BaseJobConfig` + +**필수 구현 메소드**: +```java +protected abstract String getJobName(); // Job 이름 +protected abstract ItemReader createReader(); // Reader 생성 +protected abstract ItemProcessor createProcessor(); // Processor 생성 +protected abstract ItemWriter createWriter(); // Writer 생성 +``` + +**선택적 오버라이드 메소드**: +```java +protected String getStepName() {} // Step 이름 (기본: {jobName}Step) +protected int getChunkSize() {} // Chunk 크기 (기본: 100) +protected void configureJob(JobBuilder jobBuilder) {} // Job 커스터마이징 +protected void configureStep(StepBuilder stepBuilder) {} // Step 커스터마이징 +``` + +**제공되는 기능**: +- Job 및 Step 자동 생성 +- Chunk 기반 처리 설정 +- Processor가 없는 경우도 지원 + +**사용 예제**: +```java +@Configuration +@RequiredArgsConstructor +public class ShipDataImportJobConfig extends BaseJobConfig { + + private final ShipDataReader shipDataReader; + private final ShipDataProcessor shipDataProcessor; + private final ShipDataWriter shipDataWriter; + + public ShipDataImportJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + ShipDataReader shipDataReader, + ShipDataProcessor shipDataProcessor, + ShipDataWriter shipDataWriter) { + super(jobRepository, transactionManager); + this.shipDataReader = shipDataReader; + this.shipDataProcessor = shipDataProcessor; + this.shipDataWriter = shipDataWriter; + } + + @Override + protected String getJobName() { + return "shipDataImportJob"; + } + + @Override + protected ItemReader createReader() { + return shipDataReader; + } + + @Override + protected ItemProcessor createProcessor() { + return shipDataProcessor; + } + + @Override + protected ItemWriter createWriter() { + return shipDataWriter; + } + + @Override + protected int getChunkSize() { + return 50; // 커스텀 Chunk 크기 + } + + @Bean(name = "shipDataImportJob") + public Job shipDataImportJob() { + return job(); + } + + @Bean(name = "shipDataImportStep") + public Step shipDataImportStep() { + return step(); + } +} +``` + +--- + +### 3.5 BaseJdbcRepository + +**목적**: JDBC 기반 Repository의 CRUD 템플릿 제공 + +**위치**: `com.snp.batch.common.batch.repository.BaseJdbcRepository` + +**필수 구현 메소드**: +```java +protected abstract String getTableName(); // 테이블 이름 +protected abstract RowMapper getRowMapper(); // RowMapper +protected abstract ID extractId(T entity); // ID 추출 +protected abstract String getInsertSql(); // INSERT SQL +protected abstract String getUpdateSql(); // UPDATE SQL +protected abstract void setInsertParameters(PreparedStatement ps, T entity); +protected abstract void setUpdateParameters(PreparedStatement ps, T entity); +protected abstract String getEntityName(); // Entity 이름 (로깅용) +``` + +**제공되는 기능**: +- findById, findAll, count, existsById +- save, insert, update +- batchInsert, batchUpdate, saveAll +- deleteById, deleteAll +- 자동 트랜잭션 처리 + +**사용 예제**: +```java +@Repository +public class ShipRepository extends BaseJdbcRepository { + + public ShipRepository(JdbcTemplate jdbcTemplate) { + super(jdbcTemplate); + } + + @Override + protected String getTableName() { + return "ships"; + } + + @Override + protected RowMapper getRowMapper() { + return (rs, rowNum) -> ShipEntity.builder() + .shipId(rs.getString("ship_id")) + .shipName(rs.getString("ship_name")) + .shipType(rs.getString("ship_type")) + .build(); + } + + @Override + protected String extractId(ShipEntity entity) { + return entity.getShipId(); + } + + @Override + protected String getInsertSql() { + return "INSERT INTO ships (ship_id, ship_name, ship_type) VALUES (?, ?, ?)"; + } + + @Override + protected String getUpdateSql() { + return "UPDATE ships SET ship_name = ?, ship_type = ? WHERE ship_id = ?"; + } + + @Override + protected void setInsertParameters(PreparedStatement ps, ShipEntity entity) throws SQLException { + ps.setString(1, entity.getShipId()); + ps.setString(2, entity.getShipName()); + ps.setString(3, entity.getShipType()); + } + + @Override + protected void setUpdateParameters(PreparedStatement ps, ShipEntity entity) throws SQLException { + ps.setString(1, entity.getShipName()); + ps.setString(2, entity.getShipType()); + ps.setString(3, entity.getShipId()); + } + + @Override + protected String getEntityName() { + return "Ship"; + } +} +``` + +--- + +## 4. 새로운 배치 Job 생성 가이드 + +### 4.1 사전 준비 + +1. **도메인 파악** + - 어떤 데이터를 수집할 것인가? (예: 선박 데이터, 사용자 데이터 등) + - API 엔드포인트는 무엇인가? + - 데이터 구조는 어떻게 되는가? + +2. **데이터베이스 테이블 생성** + ```sql + CREATE TABLE ships ( + ship_id VARCHAR(50) PRIMARY KEY, + ship_name VARCHAR(100) NOT NULL, + ship_type VARCHAR(50), + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ); + ``` + +### 4.2 단계별 구현 + +#### Step 1: DTO 클래스 생성 (jobs/{domain}/batch/dto 패키지) + +**API 응답 DTO**: +```java +@Data +public class ShipApiResponse { + private List data; + private int totalCount; +} +``` + +**데이터 DTO**: +```java +@Data +@Builder +public class ShipDto { + private String imoNumber; + private String coreShipInd; + private String datasetVersion; +} +``` + +#### Step 2: Entity 클래스 생성 (jobs/{domain}/batch/entity 패키지) + +**JDBC 기반 Entity (JPA 어노테이션 없음)**: +```java +/** + * Ship Entity - JDBC Template 기반 + * JPA 어노테이션 사용 금지 + */ +@Data +@SuperBuilder +@NoArgsConstructor +@AllArgsConstructor +@EqualsAndHashCode(callSuper = true) +public class ShipEntity extends BaseEntity { + /** + * 기본 키 + * 컬럼: id (BIGSERIAL) + */ + private Long id; + + /** + * IMO 번호 + * 컬럼: imo_number (VARCHAR(20), UNIQUE) + */ + private String imoNumber; + + /** + * Core Ship Indicator + * 컬럼: core_ship_ind (VARCHAR(10)) + */ + private String coreShipInd; + + // createdAt, updatedAt, createdBy, updatedBy는 BaseEntity에서 상속 +} +``` + +#### Step 3: Repository 구현 (jobs/{domain}/batch/repository 패키지) + +```java +@Repository +public class ShipRepository extends BaseJdbcRepository { + + public ShipRepository(JdbcTemplate jdbcTemplate) { + super(jdbcTemplate); + } + + // 추상 메소드 구현 (위의 3.5 예제 참고) +} +``` + +#### Step 4: Reader 구현 (jobs/{domain}/batch/reader 패키지) + +```java +@Component +public class ShipDataReader extends BaseApiReader { + + public ShipDataReader(WebClient webClient) { + super(webClient); + } + + // 추상 메소드 구현 (위의 3.1 예제 참고) +} +``` + +#### Step 5: Processor 구현 (jobs/{domain}/batch/processor 패키지) + +```java +@Component +public class ShipDataProcessor extends BaseProcessor { + + // 추상 메소드 구현 (위의 3.2 예제 참고) +} +``` + +#### Step 6: Writer 구현 (jobs/{domain}/batch/writer 패키지) + +```java +@Component +@RequiredArgsConstructor +public class ShipDataWriter extends BaseWriter { + + private final ShipRepository shipRepository; + + // 추상 메소드 구현 (위의 3.3 예제 참고) +} +``` + +#### Step 7: JobConfig 구현 (jobs/{domain}/batch/config 패키지) + +```java +@Configuration +@RequiredArgsConstructor +public class ShipDataImportJobConfig extends BaseJobConfig { + + private final ShipDataReader shipDataReader; + private final ShipDataProcessor shipDataProcessor; + private final ShipDataWriter shipDataWriter; + + public ShipDataImportJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + ShipDataReader shipDataReader, + ShipDataProcessor shipDataProcessor, + ShipDataWriter shipDataWriter) { + super(jobRepository, transactionManager); + this.shipDataReader = shipDataReader; + this.shipDataProcessor = shipDataProcessor; + this.shipDataWriter = shipDataWriter; + } + + // 추상 메소드 구현 (위의 3.4 예제 참고) + + @Bean(name = "shipDataImportJob") + public Job shipDataImportJob() { + return job(); + } + + @Bean(name = "shipDataImportStep") + public Step shipDataImportStep() { + return step(); + } +} +``` + +#### Step 8: 테스트 및 실행 + +1. **애플리케이션 시작** + ```bash + mvn spring-boot:run + ``` + +2. **웹 UI에서 확인** + - http://localhost:8080 + - "shipDataImportJob" 확인 + - "즉시 실행" 버튼 클릭 + +3. **로그 확인** + ``` + ShipDataReader API 호출 시작 + ShipDataReader API 응답 성공: 100 건 + ShipDataReader 데이터 100건 조회 완료 + ShipDataWriter 데이터 저장 시작: 50 건 + ShipDataWriter 데이터 저장 완료: 50 건 + ``` + +--- + +## 5. 예제: 전체 구현 과정 + +### 5.1 시나리오 +- **목적**: 외부 API에서 사용자 데이터를 수집하여 데이터베이스에 저장 +- **API**: `GET /api/v1/users?status=active` +- **필터링**: 이메일이 있는 사용자만 저장 + +### 5.2 파일 구조 +``` +src/main/java/com/snp/batch/ +└── jobs/user/ + └── batch/ # 배치 작업 + ├── config/ + │ └── UserDataImportJobConfig.java + ├── dto/ + │ ├── UserDto.java + │ └── UserApiResponse.java + ├── entity/ + │ └── UserEntity.java # extends BaseEntity (JPA 제거) + ├── processor/ + │ └── UserDataProcessor.java # extends BaseProcessor + ├── reader/ + │ └── UserDataReader.java # extends BaseApiReader + ├── repository/ + │ ├── UserRepository.java # 인터페이스 + │ └── UserRepositoryImpl.java # extends BaseJdbcRepository + └── writer/ + └── UserDataWriter.java # extends BaseWriter +``` + +### 5.3 테이블 생성 SQL +```sql +CREATE TABLE users ( + user_id BIGINT PRIMARY KEY, + username VARCHAR(100) NOT NULL, + email VARCHAR(255), + status VARCHAR(50), + imported_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); +``` + +### 5.4 코드 구현 + +각 클래스의 전체 구현 코드는 섹션 3의 추상 클래스 사용 예제를 참고하세요. + +--- + +## 6. 베스트 프랙티스 + +### 6.1 네이밍 컨벤션 + +| 구분 | 패턴 | 예제 | +|------|------|------| +| Job 이름 | `{domain}DataImportJob` | `shipDataImportJob` | +| Step 이름 | `{domain}DataImportStep` | `shipDataImportStep` | +| Reader | `{Domain}DataReader` | `ShipDataReader` | +| Processor | `{Domain}DataProcessor` | `ShipDataProcessor` | +| Writer | `{Domain}DataWriter` | `ShipDataWriter` | +| JobConfig | `{Domain}DataImportJobConfig` | `ShipDataImportJobConfig` | +| Repository | `{Domain}Repository` | `ShipRepository` | +| Entity | `{Domain}Entity` | `ShipEntity` | +| DTO | `{Domain}Dto` | `ShipDto` | + +### 6.2 패키지 구조 + +**배치 전용 Job** (예: shipimport): +``` +com.snp.batch.jobs.{domain}/ +└── batch/ + ├── config/ + │ └── {Domain}DataImportJobConfig.java # extends BaseJobConfig + ├── dto/ + │ ├── {Domain}Dto.java + │ └── {Domain}ApiResponse.java + ├── entity/ + │ └── {Domain}Entity.java # extends BaseEntity (JPA 제거) + ├── processor/ + │ └── {Domain}DataProcessor.java # extends BaseProcessor + ├── reader/ + │ └── {Domain}DataReader.java # extends BaseApiReader + ├── repository/ + │ ├── {Domain}Repository.java # 인터페이스 + │ └── {Domain}RepositoryImpl.java # extends BaseJdbcRepository + └── writer/ + └── {Domain}DataWriter.java # extends BaseWriter +``` + +**배치 + 웹 API Job** (예: sample): +``` +com.snp.batch.jobs.{domain}/ +├── batch/ # 배치 작업 +│ ├── config/ +│ ├── dto/ +│ ├── entity/ +│ ├── processor/ +│ ├── reader/ +│ ├── repository/ +│ └── writer/ +└── web/ # 웹 API + ├── controller/ + │ └── {Domain}WebController.java # extends BaseController + ├── dto/ + │ └── {Domain}WebDto.java # extends BaseDto + └── service/ + └── {Domain}WebService.java # extends BaseServiceImpl (JDBC) +``` + +### 6.3 Chunk 크기 선택 가이드 + +| 데이터 크기 | Chunk 크기 | 사용 시나리오 | +|-------------|-----------|--------------| +| 소량 (< 1,000) | 50-100 | 간단한 API 데이터 | +| 중량 (1,000-10,000) | 100-500 | 일반적인 배치 작업 | +| 대량 (> 10,000) | 500-1,000 | 대용량 데이터 처리 | + +### 6.4 에러 처리 전략 + +1. **API 호출 실패** + - `BaseApiReader.handleApiError()` 오버라이드 + - 빈 리스트 반환 (Job 실패 방지) 또는 예외 던지기 + +2. **데이터 변환 실패** + - `BaseProcessor.handleProcessError()` 오버라이드 + - 문제 데이터 로깅 후 null 반환 (다음 데이터 계속 처리) + +3. **저장 실패** + - `BaseWriter.handleWriteError()` 오버라이드 + - 재시도 로직 또는 부분 저장 구현 + +### 6.5 로깅 전략 + +```java +// Reader에서 +@Override +protected void afterApiCall(List data) { + log.info("API 호출 성공: {} 건 조회", data.size()); +} + +// Processor에서 +@Override +protected void onItemFiltered(ShipDto dto) { + log.debug("필터링됨: {}", dto); +} + +// Writer에서 +@Override +protected void afterWrite(List items) { + log.info("저장 완료: {} 건", items.size()); +} +``` + +### 6.6 성능 최적화 + +1. **배치 Insert 사용** + ```java + @Override + protected void writeItems(List items) { + shipRepository.batchInsert(items); // saveAll() 대신 + } + ``` + +2. **API 페이징 처리** + ```java + @Override + protected void addQueryParams(UriBuilder uriBuilder) { + uriBuilder.queryParam("page", 1); + uriBuilder.queryParam("size", 1000); + } + ``` + +3. **경량 쿼리 사용** + - 불필요한 JOIN 제거 + - 필요한 컬럼만 SELECT + +--- + +## 7. 트러블슈팅 + +### 7.1 일반적인 문제 + +#### 문제 1: Job이 실행되지 않음 +**증상**: 웹 UI에서 Job 목록이 보이지 않음 + +**해결 방법**: +1. `@Bean` 어노테이션 확인 + ```java + @Bean(name = "shipDataImportJob") + public Job shipDataImportJob() { + return job(); + } + ``` + +2. JobConfig 클래스에 `@Configuration` 어노테이션 확인 + +3. 로그 확인: "Job 생성: shipDataImportJob" 메시지 확인 + +--- + +#### 문제 2: API 호출 실패 +**증상**: "API 호출 실패" 로그, 데이터 0건 + +**해결 방법**: +1. WebClient 설정 확인 (`application.yml`) + ```yaml + api: + base-url: http://api.example.com + ``` + +2. API 경로 확인 + ```java + @Override + protected String getApiPath() { + return "/api/v1/ships"; // 슬래시 확인 + } + ``` + +3. 네트워크 연결 테스트 + ```bash + curl http://api.example.com/api/v1/ships + ``` + +--- + +#### 문제 3: 데이터가 저장되지 않음 +**증상**: "저장 완료" 로그는 있지만 DB에 데이터 없음 + +**해결 방법**: +1. 트랜잭션 확인 + ```java + @Override + protected void writeItems(List items) { + shipRepository.saveAll(items); // 트랜잭션 내에서 실행되는지 확인 + } + ``` + +2. SQL 로그 활성화 (`application.yml`) + ```yaml + logging: + level: + org.springframework.jdbc.core: DEBUG + ``` + +3. 데이터베이스 연결 확인 + ```bash + psql -U postgres -d batch_db + SELECT * FROM ships; + ``` + +--- + +#### 문제 4: Chunk 처리 중 일부만 저장됨 +**증상**: 100건 조회 중 50건만 저장됨 + +**해결 방법**: +1. Processor의 `shouldProcess()` 확인 + ```java + @Override + protected boolean shouldProcess(ShipDto dto) { + // false 반환 시 필터링됨 + return dto.getShipId() != null; + } + ``` + +2. 필터링 로그 확인 + ```java + @Override + protected void onItemFiltered(ShipDto dto) { + log.warn("필터링됨: {}", dto); // 로그 추가 + } + ``` + +--- + +#### 문제 5: 메모리 부족 오류 (OutOfMemoryError) +**증상**: 대량 데이터 처리 중 OOM 발생 + +**해결 방법**: +1. Chunk 크기 줄이기 + ```java + @Override + protected int getChunkSize() { + return 50; // 기본 100에서 감소 + } + ``` + +2. JVM 힙 메모리 증가 + ```bash + java -Xmx2g -jar snp-batch.jar + ``` + +3. API 페이징 처리 구현 + +--- + +### 7.2 로그 레벨 설정 + +**application.yml**: +```yaml +logging: + level: + com.snp.batch: DEBUG # 배치 애플리케이션 + com.snp.batch.common.batch: INFO # 배치 추상 클래스 (INFO) + com.snp.batch.common.web: INFO # 웹 추상 클래스 (INFO) + org.springframework.batch: INFO # Spring Batch + org.springframework.jdbc.core: DEBUG # SQL 쿼리 + org.springframework.web.reactive: DEBUG # WebClient +``` + +--- + +### 7.3 디버깅 팁 + +1. **Step 실행 상태 확인** + ```sql + SELECT * FROM BATCH_STEP_EXECUTION + WHERE JOB_EXECUTION_ID = {executionId}; + ``` + +2. **Step Context 확인** + ```sql + SELECT * FROM BATCH_STEP_EXECUTION_CONTEXT + WHERE STEP_EXECUTION_ID = {stepExecutionId}; + ``` + +3. **Job Parameter 확인** + ```sql + SELECT * FROM BATCH_JOB_EXECUTION_PARAMS + WHERE JOB_EXECUTION_ID = {executionId}; + ``` + +--- + +## 8. 자주 묻는 질문 (FAQ) + +### Q1: Processor 없이 Reader → Writer만 사용할 수 있나요? +**A**: 네, 가능합니다. `createProcessor()`에서 `null`을 반환하면 됩니다. + +```java +@Override +protected ItemProcessor createProcessor() { + return null; // Processor 없이 Reader → Writer +} +``` + +--- + +### Q2: 여러 개의 Writer를 사용할 수 있나요? +**A**: `CompositeItemWriter`를 사용하면 가능합니다. + +```java +@Override +protected ItemWriter createWriter() { + CompositeItemWriter compositeWriter = new CompositeItemWriter<>(); + compositeWriter.setDelegates(Arrays.asList( + shipDataWriter, + auditLogWriter + )); + return compositeWriter; +} +``` + +--- + +### Q3: API 페이징을 지원하나요? +**A**: 현재 `BaseApiReader`는 단일 호출만 지원합니다. 페이징이 필요한 경우 커스텀 Reader를 구현하세요. + +--- + +### Q4: 스케줄 등록은 어떻게 하나요? +**A**: 웹 UI에서 "스케줄 등록" 버튼을 클릭하여 Cron 표현식을 입력하면 됩니다. + +``` +예제 Cron 표현식: +- 매일 오전 2시: 0 0 2 * * ? +- 매시간: 0 0 * * * ? +- 매주 월요일 오전 9시: 0 0 9 ? * MON +``` + +--- + +### Q5: Job 실행 이력은 어디서 확인하나요? +**A**: 웹 UI의 다음 위치에서 확인할 수 있습니다: +1. 대시보드: 최근 실행 이력 (최근 10건) +2. Job 상세 페이지: 특정 Job의 모든 실행 이력 +3. 타임라인 차트: 일/주/월 단위 시각화 + +--- + +## 9. 추가 리소스 + +### 9.1 공식 문서 +- [Spring Batch 공식 문서](https://docs.spring.io/spring-batch/docs/current/reference/html/) +- [Spring Boot 공식 문서](https://docs.spring.io/spring-boot/docs/current/reference/html/) +- [Quartz Scheduler](http://www.quartz-scheduler.org/documentation/) + +### 9.2 프로젝트 파일 +- `application.yml`: 애플리케이션 설정 +- `schema-postgresql.sql`: 데이터베이스 스키마 +- `BaseApiReader.java`: API Reader 추상 클래스 (src/main/java/com/snp/batch/common/batch/reader/BaseApiReader.java:1) +- `BaseJobConfig.java`: Job Config 추상 클래스 (src/main/java/com/snp/batch/common/batch/config/BaseJobConfig.java:1) +- `BaseJdbcRepository.java`: JDBC Repository 추상 클래스 (src/main/java/com/snp/batch/common/batch/repository/BaseJdbcRepository.java:1) + +--- + +--- + +## 10. 추상화 클래스 체크리스트 + +새로운 배치 Job을 만들 때 다음 체크리스트를 참고하세요. + +### 10.1 필수 구현 항목 + +- [ ] **패키지 구조 생성** + - [ ] `jobs/{domain}/batch/` 디렉토리 생성 (배치 작업) + - [ ] `jobs/{domain}/web/` 디렉토리 생성 (웹 API 필요 시) + +- [ ] **DTO 생성** (`jobs/{domain}/batch/dto/` 패키지) + - [ ] API 응답 DTO (예: `ShipApiResponse`) + - [ ] 데이터 DTO (예: `ShipDto`) + +- [ ] **Entity 생성** (`jobs/{domain}/batch/entity/` 패키지) + - [ ] `BaseEntity` 상속 + - [ ] `@SuperBuilder`, `@NoArgsConstructor`, `@AllArgsConstructor` 추가 + - [ ] `@EqualsAndHashCode(callSuper = true)` 추가 + - [ ] **JPA 어노테이션 사용 금지** (@Entity, @Table, @Column 등) + - [ ] 컬럼 매핑 정보는 주석으로 명시 + +- [ ] **Repository 구현** (`jobs/{domain}/batch/repository/` 패키지) + - [ ] 인터페이스 생성 (예: `ShipRepository`) + - [ ] 구현체 생성 (예: `ShipRepositoryImpl`) + - [ ] `BaseJdbcRepository` 상속 (JDBC 전용) + - [ ] `getTableName()`, `getInsertSql()`, `getUpdateSql()` 구현 + - [ ] `setInsertParameters()`, `setUpdateParameters()` 구현 + - [ ] `getRowMapper()` 구현 (RowMapper 클래스 생성) + - [ ] 커스텀 쿼리 메서드 정의 + +- [ ] **Reader 구현** (`jobs/{domain}/batch/reader/` 패키지) + - [ ] `BaseApiReader` 상속 + - [ ] `getApiPath()` 구현 + - [ ] `extractDataFromResponse()` 구현 + - [ ] `getResponseType()` 구현 + - [ ] `getReaderName()` 구현 + +- [ ] **Processor 구현** (`jobs/{domain}/batch/processor/` 패키지) + - [ ] `BaseProcessor` 상속 + - [ ] `transform()` 구현 (DTO → Entity 변환) + - [ ] `shouldProcess()` 구현 (필터링 로직) + - [ ] `getProcessorName()` 구현 + +- [ ] **Writer 구현** (`jobs/{domain}/batch/writer/` 패키지) + - [ ] `BaseWriter` 상속 + - [ ] `writeItems()` 구현 (Repository 호출) + - [ ] `getWriterName()` 구현 + +- [ ] **JobConfig 구현** (`jobs/{domain}/batch/config/` 패키지) + - [ ] `BaseJobConfig` 상속 + - [ ] `getJobName()` 구현 + - [ ] `createReader()` 구현 + - [ ] `createProcessor()` 구현 + - [ ] `createWriter()` 구현 + - [ ] `getChunkSize()` 구현 (선택사항, 기본값: 100) + - [ ] `@Bean` 메서드로 Job과 Step 등록 + +### 10.2 선택 구현 항목 + +- [ ] **웹 API 구현** (REST API 제공 시) + - [ ] **DTO 생성** (`jobs/{domain}/web/dto/` 패키지) + - [ ] `BaseDto` 상속 (웹 전용 DTO) + - [ ] **Service 구현** (`jobs/{domain}/web/service/` 패키지) + - [ ] `BaseServiceImpl` 상속 (JDBC 기반) + - [ ] `getRepository()` 구현 (배치 Repository 재사용) + - [ ] `toDto()`, `toEntity()` 구현 + - [ ] CRUD 메서드 오버라이드 (필요 시) + - [ ] **Controller 구현** (`jobs/{domain}/web/controller/` 패키지) + - [ ] `BaseController` 상속 + - [ ] `@RequestMapping` 설정 + - [ ] 커스텀 API 추가 (필요 시) + +- [ ] **에러 핸들링** + - [ ] `BaseApiReader.handleApiError()` 오버라이드 + - [ ] `BaseProcessor.handleProcessError()` 오버라이드 + - [ ] `BaseWriter.handleWriteError()` 오버라이드 + +- [ ] **로깅 강화** + - [ ] `beforeApiCall()`, `afterApiCall()` 구현 + - [ ] `beforeProcess()`, `afterProcess()` 구현 + - [ ] `beforeWrite()`, `afterWrite()` 구현 + +### 10.3 테스트 항목 + +- [ ] **단위 테스트** + - [ ] Reader 테스트 (API 모킹) + - [ ] Processor 테스트 (변환 로직 검증) + - [ ] Writer 테스트 (Repository 모킹) + +- [ ] **통합 테스트** + - [ ] Job 실행 테스트 + - [ ] 데이터베이스 저장 검증 + +- [ ] **성능 테스트** + - [ ] 대용량 데이터 처리 테스트 + - [ ] Chunk 크기 최적화 + +--- + +--- + +## 📚 관련 문서 + +### 핵심 문서 +- **[README.md](README.md)** - 프로젝트 개요 및 빠른 시작 가이드 +- **[CLAUDE.md](CLAUDE.md)** - 프로젝트 형상관리 문서 (세션 연속성) +- **[SWAGGER_GUIDE.md](SWAGGER_GUIDE.md)** - Swagger API 문서 사용 가이드 + +### 아키텍처 문서 +- **[docs/architecture/ARCHITECTURE.md](docs/architecture/ARCHITECTURE.md)** - 프로젝트 아키텍처 상세 설계 +- **[docs/architecture/PROJECT_STRUCTURE.md](docs/architecture/PROJECT_STRUCTURE.md)** - Job 중심 패키지 구조 가이드 + +### 구현 가이드 +- **[docs/guides/PROXY_SERVICE_GUIDE.md](docs/guides/PROXY_SERVICE_GUIDE.md)** - 외부 API 프록시 패턴 구현 가이드 +- **[docs/guides/SHIP_API_EXAMPLE.md](docs/guides/SHIP_API_EXAMPLE.md)** - Maritime API 연동 실전 예제 + +### 보안 문서 +- **[docs/security/README.md](docs/security/README.md)** - 보안 전략 개요 (계획 단계) + +--- + +**마지막 업데이트**: 2025-10-16 +**버전**: 1.3.0 + +--- + +## 변경 이력 + +### v1.3.0 (2025-10-16) +- ✅ 프로젝트 구조를 현행화: `common/batch/`와 `common/web/` 분리 반영 +- ✅ jobs 패키지 구조 업데이트: `batch/`와 `web/` 서브패키지 구조 반영 +- ✅ 모든 Base 클래스 위치 경로 수정 (common.base → common.batch/web) +- ✅ JDBC vs JPA 사용 구분 명확화 (jobs는 JDBC 전용, global은 JPA 허용) +- ✅ Entity 예제 업데이트: JPA 어노테이션 제거, 주석 기반 매핑 설명 추가 +- ✅ 체크리스트 강화: 패키지 구조, Repository 구현, 웹 API 구현 세분화 + +### v1.2.0 (2025-10-15) +- 문서 간 상호 참조 링크 추가 +- 관련 문서 섹션 추가 + +### v1.1.0 (2025-10-14) +- 추상화 클래스 체크리스트 추가 +- 예제 코드 개선 + +### v1.0.0 (2025-10-13) +- 초기 버전 작성 diff --git a/SWAGGER_GUIDE.md b/SWAGGER_GUIDE.md new file mode 100644 index 0000000..0284434 --- /dev/null +++ b/SWAGGER_GUIDE.md @@ -0,0 +1,224 @@ +# Swagger API 문서화 가이드 + +**버전**: 1.1.0 +**프로젝트**: SNP Sync Batch - 해양 데이터 동기화 배치 시스템 + +--- + +## Swagger UI 접속 정보 + +### 접속 URL + +``` +Swagger UI: http://localhost:8051/snp-sync/swagger-ui/index.html +API 문서 (JSON): http://localhost:8051/snp-sync/v3/api-docs +API 문서 (YAML): http://localhost:8051/snp-sync/v3/api-docs.yaml +``` + +### 환경별 접속 URL + +| 환경 | URL | +|------|-----| +| 로컬 개발 | `http://localhost:8051/snp-sync/swagger-ui/index.html` | +| 개발 서버 | `http://211.208.115.83:8051/snp-sync/swagger-ui/index.html` | +| 운영 서버 | `http://211.208.115.83:8051/snp-sync/swagger-ui/index.html` | + +--- + +## 제공되는 API + +### Batch Management API (`/api/batch`) + +배치 작업 실행, 조회, 스케줄 관리 API + +#### Job 실행 및 조회 + +| Method | Endpoint | 설명 | +|--------|----------|------| +| `GET` | `/api/batch/jobs` | 등록된 배치 작업 목록 조회 | +| `POST` | `/api/batch/jobs/{jobName}/execute` | 배치 작업 수동 실행 | +| `GET` | `/api/batch/jobs/{jobName}/executions` | 작업별 실행 이력 조회 | +| `GET` | `/api/batch/executions/{executionId}` | 실행 정보 조회 | +| `GET` | `/api/batch/executions/{executionId}/detail` | Step 포함 상세 실행 정보 조회 | +| `POST` | `/api/batch/executions/{executionId}/stop` | 실행 중지 | + +#### 스케줄 관리 + +| Method | Endpoint | 설명 | +|--------|----------|------| +| `GET` | `/api/batch/schedules` | 스케줄 목록 조회 | +| `GET` | `/api/batch/schedules/{jobName}` | 특정 작업 스케줄 조회 | +| `POST` | `/api/batch/schedules` | 스케줄 생성 | +| `PUT` | `/api/batch/schedules/{jobName}` | 스케줄 수정 | +| `DELETE` | `/api/batch/schedules/{jobName}` | 스케줄 삭제 | +| `PATCH` | `/api/batch/schedules/{jobName}/toggle` | 스케줄 활성화/비활성화 | + +#### 대시보드 및 타임라인 + +| Method | Endpoint | 설명 | +|--------|----------|------| +| `GET` | `/api/batch/dashboard` | 대시보드 데이터 조회 | +| `GET` | `/api/batch/timeline` | 타임라인 데이터 조회 | +| `GET` | `/api/batch/timeline/period-executions` | 기간별 실행 이력 조회 | + +--- + +## API 테스트 예시 + +### 1. 배치 작업 목록 조회 + +```http +GET http://localhost:8051/snp-sync/api/batch/jobs +``` + +**예상 응답**: +```json +[ + "shipDetailSyncJob", + "codeDataSyncJob", + "eventDataSyncJob", + "facilityDataSyncJob", + "pscDataSyncJob", + "riskDataSyncJob", + "shipComplianceDataSyncJob", + "anchorageCallSyncJob", + "lastPositionUpdateJob" +] +``` + +### 2. 배치 작업 실행 + +```http +POST http://localhost:8051/snp-sync/api/batch/jobs/shipDetailSyncJob/execute +``` + +**예상 응답**: +```json +{ + "success": true, + "message": "Job started successfully", + "executionId": 1 +} +``` + +### 3. 실행 이력 조회 + +```http +GET http://localhost:8051/snp-sync/api/batch/jobs/shipDetailSyncJob/executions +``` + +### 4. 스케줄 생성 + +```http +POST http://localhost:8051/snp-sync/api/batch/schedules +Content-Type: application/json + +{ + "jobName": "shipDetailSyncJob", + "cronExpression": "0 0 * * * ?", + "description": "선박 정보 매시간 동기화" +} +``` + +### 5. 스케줄 활성화/비활성화 + +```http +PATCH http://localhost:8051/snp-sync/api/batch/schedules/shipDetailSyncJob/toggle +Content-Type: application/json + +{ + "active": false +} +``` + +--- + +## Swagger 어노테이션 가이드 + +### 주요 어노테이션 + +#### 1. `@Tag` - API 그룹화 +```java +@Tag(name = "Batch Management API", description = "배치 작업 실행 및 스케줄 관리 API") +public class BatchController { } +``` + +#### 2. `@Operation` - 엔드포인트 문서화 +```java +@Operation( + summary = "배치 작업 실행", + description = "지정된 배치 작업을 즉시 실행합니다" +) +``` + +#### 3. `@Parameter` - 파라미터 설명 +```java +@Parameter(description = "실행할 배치 작업 이름", required = true, example = "shipDetailSyncJob") +@PathVariable String jobName +``` + +#### 4. `@ApiResponses` - 응답 정의 +```java +@ApiResponses(value = { + @ApiResponse(responseCode = "200", description = "작업 실행 성공"), + @ApiResponse(responseCode = "500", description = "작업 실행 실패") +}) +``` + +### 신규 Controller에 Swagger 적용 + +```java +@RestController +@RequestMapping("/api/custom") +@RequiredArgsConstructor +@Tag(name = "Custom API", description = "커스텀 API") +public class CustomController { + + @Operation(summary = "커스텀 조회", description = "특정 조건으로 데이터를 조회합니다") + @ApiResponses(value = { + @ApiResponse(responseCode = "200", description = "조회 성공"), + @ApiResponse(responseCode = "500", description = "서버 오류") + }) + @GetMapping("/data") + public ResponseEntity> getData( + @Parameter(description = "조회 조건", required = true) + @RequestParam String condition) { + // 구현... + } +} +``` + +--- + +## 문제 해결 + +### Swagger UI 접속 불가 (404) + +1. 애플리케이션이 실행 중인지 확인 +2. 포트(8051)와 context-path(`/snp-sync`) 확인 +3. 다음 URL 시도: + - `http://localhost:8051/snp-sync/swagger-ui/index.html` + - `http://localhost:8051/snp-sync/swagger-ui.html` + +### 특정 엔드포인트가 보이지 않음 + +1. `@RestController` 어노테이션 확인 +2. `@RequestMapping` 경로 확인 +3. Controller가 `com.snp.batch` 패키지 하위에 있는지 확인 +4. 애플리케이션 재시작 + +--- + +## 관련 파일 + +``` +src/main/java/com/snp/batch/ +├── global/config/SwaggerConfig.java # Swagger 설정 +├── global/controller/BatchController.java # Batch Management API +└── common/web/controller/BaseController.java # 공통 CRUD Base Controller +``` + +## 참고 자료 + +- [Springdoc OpenAPI](https://springdoc.org/) +- [OpenAPI 3.0 Annotations](https://github.com/swagger-api/swagger-core/wiki/Swagger-2.X---Annotations) diff --git a/frontend/eslint.config.js b/frontend/eslint.config.js new file mode 100644 index 0000000..5e6b472 --- /dev/null +++ b/frontend/eslint.config.js @@ -0,0 +1,23 @@ +import js from '@eslint/js' +import globals from 'globals' +import reactHooks from 'eslint-plugin-react-hooks' +import reactRefresh from 'eslint-plugin-react-refresh' +import tseslint from 'typescript-eslint' +import { defineConfig, globalIgnores } from 'eslint/config' + +export default defineConfig([ + globalIgnores(['dist']), + { + files: ['**/*.{ts,tsx}'], + extends: [ + js.configs.recommended, + tseslint.configs.recommended, + reactHooks.configs.flat.recommended, + reactRefresh.configs.vite, + ], + languageOptions: { + ecmaVersion: 2020, + globals: globals.browser, + }, + }, +]) diff --git a/frontend/index.html b/frontend/index.html new file mode 100644 index 0000000..a3ba597 --- /dev/null +++ b/frontend/index.html @@ -0,0 +1,13 @@ + + + + + + + S&P 동기화 관리 + + +
+ + + diff --git a/frontend/package-lock.json b/frontend/package-lock.json new file mode 100644 index 0000000..46883a7 --- /dev/null +++ b/frontend/package-lock.json @@ -0,0 +1,3935 @@ +{ + "name": "frontend", + "version": "0.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "frontend", + "version": "0.0.0", + "dependencies": { + "react": "^19.2.0", + "react-dom": "^19.2.0", + "react-router-dom": "^7.13.0" + }, + "devDependencies": { + "@eslint/js": "^9.39.1", + "@tailwindcss/vite": "^4.1.18", + "@types/node": "^24.10.1", + "@types/react": "^19.2.7", + "@types/react-dom": "^19.2.3", + "@vitejs/plugin-react": "^5.1.1", + "eslint": "^9.39.1", + "eslint-plugin-react-hooks": "^7.0.1", + "eslint-plugin-react-refresh": "^0.4.24", + "globals": "^16.5.0", + "tailwindcss": "^4.1.18", + "typescript": "~5.9.3", + "typescript-eslint": "^8.48.0", + "vite": "^7.3.1" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.29.0.tgz", + "integrity": "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.28.5", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.29.0.tgz", + "integrity": "sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.29.0.tgz", + "integrity": "sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-compilation-targets": "^7.28.6", + "@babel/helper-module-transforms": "^7.28.6", + "@babel/helpers": "^7.28.6", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/traverse": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/remapping": "^2.3.5", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/generator": { + "version": "7.29.1", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.29.1.tgz", + "integrity": "sha512-qsaF+9Qcm2Qv8SRIMMscAvG4O3lJ0F1GuMo5HR/Bp02LopNgnZBC/EkbevHFeGs4ls/oPz9v+Bsmzbkbe+0dUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.28.6.tgz", + "integrity": "sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.28.6", + "@babel/helper-validator-option": "^7.27.1", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-globals": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", + "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.28.6.tgz", + "integrity": "sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.6.tgz", + "integrity": "sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.28.6", + "@babel/helper-validator-identifier": "^7.28.5", + "@babel/traverse": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.28.6.tgz", + "integrity": "sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", + "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.6.tgz", + "integrity": "sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.29.0.tgz", + "integrity": "sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.29.0" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-self": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.27.1.tgz", + "integrity": "sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-source": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.27.1.tgz", + "integrity": "sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/template": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.28.6.tgz", + "integrity": "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.28.6", + "@babel/parser": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.29.0.tgz", + "integrity": "sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/types": "^7.29.0", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.29.0.tgz", + "integrity": "sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.3.tgz", + "integrity": "sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.3.tgz", + "integrity": "sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.3.tgz", + "integrity": "sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.3.tgz", + "integrity": "sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.3.tgz", + "integrity": "sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.3.tgz", + "integrity": "sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.3.tgz", + "integrity": "sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.3.tgz", + "integrity": "sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.3.tgz", + "integrity": "sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.3.tgz", + "integrity": "sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.3.tgz", + "integrity": "sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.3.tgz", + "integrity": "sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.3.tgz", + "integrity": "sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.3.tgz", + "integrity": "sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.3.tgz", + "integrity": "sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.3.tgz", + "integrity": "sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.3.tgz", + "integrity": "sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.3.tgz", + "integrity": "sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.3.tgz", + "integrity": "sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.3.tgz", + "integrity": "sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.3.tgz", + "integrity": "sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.3.tgz", + "integrity": "sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.3.tgz", + "integrity": "sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.3.tgz", + "integrity": "sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.3.tgz", + "integrity": "sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.3.tgz", + "integrity": "sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.9.1", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.1.tgz", + "integrity": "sha512-phrYmNiYppR7znFEdqgfWHXR6NCkZEK7hwWDHZUjit/2/U0r6XvkDl0SYnoM51Hq7FhCGdLDT6zxCCOY1hexsQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/eslint-utils/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.12.2", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.2.tgz", + "integrity": "sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/config-array": { + "version": "0.21.1", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.1.tgz", + "integrity": "sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/object-schema": "^2.1.7", + "debug": "^4.3.1", + "minimatch": "^3.1.2" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/config-helpers": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.4.2.tgz", + "integrity": "sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/core": "^0.17.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/core": { + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.17.0.tgz", + "integrity": "sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@types/json-schema": "^7.0.15" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.3.tgz", + "integrity": "sha512-Kr+LPIUVKz2qkx1HAMH8q1q6azbqBAsXJUxBl/ODDuVPX45Z9DfwB8tPjTi6nNZ8BuM3nbJxC5zCAg5elnBUTQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^10.0.1", + "globals": "^14.0.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.1", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/eslintrc/node_modules/globals": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz", + "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@eslint/js": { + "version": "9.39.2", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.39.2.tgz", + "integrity": "sha512-q1mjIoW1VX4IvSocvM/vbTiveKC4k9eLrajNEuSsmjymSDEbpGddtpfOoN7YGAqBK3NG+uqo8ia4PDTt8buCYA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + } + }, + "node_modules/@eslint/object-schema": { + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.7.tgz", + "integrity": "sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/plugin-kit": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.4.1.tgz", + "integrity": "sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/core": "^0.17.0", + "levn": "^0.4.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@humanfs/core": { + "version": "0.19.1", + "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", + "integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/@humanfs/node": { + "version": "0.16.7", + "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.7.tgz", + "integrity": "sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@humanfs/core": "^0.19.1", + "@humanwhocodes/retry": "^0.4.0" + }, + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/retry": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz", + "integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@rolldown/pluginutils": { + "version": "1.0.0-rc.3", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-rc.3.tgz", + "integrity": "sha512-eybk3TjzzzV97Dlj5c+XrBFW57eTNhzod66y9HrBlzJ6NsCrWCp/2kaPS3K9wJmurBC0Tdw4yPjXKZqlznim3Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.57.1.tgz", + "integrity": "sha512-A6ehUVSiSaaliTxai040ZpZ2zTevHYbvu/lDoeAteHI8QnaosIzm4qwtezfRg1jOYaUmnzLX1AOD6Z+UJjtifg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.57.1.tgz", + "integrity": "sha512-dQaAddCY9YgkFHZcFNS/606Exo8vcLHwArFZ7vxXq4rigo2bb494/xKMMwRRQW6ug7Js6yXmBZhSBRuBvCCQ3w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.57.1.tgz", + "integrity": "sha512-crNPrwJOrRxagUYeMn/DZwqN88SDmwaJ8Cvi/TN1HnWBU7GwknckyosC2gd0IqYRsHDEnXf328o9/HC6OkPgOg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.57.1.tgz", + "integrity": "sha512-Ji8g8ChVbKrhFtig5QBV7iMaJrGtpHelkB3lsaKzadFBe58gmjfGXAOfI5FV0lYMH8wiqsxKQ1C9B0YTRXVy4w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.57.1.tgz", + "integrity": "sha512-R+/WwhsjmwodAcz65guCGFRkMb4gKWTcIeLy60JJQbXrJ97BOXHxnkPFrP+YwFlaS0m+uWJTstrUA9o+UchFug==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.57.1.tgz", + "integrity": "sha512-IEQTCHeiTOnAUC3IDQdzRAGj3jOAYNr9kBguI7MQAAZK3caezRrg0GxAb6Hchg4lxdZEI5Oq3iov/w/hnFWY9Q==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.57.1.tgz", + "integrity": "sha512-F8sWbhZ7tyuEfsmOxwc2giKDQzN3+kuBLPwwZGyVkLlKGdV1nvnNwYD0fKQ8+XS6hp9nY7B+ZeK01EBUE7aHaw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.57.1.tgz", + "integrity": "sha512-rGfNUfn0GIeXtBP1wL5MnzSj98+PZe/AXaGBCRmT0ts80lU5CATYGxXukeTX39XBKsxzFpEeK+Mrp9faXOlmrw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.57.1.tgz", + "integrity": "sha512-MMtej3YHWeg/0klK2Qodf3yrNzz6CGjo2UntLvk2RSPlhzgLvYEB3frRvbEF2wRKh1Z2fDIg9KRPe1fawv7C+g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.57.1.tgz", + "integrity": "sha512-1a/qhaaOXhqXGpMFMET9VqwZakkljWHLmZOX48R0I/YLbhdxr1m4gtG1Hq7++VhVUmf+L3sTAf9op4JlhQ5u1Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.57.1.tgz", + "integrity": "sha512-QWO6RQTZ/cqYtJMtxhkRkidoNGXc7ERPbZN7dVW5SdURuLeVU7lwKMpo18XdcmpWYd0qsP1bwKPf7DNSUinhvA==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.57.1.tgz", + "integrity": "sha512-xpObYIf+8gprgWaPP32xiN5RVTi/s5FCR+XMXSKmhfoJjrpRAjCuuqQXyxUa/eJTdAE6eJ+KDKaoEqjZQxh3Gw==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.57.1.tgz", + "integrity": "sha512-4BrCgrpZo4hvzMDKRqEaW1zeecScDCR+2nZ86ATLhAoJ5FQ+lbHVD3ttKe74/c7tNT9c6F2viwB3ufwp01Oh2w==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.57.1.tgz", + "integrity": "sha512-NOlUuzesGauESAyEYFSe3QTUguL+lvrN1HtwEEsU2rOwdUDeTMJdO5dUYl/2hKf9jWydJrO9OL/XSSf65R5+Xw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.57.1.tgz", + "integrity": "sha512-ptA88htVp0AwUUqhVghwDIKlvJMD/fmL/wrQj99PRHFRAG6Z5nbWoWG4o81Nt9FT+IuqUQi+L31ZKAFeJ5Is+A==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.57.1.tgz", + "integrity": "sha512-S51t7aMMTNdmAMPpBg7OOsTdn4tySRQvklmL3RpDRyknk87+Sp3xaumlatU+ppQ+5raY7sSTcC2beGgvhENfuw==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.57.1.tgz", + "integrity": "sha512-Bl00OFnVFkL82FHbEqy3k5CUCKH6OEJL54KCyx2oqsmZnFTR8IoNqBF+mjQVcRCT5sB6yOvK8A37LNm/kPJiZg==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.57.1.tgz", + "integrity": "sha512-ABca4ceT4N+Tv/GtotnWAeXZUZuM/9AQyCyKYyKnpk4yoA7QIAuBt6Hkgpw8kActYlew2mvckXkvx0FfoInnLg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.57.1.tgz", + "integrity": "sha512-HFps0JeGtuOR2convgRRkHCekD7j+gdAuXM+/i6kGzQtFhlCtQkpwtNzkNj6QhCDp7DRJ7+qC/1Vg2jt5iSOFw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-openbsd-x64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.57.1.tgz", + "integrity": "sha512-H+hXEv9gdVQuDTgnqD+SQffoWoc0Of59AStSzTEj/feWTBAnSfSD3+Dql1ZruJQxmykT/JVY0dE8Ka7z0DH1hw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.57.1.tgz", + "integrity": "sha512-4wYoDpNg6o/oPximyc/NG+mYUejZrCU2q+2w6YZqrAs2UcNUChIZXjtafAiiZSUc7On8v5NyNj34Kzj/Ltk6dQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.57.1.tgz", + "integrity": "sha512-O54mtsV/6LW3P8qdTcamQmuC990HDfR71lo44oZMZlXU4tzLrbvTii87Ni9opq60ds0YzuAlEr/GNwuNluZyMQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.57.1.tgz", + "integrity": "sha512-P3dLS+IerxCT/7D2q2FYcRdWRl22dNbrbBEtxdWhXrfIMPP9lQhb5h4Du04mdl5Woq05jVCDPCMF7Ub0NAjIew==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.57.1.tgz", + "integrity": "sha512-VMBH2eOOaKGtIJYleXsi2B8CPVADrh+TyNxJ4mWPnKfLB/DBUmzW+5m1xUrcwWoMfSLagIRpjUFeW5CO5hyciQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.57.1.tgz", + "integrity": "sha512-mxRFDdHIWRxg3UfIIAwCm6NzvxG0jDX/wBN6KsQFTvKFqqg9vTrWUE68qEjHt19A5wwx5X5aUi2zuZT7YR0jrA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@tailwindcss/node": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.1.18.tgz", + "integrity": "sha512-DoR7U1P7iYhw16qJ49fgXUlry1t4CpXeErJHnQ44JgTSKMaZUdf17cfn5mHchfJ4KRBZRFA/Coo+MUF5+gOaCQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/remapping": "^2.3.4", + "enhanced-resolve": "^5.18.3", + "jiti": "^2.6.1", + "lightningcss": "1.30.2", + "magic-string": "^0.30.21", + "source-map-js": "^1.2.1", + "tailwindcss": "4.1.18" + } + }, + "node_modules/@tailwindcss/oxide": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.1.18.tgz", + "integrity": "sha512-EgCR5tTS5bUSKQgzeMClT6iCY3ToqE1y+ZB0AKldj809QXk1Y+3jB0upOYZrn9aGIzPtUsP7sX4QQ4XtjBB95A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10" + }, + "optionalDependencies": { + "@tailwindcss/oxide-android-arm64": "4.1.18", + "@tailwindcss/oxide-darwin-arm64": "4.1.18", + "@tailwindcss/oxide-darwin-x64": "4.1.18", + "@tailwindcss/oxide-freebsd-x64": "4.1.18", + "@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.18", + "@tailwindcss/oxide-linux-arm64-gnu": "4.1.18", + "@tailwindcss/oxide-linux-arm64-musl": "4.1.18", + "@tailwindcss/oxide-linux-x64-gnu": "4.1.18", + "@tailwindcss/oxide-linux-x64-musl": "4.1.18", + "@tailwindcss/oxide-wasm32-wasi": "4.1.18", + "@tailwindcss/oxide-win32-arm64-msvc": "4.1.18", + "@tailwindcss/oxide-win32-x64-msvc": "4.1.18" + } + }, + "node_modules/@tailwindcss/oxide-android-arm64": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-android-arm64/-/oxide-android-arm64-4.1.18.tgz", + "integrity": "sha512-dJHz7+Ugr9U/diKJA0W6N/6/cjI+ZTAoxPf9Iz9BFRF2GzEX8IvXxFIi/dZBloVJX/MZGvRuFA9rqwdiIEZQ0Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-darwin-arm64": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.1.18.tgz", + "integrity": "sha512-Gc2q4Qhs660bhjyBSKgq6BYvwDz4G+BuyJ5H1xfhmDR3D8HnHCmT/BSkvSL0vQLy/nkMLY20PQ2OoYMO15Jd0A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-darwin-x64": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-x64/-/oxide-darwin-x64-4.1.18.tgz", + "integrity": "sha512-FL5oxr2xQsFrc3X9o1fjHKBYBMD1QZNyc1Xzw/h5Qu4XnEBi3dZn96HcHm41c/euGV+GRiXFfh2hUCyKi/e+yw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-freebsd-x64": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-freebsd-x64/-/oxide-freebsd-x64-4.1.18.tgz", + "integrity": "sha512-Fj+RHgu5bDodmV1dM9yAxlfJwkkWvLiRjbhuO2LEtwtlYlBgiAT4x/j5wQr1tC3SANAgD+0YcmWVrj8R9trVMA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm-gnueabihf": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm-gnueabihf/-/oxide-linux-arm-gnueabihf-4.1.18.tgz", + "integrity": "sha512-Fp+Wzk/Ws4dZn+LV2Nqx3IilnhH51YZoRaYHQsVq3RQvEl+71VGKFpkfHrLM/Li+kt5c0DJe/bHXK1eHgDmdiA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm64-gnu": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-gnu/-/oxide-linux-arm64-gnu-4.1.18.tgz", + "integrity": "sha512-S0n3jboLysNbh55Vrt7pk9wgpyTTPD0fdQeh7wQfMqLPM/Hrxi+dVsLsPrycQjGKEQk85Kgbx+6+QnYNiHalnw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm64-musl": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-musl/-/oxide-linux-arm64-musl-4.1.18.tgz", + "integrity": "sha512-1px92582HkPQlaaCkdRcio71p8bc8i/ap5807tPRDK/uw953cauQBT8c5tVGkOwrHMfc2Yh6UuxaH4vtTjGvHg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-x64-gnu": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.1.18.tgz", + "integrity": "sha512-v3gyT0ivkfBLoZGF9LyHmts0Isc8jHZyVcbzio6Wpzifg/+5ZJpDiRiUhDLkcr7f/r38SWNe7ucxmGW3j3Kb/g==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-x64-musl": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.1.18.tgz", + "integrity": "sha512-bhJ2y2OQNlcRwwgOAGMY0xTFStt4/wyU6pvI6LSuZpRgKQwxTec0/3Scu91O8ir7qCR3AuepQKLU/kX99FouqQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-wasm32-wasi/-/oxide-wasm32-wasi-4.1.18.tgz", + "integrity": "sha512-LffYTvPjODiP6PT16oNeUQJzNVyJl1cjIebq/rWWBF+3eDst5JGEFSc5cWxyRCJ0Mxl+KyIkqRxk1XPEs9x8TA==", + "bundleDependencies": [ + "@napi-rs/wasm-runtime", + "@emnapi/core", + "@emnapi/runtime", + "@tybys/wasm-util", + "@emnapi/wasi-threads", + "tslib" + ], + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/core": "^1.7.1", + "@emnapi/runtime": "^1.7.1", + "@emnapi/wasi-threads": "^1.1.0", + "@napi-rs/wasm-runtime": "^1.1.0", + "@tybys/wasm-util": "^0.10.1", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@tailwindcss/oxide-win32-arm64-msvc": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.1.18.tgz", + "integrity": "sha512-HjSA7mr9HmC8fu6bdsZvZ+dhjyGCLdotjVOgLA2vEqxEBZaQo9YTX4kwgEvPCpRh8o4uWc4J/wEoFzhEmjvPbA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-win32-x64-msvc": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-x64-msvc/-/oxide-win32-x64-msvc-4.1.18.tgz", + "integrity": "sha512-bJWbyYpUlqamC8dpR7pfjA0I7vdF6t5VpUGMWRkXVE3AXgIZjYUYAK7II1GNaxR8J1SSrSrppRar8G++JekE3Q==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/vite": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/vite/-/vite-4.1.18.tgz", + "integrity": "sha512-jVA+/UpKL1vRLg6Hkao5jldawNmRo7mQYrZtNHMIVpLfLhDml5nMRUo/8MwoX2vNXvnaXNNMedrMfMugAVX1nA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@tailwindcss/node": "4.1.18", + "@tailwindcss/oxide": "4.1.18", + "tailwindcss": "4.1.18" + }, + "peerDependencies": { + "vite": "^5.2.0 || ^6 || ^7" + } + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", + "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.28.0.tgz", + "integrity": "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.2" + } + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/json-schema": { + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "24.10.13", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.13.tgz", + "integrity": "sha512-oH72nZRfDv9lADUBSo104Aq7gPHpQZc4BTx38r9xf9pg5LfP6EzSyH2n7qFmmxRQXh7YlUXODcYsg6PuTDSxGg==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~7.16.0" + } + }, + "node_modules/@types/react": { + "version": "19.2.14", + "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.14.tgz", + "integrity": "sha512-ilcTH/UniCkMdtexkoCN0bI7pMcJDvmQFPvuPvmEaYA/NSfFTAgdUSLAoVjaRJm7+6PvcM+q1zYOwS4wTYMF9w==", + "dev": true, + "license": "MIT", + "dependencies": { + "csstype": "^3.2.2" + } + }, + "node_modules/@types/react-dom": { + "version": "19.2.3", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-19.2.3.tgz", + "integrity": "sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "@types/react": "^19.2.0" + } + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.56.0.tgz", + "integrity": "sha512-lRyPDLzNCuae71A3t9NEINBiTn7swyOhvUj3MyUOxb8x6g6vPEFoOU+ZRmGMusNC3X3YMhqMIX7i8ShqhT74Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/regexpp": "^4.12.2", + "@typescript-eslint/scope-manager": "8.56.0", + "@typescript-eslint/type-utils": "8.56.0", + "@typescript-eslint/utils": "8.56.0", + "@typescript-eslint/visitor-keys": "8.56.0", + "ignore": "^7.0.5", + "natural-compare": "^1.4.0", + "ts-api-utils": "^2.4.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^8.56.0", + "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/ignore": { + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz", + "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.56.0.tgz", + "integrity": "sha512-IgSWvLobTDOjnaxAfDTIHaECbkNlAlKv2j5SjpB2v7QHKv1FIfjwMy8FsDbVfDX/KjmCmYICcw7uGaXLhtsLNg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/scope-manager": "8.56.0", + "@typescript-eslint/types": "8.56.0", + "@typescript-eslint/typescript-estree": "8.56.0", + "@typescript-eslint/visitor-keys": "8.56.0", + "debug": "^4.4.3" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/project-service": { + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.56.0.tgz", + "integrity": "sha512-M3rnyL1vIQOMeWxTWIW096/TtVP+8W3p/XnaFflhmcFp+U4zlxUxWj4XwNs6HbDeTtN4yun0GNTTDBw/SvufKg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/tsconfig-utils": "^8.56.0", + "@typescript-eslint/types": "^8.56.0", + "debug": "^4.4.3" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.56.0.tgz", + "integrity": "sha512-7UiO/XwMHquH+ZzfVCfUNkIXlp/yQjjnlYUyYz7pfvlK3/EyyN6BK+emDmGNyQLBtLGaYrTAI6KOw8tFucWL2w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.56.0", + "@typescript-eslint/visitor-keys": "8.56.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/tsconfig-utils": { + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.56.0.tgz", + "integrity": "sha512-bSJoIIt4o3lKXD3xmDh9chZcjCz5Lk8xS7Rxn+6l5/pKrDpkCwtQNQQwZ2qRPk7TkUYhrq3WPIHXOXlbXP0itg==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.56.0.tgz", + "integrity": "sha512-qX2L3HWOU2nuDs6GzglBeuFXviDODreS58tLY/BALPC7iu3Fa+J7EOTwnX9PdNBxUI7Uh0ntP0YWGnxCkXzmfA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.56.0", + "@typescript-eslint/typescript-estree": "8.56.0", + "@typescript-eslint/utils": "8.56.0", + "debug": "^4.4.3", + "ts-api-utils": "^2.4.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/types": { + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.56.0.tgz", + "integrity": "sha512-DBsLPs3GsWhX5HylbP9HNG15U0bnwut55Lx12bHB9MpXxQ+R5GC8MwQe+N1UFXxAeQDvEsEDY6ZYwX03K7Z6HQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.56.0.tgz", + "integrity": "sha512-ex1nTUMWrseMltXUHmR2GAQ4d+WjkZCT4f+4bVsps8QEdh0vlBsaCokKTPlnqBFqqGaxilDNJG7b8dolW2m43Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/project-service": "8.56.0", + "@typescript-eslint/tsconfig-utils": "8.56.0", + "@typescript-eslint/types": "8.56.0", + "@typescript-eslint/visitor-keys": "8.56.0", + "debug": "^4.4.3", + "minimatch": "^9.0.5", + "semver": "^7.7.3", + "tinyglobby": "^0.2.15", + "ts-api-utils": "^2.4.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/semver": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@typescript-eslint/utils": { + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.56.0.tgz", + "integrity": "sha512-RZ3Qsmi2nFGsS+n+kjLAYDPVlrzf7UhTffrDIKr+h2yzAlYP/y5ZulU0yeDEPItos2Ph46JAL5P/On3pe7kDIQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.9.1", + "@typescript-eslint/scope-manager": "8.56.0", + "@typescript-eslint/types": "8.56.0", + "@typescript-eslint/typescript-estree": "8.56.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.56.0.tgz", + "integrity": "sha512-q+SL+b+05Ud6LbEE35qe4A99P+htKTKVbyiNEe45eCbJFyh/HVK9QXwlrbz+Q4L8SOW4roxSVwXYj4DMBT7Ieg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.56.0", + "eslint-visitor-keys": "^5.0.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-5.0.0.tgz", + "integrity": "sha512-A0XeIi7CXU7nPlfHS9loMYEKxUaONu/hTEzHTGba9Huu94Cq1hPivf+DE5erJozZOky0LfvXAyrV/tcswpLI0Q==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^20.19.0 || ^22.13.0 || >=24" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@vitejs/plugin-react": { + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-5.1.4.tgz", + "integrity": "sha512-VIcFLdRi/VYRU8OL/puL7QXMYafHmqOnwTZY50U1JPlCNj30PxCMx65c494b1K9be9hX83KVt0+gTEwTWLqToA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.29.0", + "@babel/plugin-transform-react-jsx-self": "^7.27.1", + "@babel/plugin-transform-react-jsx-source": "^7.27.1", + "@rolldown/pluginutils": "1.0.0-rc.3", + "@types/babel__core": "^7.20.5", + "react-refresh": "^0.18.0" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "peerDependencies": { + "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" + } + }, + "node_modules/acorn": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true, + "license": "Python-2.0" + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/baseline-browser-mapping": { + "version": "2.9.19", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.19.tgz", + "integrity": "sha512-ipDqC8FrAl/76p2SSWKSI+H9tFwm7vYqXQrItCuiVPt26Km0jS+NzSsBWAaBusvSbQcfJG+JitdMm+wZAgTYqg==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "baseline-browser-mapping": "dist/cli.js" + } + }, + "node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/browserslist": { + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz", + "integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "baseline-browser-mapping": "^2.9.0", + "caniuse-lite": "^1.0.30001759", + "electron-to-chromium": "^1.5.263", + "node-releases": "^2.0.27", + "update-browserslist-db": "^1.2.0" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001770", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001770.tgz", + "integrity": "sha512-x/2CLQ1jHENRbHg5PSId2sXq1CIO1CISvwWAj027ltMVG2UNgW+w9oH2+HzgEIRFembL8bUlXtfbBHR1fCg2xw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, + "license": "MIT" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true, + "license": "MIT" + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true, + "license": "MIT" + }, + "node_modules/cookie": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-1.1.1.tgz", + "integrity": "sha512-ei8Aos7ja0weRpFzJnEA9UHJ/7XQmqglbRwnf2ATjcB9Wq874VKH9kfjjirM6UhU2/E5fFYadylyhFldcqSidQ==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/csstype": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", + "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/detect-libc": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", + "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=8" + } + }, + "node_modules/electron-to-chromium": { + "version": "1.5.286", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.286.tgz", + "integrity": "sha512-9tfDXhJ4RKFNerfjdCcZfufu49vg620741MNs26a9+bhLThdB+plgMeou98CAaHu/WATj2iHOOHTp1hWtABj2A==", + "dev": true, + "license": "ISC" + }, + "node_modules/enhanced-resolve": { + "version": "5.19.0", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.19.0.tgz", + "integrity": "sha512-phv3E1Xl4tQOShqSte26C7Fl84EwUdZsyOuSSk9qtAGyyQs2s3jJzComh+Abf4g187lUUAvH+H26omrqia2aGg==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.4", + "tapable": "^2.3.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/esbuild": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.3.tgz", + "integrity": "sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.27.3", + "@esbuild/android-arm": "0.27.3", + "@esbuild/android-arm64": "0.27.3", + "@esbuild/android-x64": "0.27.3", + "@esbuild/darwin-arm64": "0.27.3", + "@esbuild/darwin-x64": "0.27.3", + "@esbuild/freebsd-arm64": "0.27.3", + "@esbuild/freebsd-x64": "0.27.3", + "@esbuild/linux-arm": "0.27.3", + "@esbuild/linux-arm64": "0.27.3", + "@esbuild/linux-ia32": "0.27.3", + "@esbuild/linux-loong64": "0.27.3", + "@esbuild/linux-mips64el": "0.27.3", + "@esbuild/linux-ppc64": "0.27.3", + "@esbuild/linux-riscv64": "0.27.3", + "@esbuild/linux-s390x": "0.27.3", + "@esbuild/linux-x64": "0.27.3", + "@esbuild/netbsd-arm64": "0.27.3", + "@esbuild/netbsd-x64": "0.27.3", + "@esbuild/openbsd-arm64": "0.27.3", + "@esbuild/openbsd-x64": "0.27.3", + "@esbuild/openharmony-arm64": "0.27.3", + "@esbuild/sunos-x64": "0.27.3", + "@esbuild/win32-arm64": "0.27.3", + "@esbuild/win32-ia32": "0.27.3", + "@esbuild/win32-x64": "0.27.3" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "9.39.2", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.39.2.tgz", + "integrity": "sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.8.0", + "@eslint-community/regexpp": "^4.12.1", + "@eslint/config-array": "^0.21.1", + "@eslint/config-helpers": "^0.4.2", + "@eslint/core": "^0.17.0", + "@eslint/eslintrc": "^3.3.1", + "@eslint/js": "9.39.2", + "@eslint/plugin-kit": "^0.4.1", + "@humanfs/node": "^0.16.6", + "@humanwhocodes/module-importer": "^1.0.1", + "@humanwhocodes/retry": "^0.4.2", + "@types/estree": "^1.0.6", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.6", + "debug": "^4.3.2", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^8.4.0", + "eslint-visitor-keys": "^4.2.1", + "espree": "^10.4.0", + "esquery": "^1.5.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^8.0.0", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + }, + "peerDependencies": { + "jiti": "*" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + } + } + }, + "node_modules/eslint-plugin-react-hooks": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-7.0.1.tgz", + "integrity": "sha512-O0d0m04evaNzEPoSW+59Mezf8Qt0InfgGIBJnpC0h3NH/WjUAR7BIKUfysC6todmtiZ/A0oUVS8Gce0WhBrHsA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.24.4", + "@babel/parser": "^7.24.4", + "hermes-parser": "^0.25.1", + "zod": "^3.25.0 || ^4.0.0", + "zod-validation-error": "^3.5.0 || ^4.0.0" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "eslint": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 || ^9.0.0" + } + }, + "node_modules/eslint-plugin-react-refresh": { + "version": "0.4.26", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.26.tgz", + "integrity": "sha512-1RETEylht2O6FM/MvgnyvT+8K21wLqDNg4qD51Zj3guhjt433XbnnkVttHMyaVyAFD03QSV4LPS5iE3VQmO7XQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "eslint": ">=8.40" + } + }, + "node_modules/eslint-scope": { + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz", + "integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/espree": { + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz", + "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "acorn": "^8.15.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^4.2.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esquery": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.7.0.tgz", + "integrity": "sha512-Ap6G0WQwcU/LHsvLwON1fAQX9Zp0A2Y6Y/cJBl9r/JbW90Zyg4/zbG6zzKa2OTALELarYHmKu0GhpM5EO+7T0g==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/file-entry-cache": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", + "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "flat-cache": "^4.0.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat-cache": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz", + "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", + "dev": true, + "license": "MIT", + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.4" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/flatted": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", + "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", + "dev": true, + "license": "ISC" + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/globals": { + "version": "16.5.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-16.5.0.tgz", + "integrity": "sha512-c/c15i26VrJ4IRt5Z89DnIzCGDn9EcebibhAOjw5ibqEHsE1wLUgkPn9RDmNcUKyU87GeaL633nyJ+pplFR2ZQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/hermes-estree": { + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/hermes-estree/-/hermes-estree-0.25.1.tgz", + "integrity": "sha512-0wUoCcLp+5Ev5pDW2OriHC2MJCbwLwuRx+gAqMTOkGKJJiBCLjtrvy4PWUGn6MIVefecRpzoOZ/UV6iGdOr+Cw==", + "dev": true, + "license": "MIT" + }, + "node_modules/hermes-parser": { + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/hermes-parser/-/hermes-parser-0.25.1.tgz", + "integrity": "sha512-6pEjquH3rqaI6cYAXYPcz9MS4rY6R4ngRgrgfDshRptUZIc3lw0MCIJIGDj9++mfySOuPTHB4nrSW99BCvOPIA==", + "dev": true, + "license": "MIT", + "dependencies": { + "hermes-estree": "0.25.1" + } + }, + "node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", + "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true, + "license": "ISC" + }, + "node_modules/jiti": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.6.1.tgz", + "integrity": "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==", + "dev": true, + "license": "MIT", + "bin": { + "jiti": "lib/jiti-cli.mjs" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/js-yaml": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz", + "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/lightningcss": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.30.2.tgz", + "integrity": "sha512-utfs7Pr5uJyyvDETitgsaqSyjCb2qNRAtuqUeWIAKztsOYdcACf2KtARYXg2pSvhkt+9NfoaNY7fxjl6nuMjIQ==", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "detect-libc": "^2.0.3" + }, + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + }, + "optionalDependencies": { + "lightningcss-android-arm64": "1.30.2", + "lightningcss-darwin-arm64": "1.30.2", + "lightningcss-darwin-x64": "1.30.2", + "lightningcss-freebsd-x64": "1.30.2", + "lightningcss-linux-arm-gnueabihf": "1.30.2", + "lightningcss-linux-arm64-gnu": "1.30.2", + "lightningcss-linux-arm64-musl": "1.30.2", + "lightningcss-linux-x64-gnu": "1.30.2", + "lightningcss-linux-x64-musl": "1.30.2", + "lightningcss-win32-arm64-msvc": "1.30.2", + "lightningcss-win32-x64-msvc": "1.30.2" + } + }, + "node_modules/lightningcss-android-arm64": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-android-arm64/-/lightningcss-android-arm64-1.30.2.tgz", + "integrity": "sha512-BH9sEdOCahSgmkVhBLeU7Hc9DWeZ1Eb6wNS6Da8igvUwAe0sqROHddIlvU06q3WyXVEOYDZ6ykBZQnjTbmo4+A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-darwin-arm64": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.30.2.tgz", + "integrity": "sha512-ylTcDJBN3Hp21TdhRT5zBOIi73P6/W0qwvlFEk22fkdXchtNTOU4Qc37SkzV+EKYxLouZ6M4LG9NfZ1qkhhBWA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-darwin-x64": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.30.2.tgz", + "integrity": "sha512-oBZgKchomuDYxr7ilwLcyms6BCyLn0z8J0+ZZmfpjwg9fRVZIR5/GMXd7r9RH94iDhld3UmSjBM6nXWM2TfZTQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-freebsd-x64": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.30.2.tgz", + "integrity": "sha512-c2bH6xTrf4BDpK8MoGG4Bd6zAMZDAXS569UxCAGcA7IKbHNMlhGQ89eRmvpIUGfKWNVdbhSbkQaWhEoMGmGslA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm-gnueabihf": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.30.2.tgz", + "integrity": "sha512-eVdpxh4wYcm0PofJIZVuYuLiqBIakQ9uFZmipf6LF/HRj5Bgm0eb3qL/mr1smyXIS1twwOxNWndd8z0E374hiA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm64-gnu": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.30.2.tgz", + "integrity": "sha512-UK65WJAbwIJbiBFXpxrbTNArtfuznvxAJw4Q2ZGlU8kPeDIWEX1dg3rn2veBVUylA2Ezg89ktszWbaQnxD/e3A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm64-musl": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.30.2.tgz", + "integrity": "sha512-5Vh9dGeblpTxWHpOx8iauV02popZDsCYMPIgiuw97OJ5uaDsL86cnqSFs5LZkG3ghHoX5isLgWzMs+eD1YzrnA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-x64-gnu": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.30.2.tgz", + "integrity": "sha512-Cfd46gdmj1vQ+lR6VRTTadNHu6ALuw2pKR9lYq4FnhvgBc4zWY1EtZcAc6EffShbb1MFrIPfLDXD6Xprbnni4w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-x64-musl": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.30.2.tgz", + "integrity": "sha512-XJaLUUFXb6/QG2lGIW6aIk6jKdtjtcffUT0NKvIqhSBY3hh9Ch+1LCeH80dR9q9LBjG3ewbDjnumefsLsP6aiA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-win32-arm64-msvc": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.30.2.tgz", + "integrity": "sha512-FZn+vaj7zLv//D/192WFFVA0RgHawIcHqLX9xuWiQt7P0PtdFEVaxgF9rjM/IRYHQXNnk61/H/gb2Ei+kUQ4xQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-win32-x64-msvc": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.30.2.tgz", + "integrity": "sha512-5g1yc73p+iAkid5phb4oVFMB45417DkRevRbt/El/gKXJk4jid+vPFF/AXbxn05Aky8PapwzZrdJShv5C0avjw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/magic-string": { + "version": "0.30.21", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz", + "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.5" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true, + "license": "MIT" + }, + "node_modules/node-releases": { + "version": "2.0.27", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz", + "integrity": "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/optionator": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "license": "MIT", + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/postcss": { + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/react": { + "version": "19.2.4", + "resolved": "https://registry.npmjs.org/react/-/react-19.2.4.tgz", + "integrity": "sha512-9nfp2hYpCwOjAN+8TZFGhtWEwgvWHXqESH8qT89AT/lWklpLON22Lc8pEtnpsZz7VmawabSU0gCjnj8aC0euHQ==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-dom": { + "version": "19.2.4", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.2.4.tgz", + "integrity": "sha512-AXJdLo8kgMbimY95O2aKQqsz2iWi9jMgKJhRBAxECE4IFxfcazB2LmzloIoibJI3C12IlY20+KFaLv+71bUJeQ==", + "license": "MIT", + "dependencies": { + "scheduler": "^0.27.0" + }, + "peerDependencies": { + "react": "^19.2.4" + } + }, + "node_modules/react-refresh": { + "version": "0.18.0", + "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.18.0.tgz", + "integrity": "sha512-QgT5//D3jfjJb6Gsjxv0Slpj23ip+HtOpnNgnb2S5zU3CB26G/IDPGoy4RJB42wzFE46DRsstbW6tKHoKbhAxw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-router": { + "version": "7.13.0", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.13.0.tgz", + "integrity": "sha512-PZgus8ETambRT17BUm/LL8lX3Of+oiLaPuVTRH3l1eLvSPpKO3AvhAEb5N7ihAFZQrYDqkvvWfFh9p0z9VsjLw==", + "license": "MIT", + "dependencies": { + "cookie": "^1.0.1", + "set-cookie-parser": "^2.6.0" + }, + "engines": { + "node": ">=20.0.0" + }, + "peerDependencies": { + "react": ">=18", + "react-dom": ">=18" + }, + "peerDependenciesMeta": { + "react-dom": { + "optional": true + } + } + }, + "node_modules/react-router-dom": { + "version": "7.13.0", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.13.0.tgz", + "integrity": "sha512-5CO/l5Yahi2SKC6rGZ+HDEjpjkGaG/ncEP7eWFTvFxbHP8yeeI0PxTDjimtpXYlR3b3i9/WIL4VJttPrESIf2g==", + "license": "MIT", + "dependencies": { + "react-router": "7.13.0" + }, + "engines": { + "node": ">=20.0.0" + }, + "peerDependencies": { + "react": ">=18", + "react-dom": ">=18" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/rollup": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.57.1.tgz", + "integrity": "sha512-oQL6lgK3e2QZeQ7gcgIkS2YZPg5slw37hYufJ3edKlfQSGGm8ICoxswK15ntSzF/a8+h7ekRy7k7oWc3BQ7y8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.57.1", + "@rollup/rollup-android-arm64": "4.57.1", + "@rollup/rollup-darwin-arm64": "4.57.1", + "@rollup/rollup-darwin-x64": "4.57.1", + "@rollup/rollup-freebsd-arm64": "4.57.1", + "@rollup/rollup-freebsd-x64": "4.57.1", + "@rollup/rollup-linux-arm-gnueabihf": "4.57.1", + "@rollup/rollup-linux-arm-musleabihf": "4.57.1", + "@rollup/rollup-linux-arm64-gnu": "4.57.1", + "@rollup/rollup-linux-arm64-musl": "4.57.1", + "@rollup/rollup-linux-loong64-gnu": "4.57.1", + "@rollup/rollup-linux-loong64-musl": "4.57.1", + "@rollup/rollup-linux-ppc64-gnu": "4.57.1", + "@rollup/rollup-linux-ppc64-musl": "4.57.1", + "@rollup/rollup-linux-riscv64-gnu": "4.57.1", + "@rollup/rollup-linux-riscv64-musl": "4.57.1", + "@rollup/rollup-linux-s390x-gnu": "4.57.1", + "@rollup/rollup-linux-x64-gnu": "4.57.1", + "@rollup/rollup-linux-x64-musl": "4.57.1", + "@rollup/rollup-openbsd-x64": "4.57.1", + "@rollup/rollup-openharmony-arm64": "4.57.1", + "@rollup/rollup-win32-arm64-msvc": "4.57.1", + "@rollup/rollup-win32-ia32-msvc": "4.57.1", + "@rollup/rollup-win32-x64-gnu": "4.57.1", + "@rollup/rollup-win32-x64-msvc": "4.57.1", + "fsevents": "~2.3.2" + } + }, + "node_modules/scheduler": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.27.0.tgz", + "integrity": "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==", + "license": "MIT" + }, + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/set-cookie-parser": { + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.2.tgz", + "integrity": "sha512-oeM1lpU/UvhTxw+g3cIfxXHyJRc/uidd3yK1P242gzHds0udQBYzs3y8j4gCCW+ZJ7ad0yctld8RYO+bdurlvw==", + "license": "MIT" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tailwindcss": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.18.tgz", + "integrity": "sha512-4+Z+0yiYyEtUVCScyfHCxOYP06L5Ne+JiHhY2IjR2KWMIWhJOYZKLSGZaP5HkZ8+bY0cxfzwDE5uOmzFXyIwxw==", + "dev": true, + "license": "MIT" + }, + "node_modules/tapable": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.3.0.tgz", + "integrity": "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/ts-api-utils": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.4.0.tgz", + "integrity": "sha512-3TaVTaAv2gTiMB35i3FiGJaRfwb3Pyn/j3m/bfAvGe8FB7CF6u+LMYqYlDh7reQf7UNvoTvdfAqHGmPGOSsPmA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18.12" + }, + "peerDependencies": { + "typescript": ">=4.8.4" + } + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/typescript-eslint": { + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.56.0.tgz", + "integrity": "sha512-c7toRLrotJ9oixgdW7liukZpsnq5CZ7PuKztubGYlNppuTqhIoWfhgHo/7EU0v06gS2l/x0i2NEFK1qMIf0rIg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/eslint-plugin": "8.56.0", + "@typescript-eslint/parser": "8.56.0", + "@typescript-eslint/typescript-estree": "8.56.0", + "@typescript-eslint/utils": "8.56.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0", + "typescript": ">=4.8.4 <6.0.0" + } + }, + "node_modules/undici-types": { + "version": "7.16.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", + "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==", + "dev": true, + "license": "MIT" + }, + "node_modules/update-browserslist-db": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz", + "integrity": "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/vite": { + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.1.tgz", + "integrity": "sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.27.0", + "fdir": "^6.5.0", + "picomatch": "^4.0.3", + "postcss": "^8.5.6", + "rollup": "^4.43.0", + "tinyglobby": "^0.2.15" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^20.19.0 || >=22.12.0", + "jiti": ">=1.21.0", + "less": "^4.0.0", + "lightningcss": "^1.21.0", + "sass": "^1.70.0", + "sass-embedded": "^1.70.0", + "stylus": ">=0.54.8", + "sugarss": "^5.0.0", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true, + "license": "ISC" + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/zod": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/zod/-/zod-4.3.6.tgz", + "integrity": "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, + "node_modules/zod-validation-error": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/zod-validation-error/-/zod-validation-error-4.0.2.tgz", + "integrity": "sha512-Q6/nZLe6jxuU80qb/4uJ4t5v2VEZ44lzQjPDhYJNztRQ4wyWc6VF3D3Kb/fAuPetZQnhS3hnajCf9CsWesghLQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "zod": "^3.25.0 || ^4.0.0" + } + } + } +} diff --git a/frontend/package.json b/frontend/package.json new file mode 100644 index 0000000..7204e4d --- /dev/null +++ b/frontend/package.json @@ -0,0 +1,33 @@ +{ + "name": "frontend", + "private": true, + "version": "0.0.0", + "type": "module", + "scripts": { + "dev": "vite", + "build": "tsc -b && vite build", + "lint": "eslint .", + "preview": "vite preview" + }, + "dependencies": { + "react": "^19.2.0", + "react-dom": "^19.2.0", + "react-router-dom": "^7.13.0" + }, + "devDependencies": { + "@eslint/js": "^9.39.1", + "@tailwindcss/vite": "^4.1.18", + "@types/node": "^24.10.1", + "@types/react": "^19.2.7", + "@types/react-dom": "^19.2.3", + "@vitejs/plugin-react": "^5.1.1", + "eslint": "^9.39.1", + "eslint-plugin-react-hooks": "^7.0.1", + "eslint-plugin-react-refresh": "^0.4.24", + "globals": "^16.5.0", + "tailwindcss": "^4.1.18", + "typescript": "~5.9.3", + "typescript-eslint": "^8.48.0", + "vite": "^7.3.1" + } +} diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx new file mode 100644 index 0000000..276ee30 --- /dev/null +++ b/frontend/src/App.tsx @@ -0,0 +1,49 @@ +import { lazy, Suspense } from 'react'; +import { BrowserRouter, Routes, Route } from 'react-router-dom'; +import { ToastProvider, useToastContext } from './contexts/ToastContext'; +import { ThemeProvider } from './contexts/ThemeContext'; +import Navbar from './components/Navbar'; +import ToastContainer from './components/Toast'; +import LoadingSpinner from './components/LoadingSpinner'; + +const Dashboard = lazy(() => import('./pages/Dashboard')); +const Jobs = lazy(() => import('./pages/Jobs')); +const Executions = lazy(() => import('./pages/Executions')); +const ExecutionDetail = lazy(() => import('./pages/ExecutionDetail')); +const Schedules = lazy(() => import('./pages/Schedules')); +const Timeline = lazy(() => import('./pages/Timeline')); + +function AppLayout() { + const { toasts, removeToast } = useToastContext(); + + return ( +
+
+ + }> + + } /> + } /> + } /> + } /> + } /> + } /> + + +
+ +
+ ); +} + +export default function App() { + return ( + + + + + + + + ); +} diff --git a/frontend/src/api/batchApi.ts b/frontend/src/api/batchApi.ts new file mode 100644 index 0000000..869b470 --- /dev/null +++ b/frontend/src/api/batchApi.ts @@ -0,0 +1,402 @@ +const BASE = import.meta.env.DEV ? '/snp-sync/api/batch' : '/snp-sync/api/batch'; + +async function fetchJson(url: string): Promise { + const res = await fetch(url); + if (!res.ok) throw new Error(`API Error: ${res.status} ${res.statusText}`); + return res.json(); +} + +async function postJson(url: string, body?: unknown): Promise { + const res = await fetch(url, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: body ? JSON.stringify(body) : undefined, + }); + if (!res.ok) throw new Error(`API Error: ${res.status} ${res.statusText}`); + return res.json(); +} + +async function putJson(url: string, body?: unknown): Promise { + const res = await fetch(url, { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: body ? JSON.stringify(body) : undefined, + }); + if (!res.ok) throw new Error(`API Error: ${res.status} ${res.statusText}`); + return res.json(); +} + +async function deleteJson(url: string): Promise { + const res = await fetch(url, { method: 'DELETE' }); + if (!res.ok) throw new Error(`API Error: ${res.status} ${res.statusText}`); + return res.json(); +} + +// ── Dashboard ──────────────────────────────────────────────── + +export interface DashboardStats { + totalSchedules: number; + activeSchedules: number; + inactiveSchedules: number; + totalJobs: number; +} + +export interface RunningJob { + jobName: string; + executionId: number; + status: string; + startTime: string; +} + +export interface RecentExecution { + executionId: number; + jobName: string; + status: string; + startTime: string; + endTime: string | null; +} + +export interface RecentFailure { + executionId: number; + jobName: string; + status: string; + startTime: string; + endTime: string | null; + exitMessage: string | null; +} + +export interface FailureStats { + last24h: number; + last7d: number; +} + +export interface DashboardResponse { + stats: DashboardStats; + runningJobs: RunningJob[]; + recentExecutions: RecentExecution[]; + recentFailures: RecentFailure[]; + staleExecutionCount: number; + failureStats: FailureStats; +} + +// ── Job Execution ──────────────────────────────────────────── + +export interface JobExecutionDto { + executionId: number; + jobName: string; + status: string; + startTime: string; + endTime: string | null; + exitCode: string | null; + exitMessage: string | null; + failedRecordCount: number | null; +} + +export interface ApiCallInfo { + apiUrl: string; + method: string; + parameters: Record | null; + totalCalls: number; + completedCalls: number; + lastCallTime: string; +} + +export interface StepExecutionDto { + stepExecutionId: number; + stepName: string; + status: string; + startTime: string; + endTime: string | null; + readCount: number; + writeCount: number; + commitCount: number; + rollbackCount: number; + readSkipCount: number; + processSkipCount: number; + writeSkipCount: number; + filterCount: number; + exitCode: string; + exitMessage: string | null; + duration: number | null; + apiCallInfo: ApiCallInfo | null; + apiLogSummary: StepApiLogSummary | null; + failedRecords?: FailedRecordDto[] | null; +} + +export interface ApiLogEntryDto { + logId: number; + requestUri: string; + httpMethod: string; + statusCode: number | null; + responseTimeMs: number | null; + responseCount: number | null; + errorMessage: string | null; + createdAt: string; +} + +export interface StepApiLogSummary { + totalCalls: number; + successCount: number; + errorCount: number; + avgResponseMs: number; + maxResponseMs: number; + minResponseMs: number; + totalResponseMs: number; + totalRecordCount: number; +} + +export interface ApiLogPageResponse { + content: ApiLogEntryDto[]; + page: number; + size: number; + totalElements: number; + totalPages: number; +} + +export type ApiLogStatus = 'ALL' | 'SUCCESS' | 'ERROR'; + +export interface FailedRecordDto { + id: number; + jobName: string; + recordKey: string; + errorMessage: string | null; + retryCount: number; + status: string; + createdAt: string; +} + +export interface JobExecutionDetailDto { + executionId: number; + jobName: string; + status: string; + startTime: string; + endTime: string | null; + exitCode: string; + exitMessage: string | null; + jobParameters: Record; + jobInstanceId: number; + duration: number | null; + readCount: number; + writeCount: number; + skipCount: number; + filterCount: number; + stepExecutions: StepExecutionDto[]; +} + +// ── Schedule ───────────────────────────────────────────────── + +export interface ScheduleResponse { + id: number; + jobName: string; + cronExpression: string; + description: string | null; + active: boolean; + nextFireTime: string | null; + previousFireTime: string | null; + triggerState: string | null; + createdAt: string; + updatedAt: string; +} + +export interface ScheduleRequest { + jobName: string; + cronExpression: string; + description?: string; + active?: boolean; +} + +// ── Timeline ───────────────────────────────────────────────── + +export interface PeriodInfo { + key: string; + label: string; +} + +export interface ExecutionInfo { + executionId: number | null; + status: string; + startTime: string | null; + endTime: string | null; +} + +export interface ScheduleTimeline { + jobName: string; + executions: Record; +} + +export interface TimelineResponse { + periodLabel: string; + periods: PeriodInfo[]; + schedules: ScheduleTimeline[]; +} + +// ── F4: Execution Search ───────────────────────────────────── + +export interface ExecutionSearchResponse { + executions: JobExecutionDto[]; + totalCount: number; + page: number; + size: number; + totalPages: number; +} + +// ── F7: Job Detail ─────────────────────────────────────────── + +export interface LastExecution { + executionId: number; + status: string; + startTime: string; + endTime: string | null; +} + +export interface JobDetailDto { + jobName: string; + lastExecution: LastExecution | null; + scheduleCron: string | null; +} + +// ── F8: Statistics ─────────────────────────────────────────── + +export interface DailyStat { + date: string; + successCount: number; + failedCount: number; + otherCount: number; + avgDurationMs: number; +} + +export interface ExecutionStatisticsDto { + dailyStats: DailyStat[]; + totalExecutions: number; + totalSuccess: number; + totalFailed: number; + avgDurationMs: number; +} + +// ── API Functions ──────────────────────────────────────────── + +export const batchApi = { + getDashboard: () => + fetchJson(`${BASE}/dashboard`), + + getJobs: () => + fetchJson(`${BASE}/jobs`), + + getJobsDetail: () => + fetchJson(`${BASE}/jobs/detail`), + + executeJob: (jobName: string, params?: Record) => { + const qs = params ? '?' + new URLSearchParams(params).toString() : ''; + return postJson<{ success: boolean; message: string; executionId?: number }>( + `${BASE}/jobs/${jobName}/execute${qs}`); + }, + + getJobExecutions: (jobName: string) => + fetchJson(`${BASE}/jobs/${jobName}/executions`), + + getRecentExecutions: (limit = 50) => + fetchJson(`${BASE}/executions/recent?limit=${limit}`), + + getExecutionDetail: (id: number) => + fetchJson(`${BASE}/executions/${id}/detail`), + + stopExecution: (id: number) => + postJson<{ success: boolean; message: string }>(`${BASE}/executions/${id}/stop`), + + // F1: Abandon + getStaleExecutions: (thresholdMinutes = 60) => + fetchJson(`${BASE}/executions/stale?thresholdMinutes=${thresholdMinutes}`), + + abandonExecution: (id: number) => + postJson<{ success: boolean; message: string }>(`${BASE}/executions/${id}/abandon`), + + abandonAllStale: (thresholdMinutes = 60) => + postJson<{ success: boolean; message: string; abandonedCount?: number }>( + `${BASE}/executions/stale/abandon-all?thresholdMinutes=${thresholdMinutes}`), + + // F4: Search + searchExecutions: (params: { + jobNames?: string[]; + status?: string; + startDate?: string; + endDate?: string; + page?: number; + size?: number; + }) => { + const qs = new URLSearchParams(); + if (params.jobNames && params.jobNames.length > 0) qs.set('jobNames', params.jobNames.join(',')); + if (params.status) qs.set('status', params.status); + if (params.startDate) qs.set('startDate', params.startDate); + if (params.endDate) qs.set('endDate', params.endDate); + qs.set('page', String(params.page ?? 0)); + qs.set('size', String(params.size ?? 50)); + return fetchJson(`${BASE}/executions/search?${qs.toString()}`); + }, + + // F8: Statistics + getStatistics: (days = 30) => + fetchJson(`${BASE}/statistics?days=${days}`), + + getJobStatistics: (jobName: string, days = 30) => + fetchJson(`${BASE}/statistics/${jobName}?days=${days}`), + + // Schedule + getSchedules: () => + fetchJson<{ schedules: ScheduleResponse[]; count: number }>(`${BASE}/schedules`), + + getSchedule: (jobName: string) => + fetchJson(`${BASE}/schedules/${jobName}`), + + createSchedule: (data: ScheduleRequest) => + postJson<{ success: boolean; message: string; data?: ScheduleResponse }>(`${BASE}/schedules`, data), + + updateSchedule: (jobName: string, data: { cronExpression: string; description?: string }) => + putJson<{ success: boolean; message: string; data?: ScheduleResponse }>( + `${BASE}/schedules/${jobName}`, data), + + deleteSchedule: (jobName: string) => + deleteJson<{ success: boolean; message: string }>(`${BASE}/schedules/${jobName}`), + + toggleSchedule: (jobName: string, active: boolean) => + postJson<{ success: boolean; message: string; data?: ScheduleResponse }>( + `${BASE}/schedules/${jobName}/toggle`, { active }), + + // Timeline + getTimeline: (view: string, date: string) => + fetchJson(`${BASE}/timeline?view=${view}&date=${date}`), + + getPeriodExecutions: (jobName: string, view: string, periodKey: string) => + fetchJson( + `${BASE}/timeline/period-executions?jobName=${jobName}&view=${view}&periodKey=${periodKey}`), + + getStepApiLogs: (stepExecutionId: number, params?: { + page?: number; size?: number; status?: ApiLogStatus; + }) => { + const qs = new URLSearchParams(); + qs.set('page', String(params?.page ?? 0)); + qs.set('size', String(params?.size ?? 50)); + if (params?.status && params.status !== 'ALL') qs.set('status', params.status); + return fetchJson( + `${BASE}/steps/${stepExecutionId}/api-logs?${qs.toString()}`); + }, + + // Failed Records + retryFailedRecords: (jobName: string, failedCount: number, jobExecutionId: number) => { + const qs = new URLSearchParams({ + sourceJobExecutionId: String(jobExecutionId), + executionMode: 'RETRY', + executor: 'MANUAL_RETRY', + reason: `실패 건 수동 재시도 (${failedCount}건)`, + }); + return postJson<{ success: boolean; message: string; executionId?: number }>( + `${BASE}/jobs/${jobName}/execute?${qs.toString()}`); + }, + + resolveFailedRecords: (ids: number[]) => + postJson<{ success: boolean; message: string; resolvedCount?: number }>( + `${BASE}/failed-records/resolve`, { ids }), + + resetRetryCount: (ids: number[]) => + postJson<{ success: boolean; message: string; resetCount?: number }>( + `${BASE}/failed-records/reset-retry`, { ids }), +}; diff --git a/frontend/src/components/ApiLogSection.tsx b/frontend/src/components/ApiLogSection.tsx new file mode 100644 index 0000000..4da7cb3 --- /dev/null +++ b/frontend/src/components/ApiLogSection.tsx @@ -0,0 +1,170 @@ +import { useState, useCallback, useEffect } from 'react'; +import { batchApi, type ApiLogPageResponse, type ApiLogStatus } from '../api/batchApi'; +import { formatDateTime } from '../utils/formatters'; +import Pagination from './Pagination'; +import CopyButton from './CopyButton'; + +interface ApiLogSectionProps { + stepExecutionId: number; + summary: { totalCalls: number; successCount: number; errorCount: number }; +} + +export default function ApiLogSection({ stepExecutionId, summary }: ApiLogSectionProps) { + const [open, setOpen] = useState(false); + const [status, setStatus] = useState('ALL'); + const [page, setPage] = useState(0); + const [logData, setLogData] = useState(null); + const [loading, setLoading] = useState(false); + + const fetchLogs = useCallback(async (p: number, s: ApiLogStatus) => { + setLoading(true); + try { + const data = await batchApi.getStepApiLogs(stepExecutionId, { page: p, size: 10, status: s }); + setLogData(data); + } catch { + setLogData(null); + } finally { + setLoading(false); + } + }, [stepExecutionId]); + + useEffect(() => { + if (open) { + fetchLogs(page, status); + } + }, [open, page, status, fetchLogs]); + + const handleStatusChange = (s: ApiLogStatus) => { + setStatus(s); + setPage(0); + }; + + const filters: { key: ApiLogStatus; label: string; count: number }[] = [ + { key: 'ALL', label: '전체', count: summary.totalCalls }, + { key: 'SUCCESS', label: '성공', count: summary.successCount }, + { key: 'ERROR', label: '에러', count: summary.errorCount }, + ]; + + return ( +
+ + + {open && ( +
+ {/* 상태 필터 탭 */} +
+ {filters.map(({ key, label, count }) => ( + + ))} +
+ + {loading ? ( +
+
+ 로딩중... +
+ ) : logData && logData.content.length > 0 ? ( + <> +
+ + + + + + + + + + + + + + + {logData.content.map((log, idx) => { + const isError = (log.statusCode != null && log.statusCode >= 400) || log.errorMessage; + return ( + + + + + + + + + + + ); + })} + +
#URIMethod상태응답(ms)건수시간에러
{page * 10 + idx + 1} +
+ + {log.requestUri} + + +
+
{log.httpMethod} + + {log.statusCode ?? '-'} + + + {log.responseTimeMs?.toLocaleString() ?? '-'} + + {log.responseCount?.toLocaleString() ?? '-'} + + {formatDateTime(log.createdAt)} + + {log.errorMessage || '-'} +
+
+ + {/* 페이지네이션 */} + + + ) : ( +

조회된 로그가 없습니다.

+ )} +
+ )} +
+ ); +} diff --git a/frontend/src/components/BarChart.tsx b/frontend/src/components/BarChart.tsx new file mode 100644 index 0000000..2c25aaa --- /dev/null +++ b/frontend/src/components/BarChart.tsx @@ -0,0 +1,74 @@ +interface BarValue { + color: string; + value: number; +} + +interface BarData { + label: string; + values: BarValue[]; +} + +interface Props { + data: BarData[]; + height?: number; +} + +export default function BarChart({ data, height = 200 }: Props) { + const maxTotal = Math.max(...data.map((d) => d.values.reduce((sum, v) => sum + v.value, 0)), 1); + + return ( +
+
+ {data.map((bar, i) => { + const total = bar.values.reduce((sum, v) => sum + v.value, 0); + const ratio = total / maxTotal; + + return ( +
+
`${v.color}: ${v.value}`).join(', ')} + > + {bar.values + .filter((v) => v.value > 0) + .map((v, j) => { + const segmentRatio = total > 0 ? (v.value / total) * 100 : 0; + return ( +
+ ); + })} +
+
+ ); + })} +
+
+ {data.map((bar, i) => ( +
+

+ {bar.label} +

+
+ ))} +
+
+ ); +} + +function colorToClass(color: string): string { + const map: Record = { + green: 'bg-green-500', + red: 'bg-red-500', + gray: 'bg-gray-400', + blue: 'bg-blue-500', + yellow: 'bg-yellow-500', + orange: 'bg-orange-500', + indigo: 'bg-indigo-500', + }; + return map[color] ?? color; +} diff --git a/frontend/src/components/ConfirmModal.tsx b/frontend/src/components/ConfirmModal.tsx new file mode 100644 index 0000000..8ae36f5 --- /dev/null +++ b/frontend/src/components/ConfirmModal.tsx @@ -0,0 +1,49 @@ +interface Props { + open: boolean; + title?: string; + message: string; + confirmLabel?: string; + cancelLabel?: string; + confirmColor?: string; + onConfirm: () => void; + onCancel: () => void; +} + +export default function ConfirmModal({ + open, + title = '확인', + message, + confirmLabel = '확인', + cancelLabel = '취소', + confirmColor = 'bg-wing-accent hover:bg-wing-accent/80', + onConfirm, + onCancel, +}: Props) { + if (!open) return null; + + return ( +
+
e.stopPropagation()} + > +

{title}

+

{message}

+
+ + +
+
+
+ ); +} diff --git a/frontend/src/components/CopyButton.tsx b/frontend/src/components/CopyButton.tsx new file mode 100644 index 0000000..617bf3e --- /dev/null +++ b/frontend/src/components/CopyButton.tsx @@ -0,0 +1,48 @@ +import { useState } from 'react'; + +interface CopyButtonProps { + text: string; +} + +export default function CopyButton({ text }: CopyButtonProps) { + const [copied, setCopied] = useState(false); + + const handleCopy = async (e: React.MouseEvent) => { + e.stopPropagation(); + try { + await navigator.clipboard.writeText(text); + setCopied(true); + setTimeout(() => setCopied(false), 1500); + } catch { + const textarea = document.createElement('textarea'); + textarea.value = text; + textarea.style.position = 'fixed'; + textarea.style.opacity = '0'; + document.body.appendChild(textarea); + textarea.select(); + document.execCommand('copy'); + document.body.removeChild(textarea); + setCopied(true); + setTimeout(() => setCopied(false), 1500); + } + }; + + return ( + + ); +} diff --git a/frontend/src/components/DetailStatCard.tsx b/frontend/src/components/DetailStatCard.tsx new file mode 100644 index 0000000..13d0b11 --- /dev/null +++ b/frontend/src/components/DetailStatCard.tsx @@ -0,0 +1,22 @@ +interface DetailStatCardProps { + label: string; + value: number; + gradient: string; + icon: string; +} + +export default function DetailStatCard({ label, value, gradient, icon }: DetailStatCardProps) { + return ( +
+
+
+

{label}

+

+ {value.toLocaleString()} +

+
+ {icon} +
+
+ ); +} diff --git a/frontend/src/components/EmptyState.tsx b/frontend/src/components/EmptyState.tsx new file mode 100644 index 0000000..0cf5f0a --- /dev/null +++ b/frontend/src/components/EmptyState.tsx @@ -0,0 +1,15 @@ +interface Props { + icon?: string; + message: string; + sub?: string; +} + +export default function EmptyState({ icon = '📭', message, sub }: Props) { + return ( +
+ {icon} +

{message}

+ {sub &&

{sub}

} +
+ ); +} diff --git a/frontend/src/components/GuideModal.tsx b/frontend/src/components/GuideModal.tsx new file mode 100644 index 0000000..0c4ee2e --- /dev/null +++ b/frontend/src/components/GuideModal.tsx @@ -0,0 +1,92 @@ +import { useState } from 'react'; + +interface GuideSection { + title: string; + content: string; +} + +interface Props { + open: boolean; + pageTitle: string; + sections: GuideSection[]; + onClose: () => void; +} + +export default function GuideModal({ open, pageTitle, sections, onClose }: Props) { + if (!open) return null; + + return ( +
+
e.stopPropagation()} + > +
+

{pageTitle} 사용 가이드

+ +
+ +
+ {sections.map((section, i) => ( + + ))} +
+ +
+ +
+
+
+ ); +} + +function GuideAccordion({ title, content, defaultOpen }: { title: string; content: string; defaultOpen: boolean }) { + const [isOpen, setIsOpen] = useState(defaultOpen); + + return ( +
+ + {isOpen && ( +
+ {content} +
+ )} +
+ ); +} + +export function HelpButton({ onClick }: { onClick: () => void }) { + return ( + + ); +} diff --git a/frontend/src/components/InfoItem.tsx b/frontend/src/components/InfoItem.tsx new file mode 100644 index 0000000..d8b400e --- /dev/null +++ b/frontend/src/components/InfoItem.tsx @@ -0,0 +1,15 @@ +interface InfoItemProps { + label: string; + value: string; +} + +export default function InfoItem({ label, value }: InfoItemProps) { + return ( +
+
+ {label} +
+
{value || '-'}
+
+ ); +} diff --git a/frontend/src/components/InfoModal.tsx b/frontend/src/components/InfoModal.tsx new file mode 100644 index 0000000..cedb5a9 --- /dev/null +++ b/frontend/src/components/InfoModal.tsx @@ -0,0 +1,35 @@ +interface Props { + open: boolean; + title?: string; + children: React.ReactNode; + onClose: () => void; +} + +export default function InfoModal({ + open, + title = '정보', + children, + onClose, +}: Props) { + if (!open) return null; + + return ( +
+
e.stopPropagation()} + > +

{title}

+
{children}
+
+ +
+
+
+ ); +} diff --git a/frontend/src/components/LoadingSpinner.tsx b/frontend/src/components/LoadingSpinner.tsx new file mode 100644 index 0000000..738e225 --- /dev/null +++ b/frontend/src/components/LoadingSpinner.tsx @@ -0,0 +1,7 @@ +export default function LoadingSpinner({ className = '' }: { className?: string }) { + return ( +
+
+
+ ); +} diff --git a/frontend/src/components/Navbar.tsx b/frontend/src/components/Navbar.tsx new file mode 100644 index 0000000..663baca --- /dev/null +++ b/frontend/src/components/Navbar.tsx @@ -0,0 +1,54 @@ +import { Link, useLocation } from 'react-router-dom'; +import { useThemeContext } from '../contexts/ThemeContext'; + +const navItems = [ + { path: '/', label: '대시보드', icon: '📊' }, + { path: '/executions', label: '실행 이력', icon: '📋' }, + { path: '/jobs', label: '작업', icon: '⚙️' }, + { path: '/schedules', label: '스케줄', icon: '🕐' }, + { path: '/schedule-timeline', label: '타임라인', icon: '📅' }, +]; + +export default function Navbar() { + const location = useLocation(); + const { theme, toggle } = useThemeContext(); + + const isActive = (path: string) => { + if (path === '/') return location.pathname === '/'; + return location.pathname.startsWith(path); + }; + + return ( + + ); +} diff --git a/frontend/src/components/Pagination.tsx b/frontend/src/components/Pagination.tsx new file mode 100644 index 0000000..b735e4f --- /dev/null +++ b/frontend/src/components/Pagination.tsx @@ -0,0 +1,145 @@ +interface PaginationProps { + page: number; + totalPages: number; + totalElements: number; + pageSize: number; + onPageChange: (page: number) => void; +} + +/** + * 표시할 페이지 번호 목록 생성 (Truncated Page Number) + * - 총 7슬롯 이하면 전부 표시 + * - 7슬롯 초과면 현재 페이지 기준 양쪽 1개 + 처음/끝 + ellipsis + */ +function getPageNumbers(current: number, total: number): (number | 'ellipsis')[] { + if (total <= 7) { + return Array.from({ length: total }, (_, i) => i); + } + + const pages: (number | 'ellipsis')[] = []; + const SIBLING = 1; + + const leftSibling = Math.max(current - SIBLING, 0); + const rightSibling = Math.min(current + SIBLING, total - 1); + + const showLeftEllipsis = leftSibling > 1; + const showRightEllipsis = rightSibling < total - 2; + + pages.push(0); + + if (showLeftEllipsis) { + pages.push('ellipsis'); + } else { + for (let i = 1; i < leftSibling; i++) { + pages.push(i); + } + } + + for (let i = leftSibling; i <= rightSibling; i++) { + if (i !== 0 && i !== total - 1) { + pages.push(i); + } + } + + if (showRightEllipsis) { + pages.push('ellipsis'); + } else { + for (let i = rightSibling + 1; i < total - 1; i++) { + pages.push(i); + } + } + + if (total > 1) { + pages.push(total - 1); + } + + return pages; +} + +export default function Pagination({ + page, + totalPages, + totalElements, + pageSize, + onPageChange, +}: PaginationProps) { + if (totalPages <= 1) return null; + + const start = page * pageSize + 1; + const end = Math.min((page + 1) * pageSize, totalElements); + const pages = getPageNumbers(page, totalPages); + + const btnBase = + 'inline-flex items-center justify-center w-7 h-7 text-xs rounded transition-colors'; + const btnEnabled = 'hover:bg-wing-hover text-wing-muted'; + const btnDisabled = 'opacity-30 cursor-not-allowed text-wing-muted'; + + return ( +
+ + {totalElements.toLocaleString()}건 중 {start.toLocaleString()}~ + {end.toLocaleString()} + +
+ {/* First */} + + {/* Prev */} + + + {/* Page Numbers */} + {pages.map((p, idx) => + p === 'ellipsis' ? ( + + … + + ) : ( + + ), + )} + + {/* Next */} + + {/* Last */} + +
+
+ ); +} diff --git a/frontend/src/components/StatusBadge.tsx b/frontend/src/components/StatusBadge.tsx new file mode 100644 index 0000000..686660e --- /dev/null +++ b/frontend/src/components/StatusBadge.tsx @@ -0,0 +1,40 @@ +const STATUS_CONFIG: Record = { + COMPLETED: { bg: 'bg-emerald-100 text-emerald-700', text: '완료', label: '✓' }, + FAILED: { bg: 'bg-red-100 text-red-700', text: '실패', label: '✕' }, + STARTED: { bg: 'bg-blue-100 text-blue-700', text: '실행중', label: '↻' }, + STARTING: { bg: 'bg-cyan-100 text-cyan-700', text: '시작중', label: '⏳' }, + STOPPED: { bg: 'bg-amber-100 text-amber-700', text: '중지됨', label: '⏸' }, + STOPPING: { bg: 'bg-orange-100 text-orange-700', text: '중지중', label: '⏸' }, + ABANDONED: { bg: 'bg-gray-100 text-gray-700', text: '포기됨', label: '—' }, + SCHEDULED: { bg: 'bg-violet-100 text-violet-700', text: '예정', label: '🕐' }, + UNKNOWN: { bg: 'bg-gray-100 text-gray-500', text: '알수없음', label: '?' }, +}; + +interface Props { + status: string; + className?: string; +} + +export default function StatusBadge({ status, className = '' }: Props) { + const config = STATUS_CONFIG[status] || STATUS_CONFIG.UNKNOWN; + return ( + + {config.label} + {config.text} + + ); +} + +// eslint-disable-next-line react-refresh/only-export-components +export function getStatusColor(status: string): string { + switch (status) { + case 'COMPLETED': return '#10b981'; + case 'FAILED': return '#ef4444'; + case 'STARTED': return '#3b82f6'; + case 'STARTING': return '#06b6d4'; + case 'STOPPED': return '#f59e0b'; + case 'STOPPING': return '#f97316'; + case 'SCHEDULED': return '#8b5cf6'; + default: return '#6b7280'; + } +} diff --git a/frontend/src/components/Toast.tsx b/frontend/src/components/Toast.tsx new file mode 100644 index 0000000..90e5750 --- /dev/null +++ b/frontend/src/components/Toast.tsx @@ -0,0 +1,37 @@ +import type { Toast as ToastType } from '../hooks/useToast'; + +const TYPE_STYLES: Record = { + success: 'bg-emerald-500', + error: 'bg-red-500', + warning: 'bg-amber-500', + info: 'bg-blue-500', +}; + +interface Props { + toasts: ToastType[]; + onRemove: (id: number) => void; +} + +export default function ToastContainer({ toasts, onRemove }: Props) { + if (toasts.length === 0) return null; + + return ( +
+ {toasts.map((toast) => ( +
+ {toast.message} + +
+ ))} +
+ ); +} diff --git a/frontend/src/contexts/ThemeContext.tsx b/frontend/src/contexts/ThemeContext.tsx new file mode 100644 index 0000000..0db5e08 --- /dev/null +++ b/frontend/src/contexts/ThemeContext.tsx @@ -0,0 +1,26 @@ +import { createContext, useContext, type ReactNode } from 'react'; +import { useTheme } from '../hooks/useTheme'; + +interface ThemeContextValue { + theme: 'dark' | 'light'; + toggle: () => void; +} + +const ThemeContext = createContext({ + theme: 'dark', + toggle: () => {}, +}); + +export function ThemeProvider({ children }: { children: ReactNode }) { + const value = useTheme(); + return ( + + {children} + + ); +} + +// eslint-disable-next-line react-refresh/only-export-components +export function useThemeContext() { + return useContext(ThemeContext); +} diff --git a/frontend/src/contexts/ToastContext.tsx b/frontend/src/contexts/ToastContext.tsx new file mode 100644 index 0000000..31cae8e --- /dev/null +++ b/frontend/src/contexts/ToastContext.tsx @@ -0,0 +1,29 @@ +import { createContext, useContext, type ReactNode } from 'react'; +import { useToast, type Toast } from '../hooks/useToast'; + +interface ToastContextValue { + toasts: Toast[]; + showToast: (message: string, type?: Toast['type']) => void; + removeToast: (id: number) => void; +} + +const ToastContext = createContext(null); + +export function ToastProvider({ children }: { children: ReactNode }) { + const { toasts, showToast, removeToast } = useToast(); + + return ( + + {children} + + ); +} + +// eslint-disable-next-line react-refresh/only-export-components +export function useToastContext(): ToastContextValue { + const ctx = useContext(ToastContext); + if (!ctx) { + throw new Error('useToastContext must be used within a ToastProvider'); + } + return ctx; +} diff --git a/frontend/src/hooks/usePoller.ts b/frontend/src/hooks/usePoller.ts new file mode 100644 index 0000000..04ac6c1 --- /dev/null +++ b/frontend/src/hooks/usePoller.ts @@ -0,0 +1,53 @@ +import { useEffect, useRef } from 'react'; + +/** + * 주기적 폴링 훅 + * - 마운트 시 즉시 1회 실행 후 intervalMs 주기로 반복 + * - 탭 비활성(document.hidden) 시 자동 중단, 활성화 시 즉시 재개 + * - deps 변경 시 타이머 재설정 + */ +export function usePoller( + fn: () => Promise | void, + intervalMs: number, + deps: unknown[] = [], +) { + const fnRef = useRef(fn); + fnRef.current = fn; + + useEffect(() => { + let timer: ReturnType | null = null; + + const run = () => { + fnRef.current(); + }; + + const start = () => { + run(); + timer = setInterval(run, intervalMs); + }; + + const stop = () => { + if (timer) { + clearInterval(timer); + timer = null; + } + }; + + const handleVisibility = () => { + if (document.hidden) { + stop(); + } else { + start(); + } + }; + + start(); + document.addEventListener('visibilitychange', handleVisibility); + + return () => { + stop(); + document.removeEventListener('visibilitychange', handleVisibility); + }; + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [intervalMs, ...deps]); +} diff --git a/frontend/src/hooks/useTheme.ts b/frontend/src/hooks/useTheme.ts new file mode 100644 index 0000000..a62aa00 --- /dev/null +++ b/frontend/src/hooks/useTheme.ts @@ -0,0 +1,27 @@ +import { useState, useEffect, useCallback } from 'react'; + +type Theme = 'dark' | 'light'; + +const STORAGE_KEY = 'snp-batch-theme'; + +function getInitialTheme(): Theme { + if (typeof window === 'undefined') return 'dark'; + const stored = localStorage.getItem(STORAGE_KEY); + if (stored === 'light' || stored === 'dark') return stored; + return 'dark'; +} + +export function useTheme() { + const [theme, setTheme] = useState(getInitialTheme); + + useEffect(() => { + document.documentElement.setAttribute('data-theme', theme); + localStorage.setItem(STORAGE_KEY, theme); + }, [theme]); + + const toggle = useCallback(() => { + setTheme((prev) => (prev === 'dark' ? 'light' : 'dark')); + }, []); + + return { theme, toggle } as const; +} diff --git a/frontend/src/hooks/useToast.ts b/frontend/src/hooks/useToast.ts new file mode 100644 index 0000000..04ce8f8 --- /dev/null +++ b/frontend/src/hooks/useToast.ts @@ -0,0 +1,27 @@ +import { useState, useCallback } from 'react'; + +export interface Toast { + id: number; + message: string; + type: 'success' | 'error' | 'warning' | 'info'; +} + +let nextId = 0; + +export function useToast() { + const [toasts, setToasts] = useState([]); + + const showToast = useCallback((message: string, type: Toast['type'] = 'info') => { + const id = nextId++; + setToasts((prev) => [...prev, { id, message, type }]); + setTimeout(() => { + setToasts((prev) => prev.filter((t) => t.id !== id)); + }, 5000); + }, []); + + const removeToast = useCallback((id: number) => { + setToasts((prev) => prev.filter((t) => t.id !== id)); + }, []); + + return { toasts, showToast, removeToast }; +} diff --git a/frontend/src/index.css b/frontend/src/index.css new file mode 100644 index 0000000..7373563 --- /dev/null +++ b/frontend/src/index.css @@ -0,0 +1,3 @@ +@import "tailwindcss"; +@import "./theme/tokens.css"; +@import "./theme/base.css"; diff --git a/frontend/src/main.tsx b/frontend/src/main.tsx new file mode 100644 index 0000000..bef5202 --- /dev/null +++ b/frontend/src/main.tsx @@ -0,0 +1,10 @@ +import { StrictMode } from 'react' +import { createRoot } from 'react-dom/client' +import './index.css' +import App from './App.tsx' + +createRoot(document.getElementById('root')!).render( + + + , +) diff --git a/frontend/src/pages/Dashboard.tsx b/frontend/src/pages/Dashboard.tsx new file mode 100644 index 0000000..49bbf91 --- /dev/null +++ b/frontend/src/pages/Dashboard.tsx @@ -0,0 +1,394 @@ +import { useState, useCallback, useEffect } from 'react'; +import { Link } from 'react-router-dom'; +import { + batchApi, + type DashboardResponse, + type DashboardStats, + type ExecutionStatisticsDto, +} from '../api/batchApi'; +import { usePoller } from '../hooks/usePoller'; +import { useToastContext } from '../contexts/ToastContext'; +import StatusBadge from '../components/StatusBadge'; +import EmptyState from '../components/EmptyState'; +import LoadingSpinner from '../components/LoadingSpinner'; +import BarChart from '../components/BarChart'; +import { formatDateTime, calculateDuration } from '../utils/formatters'; +import GuideModal, { HelpButton } from '../components/GuideModal'; + +const POLLING_INTERVAL = 5000; + +const DASHBOARD_GUIDE = [ + { + title: '통계 카드', + content: '화면 상단에 전체 스케줄, 활성/비활성 스케줄 수, 전체 작업 수, 최근 24시간 실패 건수를 한눈에 보여줍니다.', + }, + { + title: '실행 중인 작업', + content: '현재 실행 중인 배치 작업 목록을 실시간으로 보여줍니다.\n5초마다 자동으로 갱신됩니다.\n오래 실행 중인 작업이 있으면 상단에 경고 배너가 표시되며, "전체 강제 종료" 버튼으로 일괄 중지할 수 있습니다.', + }, + { + title: '최근 실행 이력', + content: '최근 완료된 배치 작업 5건을 보여줍니다.\n각 행의 작업명, 상태, 시작 시간, 소요 시간을 확인할 수 있습니다.\n"전체 보기"를 클릭하면 실행 이력 화면으로 이동합니다.', + }, + { + title: '최근 실패 이력', + content: '최근 24시간 내 실패한 작업이 있을 때만 표시됩니다.\n실패 원인을 빠르게 파악할 수 있도록 종료 코드와 메시지를 함께 보여줍니다.', + }, + { + title: '실행 통계 차트', + content: '최근 30일간의 배치 실행 통계를 바 차트로 보여줍니다.\n초록색은 성공, 빨간색은 실패, 회색은 기타 상태를 나타냅니다.', + }, +]; + +interface StatCardProps { + label: string; + value: number; + gradient: string; + to?: string; +} + +function StatCard({ label, value, gradient, to }: StatCardProps) { + const content = ( +
+

{value}

+

{label}

+
+ ); + + if (to) { + return {content}; + } + return content; +} + +export default function Dashboard() { + const { showToast } = useToastContext(); + const [dashboard, setDashboard] = useState(null); + const [loading, setLoading] = useState(true); + const [guideOpen, setGuideOpen] = useState(false); + + const [abandoning, setAbandoning] = useState(false); + const [statistics, setStatistics] = useState(null); + + const loadStatistics = useCallback(async () => { + try { + const data = await batchApi.getStatistics(30); + setStatistics(data); + } catch { + /* 통계 로드 실패는 무시 */ + } + }, []); + + useEffect(() => { + loadStatistics(); + }, [loadStatistics]); + + const loadDashboard = useCallback(async () => { + try { + const data = await batchApi.getDashboard(); + setDashboard(data); + } catch (err) { + console.error('Dashboard load failed:', err); + } finally { + setLoading(false); + } + }, []); + + usePoller(loadDashboard, POLLING_INTERVAL); + + const handleAbandonAllStale = async () => { + setAbandoning(true); + try { + const result = await batchApi.abandonAllStale(); + showToast( + result.message || `${result.abandonedCount ?? 0}건 강제 종료 완료`, + 'success', + ); + await loadDashboard(); + } catch (err) { + showToast( + `강제 종료 실패: ${err instanceof Error ? err.message : '알 수 없는 오류'}`, + 'error', + ); + } finally { + setAbandoning(false); + } + }; + + if (loading) return ; + + const stats: DashboardStats = dashboard?.stats ?? { + totalSchedules: 0, + activeSchedules: 0, + inactiveSchedules: 0, + totalJobs: 0, + }; + + const runningJobs = dashboard?.runningJobs ?? []; + const recentExecutions = dashboard?.recentExecutions ?? []; + const recentFailures = dashboard?.recentFailures ?? []; + const staleExecutionCount = dashboard?.staleExecutionCount ?? 0; + const failureStats = dashboard?.failureStats ?? { last24h: 0, last7d: 0 }; + + return ( +
+ {/* Header */} +
+
+

대시보드

+ setGuideOpen(true)} /> +
+
+ + {/* F1: Stale Execution Warning Banner */} + {staleExecutionCount > 0 && ( +
+ + {staleExecutionCount}건의 오래된 실행 중 작업이 있습니다 + + +
+ )} + + {/* Stats Cards */} +
+ + + + + +
+ + {/* Running Jobs */} +
+

+ 실행 중인 작업 + {runningJobs.length > 0 && ( + + ({runningJobs.length}건) + + )} +

+ {runningJobs.length === 0 ? ( + + ) : ( +
+ + + + + + + + + + + {runningJobs.map((job) => ( + + + + + + + ))} + +
작업명실행 ID시작 시간상태
{job.jobName}#{job.executionId}{formatDateTime(job.startTime)} + +
+
+ )} +
+ + {/* Recent Executions */} +
+
+

최근 실행 이력

+ + 전체 보기 → + +
+ {recentExecutions.length === 0 ? ( + + ) : ( +
+ + + + + + + + + + + + + {recentExecutions.slice(0, 5).map((exec) => ( + + + + + + + + + ))} + +
실행 ID작업명시작 시간종료 시간소요 시간상태
+ + #{exec.executionId} + + {exec.jobName}{formatDateTime(exec.startTime)}{formatDateTime(exec.endTime)} + {calculateDuration(exec.startTime, exec.endTime)} + + +
+
+ )} +
+ + {/* F6: Recent Failures */} + {recentFailures.length > 0 && ( +
+

+ 최근 실패 이력 + + ({recentFailures.length}건) + +

+
+ + + + + + + + + + + {recentFailures.map((fail) => ( + + + + + + + ))} + +
실행 ID작업명시작 시간오류 메시지
+ + #{fail.executionId} + + {fail.jobName}{formatDateTime(fail.startTime)} + {fail.exitMessage + ? fail.exitMessage.length > 50 + ? `${fail.exitMessage.slice(0, 50)}...` + : fail.exitMessage + : '-'} +
+
+
+ )} + + {/* F8: Execution Statistics Chart */} + {statistics && statistics.dailyStats.length > 0 && ( +
+
+

+ 실행 통계 (최근 30일) +

+
+ + 전체 {statistics.totalExecutions} + + + 성공 {statistics.totalSuccess} + + + 실패 {statistics.totalFailed} + +
+
+ ({ + label: d.date.slice(5), + values: [ + { color: 'green', value: d.successCount }, + { color: 'red', value: d.failedCount }, + { color: 'gray', value: d.otherCount }, + ], + }))} + height={180} + /> +
+ + 성공 + + + 실패 + + + 기타 + +
+
+ )} + + setGuideOpen(false)} + /> +
+ ); +} diff --git a/frontend/src/pages/ExecutionDetail.tsx b/frontend/src/pages/ExecutionDetail.tsx new file mode 100644 index 0000000..f861986 --- /dev/null +++ b/frontend/src/pages/ExecutionDetail.tsx @@ -0,0 +1,708 @@ +import { useState, useCallback } from 'react'; +import { useParams, useSearchParams, useNavigate } from 'react-router-dom'; +import { batchApi, type JobExecutionDetailDto, type StepExecutionDto, type FailedRecordDto } from '../api/batchApi'; +import { formatDateTime, formatDuration, calculateDuration } from '../utils/formatters'; +import { usePoller } from '../hooks/usePoller'; +import StatusBadge from '../components/StatusBadge'; +import EmptyState from '../components/EmptyState'; +import LoadingSpinner from '../components/LoadingSpinner'; +import Pagination from '../components/Pagination'; +import DetailStatCard from '../components/DetailStatCard'; +import ApiLogSection from '../components/ApiLogSection'; +import InfoItem from '../components/InfoItem'; +import GuideModal, { HelpButton } from '../components/GuideModal'; + +const POLLING_INTERVAL_MS = 5000; + +const EXECUTION_DETAIL_GUIDE = [ + { + title: '실행 기본 정보', + content: '실행의 시작/종료 시간, 소요 시간, 종료 코드, 에러 메시지 등 기본 정보를 보여줍니다.\n실행 중인 경우 5초마다 자동으로 갱신됩니다.', + }, + { + title: '처리 통계', + content: '4개의 통계 카드로 전체 처리 현황을 요약합니다.\n• 읽기(Read): 외부 API에서 조회한 건수\n• 쓰기(Write): DB에 저장된 건수\n• 건너뜀(Skip): 처리하지 않은 건수\n• 필터(Filter): 조건에 의해 제외된 건수', + }, + { + title: 'Step 실행 정보', + content: '배치 작업은 하나 이상의 Step으로 구성됩니다.\n각 Step의 상태, 처리 건수, 커밋/롤백 횟수를 확인할 수 있습니다.\nAPI 호출 정보에서는 총 호출 수, 성공/에러 수, 평균 응답 시간을 보여줍니다.', + }, + { + title: 'API 호출 로그', + content: '각 Step에서 호출한 외부 API의 상세 로그를 확인할 수 있습니다.\n요청 URL, 응답 코드, 응답 시간 등을 페이지 단위로 조회합니다.', + }, + { + title: '실패 건 관리', + content: '처리 중 실패한 레코드가 있으면 목록으로 표시됩니다.\n• 실패 건 재수집: 실패한 데이터를 다시 수집합니다\n• 일괄 RESOLVED: 모든 실패 건을 해결됨으로 처리합니다\n• 재시도 초기화: 재시도 횟수를 초기화하여 자동 재수집 대상에 포함시킵니다', + }, +]; + +interface StepCardProps { + step: StepExecutionDto; + jobName: string; + jobExecutionId: number; +} + +function StepCard({ step, jobName, jobExecutionId }: StepCardProps) { + const stats = [ + { label: '읽기', value: step.readCount }, + { label: '쓰기', value: step.writeCount }, + { label: '커밋', value: step.commitCount }, + { label: '롤백', value: step.rollbackCount }, + { label: '읽기 건너뜀', value: step.readSkipCount }, + { label: '처리 건너뜀', value: step.processSkipCount }, + { label: '쓰기 건너뜀', value: step.writeSkipCount }, + { label: '필터', value: step.filterCount }, + ]; + + return ( +
+
+
+

+ {step.stepName} +

+ +
+ + {step.duration != null + ? formatDuration(step.duration) + : calculateDuration(step.startTime, step.endTime)} + +
+ +
+
+ 시작: {formatDateTime(step.startTime)} +
+
+ 종료: {formatDateTime(step.endTime)} +
+
+ +
+ {stats.map(({ label, value }) => ( +
+

+ {value.toLocaleString()} +

+

{label}

+
+ ))} +
+ + {/* API 호출 정보: apiLogSummary가 있으면 개별 로그 리스트, 없으면 기존 apiCallInfo 요약 */} + {step.apiLogSummary ? ( +
+

API 호출 정보

+
+
+

{step.apiLogSummary.totalCalls.toLocaleString()}

+

총 호출

+
+
+

{step.apiLogSummary.successCount.toLocaleString()}

+

성공

+
+
+

0 ? 'text-red-500' : 'text-wing-text'}`}> + {step.apiLogSummary.errorCount.toLocaleString()} +

+

에러

+
+
+

{Math.round(step.apiLogSummary.avgResponseMs).toLocaleString()}

+

평균(ms)

+
+
+

{step.apiLogSummary.maxResponseMs.toLocaleString()}

+

최대(ms)

+
+
+

{step.apiLogSummary.minResponseMs.toLocaleString()}

+

최소(ms)

+
+
+ + {step.apiLogSummary.totalCalls > 0 && ( + + )} +
+ ) : step.apiCallInfo && ( +
+

API 호출 정보

+
+
+ URL:{' '} + {step.apiCallInfo.apiUrl} +
+
+ Method:{' '} + {step.apiCallInfo.method} +
+
+ 호출:{' '} + {step.apiCallInfo.completedCalls} / {step.apiCallInfo.totalCalls} +
+ {step.apiCallInfo.lastCallTime && ( +
+ 최종:{' '} + {step.apiCallInfo.lastCallTime} +
+ )} +
+
+ )} + + {/* 호출 실패 데이터 토글 */} + {step.failedRecords && step.failedRecords.length > 0 && ( + + )} + + {step.exitMessage && ( +
+

Exit Message

+

+ {step.exitMessage} +

+
+ )} +
+ ); +} + +export default function ExecutionDetail() { + const { id: paramId } = useParams<{ id: string }>(); + const [searchParams] = useSearchParams(); + const navigate = useNavigate(); + + const executionId = paramId + ? Number(paramId) + : Number(searchParams.get('id')); + + const [detail, setDetail] = useState(null); + const [loading, setLoading] = useState(true); + const [error, setError] = useState(null); + const [guideOpen, setGuideOpen] = useState(false); + + const isRunning = detail + ? detail.status === 'STARTED' || detail.status === 'STARTING' + : false; + + const loadDetail = useCallback(async () => { + if (!executionId || isNaN(executionId)) { + setError('유효하지 않은 실행 ID입니다.'); + setLoading(false); + return; + } + try { + const data = await batchApi.getExecutionDetail(executionId); + setDetail(data); + setError(null); + } catch (err) { + setError( + err instanceof Error + ? err.message + : '실행 상세 정보를 불러오지 못했습니다.', + ); + } finally { + setLoading(false); + } + }, [executionId]); + + /* 실행중인 경우 5초 폴링, 완료 후에는 1회 로드로 충분하지만 폴링 유지 */ + usePoller(loadDetail, isRunning ? POLLING_INTERVAL_MS : 30_000, [ + executionId, + ]); + + if (loading) return ; + + if (error || !detail) { + return ( +
+ + +
+ ); + } + + const jobParams = Object.entries(detail.jobParameters); + + return ( +
+ {/* 상단 내비게이션 */} + + + {/* Job 기본 정보 */} +
+
+
+
+

+ 실행 #{detail.executionId} +

+ setGuideOpen(true)} /> +
+

+ {detail.jobName} +

+
+ +
+ +
+ + + + + {detail.exitMessage && ( +
+ +
+ )} +
+
+ + {/* 실행 통계 카드 4개 */} +
+ + + + +
+ + {/* Job Parameters */} + {jobParams.length > 0 && ( +
+

+ Job Parameters +

+
+ + + + + + + + + {jobParams.map(([key, value]) => ( + + + + + ))} + +
KeyValue
+ {key} + + {value} +
+
+
+ )} + + {/* Step 실행 정보 */} +
+

+ Step 실행 정보 + + ({detail.stepExecutions.length}개) + +

+ {detail.stepExecutions.length === 0 ? ( + + ) : ( +
+ {detail.stepExecutions.map((step) => ( + + ))} +
+ )} +
+ + setGuideOpen(false)} + pageTitle="실행 상세" + sections={EXECUTION_DETAIL_GUIDE} + /> +
+ ); +} + +const FAILED_PAGE_SIZE = 10; + +function FailedRecordsToggle({ records, jobName, jobExecutionId }: { records: FailedRecordDto[]; jobName: string; jobExecutionId: number }) { + const [open, setOpen] = useState(false); + const [showConfirm, setShowConfirm] = useState(false); + const [showResolveConfirm, setShowResolveConfirm] = useState(false); + const [retrying, setRetrying] = useState(false); + const [resolving, setResolving] = useState(false); + const [showResetConfirm, setShowResetConfirm] = useState(false); + const [resetting, setResetting] = useState(false); + const [page, setPage] = useState(0); + const navigate = useNavigate(); + + const failedRecords = records.filter((r) => r.status === 'FAILED'); + const totalPages = Math.ceil(records.length / FAILED_PAGE_SIZE); + const pagedRecords = records.slice(page * FAILED_PAGE_SIZE, (page + 1) * FAILED_PAGE_SIZE); + + const statusColor = (status: string) => { + switch (status) { + case 'RESOLVED': return 'text-emerald-600 bg-emerald-50'; + case 'RETRY_PENDING': return 'text-amber-600 bg-amber-50'; + default: return 'text-red-600 bg-red-50'; + } + }; + + const MAX_RETRY_COUNT = 3; + + const retryStatusLabel = (record: FailedRecordDto) => { + if (record.status !== 'FAILED') return null; + if (record.retryCount >= MAX_RETRY_COUNT) return { label: '재시도 초과', color: 'text-red-600 bg-red-100' }; + if (record.retryCount > 0) return { label: `재시도 ${record.retryCount}/${MAX_RETRY_COUNT}`, color: 'text-amber-600 bg-amber-100' }; + return { label: '대기', color: 'text-blue-600 bg-blue-100' }; + }; + + const exceededRecords = failedRecords.filter((r) => r.retryCount >= MAX_RETRY_COUNT); + + const handleRetry = async () => { + setRetrying(true); + try { + const result = await batchApi.retryFailedRecords(jobName, failedRecords.length, jobExecutionId); + if (result.success) { + setShowConfirm(false); + if (result.executionId) { + navigate(`/executions/${result.executionId}`); + } else { + alert(result.message || '재수집이 요청되었습니다.'); + } + } else { + alert(result.message || '재수집 실행에 실패했습니다.'); + } + } catch { + alert('재수집 실행에 실패했습니다.'); + } finally { + setRetrying(false); + } + }; + + const handleResolve = async () => { + setResolving(true); + try { + const ids = failedRecords.map((r) => r.id); + await batchApi.resolveFailedRecords(ids); + setShowResolveConfirm(false); + navigate(0); + } catch { + alert('일괄 RESOLVED 처리에 실패했습니다.'); + } finally { + setResolving(false); + } + }; + + const handleResetRetry = async () => { + setResetting(true); + try { + const ids = exceededRecords.map((r) => r.id); + await batchApi.resetRetryCount(ids); + setShowResetConfirm(false); + navigate(0); + } catch { + alert('재시도 초기화에 실패했습니다.'); + } finally { + setResetting(false); + } + }; + + return ( +
+
+ + + {failedRecords.length > 0 && ( +
+ {exceededRecords.length > 0 && ( + + )} + + +
+ )} +
+ + {open && ( +
+
+ + + + + + + + + + + + {pagedRecords.map((record) => ( + + + + + + + + ))} + +
Record Key에러 메시지재시도상태생성 시간
+ {record.recordKey} + + {record.errorMessage || '-'} + + {(() => { + const info = retryStatusLabel(record); + return info ? ( + + {info.label} + + ) : ( + - + ); + })()} + + + {record.status} + + + {formatDateTime(record.createdAt)} +
+
+ +
+ )} + + {/* 재수집 확인 다이얼로그 */} + {showConfirm && ( +
+
+

+ 실패 건 재수집 확인 +

+

+ 다음 {failedRecords.length}건의 IMO에 대해 재수집을 실행합니다. +

+
+
+ {failedRecords.map((r) => ( + + {r.recordKey} + + ))} +
+
+
+ + +
+
+
+ )} + + {/* 일괄 RESOLVED 확인 다이얼로그 */} + {showResolveConfirm && ( +
+
+

+ 일괄 RESOLVED 확인 +

+

+ FAILED 상태의 {failedRecords.length}건을 RESOLVED로 변경합니다. + 이 작업은 되돌릴 수 없습니다. +

+
+ + +
+
+
+ )} + + {/* 재시도 초기화 확인 다이얼로그 */} + {showResetConfirm && ( +
+
+

+ 재시도 초기화 확인 +

+

+ 재시도 횟수를 초과한 {exceededRecords.length}건의 retryCount를 0으로 초기화합니다. + 초기화 후 다음 배치 실행 시 자동 재수집 대상에 포함됩니다. +

+
+ + +
+
+
+ )} +
+ ); +} + diff --git a/frontend/src/pages/Executions.tsx b/frontend/src/pages/Executions.tsx new file mode 100644 index 0000000..189604b --- /dev/null +++ b/frontend/src/pages/Executions.tsx @@ -0,0 +1,648 @@ +import { useState, useMemo, useCallback, useEffect } from 'react'; +import { useNavigate, useSearchParams } from 'react-router-dom'; +import { batchApi, type JobExecutionDto, type ExecutionSearchResponse, type ScheduleResponse } from '../api/batchApi'; +import { formatDateTime, calculateDuration } from '../utils/formatters'; +import { usePoller } from '../hooks/usePoller'; +import { useToastContext } from '../contexts/ToastContext'; +import StatusBadge from '../components/StatusBadge'; +import ConfirmModal from '../components/ConfirmModal'; +import InfoModal from '../components/InfoModal'; +import EmptyState from '../components/EmptyState'; +import LoadingSpinner from '../components/LoadingSpinner'; +import GuideModal, { HelpButton } from '../components/GuideModal'; + +type StatusFilter = 'ALL' | 'COMPLETED' | 'FAILED' | 'STARTED' | 'STOPPED'; + +const STATUS_FILTERS: { value: StatusFilter; label: string }[] = [ + { value: 'ALL', label: '전체' }, + { value: 'COMPLETED', label: '완료' }, + { value: 'FAILED', label: '실패' }, + { value: 'STARTED', label: '실행중' }, + { value: 'STOPPED', label: '중지됨' }, +]; + +const POLLING_INTERVAL_MS = 5000; +const RECENT_LIMIT = 50; +const PAGE_SIZE = 50; + +const EXECUTIONS_GUIDE = [ + { + title: '작업 필터', + content: '상단의 드롭다운에서 조회할 작업을 선택할 수 있습니다.\n여러 작업을 동시에 선택할 수 있습니다.', + }, + { + title: '상태 필터', + content: '완료 / 실패 / 실행중 / 중지됨 버튼으로 상태별 필터링이 가능합니다.\n"전체"를 선택하면 모든 상태의 실행 이력을 볼 수 있습니다.', + }, + { + title: '날짜 검색', + content: '시작일과 종료일을 지정하여 특정 기간의 실행 이력을 조회할 수 있습니다.\n"검색" 버튼을 클릭하면 조건에 맞는 결과가 표시됩니다.\n"초기화" 버튼으로 검색 조건을 제거하고 최신 이력으로 돌아갑니다.', + }, + { + title: '실행 중인 작업 제어', + content: '실행 중인 작업의 행에서 "중지" 또는 "강제 종료" 버튼을 사용할 수 있습니다.\n• 중지: 현재 Step 완료 후 안전하게 종료\n• 강제 종료: 즉시 중단 (데이터 정합성 주의)', + }, + { + title: '실패 로그 확인', + content: '상태가 "FAILED"인 행을 클릭하면 실패 상세 정보를 확인할 수 있습니다.\n종료 코드(Exit Code)와 에러 메시지로 실패 원인을 파악하세요.\n상태가 "COMPLETED"이지만 실패 건수가 있으면 경고 아이콘이 표시됩니다.', + }, +]; + +export default function Executions() { + const navigate = useNavigate(); + const [searchParams, setSearchParams] = useSearchParams(); + + const jobFromQuery = searchParams.get('job') || ''; + + const [jobs, setJobs] = useState([]); + const [scheduleList, setScheduleList] = useState([]); + const [executions, setExecutions] = useState([]); + const [selectedJobs, setSelectedJobs] = useState(jobFromQuery ? [jobFromQuery] : []); + const [jobDropdownOpen, setJobDropdownOpen] = useState(false); + const [statusFilter, setStatusFilter] = useState('ALL'); + const [loading, setLoading] = useState(true); + const [stopTarget, setStopTarget] = useState(null); + + // F1: 강제 종료 + const [abandonTarget, setAbandonTarget] = useState(null); + + // F4: 날짜 범위 필터 + 페이지네이션 + const [startDate, setStartDate] = useState(''); + const [endDate, setEndDate] = useState(''); + const [page, setPage] = useState(0); + const [totalPages, setTotalPages] = useState(0); + const [totalCount, setTotalCount] = useState(0); + const [useSearch, setUseSearch] = useState(false); + + // F9: 실패 로그 뷰어 + const [failLogTarget, setFailLogTarget] = useState(null); + + const [guideOpen, setGuideOpen] = useState(false); + + const { showToast } = useToastContext(); + + useEffect(() => { + batchApi.getSchedules().then(res => setScheduleList(res.schedules)).catch(() => {}); + }, []); + + const displayNameMap = useMemo>(() => { + const map: Record = {}; + for (const s of scheduleList) { + if (s.description) map[s.jobName] = s.description; + } + return map; + }, [scheduleList]); + + + const loadJobs = useCallback(async () => { + try { + const data = await batchApi.getJobs(); + setJobs(data); + } catch { + /* Job 목록 로드 실패는 무시 */ + } + }, []); + + const loadSearchExecutions = useCallback(async (targetPage: number) => { + try { + setLoading(true); + const params: { + jobNames?: string[]; + status?: string; + startDate?: string; + endDate?: string; + page?: number; + size?: number; + } = { + page: targetPage, + size: PAGE_SIZE, + }; + if (selectedJobs.length > 0) params.jobNames = selectedJobs; + if (statusFilter !== 'ALL') params.status = statusFilter; + if (startDate) params.startDate = `${startDate}T00:00:00`; + if (endDate) params.endDate = `${endDate}T23:59:59`; + + const data: ExecutionSearchResponse = await batchApi.searchExecutions(params); + setExecutions(data.executions); + setTotalPages(data.totalPages); + setTotalCount(data.totalCount); + setPage(data.page); + } catch { + setExecutions([]); + setTotalPages(0); + setTotalCount(0); + } finally { + setLoading(false); + } + }, [selectedJobs, statusFilter, startDate, endDate]); + + const loadExecutions = useCallback(async () => { + // 검색 모드에서는 폴링하지 않음 (검색 버튼 클릭 시에만 1회 조회) + if (useSearch) return; + try { + let data: JobExecutionDto[]; + if (selectedJobs.length === 1) { + data = await batchApi.getJobExecutions(selectedJobs[0]); + } else if (selectedJobs.length > 1) { + // 복수 Job 선택 시 search API 사용 + const result = await batchApi.searchExecutions({ + jobNames: selectedJobs, size: RECENT_LIMIT, + }); + data = result.executions; + } else { + try { + data = await batchApi.getRecentExecutions(RECENT_LIMIT); + } catch { + data = []; + } + } + setExecutions(data); + } catch { + setExecutions([]); + } finally { + setLoading(false); + } + }, [selectedJobs, useSearch, page, loadSearchExecutions]); + + /* 마운트 시 Job 목록 1회 로드 */ + usePoller(loadJobs, 60_000, []); + + /* 실행 이력 5초 폴링 */ + usePoller(loadExecutions, POLLING_INTERVAL_MS, [selectedJobs, useSearch, page]); + + const filteredExecutions = useMemo(() => { + // 검색 모드에서는 서버 필터링 사용 + if (useSearch) return executions; + if (statusFilter === 'ALL') return executions; + return executions.filter((e) => e.status === statusFilter); + }, [executions, statusFilter, useSearch]); + + const toggleJob = (jobName: string) => { + setSelectedJobs((prev) => { + const next = prev.includes(jobName) + ? prev.filter((j) => j !== jobName) + : [...prev, jobName]; + if (next.length === 1) { + setSearchParams({ job: next[0] }); + } else { + setSearchParams({}); + } + return next; + }); + setLoading(true); + if (useSearch) { + setPage(0); + } + }; + + const clearSelectedJobs = () => { + setSelectedJobs([]); + setSearchParams({}); + setLoading(true); + if (useSearch) { + setPage(0); + } + }; + + const handleStop = async () => { + if (!stopTarget) return; + try { + const result = await batchApi.stopExecution(stopTarget.executionId); + showToast(result.message || '실행이 중지되었습니다.', 'success'); + } catch (err) { + showToast( + err instanceof Error ? err.message : '중지 요청에 실패했습니다.', + 'error', + ); + } finally { + setStopTarget(null); + } + }; + + // F1: 강제 종료 핸들러 + const handleAbandon = async () => { + if (!abandonTarget) return; + try { + const result = await batchApi.abandonExecution(abandonTarget.executionId); + showToast(result.message || '실행이 강제 종료되었습니다.', 'success'); + } catch (err) { + showToast( + err instanceof Error ? err.message : '강제 종료 요청에 실패했습니다.', + 'error', + ); + } finally { + setAbandonTarget(null); + } + }; + + // F4: 검색 핸들러 + const handleSearch = async () => { + setUseSearch(true); + setPage(0); + await loadSearchExecutions(0); + }; + + // F4: 초기화 핸들러 + const handleResetSearch = () => { + setUseSearch(false); + setStartDate(''); + setEndDate(''); + setPage(0); + setTotalPages(0); + setTotalCount(0); + setLoading(true); + }; + + // F4: 페이지 이동 핸들러 + const handlePageChange = (newPage: number) => { + if (newPage < 0 || newPage >= totalPages) return; + setPage(newPage); + loadSearchExecutions(newPage); + }; + + const isRunning = (status: string) => + status === 'STARTED' || status === 'STARTING'; + + return ( +
+ {/* 헤더 */} +
+
+

실행 이력

+ setGuideOpen(true)} /> +
+

+ 배치 작업 실행 이력을 조회하고 관리합니다. +

+
+ + {/* 필터 영역 */} +
+
+ {/* Job 멀티 선택 */} +
+
+ +
+ + {jobDropdownOpen && ( + <> +
setJobDropdownOpen(false)} /> +
+ {jobs.map((job) => ( + + ))} +
+ + )} +
+ {selectedJobs.length > 0 && ( + + )} +
+ {/* 선택된 Job 칩 */} + {selectedJobs.length > 0 && ( +
+ {selectedJobs.map((job) => ( + + {displayNameMap[job] || job} + + + ))} +
+ )} +
+ + {/* 상태 필터 버튼 그룹 */} +
+ {STATUS_FILTERS.map(({ value, label }) => ( + + ))} +
+
+ + {/* F4: 날짜 범위 필터 */} +
+ +
+ setStartDate(e.target.value)} + className="block rounded-lg border border-wing-border bg-wing-surface px-3 py-2 text-sm shadow-sm focus:border-wing-accent focus:ring-1 focus:ring-wing-accent" + /> + ~ + setEndDate(e.target.value)} + className="block rounded-lg border border-wing-border bg-wing-surface px-3 py-2 text-sm shadow-sm focus:border-wing-accent focus:ring-1 focus:ring-wing-accent" + /> +
+
+ + {useSearch && ( + + )} +
+
+
+ + {/* 실행 이력 테이블 */} +
+ {loading ? ( + + ) : filteredExecutions.length === 0 ? ( + 0 + ? '선택한 작업의 실행 이력이 없습니다.' + : undefined + } + /> + ) : ( +
+ + + + + + + + + + + + + + {filteredExecutions.map((exec) => ( + + + + + + + + + + ))} + +
실행 ID작업명상태시작시간종료시간소요시간 + 액션 +
+ #{exec.executionId} + + {displayNameMap[exec.jobName] || exec.jobName} + +
+ {/* F9: FAILED 상태 클릭 시 실패 로그 모달 */} + {exec.status === 'FAILED' ? ( + + ) : ( + + )} + {exec.status === 'COMPLETED' && exec.failedRecordCount != null && exec.failedRecordCount > 0 && ( + + + + + {exec.failedRecordCount} + + )} +
+
+ {formatDateTime(exec.startTime)} + + {formatDateTime(exec.endTime)} + + {calculateDuration( + exec.startTime, + exec.endTime, + )} + +
+ {isRunning(exec.status) && ( + <> + + + + )} + +
+
+
+ )} + + {/* 결과 건수 표시 + F4: 페이지네이션 */} + {!loading && filteredExecutions.length > 0 && ( +
+
+ {useSearch ? ( + <>총 {totalCount}건 + ) : ( + <> + 총 {filteredExecutions.length}건 + {statusFilter !== 'ALL' && ( + + (전체 {executions.length}건 중) + + )} + + )} +
+ {/* F4: 페이지네이션 UI */} + {useSearch && totalPages > 1 && ( +
+ + + {page + 1} / {totalPages} + + +
+ )} +
+ )} +
+ + {/* 중지 확인 모달 */} + setStopTarget(null)} + /> + + {/* F1: 강제 종료 확인 모달 */} + setAbandonTarget(null)} + /> + + setGuideOpen(false)} + pageTitle="실행 이력" + sections={EXECUTIONS_GUIDE} + /> + + {/* F9: 실패 로그 뷰어 모달 */} + setFailLogTarget(null)} + > + {failLogTarget && ( +
+
+

+ Exit Code +

+

+ {failLogTarget.exitCode || '-'} +

+
+
+

+ Exit Message +

+
+                                {failLogTarget.exitMessage || '메시지 없음'}
+                            
+
+
+ )} +
+
+ ); +} diff --git a/frontend/src/pages/Jobs.tsx b/frontend/src/pages/Jobs.tsx new file mode 100644 index 0000000..9ca3977 --- /dev/null +++ b/frontend/src/pages/Jobs.tsx @@ -0,0 +1,576 @@ +import { useState, useCallback, useEffect, useMemo } from 'react'; +import { useNavigate } from 'react-router-dom'; +import { batchApi } from '../api/batchApi'; +import type { JobDetailDto, ScheduleResponse } from '../api/batchApi'; +import { usePoller } from '../hooks/usePoller'; +import { useToastContext } from '../contexts/ToastContext'; +import StatusBadge from '../components/StatusBadge'; +import EmptyState from '../components/EmptyState'; +import LoadingSpinner from '../components/LoadingSpinner'; +import { formatDateTime, calculateDuration } from '../utils/formatters'; +import GuideModal, { HelpButton } from '../components/GuideModal'; + +const POLLING_INTERVAL = 30000; + +const JOBS_GUIDE = [ + { + title: '상태 필터', + content: '상단의 탭 버튼으로 작업 상태별 필터링이 가능합니다.\n전체 / 실행 중 / 성공 / 실패 / 미실행 중 선택하세요.\n각 탭 옆의 숫자는 해당 상태의 작업 수입니다.', + }, + { + title: '검색 및 정렬', + content: '검색창에 작업명을 입력하면 실시간으로 필터링됩니다.\n정렬 옵션: 작업명순, 최신 실행순(기본), 상태별(실패 우선)\n테이블/카드 뷰 전환 버튼으로 보기 방식을 변경할 수 있습니다.', + }, + { + title: '작업 실행', + content: '"실행" 버튼을 클릭하면 확인 팝업이 표시됩니다.\n확인 후 해당 배치 작업이 즉시 실행됩니다.\n실행 중인 작업은 좌측에 초록색 점이 표시됩니다.', + }, + { + title: '이력 보기', + content: '"이력 보기" 버튼을 클릭하면 해당 작업의 실행 이력 화면으로 이동합니다.\n과거 실행 결과, 소요 시간 등을 상세히 확인할 수 있습니다.', + }, +]; + +type StatusFilterKey = 'ALL' | 'STARTED' | 'COMPLETED' | 'FAILED' | 'NONE'; +type SortKey = 'name' | 'recent' | 'status'; +type ViewMode = 'card' | 'table'; + +interface StatusTabConfig { + key: StatusFilterKey; + label: string; +} + +const STATUS_TABS: StatusTabConfig[] = [ + { key: 'ALL', label: '전체' }, + { key: 'STARTED', label: '실행 중' }, + { key: 'COMPLETED', label: '성공' }, + { key: 'FAILED', label: '실패' }, + { key: 'NONE', label: '미실행' }, +]; + +const STATUS_ORDER: Record = { + FAILED: 0, + STARTED: 1, + COMPLETED: 2, +}; + +function getStatusOrder(job: JobDetailDto): number { + if (!job.lastExecution) return 3; + return STATUS_ORDER[job.lastExecution.status] ?? 4; +} + +function matchesStatusFilter(job: JobDetailDto, filter: StatusFilterKey): boolean { + if (filter === 'ALL') return true; + if (filter === 'NONE') return job.lastExecution === null; + return job.lastExecution?.status === filter; +} + +export default function Jobs() { + const navigate = useNavigate(); + const { showToast } = useToastContext(); + + const [jobs, setJobs] = useState([]); + const [loading, setLoading] = useState(true); + const [searchTerm, setSearchTerm] = useState(''); + const [statusFilter, setStatusFilter] = useState('ALL'); + const [sortKey, setSortKey] = useState('recent'); + const [viewMode, setViewMode] = useState('table'); + + const [guideOpen, setGuideOpen] = useState(false); + + // Execute modal (individual card) + const [executeModalOpen, setExecuteModalOpen] = useState(false); + const [targetJob, setTargetJob] = useState(''); + const [executing, setExecuting] = useState(false); + + + const [scheduleList, setScheduleList] = useState([]); + + useEffect(() => { + batchApi.getSchedules().then(res => setScheduleList(res.schedules)).catch(() => {}); + }, []); + + const displayNameMap = useMemo>(() => { + const map: Record = {}; + for (const s of scheduleList) { + if (s.description) map[s.jobName] = s.description; + } + return map; + }, [scheduleList]); + + const loadJobs = useCallback(async () => { + try { + const data = await batchApi.getJobsDetail(); + setJobs(data); + } catch (err) { + console.error('Jobs load failed:', err); + } finally { + setLoading(false); + } + }, []); + + usePoller(loadJobs, POLLING_INTERVAL); + + /** schedule description 우선, 없으면 jobName */ + const getJobLabel = useCallback((job: JobDetailDto) => displayNameMap[job.jobName] || job.jobName, [displayNameMap]); + + const statusCounts = useMemo(() => { + const searchFiltered = searchTerm.trim() + ? jobs.filter((job) => { + const term = searchTerm.toLowerCase(); + return job.jobName.toLowerCase().includes(term) + || (displayNameMap[job.jobName]?.toLowerCase().includes(term) ?? false); + }) + : jobs; + + return STATUS_TABS.reduce>( + (acc, tab) => { + acc[tab.key] = searchFiltered.filter((job) => matchesStatusFilter(job, tab.key)).length; + return acc; + }, + { ALL: 0, STARTED: 0, COMPLETED: 0, FAILED: 0, NONE: 0 }, + ); + }, [jobs, searchTerm]); + + const filteredJobs = useMemo(() => { + let result = jobs; + + if (searchTerm.trim()) { + const term = searchTerm.toLowerCase(); + result = result.filter((job) => + job.jobName.toLowerCase().includes(term) + || (displayNameMap[job.jobName]?.toLowerCase().includes(term) ?? false), + ); + } + + result = result.filter((job) => matchesStatusFilter(job, statusFilter)); + + result = [...result].sort((a, b) => { + if (sortKey === 'name') { + return getJobLabel(a).localeCompare(getJobLabel(b)); + } + if (sortKey === 'recent') { + const aTime = a.lastExecution?.startTime ? new Date(a.lastExecution.startTime).getTime() : 0; + const bTime = b.lastExecution?.startTime ? new Date(b.lastExecution.startTime).getTime() : 0; + return bTime - aTime; + } + if (sortKey === 'status') { + return getStatusOrder(a) - getStatusOrder(b); + } + return 0; + }); + + return result; + }, [jobs, searchTerm, statusFilter, sortKey]); + + const handleExecuteClick = (jobName: string) => { + setTargetJob(jobName); + setExecuteModalOpen(true); + }; + + const handleConfirmExecute = async () => { + if (!targetJob) return; + setExecuting(true); + try { + const result = await batchApi.executeJob(targetJob); + showToast( + result.message || `${targetJob} 실행 요청 완료`, + 'success', + ); + setExecuteModalOpen(false); + } catch (err) { + showToast( + `실행 실패: ${err instanceof Error ? err.message : '알 수 없는 오류'}`, + 'error', + ); + } finally { + setExecuting(false); + } + }; + + const handleViewHistory = (jobName: string) => { + navigate(`/executions?job=${encodeURIComponent(jobName)}`); + }; + + if (loading) return ; + + return ( +
+ {/* Header */} +
+
+

배치 작업 목록

+ setGuideOpen(true)} /> +
+ + 총 {jobs.length}개 작업 + +
+ + {/* Status Filter Tabs */} +
+ {STATUS_TABS.map((tab) => ( + + ))} +
+ + {/* Search + Sort + View Toggle */} +
+
+ {/* Search */} +
+ + + + + + setSearchTerm(e.target.value)} + className="w-full pl-10 pr-4 py-2 border border-wing-border rounded-lg text-sm + focus:ring-2 focus:ring-wing-accent focus:border-wing-accent outline-none" + /> + {searchTerm && ( + + )} +
+ + {/* Sort dropdown */} + + + {/* View mode toggle */} +
+ + +
+
+ + {searchTerm && ( +

+ {filteredJobs.length}개 작업 검색됨 +

+ )} +
+ + {/* Job List */} + {filteredJobs.length === 0 ? ( +
+ +
+ ) : viewMode === 'card' ? ( + /* Card View */ +
+ {filteredJobs.map((job) => { + const isRunning = job.lastExecution?.status === 'STARTED'; + const duration = job.lastExecution + ? calculateDuration(job.lastExecution.startTime, job.lastExecution.endTime) + : null; + const showDuration = + job.lastExecution?.endTime != null && duration !== null && duration !== '-'; + + return ( +
+
+
+

+ {getJobLabel(job)} +

+
+
+ {isRunning && ( + + )} + {job.lastExecution && ( + + )} +
+
+ + {/* Job detail info */} +
+ {job.lastExecution ? ( + <> +

+ 마지막 실행: {formatDateTime(job.lastExecution.startTime)} +

+ {showDuration && ( +

+ 소요 시간: {duration} +

+ )} + {isRunning && !showDuration && ( +

+ 소요 시간: 실행 중... +

+ )} + + ) : ( +

실행 이력 없음

+ )} + +
+ {job.scheduleCron ? ( + + 자동 + + ) : ( + + 수동 + + )} + {job.scheduleCron && ( + + {job.scheduleCron} + + )} +
+
+ +
+ + +
+
+ ); + })} +
+ ) : ( + /* Table View */ +
+
+ + + + + + + + + + + + + {filteredJobs.map((job) => { + const isRunning = job.lastExecution?.status === 'STARTED'; + const duration = job.lastExecution + ? calculateDuration(job.lastExecution.startTime, job.lastExecution.endTime) + : '-'; + + return ( + + + + + + + + + ); + })} + +
+ 작업명 + + 상태 + + 마지막 실행 + + 소요시간 + + 스케줄 + + 액션 +
+
+ {isRunning && ( + + )} + {getJobLabel(job)} +
+
+ {job.lastExecution ? ( + + ) : ( + 미실행 + )} + + {job.lastExecution + ? formatDateTime(job.lastExecution.startTime) + : '-'} + + {job.lastExecution ? duration : '-'} + + {job.scheduleCron ? ( + + 자동 + + ) : ( + + 수동 + + )} + +
+ + +
+
+
+
+ )} + + setGuideOpen(false)} + /> + + {/* Execute Modal (custom with date params) */} + {executeModalOpen && ( +
setExecuteModalOpen(false)} + > +
e.stopPropagation()} + > +

작업 실행 확인

+

+ "{displayNameMap[targetJob] || targetJob}" 작업을 실행하시겠습니까? +

+ +
+ + +
+
+
+ )} + +
+ ); +} diff --git a/frontend/src/pages/Schedules.tsx b/frontend/src/pages/Schedules.tsx new file mode 100644 index 0000000..bd01cdc --- /dev/null +++ b/frontend/src/pages/Schedules.tsx @@ -0,0 +1,839 @@ +import { useState, useEffect, useCallback, useMemo } from 'react'; +import { batchApi, type ScheduleResponse } from '../api/batchApi'; +import { formatDateTime } from '../utils/formatters'; +import { useToastContext } from '../contexts/ToastContext'; +import ConfirmModal from '../components/ConfirmModal'; +import EmptyState from '../components/EmptyState'; +import LoadingSpinner from '../components/LoadingSpinner'; +import { getNextExecutions } from '../utils/cronPreview'; +import GuideModal, { HelpButton } from '../components/GuideModal'; + +type ScheduleMode = 'new' | 'existing'; +type ScheduleViewMode = 'card' | 'table'; +type ActiveFilterKey = 'ALL' | 'ACTIVE' | 'INACTIVE'; +type ScheduleSortKey = 'name' | 'nextFire' | 'active'; + +interface ActiveTabConfig { + key: ActiveFilterKey; + label: string; +} + +const ACTIVE_TABS: ActiveTabConfig[] = [ + { key: 'ALL', label: '전체' }, + { key: 'ACTIVE', label: '활성' }, + { key: 'INACTIVE', label: '비활성' }, +]; + +interface ConfirmAction { + type: 'toggle' | 'delete'; + schedule: ScheduleResponse; +} + +const CRON_PRESETS = [ + { label: '매 분', cron: '0 * * * * ?' }, + { label: '매시 정각', cron: '0 0 * * * ?' }, + { label: '매 15분', cron: '0 0/15 * * * ?' }, + { label: '매일 00:00', cron: '0 0 0 * * ?' }, + { label: '매일 12:00', cron: '0 0 12 * * ?' }, + { label: '매주 월 00:00', cron: '0 0 0 ? * MON' }, +]; + +function CronPreview({ cron }: { cron: string }) { + const nextDates = useMemo(() => getNextExecutions(cron, 5), [cron]); + + if (nextDates.length === 0) { + return ( +
+

미리보기 불가 (복잡한 표현식)

+
+ ); + } + + const fmt = new Intl.DateTimeFormat('ko-KR', { + month: '2-digit', + day: '2-digit', + weekday: 'short', + hour: '2-digit', + minute: '2-digit', + second: '2-digit', + hour12: false, + }); + + return ( +
+ +
+ {nextDates.map((d, i) => ( + + {fmt.format(d)} + + ))} +
+
+ ); +} + +function getTriggerStateStyle(state: string | null): string { + switch (state) { + case 'NORMAL': + return 'bg-emerald-100 text-emerald-700'; + case 'PAUSED': + return 'bg-amber-100 text-amber-700'; + case 'BLOCKED': + return 'bg-red-100 text-red-700'; + case 'ERROR': + return 'bg-red-100 text-red-700'; + default: + return 'bg-wing-card text-wing-muted'; + } +} + +const SCHEDULES_GUIDE = [ + { + title: '스케줄이란?', + content: '스케줄은 배치 작업을 자동으로 실행하는 설정입니다.\nCron 표현식으로 실행 주기를 지정하면 해당 시간에 자동 실행됩니다.\n활성화된 스케줄만 자동 실행되며, 비활성화하면 일시 중지됩니다.', + }, + { + title: '스케줄 등록/수정', + content: '"+ 새 스케줄" 버튼 또는 기존 스케줄의 "편집" 버튼을 클릭하면 설정 팝업이 열립니다.\n1. 작업 선택: 자동 실행할 배치 작업을 선택합니다\n2. Cron 표현식: 실행 주기를 설정합니다 (프리셋 버튼으로 간편 설정 가능)\n3. 설명: 스케줄에 대한 메모를 입력합니다 (선택)\n\n"다음 5회 실행 예정" 미리보기로 설정이 올바른지 확인하세요.', + }, + { + title: 'Cron 표현식', + content: 'Cron 표현식은 "초 분 시 일 월 요일" 6자리로 구성됩니다.\n예시:\n• 0 0/15 * * * ? → 매 15분마다\n• 0 0 0 * * ? → 매일 자정\n• 0 0 12 * * ? → 매일 정오\n• 0 0 0 ? * MON → 매주 월요일 자정\n\n프리셋 버튼을 활용하면 직접 입력하지 않아도 됩니다.', + }, + { + title: '스케줄 관리', + content: '• 편집: 스케줄 설정(Cron, 설명)을 수정합니다\n• 활성화/비활성화: 자동 실행을 켜거나 끕니다\n• 삭제: 스케줄을 완전히 제거합니다\n\n상태 표시:\n• 활성 (초록): 정상 동작 중\n• 비활성 (회색): 일시 중지 상태\n• NORMAL: 트리거 정상\n• PAUSED: 트리거 일시 중지\n• BLOCKED: 이전 실행이 아직 진행 중\n• ERROR: 트리거 오류 발생', + }, +]; + +export default function Schedules() { + const { showToast } = useToastContext(); + + // Guide modal state + const [guideOpen, setGuideOpen] = useState(false); + + // Form state + const [jobs, setJobs] = useState([]); + const [selectedJob, setSelectedJob] = useState(''); + const [cronExpression, setCronExpression] = useState(''); + const [description, setDescription] = useState(''); + const [scheduleMode, setScheduleMode] = useState('new'); + const [formLoading, setFormLoading] = useState(false); + const [saving, setSaving] = useState(false); + + // Schedule list state + const [schedules, setSchedules] = useState([]); + const [listLoading, setListLoading] = useState(true); + + // View mode state + const [viewMode, setViewMode] = useState('table'); + + // Search / filter / sort state + const [searchTerm, setSearchTerm] = useState(''); + const [activeFilter, setActiveFilter] = useState('ALL'); + const [sortKey, setSortKey] = useState('name'); + + // Confirm modal state + const [confirmAction, setConfirmAction] = useState(null); + + // Form modal state + const [formOpen, setFormOpen] = useState(false); + + const loadSchedules = useCallback(async () => { + try { + const result = await batchApi.getSchedules(); + setSchedules(result.schedules); + } catch (err) { + showToast('스케줄 목록 조회 실패', 'error'); + console.error(err); + } finally { + setListLoading(false); + } + }, [showToast]); + + const loadJobs = useCallback(async () => { + try { + const result = await batchApi.getJobs(); + setJobs(result); + } catch (err) { + showToast('작업 목록 조회 실패', 'error'); + console.error(err); + } + }, [showToast]); + + useEffect(() => { + loadJobs(); + loadSchedules(); + }, [loadJobs, loadSchedules]); + + const displayNameMap = useMemo>(() => { + const map: Record = {}; + for (const s of schedules) { + if (s.description) map[s.jobName] = s.description; + } + return map; + }, [schedules]); + + const activeCounts = useMemo(() => { + const searchFiltered = searchTerm.trim() + ? schedules.filter((s) => { + const term = searchTerm.toLowerCase(); + return s.jobName.toLowerCase().includes(term) + || (displayNameMap[s.jobName]?.toLowerCase().includes(term) ?? false) + || (s.description?.toLowerCase().includes(term) ?? false); + }) + : schedules; + + return ACTIVE_TABS.reduce>( + (acc, tab) => { + acc[tab.key] = searchFiltered.filter((s) => { + if (tab.key === 'ALL') return true; + if (tab.key === 'ACTIVE') return s.active; + return !s.active; + }).length; + return acc; + }, + { ALL: 0, ACTIVE: 0, INACTIVE: 0 }, + ); + }, [schedules, searchTerm, displayNameMap]); + + const filteredSchedules = useMemo(() => { + let result = schedules; + + // 검색 필터 + if (searchTerm.trim()) { + const term = searchTerm.toLowerCase(); + result = result.filter((s) => + s.jobName.toLowerCase().includes(term) + || (displayNameMap[s.jobName]?.toLowerCase().includes(term) ?? false) + || (s.description?.toLowerCase().includes(term) ?? false), + ); + } + + // 활성/비활성 필터 + if (activeFilter === 'ACTIVE') { + result = result.filter((s) => s.active); + } else if (activeFilter === 'INACTIVE') { + result = result.filter((s) => !s.active); + } + + // 정렬 + result = [...result].sort((a, b) => { + if (sortKey === 'name') { + const aName = displayNameMap[a.jobName] || a.jobName; + const bName = displayNameMap[b.jobName] || b.jobName; + return aName.localeCompare(bName); + } + if (sortKey === 'nextFire') { + const aTime = a.nextFireTime ? new Date(a.nextFireTime).getTime() : Number.MAX_SAFE_INTEGER; + const bTime = b.nextFireTime ? new Date(b.nextFireTime).getTime() : Number.MAX_SAFE_INTEGER; + return aTime - bTime; + } + if (sortKey === 'active') { + if (a.active === b.active) { + const aName = displayNameMap[a.jobName] || a.jobName; + const bName = displayNameMap[b.jobName] || b.jobName; + return aName.localeCompare(bName); + } + return a.active ? -1 : 1; + } + return 0; + }); + + return result; + }, [schedules, searchTerm, activeFilter, sortKey, displayNameMap]); + + const handleJobSelect = async (jobName: string) => { + setSelectedJob(jobName); + setCronExpression(''); + setDescription(''); + setScheduleMode('new'); + + if (!jobName) return; + + setFormLoading(true); + try { + const schedule = await batchApi.getSchedule(jobName); + setCronExpression(schedule.cronExpression); + setDescription(schedule.description ?? ''); + setScheduleMode('existing'); + } catch { + // 404 = new schedule + setScheduleMode('new'); + } finally { + setFormLoading(false); + } + }; + + const handleSave = async () => { + if (!selectedJob) { + showToast('작업을 선택해주세요', 'error'); + return; + } + if (!cronExpression.trim()) { + showToast('Cron 표현식을 입력해주세요', 'error'); + return; + } + + setSaving(true); + try { + if (scheduleMode === 'existing') { + await batchApi.updateSchedule(selectedJob, { + cronExpression: cronExpression.trim(), + description: description.trim() || undefined, + }); + showToast('스케줄이 수정되었습니다', 'success'); + } else { + await batchApi.createSchedule({ + jobName: selectedJob, + cronExpression: cronExpression.trim(), + description: description.trim() || undefined, + }); + showToast('스케줄이 등록되었습니다', 'success'); + } + await loadSchedules(); + setFormOpen(false); + resetForm(); + } catch (err) { + const message = err instanceof Error ? err.message : '저장 실패'; + showToast(message, 'error'); + } finally { + setSaving(false); + } + }; + + const handleToggle = async (schedule: ScheduleResponse) => { + try { + await batchApi.toggleSchedule(schedule.jobName, !schedule.active); + showToast( + `${schedule.jobName} 스케줄이 ${schedule.active ? '비활성화' : '활성화'}되었습니다`, + 'success', + ); + await loadSchedules(); + } catch (err) { + const message = err instanceof Error ? err.message : '토글 실패'; + showToast(message, 'error'); + } + setConfirmAction(null); + }; + + const handleDelete = async (schedule: ScheduleResponse) => { + try { + await batchApi.deleteSchedule(schedule.jobName); + showToast(`${schedule.jobName} 스케줄이 삭제되었습니다`, 'success'); + await loadSchedules(); + // Close form if deleted schedule was being edited + if (selectedJob === schedule.jobName) { + resetForm(); + setFormOpen(false); + } + } catch (err) { + const message = err instanceof Error ? err.message : '삭제 실패'; + showToast(message, 'error'); + } + setConfirmAction(null); + }; + + const resetForm = () => { + setSelectedJob(''); + setCronExpression(''); + setDescription(''); + setScheduleMode('new'); + }; + + const handleEditFromCard = (schedule: ScheduleResponse) => { + setSelectedJob(schedule.jobName); + setCronExpression(schedule.cronExpression); + setDescription(schedule.description ?? ''); + setScheduleMode('existing'); + setFormOpen(true); + }; + + const handleNewSchedule = () => { + resetForm(); + setFormOpen(true); + }; + + const getScheduleLabel = (schedule: ScheduleResponse) => + displayNameMap[schedule.jobName] || schedule.jobName; + + if (listLoading) return ; + + return ( +
+ {/* Form Modal */} + {formOpen && ( +
+
setFormOpen(false)} /> +
+
+

+ {scheduleMode === 'existing' ? '스케줄 수정' : '스케줄 등록'} +

+ +
+ +
+ {/* Job Select */} +
+ +
+ + {selectedJob && ( + + {scheduleMode === 'existing' ? '기존 스케줄' : '새 스케줄'} + + )} + {formLoading && ( +
+ )} +
+
+ + {/* Cron Expression */} +
+ + setCronExpression(e.target.value)} + placeholder="0 0/15 * * * ?" + className="w-full rounded-lg border border-wing-border px-3 py-2 text-sm font-mono focus:outline-none focus:ring-2 focus:ring-wing-accent focus:border-wing-accent" + disabled={!selectedJob || formLoading} + /> +
+ + {/* Cron Presets */} +
+ +
+ {CRON_PRESETS.map(({ label, cron }) => ( + + ))} +
+
+ + {/* Cron Preview */} + {cronExpression.trim() && ( + + )} + + {/* Description */} +
+ + setDescription(e.target.value)} + placeholder="스케줄 설명 (선택)" + className="w-full rounded-lg border border-wing-border px-3 py-2 text-sm focus:outline-none focus:ring-2 focus:ring-wing-accent focus:border-wing-accent" + disabled={!selectedJob || formLoading} + /> +
+
+ + {/* Modal Footer */} +
+ + +
+
+
+ )} + + {/* Header */} +
+
+

스케줄 관리

+ setGuideOpen(true)} /> +
+
+ + + + 총 {schedules.length}개 스케줄 + +
+
+ + {/* Active Filter Tabs */} +
+ {ACTIVE_TABS.map((tab) => ( + + ))} +
+ + {/* Search + Sort + View Toggle */} +
+
+ {/* Search */} +
+ + + + + + setSearchTerm(e.target.value)} + className="w-full pl-10 pr-4 py-2 border border-wing-border rounded-lg text-sm + focus:ring-2 focus:ring-wing-accent focus:border-wing-accent outline-none" + /> + {searchTerm && ( + + )} +
+ + {/* Sort dropdown */} + + + {/* View mode toggle */} +
+ + +
+
+ + {searchTerm && ( +

+ {filteredSchedules.length}개 스케줄 검색됨 +

+ )} +
+ + {/* Schedule List */} + {filteredSchedules.length === 0 ? ( +
+ +
+ ) : viewMode === 'card' ? ( + /* Card View */ +
+ {filteredSchedules.map((schedule) => ( +
+
+
+

+ {getScheduleLabel(schedule)} +

+
+
+ + {schedule.active ? '활성' : '비활성'} + + {schedule.triggerState && ( + + {schedule.triggerState} + + )} +
+
+ + {/* Detail Info */} +
+
+ + {schedule.cronExpression} + +
+

+ 다음 실행: {formatDateTime(schedule.nextFireTime)} +

+ {schedule.previousFireTime && ( +

+ 이전 실행: {formatDateTime(schedule.previousFireTime)} +

+ )} +
+ + {/* Action Buttons */} +
+ + + +
+
+ ))} +
+ ) : ( + /* Table View */ +
+
+ + + + + + + + + + + + + {filteredSchedules.map((schedule) => ( + + + + + + + + + ))} + +
작업명Cron 표현식상태다음 실행이전 실행액션
+ {getScheduleLabel(schedule)} + + {schedule.cronExpression} + + + {schedule.active ? '활성' : '비활성'} + + {formatDateTime(schedule.nextFireTime)}{schedule.previousFireTime ? formatDateTime(schedule.previousFireTime) : '-'} +
+ + + +
+
+
+
+ )} + + {/* Confirm Modal */} + {confirmAction?.type === 'toggle' && ( + handleToggle(confirmAction.schedule)} + onCancel={() => setConfirmAction(null)} + /> + )} + {confirmAction?.type === 'delete' && ( + handleDelete(confirmAction.schedule)} + onCancel={() => setConfirmAction(null)} + /> + )} + setGuideOpen(false)} + pageTitle="스케줄 관리" + sections={SCHEDULES_GUIDE} + /> +
+ ); +} diff --git a/frontend/src/pages/Timeline.tsx b/frontend/src/pages/Timeline.tsx new file mode 100644 index 0000000..e1218f0 --- /dev/null +++ b/frontend/src/pages/Timeline.tsx @@ -0,0 +1,513 @@ +import { useState, useCallback, useRef, useEffect, useMemo } from 'react'; +import { Link } from 'react-router-dom'; +import { batchApi, type ExecutionInfo, type JobExecutionDto, type PeriodInfo, type ScheduleResponse, type ScheduleTimeline } from '../api/batchApi'; +import { formatDateTime, calculateDuration } from '../utils/formatters'; +import { usePoller } from '../hooks/usePoller'; +import { useToastContext } from '../contexts/ToastContext'; +import { getStatusColor } from '../components/StatusBadge'; +import StatusBadge from '../components/StatusBadge'; +import LoadingSpinner from '../components/LoadingSpinner'; +import EmptyState from '../components/EmptyState'; +import GuideModal, { HelpButton } from '../components/GuideModal'; + +type ViewType = 'day' | 'week' | 'month'; + +interface TooltipData { + jobName: string; + period: PeriodInfo; + execution: ExecutionInfo; + x: number; + y: number; +} + +interface SelectedCell { + jobName: string; + periodKey: string; + periodLabel: string; +} + +const VIEW_OPTIONS: { value: ViewType; label: string }[] = [ + { value: 'day', label: 'Day' }, + { value: 'week', label: 'Week' }, + { value: 'month', label: 'Month' }, +]; + +const LEGEND_ITEMS = [ + { status: 'COMPLETED', color: '#10b981', label: '완료' }, + { status: 'FAILED', color: '#ef4444', label: '실패' }, + { status: 'STARTED', color: '#3b82f6', label: '실행중' }, + { status: 'SCHEDULED', color: '#8b5cf6', label: '예정' }, + { status: 'NONE', color: '#e5e7eb', label: '없음' }, +]; + +const JOB_COL_WIDTH = 200; +const CELL_MIN_WIDTH = 80; +const POLLING_INTERVAL = 30000; + +function formatDateStr(date: Date): string { + const y = date.getFullYear(); + const m = String(date.getMonth() + 1).padStart(2, '0'); + const d = String(date.getDate()).padStart(2, '0'); + return `${y}-${m}-${d}`; +} + +function shiftDate(date: Date, view: ViewType, delta: number): Date { + const next = new Date(date); + switch (view) { + case 'day': + next.setDate(next.getDate() + delta); + break; + case 'week': + next.setDate(next.getDate() + delta * 7); + break; + case 'month': + next.setMonth(next.getMonth() + delta); + break; + } + return next; +} + +function isRunning(status: string): boolean { + return status === 'STARTED' || status === 'STARTING'; +} + +const TIMELINE_GUIDE = [ + { + title: '타임라인이란?', + content: '타임라인은 배치 작업의 실행 스케줄과 결과를 시각적으로 보여주는 화면입니다.\n세로축은 작업 목록, 가로축은 시간대를 나타냅니다.\n각 셀의 색상으로 실행 상태를 한눈에 파악할 수 있습니다.', + }, + { + title: '보기 모드', + content: '3가지 보기 모드를 제공합니다.\n• Day: 하루 단위 (시간대별 상세 보기)\n• Week: 일주일 단위\n• Month: 한 달 단위\n\n이전/다음 버튼으로 기간을 이동하고, "오늘" 버튼으로 현재 날짜로 돌아옵니다.', + }, + { + title: '색상 범례', + content: '각 셀의 색상은 실행 상태를 나타냅니다.\n• 초록색: 완료 (성공적으로 실행됨)\n• 빨간색: 실패 (오류 발생)\n• 파란색: 실행 중 (현재 진행 중)\n• 보라색: 예정 (아직 실행 전)\n• 회색: 없음 (해당 시간대에 실행 기록 없음)', + }, + { + title: '상세 보기', + content: '셀 위에 마우스를 올리면 툴팁으로 작업명, 기간, 상태 등 요약 정보를 보여줍니다.\n셀을 클릭하면 하단에 상세 패널이 열리며, 해당 시간대의 실행 이력 목록을 확인할 수 있습니다.\n"상세" 링크를 클릭하면 실행 상세 화면으로 이동합니다.', + }, +]; + +export default function Timeline() { + const { showToast } = useToastContext(); + + // Guide modal state + const [guideOpen, setGuideOpen] = useState(false); + + const [view, setView] = useState('day'); + const [currentDate, setCurrentDate] = useState(() => new Date()); + const [periodLabel, setPeriodLabel] = useState(''); + const [periods, setPeriods] = useState([]); + const [schedules, setSchedules] = useState([]); + const [loading, setLoading] = useState(true); + + const [scheduleList, setScheduleList] = useState([]); + + useEffect(() => { + batchApi.getSchedules().then(res => setScheduleList(res.schedules)).catch(() => {}); + }, []); + + const displayNameMap = useMemo>(() => { + const map: Record = {}; + for (const s of scheduleList) { + if (s.description) map[s.jobName] = s.description; + } + return map; + }, [scheduleList]); + + // Tooltip + const [tooltip, setTooltip] = useState(null); + const tooltipTimeoutRef = useRef | null>(null); + + // Selected cell & detail panel + const [selectedCell, setSelectedCell] = useState(null); + const [detailExecutions, setDetailExecutions] = useState([]); + const [detailLoading, setDetailLoading] = useState(false); + + const loadTimeline = useCallback(async () => { + try { + const dateStr = formatDateStr(currentDate); + const result = await batchApi.getTimeline(view, dateStr); + setPeriodLabel(result.periodLabel); + setPeriods(result.periods); + setSchedules(result.schedules); + } catch (err) { + showToast('타임라인 조회 실패', 'error'); + console.error(err); + } finally { + setLoading(false); + } + }, [view, currentDate, showToast]); + + usePoller(loadTimeline, POLLING_INTERVAL, [view, currentDate]); + + const handlePrev = () => setCurrentDate((d) => shiftDate(d, view, -1)); + const handleNext = () => setCurrentDate((d) => shiftDate(d, view, 1)); + const handleToday = () => setCurrentDate(new Date()); + + const handleRefresh = async () => { + setLoading(true); + await loadTimeline(); + }; + + // Tooltip handlers + const handleCellMouseEnter = ( + e: React.MouseEvent, + jobName: string, + period: PeriodInfo, + execution: ExecutionInfo, + ) => { + if (tooltipTimeoutRef.current) { + clearTimeout(tooltipTimeoutRef.current); + } + const rect = (e.currentTarget as HTMLElement).getBoundingClientRect(); + setTooltip({ + jobName, + period, + execution, + x: rect.left + rect.width / 2, + y: rect.top, + }); + }; + + const handleCellMouseLeave = () => { + tooltipTimeoutRef.current = setTimeout(() => { + setTooltip(null); + }, 100); + }; + + // Clean up tooltip timeout + useEffect(() => { + return () => { + if (tooltipTimeoutRef.current) { + clearTimeout(tooltipTimeoutRef.current); + } + }; + }, []); + + // Cell click -> detail panel + const handleCellClick = async (jobName: string, periodKey: string, periodLabel: string) => { + // Toggle off if clicking same cell + if (selectedCell?.jobName === jobName && selectedCell?.periodKey === periodKey) { + setSelectedCell(null); + setDetailExecutions([]); + return; + } + + setSelectedCell({ jobName, periodKey, periodLabel }); + setDetailLoading(true); + setDetailExecutions([]); + + try { + const executions = await batchApi.getPeriodExecutions(jobName, view, periodKey); + setDetailExecutions(executions); + } catch (err) { + showToast('구간 실행 이력 조회 실패', 'error'); + console.error(err); + } finally { + setDetailLoading(false); + } + }; + + const closeDetail = () => { + setSelectedCell(null); + setDetailExecutions([]); + }; + + const gridTemplateColumns = `${JOB_COL_WIDTH}px repeat(${periods.length}, minmax(${CELL_MIN_WIDTH}px, 1fr))`; + + return ( +
+ {/* Controls */} +
+
+ {/* View Toggle */} +
+ {VIEW_OPTIONS.map((opt) => ( + + ))} +
+ + {/* Navigation */} +
+ + + +
+ + {/* Period Label */} + + {periodLabel} + + + {/* Refresh */} + + + {/* Help */} + setGuideOpen(true)} /> +
+
+ + {/* Legend */} +
+ {LEGEND_ITEMS.map((item) => ( +
+
+ {item.label} +
+ ))} +
+ + {/* Timeline Grid */} +
+ {loading ? ( + + ) : schedules.length === 0 ? ( + + ) : ( +
+
+ {/* Header Row */} +
+ 작업명 +
+ {periods.map((period) => ( +
+ {period.label} +
+ ))} + + {/* Data Rows */} + {schedules.map((schedule) => ( + <> + {/* Job Name (sticky) */} +
+ {displayNameMap[schedule.jobName] || schedule.jobName} +
+ + {/* Execution Cells */} + {periods.map((period) => { + const exec = schedule.executions[period.key]; + const hasExec = exec !== null && exec !== undefined; + const isSelected = + selectedCell?.jobName === schedule.jobName && + selectedCell?.periodKey === period.key; + const running = hasExec && isRunning(exec.status); + + return ( +
+ handleCellClick(schedule.jobName, period.key, period.label) + } + onMouseEnter={ + hasExec + ? (e) => handleCellMouseEnter(e, schedule.jobName, period, exec) + : undefined + } + onMouseLeave={hasExec ? handleCellMouseLeave : undefined} + > + {hasExec && ( +
+ )} +
+ ); + })} + + ))} +
+
+ )} +
+ + {/* Tooltip */} + {tooltip && ( +
+
+
{displayNameMap[tooltip.jobName] || tooltip.jobName}
+
+
기간: {tooltip.period.label}
+
+ 상태:{' '} + + {tooltip.execution.status} + +
+ {tooltip.execution.startTime && ( +
시작: {formatDateTime(tooltip.execution.startTime)}
+ )} + {tooltip.execution.endTime && ( +
종료: {formatDateTime(tooltip.execution.endTime)}
+ )} + {tooltip.execution.executionId && ( +
실행 ID: {tooltip.execution.executionId}
+ )} +
+ {/* Arrow */} +
+
+
+ )} + + {/* Detail Panel */} + {selectedCell && ( +
+
+
+

+ {displayNameMap[selectedCell.jobName] || selectedCell.jobName} +

+

+ 구간: {selectedCell.periodLabel} +

+
+ +
+ + {detailLoading ? ( + + ) : detailExecutions.length === 0 ? ( + + ) : ( +
+ + + + + + + + + + + + + {detailExecutions.map((exec) => ( + + + + + + + + + ))} + +
+ 실행 ID + + 상태 + + 시작 시간 + + 종료 시간 + + 소요 시간 + + 상세 +
+ #{exec.executionId} + + + + {formatDateTime(exec.startTime)} + + {formatDateTime(exec.endTime)} + + {calculateDuration(exec.startTime, exec.endTime)} + + + 상세 + +
+
+ )} +
+ )} + setGuideOpen(false)} + pageTitle="타임라인" + sections={TIMELINE_GUIDE} + /> +
+ ); +} diff --git a/frontend/src/theme/base.css b/frontend/src/theme/base.css new file mode 100644 index 0000000..803906a --- /dev/null +++ b/frontend/src/theme/base.css @@ -0,0 +1,25 @@ +body { + font-family: 'Noto Sans KR', sans-serif; + background: var(--wing-bg); + color: var(--wing-text); + transition: background-color 0.2s ease, color 0.2s ease; +} + +/* Scrollbar styling for dark mode */ +::-webkit-scrollbar { + width: 8px; + height: 8px; +} + +::-webkit-scrollbar-track { + background: var(--wing-surface); +} + +::-webkit-scrollbar-thumb { + background: var(--wing-muted); + border-radius: 4px; +} + +::-webkit-scrollbar-thumb:hover { + background: var(--wing-accent); +} diff --git a/frontend/src/theme/tokens.css b/frontend/src/theme/tokens.css new file mode 100644 index 0000000..1614ed3 --- /dev/null +++ b/frontend/src/theme/tokens.css @@ -0,0 +1,66 @@ +/* Dark theme (default) */ +:root, +[data-theme='dark'] { + --wing-bg: #020617; + --wing-surface: #0f172a; + --wing-card: #1e293b; + --wing-border: #1e3a5f; + --wing-text: #e2e8f0; + --wing-muted: #64748b; + --wing-accent: #3b82f6; + --wing-danger: #ef4444; + --wing-warning: #f59e0b; + --wing-success: #22c55e; + --wing-glass: rgba(15, 23, 42, 0.92); + --wing-glass-dense: rgba(15, 23, 42, 0.95); + --wing-overlay: rgba(2, 6, 23, 0.42); + --wing-card-alpha: rgba(30, 41, 59, 0.55); + --wing-subtle: rgba(255, 255, 255, 0.03); + --wing-hover: rgba(255, 255, 255, 0.05); + --wing-input-bg: #0f172a; + --wing-input-border: #334155; +} + +/* Light theme */ +[data-theme='light'] { + --wing-bg: #e2e8f0; + --wing-surface: #ffffff; + --wing-card: #f1f5f9; + --wing-border: #94a3b8; + --wing-text: #0f172a; + --wing-muted: #64748b; + --wing-accent: #2563eb; + --wing-danger: #dc2626; + --wing-warning: #d97706; + --wing-success: #16a34a; + --wing-glass: rgba(255, 255, 255, 0.92); + --wing-glass-dense: rgba(255, 255, 255, 0.95); + --wing-overlay: rgba(0, 0, 0, 0.25); + --wing-card-alpha: rgba(226, 232, 240, 0.6); + --wing-subtle: rgba(0, 0, 0, 0.03); + --wing-hover: rgba(0, 0, 0, 0.04); + --wing-input-bg: #ffffff; + --wing-input-border: #cbd5e1; +} + +@theme { + --color-wing-bg: var(--wing-bg); + --color-wing-surface: var(--wing-surface); + --color-wing-card: var(--wing-card); + --color-wing-border: var(--wing-border); + --color-wing-text: var(--wing-text); + --color-wing-muted: var(--wing-muted); + --color-wing-accent: var(--wing-accent); + --color-wing-danger: var(--wing-danger); + --color-wing-warning: var(--wing-warning); + --color-wing-success: var(--wing-success); + --color-wing-glass: var(--wing-glass); + --color-wing-glass-dense: var(--wing-glass-dense); + --color-wing-overlay: var(--wing-overlay); + --color-wing-card-alpha: var(--wing-card-alpha); + --color-wing-subtle: var(--wing-subtle); + --color-wing-hover: var(--wing-hover); + --color-wing-input-bg: var(--wing-input-bg); + --color-wing-input-border: var(--wing-input-border); + --font-sans: 'Noto Sans KR', sans-serif; +} diff --git a/frontend/src/utils/cronPreview.ts b/frontend/src/utils/cronPreview.ts new file mode 100644 index 0000000..7ed0e84 --- /dev/null +++ b/frontend/src/utils/cronPreview.ts @@ -0,0 +1,154 @@ +/** + * Quartz 형식 Cron 표현식의 다음 실행 시간을 계산한다. + * 형식: 초 분 시 일 월 요일 + */ +export function getNextExecutions(cron: string, count: number): Date[] { + const parts = cron.trim().split(/\s+/); + if (parts.length < 6) return []; + + const [secField, minField, hourField, dayField, monthField, dowField] = parts; + + if (hasUnsupportedToken(dayField) || hasUnsupportedToken(dowField)) { + return []; + } + + const seconds = parseField(secField, 0, 59); + const minutes = parseField(minField, 0, 59); + const hours = parseField(hourField, 0, 23); + const daysOfMonth = parseField(dayField, 1, 31); + const months = parseField(monthField, 1, 12); + const daysOfWeek = parseDowField(dowField); + + if (!seconds || !minutes || !hours || !months) return []; + + const results: Date[] = []; + const now = new Date(); + const cursor = new Date(now.getFullYear(), now.getMonth(), now.getDate(), now.getHours(), now.getMinutes(), now.getSeconds() + 1); + cursor.setMilliseconds(0); + + const limit = new Date(now.getTime() + 365 * 24 * 60 * 60 * 1000); + + while (results.length < count && cursor.getTime() <= limit.getTime()) { + const month = cursor.getMonth() + 1; + if (!months.includes(month)) { + cursor.setMonth(cursor.getMonth() + 1, 1); + cursor.setHours(0, 0, 0, 0); + continue; + } + + const day = cursor.getDate(); + const dayMatches = daysOfMonth ? daysOfMonth.includes(day) : true; + const dowMatches = daysOfWeek ? daysOfWeek.includes(cursor.getDay()) : true; + + const needDayCheck = dayField !== '?' && dowField !== '?'; + const dayOk = needDayCheck ? dayMatches && dowMatches : dayMatches && dowMatches; + + if (!dayOk) { + cursor.setDate(cursor.getDate() + 1); + cursor.setHours(0, 0, 0, 0); + continue; + } + + const hour = cursor.getHours(); + if (!hours.includes(hour)) { + cursor.setHours(cursor.getHours() + 1, 0, 0, 0); + continue; + } + + const minute = cursor.getMinutes(); + if (!minutes.includes(minute)) { + cursor.setMinutes(cursor.getMinutes() + 1, 0, 0); + continue; + } + + const second = cursor.getSeconds(); + if (!seconds.includes(second)) { + cursor.setSeconds(cursor.getSeconds() + 1, 0); + continue; + } + + results.push(new Date(cursor)); + cursor.setSeconds(cursor.getSeconds() + 1); + } + + return results; +} + +function hasUnsupportedToken(field: string): boolean { + return /[LW#]/.test(field); +} + +function parseField(field: string, min: number, max: number): number[] | null { + if (field === '?') return null; + if (field === '*') return range(min, max); + + const values = new Set(); + + for (const part of field.split(',')) { + const stepMatch = part.match(/^(.+)\/(\d+)$/); + if (stepMatch) { + const [, base, stepStr] = stepMatch; + const step = parseInt(stepStr, 10); + if (step <= 0) return range(min, max); + let start = min; + let end = max; + + if (base === '*') { + start = min; + } else if (base.includes('-')) { + const [lo, hi] = base.split('-').map(Number); + start = lo; + end = hi; + } else { + start = parseInt(base, 10); + } + + for (let v = start; v <= end; v += step) { + if (v >= min && v <= max) values.add(v); + } + continue; + } + + const rangeMatch = part.match(/^(\d+)-(\d+)$/); + if (rangeMatch) { + const lo = parseInt(rangeMatch[1], 10); + const hi = parseInt(rangeMatch[2], 10); + for (let v = lo; v <= hi; v++) { + if (v >= min && v <= max) values.add(v); + } + continue; + } + + const num = parseInt(part, 10); + if (!isNaN(num) && num >= min && num <= max) { + values.add(num); + } + } + + return values.size > 0 ? Array.from(values).sort((a, b) => a - b) : range(min, max); +} + +function parseDowField(field: string): number[] | null { + if (field === '?' || field === '*') return null; + + const dayMap: Record = { + SUN: '0', MON: '1', TUE: '2', WED: '3', THU: '4', FRI: '5', SAT: '6', + }; + + let normalized = field.toUpperCase(); + for (const [name, num] of Object.entries(dayMap)) { + normalized = normalized.replace(new RegExp(name, 'g'), num); + } + + // Quartz uses 1=SUN..7=SAT, convert to JS 0=SUN..6=SAT + const parsed = parseField(normalized, 1, 7); + if (!parsed) return null; + + return parsed.map((v) => v - 1); +} + +function range(min: number, max: number): number[] { + const result: number[] = []; + for (let i = min; i <= max; i++) result.push(i); + return result; +} diff --git a/frontend/src/utils/formatters.ts b/frontend/src/utils/formatters.ts new file mode 100644 index 0000000..c1cda20 --- /dev/null +++ b/frontend/src/utils/formatters.ts @@ -0,0 +1,58 @@ +export function formatDateTime(dateTimeStr: string | null | undefined): string { + if (!dateTimeStr) return '-'; + try { + const date = new Date(dateTimeStr); + if (isNaN(date.getTime())) return '-'; + const y = date.getFullYear(); + const m = String(date.getMonth() + 1).padStart(2, '0'); + const d = String(date.getDate()).padStart(2, '0'); + const h = String(date.getHours()).padStart(2, '0'); + const min = String(date.getMinutes()).padStart(2, '0'); + const s = String(date.getSeconds()).padStart(2, '0'); + return `${y}-${m}-${d} ${h}:${min}:${s}`; + } catch { + return '-'; + } +} + +export function formatDateTimeShort(dateTimeStr: string | null | undefined): string { + if (!dateTimeStr) return '-'; + try { + const date = new Date(dateTimeStr); + if (isNaN(date.getTime())) return '-'; + const m = String(date.getMonth() + 1).padStart(2, '0'); + const d = String(date.getDate()).padStart(2, '0'); + const h = String(date.getHours()).padStart(2, '0'); + const min = String(date.getMinutes()).padStart(2, '0'); + return `${m}/${d} ${h}:${min}`; + } catch { + return '-'; + } +} + +export function formatDuration(ms: number | null | undefined): string { + if (ms == null || ms < 0) return '-'; + const totalSeconds = Math.floor(ms / 1000); + const hours = Math.floor(totalSeconds / 3600); + const minutes = Math.floor((totalSeconds % 3600) / 60); + const seconds = totalSeconds % 60; + + if (hours > 0) return `${hours}시간 ${minutes}분 ${seconds}초`; + if (minutes > 0) return `${minutes}분 ${seconds}초`; + return `${seconds}초`; +} + +export function calculateDuration( + startTime: string | null | undefined, + endTime: string | null | undefined, +): string { + if (!startTime) return '-'; + const start = new Date(startTime).getTime(); + if (isNaN(start)) return '-'; + + if (!endTime) return '실행 중...'; + const end = new Date(endTime).getTime(); + if (isNaN(end)) return '-'; + + return formatDuration(end - start); +} diff --git a/frontend/tsconfig.app.json b/frontend/tsconfig.app.json new file mode 100644 index 0000000..a9b5a59 --- /dev/null +++ b/frontend/tsconfig.app.json @@ -0,0 +1,28 @@ +{ + "compilerOptions": { + "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo", + "target": "ES2022", + "useDefineForClassFields": true, + "lib": ["ES2022", "DOM", "DOM.Iterable"], + "module": "ESNext", + "types": ["vite/client"], + "skipLibCheck": true, + + /* Bundler mode */ + "moduleResolution": "bundler", + "allowImportingTsExtensions": true, + "verbatimModuleSyntax": true, + "moduleDetection": "force", + "noEmit": true, + "jsx": "react-jsx", + + /* Linting */ + "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "erasableSyntaxOnly": true, + "noFallthroughCasesInSwitch": true, + "noUncheckedSideEffectImports": true + }, + "include": ["src"] +} diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json new file mode 100644 index 0000000..1ffef60 --- /dev/null +++ b/frontend/tsconfig.json @@ -0,0 +1,7 @@ +{ + "files": [], + "references": [ + { "path": "./tsconfig.app.json" }, + { "path": "./tsconfig.node.json" } + ] +} diff --git a/frontend/tsconfig.node.json b/frontend/tsconfig.node.json new file mode 100644 index 0000000..8a67f62 --- /dev/null +++ b/frontend/tsconfig.node.json @@ -0,0 +1,26 @@ +{ + "compilerOptions": { + "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo", + "target": "ES2023", + "lib": ["ES2023"], + "module": "ESNext", + "types": ["node"], + "skipLibCheck": true, + + /* Bundler mode */ + "moduleResolution": "bundler", + "allowImportingTsExtensions": true, + "verbatimModuleSyntax": true, + "moduleDetection": "force", + "noEmit": true, + + /* Linting */ + "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "erasableSyntaxOnly": true, + "noFallthroughCasesInSwitch": true, + "noUncheckedSideEffectImports": true + }, + "include": ["vite.config.ts"] +} diff --git a/frontend/vite.config.ts b/frontend/vite.config.ts new file mode 100644 index 0000000..ea9ef25 --- /dev/null +++ b/frontend/vite.config.ts @@ -0,0 +1,21 @@ +import { defineConfig } from 'vite' +import react from '@vitejs/plugin-react' +import tailwindcss from '@tailwindcss/vite' + +export default defineConfig({ + plugins: [react(), tailwindcss()], + server: { + port: 5173, + proxy: { + '/snp-sync/api': { + target: 'http://localhost:8051', + changeOrigin: true, + }, + }, + }, + base: '/snp-sync/', + build: { + outDir: '../src/main/resources/static', + emptyOutDir: true, + }, +}) diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..e87d1fb --- /dev/null +++ b/pom.xml @@ -0,0 +1,186 @@ + + + 4.0.0 + + + org.springframework.boot + spring-boot-starter-parent + 3.2.1 + + + + com.snp + snp-sync-batch + 1.0.0 + SNP Sync Batch + Spring Batch project for JSON to PostgreSQL with Web GUI + + + 17 + UTF-8 + 17 + 17 + + + 3.2.1 + 5.1.0 + 42.7.6 + 1.18.30 + 2.5.0 + + + + + + org.springframework.boot + spring-boot-starter-web + + + + + org.springframework.boot + spring-boot-starter-batch + + + + + org.springframework.boot + spring-boot-starter-data-jpa + + + + + org.postgresql + postgresql + ${postgresql.version} + runtime + + + + + org.springframework.boot + spring-boot-starter-quartz + + + + + com.fasterxml.jackson.core + jackson-databind + + + + + org.projectlombok + lombok + ${lombok.version} + true + + + + + org.springframework.boot + spring-boot-devtools + runtime + true + + + + + org.springframework.boot + spring-boot-starter-actuator + + + + + org.springframework.boot + spring-boot-starter-webflux + + + + + org.springdoc + springdoc-openapi-starter-webmvc-ui + 2.3.0 + + + + + org.springframework.boot + spring-boot-starter-test + test + + + + org.springframework.batch + spring-batch-test + test + + + + + + + org.springframework.boot + spring-boot-maven-plugin + ${spring-boot.version} + + + + org.projectlombok + lombok + + + + + + com.github.eirslett + frontend-maven-plugin + 1.15.1 + + frontend + v20.19.0 + + + + install-node-and-npm + install-node-and-npm + + + npm-install + npm + + install + + + + npm-build + npm + + run build + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.11.0 + + 17 + 17 + UTF-8 + + + org.projectlombok + lombok + ${lombok.version} + + + + + + + diff --git a/src/main/java/com/snp/batch/SnpBatchApplication.java b/src/main/java/com/snp/batch/SnpBatchApplication.java new file mode 100644 index 0000000..bf4315f --- /dev/null +++ b/src/main/java/com/snp/batch/SnpBatchApplication.java @@ -0,0 +1,14 @@ +package com.snp.batch; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.scheduling.annotation.EnableScheduling; + +@SpringBootApplication +@EnableScheduling +public class SnpBatchApplication { + + public static void main(String[] args) { + SpringApplication.run(SnpBatchApplication.class, args); + } +} diff --git a/src/main/java/com/snp/batch/common/batch/config/BaseJobConfig.java b/src/main/java/com/snp/batch/common/batch/config/BaseJobConfig.java new file mode 100644 index 0000000..bace6af --- /dev/null +++ b/src/main/java/com/snp/batch/common/batch/config/BaseJobConfig.java @@ -0,0 +1,140 @@ +package com.snp.batch.common.batch.config; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.transaction.PlatformTransactionManager; + +/** + * Batch Job 설정을 위한 추상 클래스 + * Reader → Processor → Writer 패턴의 표준 Job 구성 제공 + * + * @param 입력 타입 (Reader 출력, Processor 입력) + * @param 출력 타입 (Processor 출력, Writer 입력) + */ +@Slf4j +@RequiredArgsConstructor +public abstract class BaseJobConfig { + + protected final JobRepository jobRepository; + protected final PlatformTransactionManager transactionManager; + + /** + * Job 이름 반환 (하위 클래스에서 구현) + * 예: "shipDataImportJob" + */ + protected abstract String getJobName(); + + /** + * Step 이름 반환 (선택사항, 기본: {jobName}Step) + */ + protected String getStepName() { + return getJobName() + "Step"; + } + + /** + * Reader 생성 (하위 클래스에서 구현) + */ + protected abstract ItemReader createReader(); + + /** + * Processor 생성 (하위 클래스에서 구현) + * 처리 로직이 없는 경우 null 반환 가능 + */ + protected abstract ItemProcessor createProcessor(); + + /** + * Writer 생성 (하위 클래스에서 구현) + */ + protected abstract ItemWriter createWriter(); + + /** + * Chunk 크기 반환 (선택사항, 기본: 100) + */ + protected int getChunkSize() { + return 100; + } + + /** + * Job 시작 전 실행 (선택사항) + * Job Listener 등록 시 사용 + */ + protected void configureJob(JobBuilder jobBuilder) { + // 기본 구현: 아무것도 하지 않음 + // 하위 클래스에서 필요시 오버라이드 + // 예: jobBuilder.listener(jobExecutionListener()) + } + + /** + * Step 커스터마이징 (선택사항) + * Step Listener, FaultTolerant 등 설정 시 사용 + */ + protected void configureStep(StepBuilder stepBuilder) { + // 기본 구현: 아무것도 하지 않음 + // 하위 클래스에서 필요시 오버라이드 + // 예: stepBuilder.listener(stepExecutionListener()) + // stepBuilder.faultTolerant().skip(Exception.class).skipLimit(10) + } + + /** + * ItemReader/Processor/Writer를 사용하는 표준 Step 생성 + */ + public Step step() { + log.info("표준 Step 생성: {}", getStepName()); + + ItemProcessor processor = createProcessor(); + StepBuilder stepBuilder = new StepBuilder(getStepName(), jobRepository); + + if (processor != null) { + var chunkBuilder = stepBuilder + .chunk(getChunkSize(), transactionManager) + .reader(createReader()) + .processor(processor) + .writer(createWriter()); + + configureStep(stepBuilder); + return chunkBuilder.build(); + } else { + @SuppressWarnings("unchecked") + var chunkBuilder = stepBuilder + .chunk(getChunkSize(), transactionManager) + .reader(createReader()) + .writer((ItemWriter) createWriter()); + + configureStep(stepBuilder); + return chunkBuilder.build(); + } + } + + /** + * Job 흐름 정의 (하위 클래스에서 Job의 start() 및 next()를 정의) + * **멀티 Step 구현을 위해 이 메서드를 추상 메서드로 변경합니다.** + */ + protected abstract Job createJobFlow(JobBuilder jobBuilder); + + /** + * Job 생성 (표준 구현 제공) + * **변경된 createJobFlow를 호출하도록 수정합니다.** + */ + public final Job job() { + log.info("Job 생성 시작: {}", getJobName()); + + JobBuilder jobBuilder = new JobBuilder(getJobName(), jobRepository); + + // 커스텀 설정 적용 + configureJob(jobBuilder); + + // Job 흐름 정의 + Job job = createJobFlow(jobBuilder); + + log.info("Job 생성 완료: {}", getJobName()); + return job; + } +} diff --git a/src/main/java/com/snp/batch/common/batch/entity/BaseEntity.java b/src/main/java/com/snp/batch/common/batch/entity/BaseEntity.java new file mode 100644 index 0000000..1b35a2a --- /dev/null +++ b/src/main/java/com/snp/batch/common/batch/entity/BaseEntity.java @@ -0,0 +1,46 @@ +package com.snp.batch.common.batch.entity; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; +import lombok.experimental.SuperBuilder; + +import java.time.LocalDateTime; + +/** + * 모든 Entity의 공통 베이스 클래스 - JDBC 전용 + * 생성/수정 감사(Audit) 필드 제공 + * + * 이 필드들은 Repository의 Insert/Update 시 자동으로 설정됩니다. + * BaseJdbcRepository가 감사 필드를 자동으로 관리합니다. + */ +@Data +@SuperBuilder +@NoArgsConstructor +@AllArgsConstructor +public abstract class BaseEntity { + + /** + * 생성 일시 + * 컬럼: created_at (TIMESTAMP) + */ + private LocalDateTime createdAt; + + /** + * 수정 일시 + * 컬럼: updated_at (TIMESTAMP) + */ + private LocalDateTime updatedAt; + + /** + * 생성자 + * 컬럼: created_by (VARCHAR(100)) + */ + private String createdBy; + + /** + * 수정자 + * 컬럼: updated_by (VARCHAR(100)) + */ + private String updatedBy; +} diff --git a/src/main/java/com/snp/batch/common/batch/processor/BaseProcessor.java b/src/main/java/com/snp/batch/common/batch/processor/BaseProcessor.java new file mode 100644 index 0000000..0add9cc --- /dev/null +++ b/src/main/java/com/snp/batch/common/batch/processor/BaseProcessor.java @@ -0,0 +1,61 @@ +package com.snp.batch.common.batch.processor; + +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemProcessor; + +/** + * ItemProcessor 추상 클래스 (v2.0) + * 데이터 변환 및 처리 로직을 위한 템플릿 제공 + * + * Template Method Pattern: + * - process(): 공통 로직 (null 체크, 로깅) + * - processItem(): 하위 클래스에서 변환 로직 구현 + * + * 기본 용도: + * - 단순 변환: DTO → Entity + * - 데이터 필터링: null 반환 시 해당 아이템 스킵 + * - 데이터 검증: 유효하지 않은 데이터 필터링 + * + * 고급 용도 (다중 depth JSON 처리): + * - 중첩된 JSON을 여러 Entity로 분해 + * - 1:N 관계 처리 (Order → OrderItems) + * - CompositeWriter와 조합하여 여러 테이블에 저장 + * + * 예제: + * - 단순 변환: ProductDataProcessor (DTO → Entity) + * - 복잡한 처리: 복잡한 JSON 처리 예제 참고 + * + * @param 입력 DTO 타입 + * @param 출력 Entity 타입 + */ +@Slf4j +public abstract class BaseProcessor implements ItemProcessor { + + /** + * 데이터 변환 로직 (하위 클래스에서 구현) + * DTO → Entity 변환 등의 비즈니스 로직 구현 + * + * @param item 입력 DTO + * @return 변환된 Entity (필터링 시 null 반환 가능) + * @throws Exception 처리 중 오류 발생 시 + */ + protected abstract O processItem(I item) throws Exception; + + /** + * Spring Batch ItemProcessor 인터페이스 구현 + * 데이터 변환 및 필터링 수행 + * + * @param item 입력 DTO + * @return 변환된 Entity (null이면 해당 아이템 스킵) + * @throws Exception 처리 중 오류 발생 시 + */ + @Override + public O process(I item) throws Exception { + if (item == null) { + return null; + } + + log.debug("데이터 처리 중: {}", item); + return processItem(item); + } +} diff --git a/src/main/java/com/snp/batch/common/batch/reader/BaseApiReader.java b/src/main/java/com/snp/batch/common/batch/reader/BaseApiReader.java new file mode 100644 index 0000000..6923f99 --- /dev/null +++ b/src/main/java/com/snp/batch/common/batch/reader/BaseApiReader.java @@ -0,0 +1,633 @@ +package com.snp.batch.common.batch.reader; + +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.annotation.BeforeStep; +import org.springframework.batch.item.ExecutionContext; +import org.springframework.batch.item.ItemReader; +import org.springframework.http.HttpHeaders; +import org.springframework.http.MediaType; +import org.springframework.web.reactive.function.client.WebClient; +import org.springframework.web.util.UriBuilder; + +import java.net.URI; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; + +/** + * REST API 기반 ItemReader 추상 클래스 (v3.0 - Chunk 기반) + * + * 주요 기능: + * - HTTP Method 지원: GET, POST + * - 다중 Query Parameter 처리 + * - Path Variable 지원 + * - Request Body 지원 (POST) + * - 동적 Header 설정 + * - 복잡한 JSON 응답 파싱 + * - ✨ Chunk 기반 배치 처리 (Iterator 패턴) + * + * Template Method Pattern: + * - read(): 공통 로직 (1건씩 순차 반환) + * - fetchNextBatch(): 다음 배치 조회 (구현체에서 오버라이드) + * - 새로운 훅 메서드들: HTTP Method, 파라미터, 헤더 등 + * + * 동작 방식: + * 1. read() 호출 시 currentBatch가 비어있으면 fetchNextBatch() 호출 + * 2. fetchNextBatch()가 100건 반환 + * 3. read()가 100번 호출되면서 1건씩 반환 + * 4. 100건 모두 반환되면 다시 fetchNextBatch() 호출 + * 5. fetchNextBatch()가 null/empty 반환 시 Job 종료 + * + * 하위 호환성: + * - 기존 fetchDataFromApi() 메서드 계속 지원 + * - 새로운 fetchNextBatch() 메서드 사용 권장 + * + * @param DTO 타입 (API 응답 데이터) + */ +@Slf4j +public abstract class BaseApiReader implements ItemReader { + + // Chunk 기반 Iterator 패턴 + private java.util.Iterator currentBatch; + private boolean initialized = false; + private boolean useChunkMode = false; // Chunk 모드 사용 여부 + + // 하위 호환성을 위한 필드 (fetchDataFromApi 사용 시) + private List legacyDataList; + private int legacyNextIndex = 0; + + // WebClient는 하위 클래스에서 주입받아 사용 + protected WebClient webClient; + + // StepExecution - API 정보 저장용 + protected StepExecution stepExecution; + + // API 호출 통계 + private int totalApiCalls = 0; + private int completedApiCalls = 0; + + /** + * 기본 생성자 (WebClient 없이 사용 - Mock 데이터용) + */ + protected BaseApiReader() { + this.webClient = null; + } + + /** + * WebClient를 주입받는 생성자 (실제 API 연동용) + * + * @param webClient Spring WebClient 인스턴스 + */ + protected BaseApiReader(WebClient webClient) { + this.webClient = webClient; + } + + /** + * Step 실행 전 초기화 및 API 정보 저장 + * Spring Batch가 자동으로 StepExecution을 주입하고 이 메서드를 호출함 + * + * @param stepExecution Step 실행 정보 + */ + @BeforeStep + public void saveApiInfoToContext(StepExecution stepExecution) { + this.stepExecution = stepExecution; + + // API 정보를 StepExecutionContext에 저장 + ExecutionContext context = stepExecution.getExecutionContext(); + + // WebClient가 있는 경우에만 API 정보 저장 + if (webClient != null) { + // 1. API URL 저장 + String baseUrl = getApiBaseUrl(); + String apiPath = getApiPath(); + String fullUrl = baseUrl != null ? baseUrl + apiPath : apiPath; + context.putString("apiUrl", fullUrl); + + // 2. HTTP Method 저장 + context.putString("apiMethod", getHttpMethod()); + + // 3. API Parameters 저장 + Map params = new HashMap<>(); + Map queryParams = getQueryParams(); + if (queryParams != null && !queryParams.isEmpty()) { + params.putAll(queryParams); + } + Map pathVars = getPathVariables(); + if (pathVars != null && !pathVars.isEmpty()) { + params.putAll(pathVars); + } + context.put("apiParameters", params); + + // 4. 통계 초기화 + context.putInt("totalApiCalls", 0); + context.putInt("completedApiCalls", 0); + + log.info("[{}] API 정보 저장: {} {}", getReaderName(), getHttpMethod(), fullUrl); + } + } + + /** + * API Base URL 반환 (WebClient의 baseUrl) + * 하위 클래스에서 필요 시 오버라이드 + */ + protected String getApiBaseUrl() { + return ""; + } + + /** + * API 호출 통계 업데이트 + */ + protected void updateApiCallStats(int totalCalls, int completedCalls) { + if (stepExecution != null) { + ExecutionContext context = stepExecution.getExecutionContext(); + context.putInt("totalApiCalls", totalCalls); + context.putInt("completedApiCalls", completedCalls); + + // 마지막 호출 시간 저장 + String lastCallTime = LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")); + context.putString("lastCallTime", lastCallTime); + + this.totalApiCalls = totalCalls; + this.completedApiCalls = completedCalls; + } + } + + // ======================================== + // ItemReader 구현 (공통 로직) + // ======================================== + + /** + * Spring Batch ItemReader 인터페이스 구현 + * 데이터를 순차적으로 하나씩 반환 + * + * Chunk 기반 동작: + * 1. currentBatch가 비어있으면 fetchNextBatch() 호출하여 다음 배치 로드 + * 2. Iterator에서 1건씩 반환 + * 3. Iterator가 비면 다시 1번으로 + * 4. fetchNextBatch()가 null/empty 반환하면 Job 종료 + * + * @return 다음 데이터 항목 (더 이상 없으면 null) + */ + @Override + public T read() throws Exception { + // Chunk 모드 사용 여부는 첫 호출 시 결정 + if (!initialized && !useChunkMode) { + // Legacy 모드로 시작 + return readLegacyMode(); + } + + // Chunk 모드가 활성화된 경우 + if (useChunkMode) { + return readChunkMode(); + } + + // Legacy 모드 + return readLegacyMode(); + } + + /** + * Chunk 모드 활성화 (하위 클래스에서 명시적 호출) + */ + protected void enableChunkMode() { + this.useChunkMode = true; + } + + /** + * Chunk 기반 read() 구현 (신규 방식) + */ + private T readChunkMode() throws Exception { + // 최초 호출 시 초기화 + if (!initialized) { + beforeFetch(); + initialized = true; + } + + // currentBatch가 비어있으면 다음 배치 로드 + if (currentBatch == null || !currentBatch.hasNext()) { + List nextBatch = fetchNextBatch(); + + // 더 이상 데이터가 없으면 종료 + if (nextBatch == null || nextBatch.isEmpty()) { + afterFetch(null); + log.info("[{}] 모든 배치 처리 완료", getReaderName()); + return null; + } + + // Iterator 갱신 + currentBatch = nextBatch.iterator(); + log.debug("[{}] 배치 로드 완료: {} 건", getReaderName(), nextBatch.size()); + } + + // Iterator에서 1건씩 반환 + return currentBatch.next(); + } + + /** + * Legacy 모드 read() 구현 (하위 호환성) + * 기존 fetchDataFromApi()를 오버라이드한 구현체 지원 + */ + private T readLegacyMode() throws Exception { + // 최초 호출 시 API에서 전체 데이터 조회 + if (legacyDataList == null) { + beforeFetch(); + legacyDataList = fetchDataFromApi(); + afterFetch(legacyDataList); + log.info("[{}] 데이터 {}건 조회 완료 (Legacy 모드)", + getReaderName(), legacyDataList != null ? legacyDataList.size() : 0); + } + + // 데이터를 순차적으로 반환 + if (legacyDataList != null && legacyNextIndex < legacyDataList.size()) { + return legacyDataList.get(legacyNextIndex++); + } else { + return null; // 데이터 끝 + } + } + + + // ======================================== + // 핵심 추상 메서드 (하위 클래스에서 구현) + // ======================================== + + /** + * ✨ 다음 배치 데이터를 조회하여 리스트로 반환 (신규 방식 - Chunk 기반) + * + * Chunk 기반 배치 처리를 위한 메서드: + * - read()가 호출될 때마다 필요 시 이 메서드가 호출됨 + * - 일반적으로 100~1000건씩 반환 + * - 더 이상 데이터가 없으면 null 또는 빈 리스트 반환 + * + * 구현 예시: + *
+     * private int currentPage = 0;
+     * private final int pageSize = 100;
+     *
+     * @Override
+     * protected List fetchNextBatch() {
+     *     if (currentPage >= totalPages) {
+     *         return null; // 종료
+     *     }
+     *
+     *     // API 호출 (100건씩)
+     *     ProductApiResponse response = callApiForPage(currentPage, pageSize);
+     *     currentPage++;
+     *
+     *     return response.getProducts();
+     * }
+     * 
+ * + * @return 다음 배치 데이터 리스트 (null 또는 빈 리스트면 종료) + * @throws Exception API 호출 실패 등 + */ + protected List fetchNextBatch() throws Exception { + // 기본 구현: Legacy 모드 fallback + // 하위 클래스에서 오버라이드 안 하면 fetchDataFromApi() 사용 + return null; + } + + /** + * API에서 데이터를 조회하여 리스트로 반환 (Legacy 방식 - 하위 호환성) + * + * ⚠️ Deprecated: fetchNextBatch()를 사용하세요. + * + * 구현 방법: + * 1. WebClient 없이 Mock 데이터 생성 (sample용) + * 2. WebClient로 실제 API 호출 (실전용) + * 3. callApi() 헬퍼 메서드 사용 (권장) + * + * @return API에서 조회한 데이터 리스트 (전체) + */ + protected List fetchDataFromApi() { + // 기본 구현: 빈 리스트 반환 + // 하위 클래스에서 오버라이드 필요 + return new ArrayList<>(); + } + + /** + * Reader 이름 반환 (로깅용) + * + * @return Reader 이름 (예: "ProductDataReader") + */ + protected abstract String getReaderName(); + + // ======================================== + // HTTP 요청 설정 메서드 (선택적 오버라이드) + // ======================================== + + /** + * HTTP Method 반환 + * + * 기본값: GET + * POST 요청 시 오버라이드 + * + * @return HTTP Method ("GET" 또는 "POST") + */ + protected String getHttpMethod() { + return "GET"; + } + + /** + * API 엔드포인트 경로 반환 + * + * 예제: + * - "/api/v1/products" + * - "/api/v1/orders/{orderId}" (Path Variable 포함) + * + * @return API 경로 + */ + protected String getApiPath() { + return ""; + } + + /** + * Query Parameter 맵 반환 + * + * 예제: + * Map params = new HashMap<>(); + * params.put("status", "active"); + * params.put("page", 1); + * params.put("size", 100); + * return params; + * + * @return Query Parameter 맵 (null이면 파라미터 없음) + */ + protected Map getQueryParams() { + return null; + } + + /** + * Path Variable 맵 반환 + * + * 예제: + * Map pathVars = new HashMap<>(); + * pathVars.put("orderId", "ORD-001"); + * return pathVars; + * + * @return Path Variable 맵 (null이면 Path Variable 없음) + */ + protected Map getPathVariables() { + return null; + } + + /** + * Request Body 반환 (POST 요청용) + * + * 예제: + * return RequestDto.builder() + * .startDate("2025-01-01") + * .endDate("2025-12-31") + * .build(); + * + * @return Request Body 객체 (null이면 Body 없음) + */ + protected Object getRequestBody() { + return null; + } + + /** + * HTTP Header 맵 반환 + * + * 예제: + * Map headers = new HashMap<>(); + * headers.put("Authorization", "Bearer token123"); + * headers.put("X-Custom-Header", "value"); + * return headers; + * + * 기본 헤더 (자동 추가): + * - Content-Type: application/json + * - Accept: application/json + * + * @return HTTP Header 맵 (null이면 기본 헤더만 사용) + */ + protected Map getHeaders() { + return null; + } + + /** + * API 응답 타입 반환 + * + * 예제: + * return ProductApiResponse.class; + * + * @return 응답 클래스 타입 + */ + protected Class getResponseType() { + return Object.class; + } + + /** + * API 응답에서 데이터 리스트 추출 + * + * 복잡한 JSON 응답 구조 처리: + * - 단순: response.getData() + * - 중첩: response.getResult().getItems() + * + * @param response API 응답 객체 + * @return 추출된 데이터 리스트 + */ + protected List extractDataFromResponse(Object response) { + return Collections.emptyList(); + } + + // ======================================== + // 라이프사이클 훅 메서드 (선택적 오버라이드) + // ======================================== + + /** + * API 호출 전 전처리 + * + * 사용 예: + * - 파라미터 검증 + * - 로깅 + * - 캐시 확인 + */ + protected void beforeFetch() { + log.debug("[{}] API 호출 준비 중...", getReaderName()); + } + + /** + * API 호출 후 후처리 + * + * 사용 예: + * - 데이터 검증 + * - 로깅 + * - 캐시 저장 + * + * @param data 조회된 데이터 리스트 + */ + protected void afterFetch(List data) { + log.debug("[{}] API 호출 완료", getReaderName()); + } + + /** + * API 호출 실패 시 에러 처리 + * + * 기본 동작: 빈 리스트 반환 (Job 실패 방지) + * 오버라이드 시: 예외 던지기 또는 재시도 로직 구현 + * + * @param e 발생한 예외 + * @return 대체 데이터 리스트 (빈 리스트 또는 캐시 데이터) + */ + protected List handleApiError(Exception e) { + log.error("[{}] API 호출 실패: {}", getReaderName(), e.getMessage(), e); + return new ArrayList<>(); + } + + // ======================================== + // 헬퍼 메서드 (하위 클래스에서 사용 가능) + // ======================================== + + /** + * WebClient를 사용한 API 호출 (GET/POST 자동 처리) + * + * 사용 방법 (fetchDataFromApi()에서): + * + * @Override + * protected List fetchDataFromApi() { + * ProductApiResponse response = callApi(); + * return extractDataFromResponse(response); + * } + * + * @param 응답 타입 + * @return API 응답 객체 + */ + @SuppressWarnings("unchecked") + protected R callApi() { + if (webClient == null) { + throw new IllegalStateException("WebClient가 초기화되지 않았습니다. 생성자에서 WebClient를 주입하세요."); + } + + try { + String method = getHttpMethod().toUpperCase(); + String path = getApiPath(); + + log.info("[{}] {} 요청 시작: {}", getReaderName(), method, path); + + if ("GET".equals(method)) { + return callGetApi(); + } else if ("POST".equals(method)) { + return callPostApi(); + } else { + throw new UnsupportedOperationException("지원하지 않는 HTTP Method: " + method); + } + + } catch (Exception e) { + log.error("[{}] API 호출 중 오류 발생", getReaderName(), e); + throw new RuntimeException("API 호출 실패", e); + } + } + + /** + * GET 요청 내부 처리 + */ + @SuppressWarnings("unchecked") + private R callGetApi() { + return (R) webClient + .get() + .uri(buildUri()) + .headers(this::applyHeaders) + .retrieve() + .bodyToMono(getResponseType()) + .block(); + } + + /** + * POST 요청 내부 처리 + */ + @SuppressWarnings("unchecked") + private R callPostApi() { + Object requestBody = getRequestBody(); + + if (requestBody == null) { + // Body 없는 POST 요청 + return (R) webClient + .post() + .uri(buildUri()) + .headers(this::applyHeaders) + .retrieve() + .bodyToMono(getResponseType()) + .block(); + } else { + // Body 있는 POST 요청 + return (R) webClient + .post() + .uri(buildUri()) + .headers(this::applyHeaders) + .bodyValue(requestBody) + .retrieve() + .bodyToMono(getResponseType()) + .block(); + } + } + + /** + * URI 빌드 (Path + Query Parameters + Path Variables) + */ + private Function buildUri() { + return uriBuilder -> { + // 1. Path 설정 + String path = getApiPath(); + uriBuilder.path(path); + + // 2. Query Parameters 추가 + Map queryParams = getQueryParams(); + if (queryParams != null && !queryParams.isEmpty()) { + queryParams.forEach((key, value) -> { + if (value != null) { + uriBuilder.queryParam(key, value); + } + }); + log.debug("[{}] Query Parameters: {}", getReaderName(), queryParams); + } + + // 3. Path Variables 적용 + Map pathVars = getPathVariables(); + if (pathVars != null && !pathVars.isEmpty()) { + log.debug("[{}] Path Variables: {}", getReaderName(), pathVars); + return uriBuilder.build(pathVars); + } else { + return uriBuilder.build(); + } + }; + } + + /** + * HTTP Header 적용 + */ + private void applyHeaders(HttpHeaders httpHeaders) { + // 1. 기본 헤더 설정 + httpHeaders.setContentType(MediaType.APPLICATION_JSON); + httpHeaders.setAccept(Collections.singletonList(MediaType.APPLICATION_JSON)); + + // 2. 커스텀 헤더 추가 + Map customHeaders = getHeaders(); + if (customHeaders != null && !customHeaders.isEmpty()) { + customHeaders.forEach(httpHeaders::set); + log.debug("[{}] Custom Headers: {}", getReaderName(), customHeaders); + } + } + + // ======================================== + // 유틸리티 메서드 + // ======================================== + + /** + * 데이터 리스트가 비어있는지 확인 + */ + protected boolean isEmpty(List data) { + return data == null || data.isEmpty(); + } + + /** + * 데이터 리스트 크기 반환 (null-safe) + */ + protected int getDataSize(List data) { + return data != null ? data.size() : 0; + } +} diff --git a/src/main/java/com/snp/batch/common/batch/repository/BaseJdbcRepository.java b/src/main/java/com/snp/batch/common/batch/repository/BaseJdbcRepository.java new file mode 100644 index 0000000..76b0a5a --- /dev/null +++ b/src/main/java/com/snp/batch/common/batch/repository/BaseJdbcRepository.java @@ -0,0 +1,339 @@ +package com.snp.batch.common.batch.repository; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.jdbc.support.GeneratedKeyHolder; +import org.springframework.jdbc.support.KeyHolder; +import org.springframework.transaction.annotation.Transactional; + +import java.sql.PreparedStatement; +import java.sql.Statement; +import java.time.LocalDateTime; +import java.util.List; +import java.util.Optional; + +/** + * JdbcTemplate 기반 Repository 추상 클래스 + * 모든 Repository가 상속받아 일관된 CRUD 패턴 제공 + * + * @param Entity 타입 + * @param ID 타입 + */ +@Slf4j +@RequiredArgsConstructor +@Transactional(readOnly = true) +public abstract class BaseJdbcRepository { + + protected final JdbcTemplate jdbcTemplate; + + /** + * 테이블명 반환 (하위 클래스에서 구현) + */ + protected abstract String getTableName(); + + /** + * ID 컬럼명 반환 (기본값: "id") + */ + protected String getIdColumnName() { + return "id"; + } + protected String getIdColumnName(String customId) { + return customId; + } + + /** + * RowMapper 반환 (하위 클래스에서 구현) + */ + protected abstract RowMapper getRowMapper(); + + /** + * Entity에서 ID 추출 (하위 클래스에서 구현) + */ + protected abstract ID extractId(T entity); + + /** + * INSERT SQL 생성 (하위 클래스에서 구현) + */ + protected abstract String getInsertSql(); + + /** + * UPDATE SQL 생성 (하위 클래스에서 구현) + */ + protected abstract String getUpdateSql(); + + /** + * INSERT용 PreparedStatement 파라미터 설정 (하위 클래스에서 구현) + */ + protected abstract void setInsertParameters(PreparedStatement ps, T entity) throws Exception; + + /** + * UPDATE용 PreparedStatement 파라미터 설정 (하위 클래스에서 구현) + */ + protected abstract void setUpdateParameters(PreparedStatement ps, T entity) throws Exception; + + /** + * 엔티티명 반환 (로깅용) + */ + protected abstract String getEntityName(); + + // ==================== CRUD 메서드 ==================== + + /** + * ID로 조회 + */ + public Optional findById(ID id) { + String sql = String.format("SELECT * FROM %s WHERE %s = ?", getTableName(), getIdColumnName()); + log.debug("{} 조회: ID={}", getEntityName(), id); + + List results = jdbcTemplate.query(sql, getRowMapper(), id); + return results.isEmpty() ? Optional.empty() : Optional.of(results.get(0)); + } + + /** + * 전체 조회 + */ + public List findAll() { + String sql = String.format("SELECT * FROM %s ORDER BY %s DESC", getTableName(), getIdColumnName()); + log.debug("{} 전체 조회", getEntityName()); + return jdbcTemplate.query(sql, getRowMapper()); + } + + /** + * 개수 조회 + */ + public long count() { + String sql = String.format("SELECT COUNT(*) FROM %s", getTableName()); + Long count = jdbcTemplate.queryForObject(sql, Long.class); + return count != null ? count : 0L; + } + + /** + * 존재 여부 확인 + */ + public boolean existsById(ID id) { + String sql = String.format("SELECT COUNT(*) FROM %s WHERE %s = ?", getTableName(), getIdColumnName()); + Long count = jdbcTemplate.queryForObject(sql, Long.class, id); + return count != null && count > 0; + } + + /** + * 단건 저장 (INSERT 또는 UPDATE) + */ + @Transactional + public T save(T entity) { + ID id = extractId(entity); + + if (id == null || !existsById(id)) { + return insert(entity); + } else { + return update(entity); + } + } + + /** + * 단건 INSERT + */ + @Transactional + protected T insert(T entity) { + log.info("{} 삽입 시작", getEntityName()); + + KeyHolder keyHolder = new GeneratedKeyHolder(); + + jdbcTemplate.update(connection -> { + PreparedStatement ps = connection.prepareStatement(getInsertSql(), Statement.RETURN_GENERATED_KEYS); + try { + setInsertParameters(ps, entity); + } catch (Exception e) { + log.error("{} 삽입 파라미터 설정 실패", getEntityName(), e); + throw new RuntimeException("Failed to set insert parameters", e); + } + return ps; + }, keyHolder); + + // 생성된 ID 조회 + if (keyHolder.getKeys() != null && !keyHolder.getKeys().isEmpty()) { + Object idValue = keyHolder.getKeys().get(getIdColumnName()); + if (idValue != null) { + @SuppressWarnings("unchecked") + ID generatedId = (ID) (idValue instanceof Number ? ((Number) idValue).longValue() : idValue); + log.info("{} 삽입 완료: ID={}", getEntityName(), generatedId); + return findById(generatedId).orElse(entity); + } + } + + log.info("{} 삽입 완료 (ID 미반환)", getEntityName()); + return entity; + } + + /** + * 단건 UPDATE + */ + @Transactional + protected T update(T entity) { + ID id = extractId(entity); + log.info("{} 수정 시작: ID={}", getEntityName(), id); + + int updated = jdbcTemplate.update(connection -> { + PreparedStatement ps = connection.prepareStatement(getUpdateSql()); + try { + setUpdateParameters(ps, entity); + } catch (Exception e) { + log.error("{} 수정 파라미터 설정 실패", getEntityName(), e); + throw new RuntimeException("Failed to set update parameters", e); + } + return ps; + }); + + if (updated == 0) { + throw new IllegalStateException(getEntityName() + " 수정 실패: ID=" + id); + } + + log.info("{} 수정 완료: ID={}", getEntityName(), id); + return findById(id).orElse(entity); + } + + /** + * 배치 INSERT (대량 삽입) + */ + @Transactional + public void batchInsert(List entities) { + if (entities == null || entities.isEmpty()) { + return; + } + + log.debug("{} 배치 삽입 시작: {} 건", getEntityName(), entities.size()); + + jdbcTemplate.batchUpdate(getInsertSql(), entities, entities.size(), + (ps, entity) -> { + try { + setInsertParameters(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", getEntityName(), entities.size()); + } + + /** + * 배치 UPDATE (대량 수정) + */ + @Transactional + public void batchUpdate(List entities) { + if (entities == null || entities.isEmpty()) { + return; + } + + log.info("{} 배치 수정 시작: {} 건", getEntityName(), entities.size()); + + jdbcTemplate.batchUpdate(getUpdateSql(), entities, entities.size(), + (ps, entity) -> { + try { + setUpdateParameters(ps, entity); + } catch (Exception e) { + log.error("배치 수정 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.info("{} 배치 수정 완료: {} 건", getEntityName(), entities.size()); + } + + /** + * 전체 저장 (INSERT 또는 UPDATE) + */ + @Transactional + public void saveAll(List entities) { + if (entities == null || entities.isEmpty()) { + return; + } + + log.info("{} 전체 저장 시작: {} 건", getEntityName(), entities.size()); + + // INSERT와 UPDATE 분리 + List toInsert = entities.stream() + .filter(e -> extractId(e) == null || !existsById(extractId(e))) + .toList(); + + List toUpdate = entities.stream() + .filter(e -> extractId(e) != null && existsById(extractId(e))) + .toList(); + + if (!toInsert.isEmpty()) { + batchInsert(toInsert); + } + + if (!toUpdate.isEmpty()) { + batchUpdate(toUpdate); + } + + log.info("{} 전체 저장 완료: 삽입={} 건, 수정={} 건", getEntityName(), toInsert.size(), toUpdate.size()); + } + + /** + * ID로 삭제 + */ + @Transactional + public void deleteById(ID id) { + String sql = String.format("DELETE FROM %s WHERE %s = ?", getTableName(), getIdColumnName()); + log.info("{} 삭제: ID={}", getEntityName(), id); + + int deleted = jdbcTemplate.update(sql, id); + + if (deleted == 0) { + log.warn("{} 삭제 실패 (존재하지 않음): ID={}", getEntityName(), id); + } else { + log.info("{} 삭제 완료: ID={}", getEntityName(), id); + } + } + + /** + * 전체 삭제 + */ + @Transactional + public void deleteAll() { + String sql = String.format("DELETE FROM %s", getTableName()); + log.warn("{} 전체 삭제", getEntityName()); + + int deleted = jdbcTemplate.update(sql); + log.info("{} 전체 삭제 완료: {} 건", getEntityName(), deleted); + } + + // ==================== 헬퍼 메서드 ==================== + + /** + * 현재 시각 반환 (감사 필드용) + */ + protected LocalDateTime now() { + return LocalDateTime.now(); + } + + /** + * 커스텀 쿼리 실행 (단건 조회) + */ + protected Optional executeQueryForObject(String sql, Object... params) { + log.debug("커스텀 쿼리 실행: {}", sql); + List results = jdbcTemplate.query(sql, getRowMapper(), params); + return results.isEmpty() ? Optional.empty() : Optional.of(results.get(0)); + } + + /** + * 커스텀 쿼리 실행 (다건 조회) + */ + protected List executeQueryForList(String sql, Object... params) { + log.debug("커스텀 쿼리 실행: {}", sql); + return jdbcTemplate.query(sql, getRowMapper(), params); + } + + /** + * 커스텀 업데이트 실행 + */ + @Transactional + protected int executeUpdate(String sql, Object... params) { + log.debug("커스텀 업데이트 실행: {}", sql); + return jdbcTemplate.update(sql, params); + } +} diff --git a/src/main/java/com/snp/batch/common/batch/repository/MultiDataSourceJdbcRepository.java b/src/main/java/com/snp/batch/common/batch/repository/MultiDataSourceJdbcRepository.java new file mode 100644 index 0000000..c83e1b8 --- /dev/null +++ b/src/main/java/com/snp/batch/common/batch/repository/MultiDataSourceJdbcRepository.java @@ -0,0 +1,125 @@ +package com.snp.batch.common.batch.repository; + +import lombok.extern.slf4j.Slf4j; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.transaction.annotation.Transactional; + +import java.sql.PreparedStatement; +import java.time.LocalDateTime; +import java.util.List; +import java.util.Optional; + +/** + * JdbcTemplate 기반 Repository 추상 클래스 (멀티 데이터 소스 지원) + * 모든 Repository가 상속받아 일관된 CRUD 패턴을 제공하며, + * Batch DB용과 Business DB용 JdbcTemplate을 모두 제공합니다. + * + * @param Entity 타입 + * @param ID 타입 + */ +@Slf4j +@Transactional(readOnly = true) +public abstract class MultiDataSourceJdbcRepository { + + // ⭐ Batch 메타데이터/설정용 DB 템플릿 (1번 DB) + protected final JdbcTemplate batchJdbcTemplate; + + // ⭐ Business 데이터용 DB 템플릿 (2번 DB) + protected final JdbcTemplate businessJdbcTemplate; + + /** + * 생성자: 두 개의 JdbcTemplate을 주입받아 초기화합니다. + * (하위 클래스는 이 생성자를 호출하여 템플릿을 초기화해야 합니다.) + */ + public MultiDataSourceJdbcRepository(JdbcTemplate batchJdbcTemplate, JdbcTemplate businessJdbcTemplate) { + this.batchJdbcTemplate = batchJdbcTemplate; + this.businessJdbcTemplate = businessJdbcTemplate; + } + + // ==================== 추상 메서드 (BaseJdbcRepository와 동일) ==================== + + // 이 부분은 기존 BaseJdbcRepository의 추상 메서드와 동일하게 유지하여 + // 하위 클래스가 공통 CRUD 기능을 구현할 수 있도록 합니다. + + protected abstract String getTableName(); + protected String getIdColumnName() { return "id"; } + protected String getIdColumnName(String customId) { return customId; } + protected abstract RowMapper getRowMapper(); + protected abstract ID extractId(T entity); + protected abstract String getInsertSql(); + protected abstract String getUpdateSql(); + protected abstract void setInsertParameters(PreparedStatement ps, T entity) throws Exception; + protected abstract void setUpdateParameters(PreparedStatement ps, T entity) throws Exception; + protected abstract String getEntityName(); + + // ==================== 공통 CRUD 메서드 (businessJdbcTemplate 사용) ==================== + + // CRUD 로직은 주로 비즈니스 데이터(2번 DB)에 적용된다고 가정하고 businessJdbcTemplate을 사용합니다. + + /** + * ID로 조회 (Business DB) + */ + public Optional findById(ID id) { + String sql = String.format("SELECT * FROM %s WHERE %s = ?", getTableName(), getIdColumnName()); + log.debug("{} 조회: ID={}", getEntityName(), id); + + // ⭐ businessJdbcTemplate 사용 + List results = businessJdbcTemplate.query(sql, getRowMapper(), id); + return results.isEmpty() ? Optional.empty() : Optional.of(results.get(0)); + } + + /** + * 배치 INSERT (Business DB) + */ + @Transactional + public void batchInsert(List entities) { + if (entities == null || entities.isEmpty()) { + return; + } + + log.debug("{} 배치 삽입 시작: {} 건 (Business DB)", getEntityName(), entities.size()); + + // ⭐ businessJdbcTemplate 사용 + businessJdbcTemplate.batchUpdate(getInsertSql(), entities, entities.size(), + (ps, entity) -> { + try { + setInsertParameters(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", getEntityName(), entities.size()); + } + + // ... (나머지 find, save, update, delete 메서드도 businessJdbcTemplate을 사용하여 구현합니다.) + + // ==================== 헬퍼 메서드 ==================== + + /** + * 커스텀 쿼리 실행 (Batch DB용) + */ + protected List executeBatchQueryForList(String sql, Object... params) { + log.debug("Batch DB 커스텀 쿼리 실행: {}", sql); + // ⭐ batchJdbcTemplate 사용 + return batchJdbcTemplate.query(sql, getRowMapper(), params); + } + + /** + * 커스텀 업데이트 실행 (Business DB용) + */ + @Transactional + protected int executeBusinessUpdate(String sql, Object... params) { + log.debug("Business DB 커스텀 업데이트 실행: {}", sql); + // ⭐ businessJdbcTemplate 사용 + return businessJdbcTemplate.update(sql, params); + } + + // ... (나머지 헬퍼 메서드 생략) ... + + protected LocalDateTime now() { + return LocalDateTime.now(); + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/common/batch/writer/BaseChunkedWriter.java b/src/main/java/com/snp/batch/common/batch/writer/BaseChunkedWriter.java new file mode 100644 index 0000000..1239179 --- /dev/null +++ b/src/main/java/com/snp/batch/common/batch/writer/BaseChunkedWriter.java @@ -0,0 +1,115 @@ +package com.snp.batch.common.batch.writer; + +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.Chunk; +import org.springframework.batch.item.ItemWriter; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.support.TransactionTemplate; + +import java.util.ArrayList; +import java.util.List; + +/** + * Sub-Chunk 분할 Writer + * + * 대량 데이터(60,000건 이상)를 subChunkSize 단위로 분할하여 커밋 + * - 트랜잭션 부담 감소 + * - 메모리 효율 향상 + * - 실패 시 부분 복구 가능 + * + * @param Entity 타입 + */ +@Slf4j +public abstract class BaseChunkedWriter implements ItemWriter { + + private static final int DEFAULT_SUB_CHUNK_SIZE = 5000; + + private final String entityName; + private final int subChunkSize; + private final TransactionTemplate transactionTemplate; + + protected BaseChunkedWriter(String entityName, PlatformTransactionManager transactionManager) { + this(entityName, transactionManager, DEFAULT_SUB_CHUNK_SIZE); + } + + protected BaseChunkedWriter(String entityName, PlatformTransactionManager transactionManager, int subChunkSize) { + this.entityName = entityName; + this.subChunkSize = subChunkSize; + this.transactionTemplate = new TransactionTemplate(transactionManager); + } + + /** + * 실제 데이터 저장 로직 (하위 클래스에서 구현) + * + * @param items 저장할 Entity 리스트 + * @throws Exception 저장 중 오류 발생 시 + */ + protected abstract void writeItems(List items) throws Exception; + + /** + * Spring Batch ItemWriter 인터페이스 구현 + * Chunk를 subChunkSize 단위로 분할하여 각각 독립적인 트랜잭션으로 커밋 + * + * @param chunk 저장할 데이터 청크 + * @throws Exception 저장 중 오류 발생 시 + */ + @Override + public void write(Chunk chunk) throws Exception { + List items = new ArrayList<>(chunk.getItems()); + + if (items.isEmpty()) { + log.debug("[{}] 저장할 데이터가 없습니다", entityName); + return; + } + + int totalSize = items.size(); + int totalSubChunks = (int) Math.ceil((double) totalSize / subChunkSize); + + log.info("[{}] 전체 데이터 {}건을 {}건 단위로 분할 처리 시작 (총 {} Sub-Chunk)", + entityName, totalSize, subChunkSize, totalSubChunks); + + int processedCount = 0; + int subChunkIndex = 0; + + for (int i = 0; i < totalSize; i += subChunkSize) { + subChunkIndex++; + int endIndex = Math.min(i + subChunkSize, totalSize); + List subChunk = items.subList(i, endIndex); + + final int currentSubChunkIndex = subChunkIndex; + final int currentSubChunkSize = subChunk.size(); + + try { + // 각 Sub-Chunk를 독립적인 트랜잭션으로 처리 + transactionTemplate.executeWithoutResult(status -> { + try { + log.debug("[{}] Sub-Chunk {}/{} 처리 시작 ({}건)", + entityName, currentSubChunkIndex, totalSubChunks, currentSubChunkSize); + + writeItems(new ArrayList<>(subChunk)); + + log.debug("[{}] Sub-Chunk {}/{} 커밋 완료 ({}건)", + entityName, currentSubChunkIndex, totalSubChunks, currentSubChunkSize); + } catch (Exception e) { + log.error("[{}] Sub-Chunk {}/{} 처리 실패", entityName, currentSubChunkIndex, totalSubChunks, e); + status.setRollbackOnly(); + throw new RuntimeException(e); + } + }); + + processedCount += currentSubChunkSize; + log.info("[{}] 진행률: {}/{} ({}%)", + entityName, processedCount, totalSize, (processedCount * 100 / totalSize)); + + } catch (Exception e) { + log.error("[{}] Sub-Chunk {}/{} 실패. 처리 완료: {}건, 미처리: {}건", + entityName, subChunkIndex, totalSubChunks, processedCount, totalSize - processedCount); + throw new RuntimeException( + String.format("[%s] Sub-Chunk %d/%d 처리 중 오류 발생. 성공: %d건, 실패 시작 위치: %d", + entityName, subChunkIndex, totalSubChunks, processedCount, i), e); + } + } + + log.info("[{}] 전체 데이터 {}건 저장 완료 ({} Sub-Chunk)", entityName, totalSize, totalSubChunks); + } +} diff --git a/src/main/java/com/snp/batch/common/batch/writer/BaseWriter.java b/src/main/java/com/snp/batch/common/batch/writer/BaseWriter.java new file mode 100644 index 0000000..6169d5b --- /dev/null +++ b/src/main/java/com/snp/batch/common/batch/writer/BaseWriter.java @@ -0,0 +1,61 @@ +package com.snp.batch.common.batch.writer; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.Chunk; +import org.springframework.batch.item.ItemWriter; + +import java.util.ArrayList; +import java.util.List; + +/** + * ItemWriter 추상 클래스 + * 데이터 저장 로직을 위한 템플릿 제공 + * + * Template Method Pattern: + * - write(): 공통 로직 (로깅, null 체크) + * - writeItems(): 하위 클래스에서 저장 로직 구현 + * + * @param Entity 타입 + */ +@Slf4j +@RequiredArgsConstructor +public abstract class BaseWriter implements ItemWriter { + + private final String entityName; + + /** + * 실제 데이터 저장 로직 (하위 클래스에서 구현) + * Repository의 saveAll() 또는 batchInsert() 호출 등 + * + * @param items 저장할 Entity 리스트 + * @throws Exception 저장 중 오류 발생 시 + */ + protected abstract void writeItems(List items) throws Exception; + + /** + * Spring Batch ItemWriter 인터페이스 구현 + * Chunk 단위로 데이터를 저장 + * + * @param chunk 저장할 데이터 청크 + * @throws Exception 저장 중 오류 발생 시 + */ + @Override + public void write(Chunk chunk) throws Exception { + List items = new ArrayList<>(chunk.getItems()); + + if (items.isEmpty()) { + log.debug("{} 저장할 데이터가 없습니다", entityName); + return; + } + + try { + log.info("{} 데이터 {}건 저장 시작", entityName, items.size()); + writeItems(items); + log.info("{} 데이터 {}건 저장 완료", entityName, items.size()); + } catch (Exception e) { + log.error("{} 데이터 저장 실패", entityName, e); + throw e; + } + } +} diff --git a/src/main/java/com/snp/batch/common/util/BatchWriteListener.java b/src/main/java/com/snp/batch/common/util/BatchWriteListener.java new file mode 100644 index 0000000..86daf25 --- /dev/null +++ b/src/main/java/com/snp/batch/common/util/BatchWriteListener.java @@ -0,0 +1,51 @@ +package com.snp.batch.common.util; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.ItemWriteListener; +import org.springframework.batch.item.Chunk; +import org.springframework.jdbc.core.JdbcTemplate; + +@Slf4j +@RequiredArgsConstructor +public class BatchWriteListener implements ItemWriteListener { + + private final JdbcTemplate businessJdbcTemplate; + private final String updateSql; // 실행할 쿼리 (예: "UPDATE ... SET batch_flag = 'S' ...") + + @Override + public void afterWrite(Chunk items) { + // afterWrite는 Writer가 예외 없이 성공했을 때만 실행되는 것이 보장되어야 함 + if (items.isEmpty()) return; + + Long jobExecutionId = items.getItems().get(0).getJobExecutionId(); + + try { + int updatedRows = businessJdbcTemplate.update(updateSql, jobExecutionId); + log.info("[BatchWriteListener] Success update 'S'. jobExecutionId: {}, rows: {}", jobExecutionId, updatedRows); + } catch (Exception e) { + log.error("[BatchWriteListener] Update 'S' failed. jobExecutionId: {}", jobExecutionId, e); + // ❗중요: 리스너의 업데이트가 실패해도 배치를 중단시키려면 예외를 던져야 함 + throw e; + } + } + + @Override + public void onWriteError(Exception exception, Chunk items) { + // ⭐ Writer에서 에러가 발생하면 이 메서드가 호출됨 + if (!items.isEmpty()) { + Long jobExecutionId = items.getItems().get(0).getJobExecutionId(); + log.error("[BatchWriteListener] Write Error Detected! jobExecutionId: {}. Status will NOT be updated to 'S'. Error: {}", + jobExecutionId, exception.getMessage()); + } + + // ❗중요: 여기서 예외를 다시 던져야 배치가 중단(FAILED)됨 + // 만약 여기서 예외를 던지지 않으면 배치는 다음 청크를 계속 시도할 수 있음 + if (exception instanceof RuntimeException) { + throw (RuntimeException) exception; + } else { + throw new RuntimeException("Force stop batch due to write error", exception); + } + } + +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/common/util/CommonSql.java b/src/main/java/com/snp/batch/common/util/CommonSql.java new file mode 100644 index 0000000..8e660e4 --- /dev/null +++ b/src/main/java/com/snp/batch/common/util/CommonSql.java @@ -0,0 +1,69 @@ +package com.snp.batch.common.util; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class CommonSql { + private static String SOURCE_SCHEMA; + + public CommonSql(@Value("${app.batch.source-schema.name}") String sourceSchema) { + SOURCE_SCHEMA = sourceSchema; + } + /* + * 동기화 대상 Job Execution ID 조회 + */ + public static String getNextTargetQuery(String targetTable){ + return """ + SELECT MIN(a.job_execution_id) + FROM %s.%s a + INNER JOIN %s.batch_job_execution b + ON a.job_execution_id = b.job_execution_id + AND b.status = 'COMPLETED' + WHERE 1=1 + AND a.batch_flag = 'N' + """.formatted(SOURCE_SCHEMA, targetTable, SOURCE_SCHEMA); + } + /* + * 동기화 대상 데이터 조회 by Job Execution ID + */ + public static String getTargetDataQuery(String targetTable){ + return """ + SELECT a.* + FROM %s.%s a + INNER JOIN %s.batch_job_execution b + ON a.job_execution_id = b.job_execution_id + AND b.status = 'COMPLETED' + WHERE 1=1 + AND a.batch_flag = 'N' + AND a.job_execution_id = ? + ORDER BY a.job_execution_id, a.row_index; + """.formatted(SOURCE_SCHEMA, targetTable, SOURCE_SCHEMA); + } + /* + * 동기화 상대 업데이트 N(대기) -> P(진행) + */ + public static String getProcessBatchQuery(String targetTable) { + return """ + UPDATE %s.%s + SET batch_flag = 'P' + , mdfcn_dt = CURRENT_TIMESTAMP + , mdfr_id = 'SYSTEM' + WHERE batch_flag = 'N' + and job_execution_id = ? + """.formatted(SOURCE_SCHEMA, targetTable); + } + /* + * 동기화 상대 업데이트 P(진행) -> S(완료) + */ + public static String getCompleteBatchQuery(String targetTable) { + return """ + UPDATE %s.%s + SET batch_flag = 'S' + , mdfcn_dt = CURRENT_TIMESTAMP + , mdfr_id = 'SYSTEM' + WHERE batch_flag = 'P' + and job_execution_id = ? + """.formatted(SOURCE_SCHEMA, targetTable); + } +} diff --git a/src/main/java/com/snp/batch/common/util/EntityUtils.java b/src/main/java/com/snp/batch/common/util/EntityUtils.java new file mode 100644 index 0000000..b51a5fc --- /dev/null +++ b/src/main/java/com/snp/batch/common/util/EntityUtils.java @@ -0,0 +1,31 @@ +package com.snp.batch.common.util; // 적절한 유틸리티 패키지로 변경 + +import java.util.Collections; +import java.util.List; +import java.util.function.Function; +import java.util.stream.Collectors; + +public class EntityUtils { + + /** + * 제네릭 리스트(Entity 또는 DTO)에서 원하는 필드(값)만 추출하여 List 형태로 반환하는 공통 함수. + * 추출된 필드가 Long이 아닌 경우에도 Function 람다 내에서 Long으로 변환해야 합니다. + * * @param 리스트의 요소 타입 (예: ShipDto, OwnerHistoryEntity) + * @param list 데이터를 추출할 리스트 + * @param indexExtractor 리스트 요소에서 shipresultindex (Long 타입) 값을 추출하는 Function + * @return 추출된 shipresultindex 값들의 List + */ + public static List getIndexesFromList( + List list, + Function indexExtractor) { // ⭐ 함수명 변경 (getIndexesFromEntityList -> getIndexesFromList) + + if (list == null || list.isEmpty()) { + return Collections.emptyList(); + } + + return list.stream() + // Function 인터페이스를 사용하여 각 요소에서 Long 값을 추출 + .map(indexExtractor) + .collect(Collectors.toList()); + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/common/util/GroupByExecutionIdChunkListener.java b/src/main/java/com/snp/batch/common/util/GroupByExecutionIdChunkListener.java new file mode 100644 index 0000000..57063de --- /dev/null +++ b/src/main/java/com/snp/batch/common/util/GroupByExecutionIdChunkListener.java @@ -0,0 +1,31 @@ +package com.snp.batch.common.util; + +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.ChunkListener; +import org.springframework.batch.core.scope.context.ChunkContext; + +/** + * 청크 완료 후 ThreadLocal을 정리하는 리스너 + */ +@Slf4j +public class GroupByExecutionIdChunkListener implements ChunkListener { + + @Override + public void beforeChunk(ChunkContext context) { + // 청크 시작 전 - 필요시 구현 + } + + @Override + public void afterChunk(ChunkContext context) { + // 청크 완료 후 ThreadLocal 정리 + GroupByExecutionIdPolicy.clearCurrentItem(); + log.debug("[GroupByExecutionIdChunkListener] 청크 완료 - ThreadLocal 정리됨"); + } + + @Override + public void afterChunkError(ChunkContext context) { + // 청크 에러 시에도 ThreadLocal 정리 + GroupByExecutionIdPolicy.clearCurrentItem(); + log.warn("[GroupByExecutionIdChunkListener] 청크 에러 발생 - ThreadLocal 정리됨"); + } +} diff --git a/src/main/java/com/snp/batch/common/util/GroupByExecutionIdPolicy.java b/src/main/java/com/snp/batch/common/util/GroupByExecutionIdPolicy.java new file mode 100644 index 0000000..ead1790 --- /dev/null +++ b/src/main/java/com/snp/batch/common/util/GroupByExecutionIdPolicy.java @@ -0,0 +1,86 @@ +package com.snp.batch.common.util; + +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.repeat.CompletionPolicy; +import org.springframework.batch.repeat.RepeatContext; +import org.springframework.batch.repeat.RepeatStatus; + +@Slf4j +public class GroupByExecutionIdPolicy implements CompletionPolicy { + + // ThreadLocal을 통해 Reader에서 읽은 아이템을 전달받음 + private static final ThreadLocal CURRENT_ITEM = new ThreadLocal<>(); + + private Long currentId = null; + private boolean isComplete = false; + + /** + * ItemReadListener에서 호출하여 현재 읽은 아이템을 설정 + */ + public static void setCurrentItem(Object item) { + CURRENT_ITEM.set(item); + } + + /** + * 청크 완료 후 ThreadLocal 정리 + */ + public static void clearCurrentItem() { + CURRENT_ITEM.remove(); + } + + @Override + public boolean isComplete(RepeatContext context, RepeatStatus result) { + // Reader가 null을 반환하면 (FINISHED) 청크 종료 + if (result == RepeatStatus.FINISHED) { + log.debug("[GroupByExecutionIdPolicy] isComplete - Reader 종료 (FINISHED)"); + return true; + } + log.debug("[GroupByExecutionIdPolicy] isComplete(context, result) 호출 - result: {}, isComplete: {}", result, isComplete); + return isComplete; + } + + @Override + public boolean isComplete(RepeatContext context) { + log.debug("[GroupByExecutionIdPolicy] isComplete(context) 호출 - isComplete: {}", isComplete); + return isComplete; + } + + @Override + public RepeatContext start(RepeatContext parent) { + log.debug("[GroupByExecutionIdPolicy] start 호출 - 청크 초기화"); + this.currentId = null; + this.isComplete = false; + return parent; + } + + @Override + public void update(RepeatContext context) { + Object item = CURRENT_ITEM.get(); + log.debug("[GroupByExecutionIdPolicy] update 호출 - item: {}, currentId: {}, isComplete: {}", + item != null ? item.getClass().getSimpleName() : "null", currentId, isComplete); + + // 1. 아이템이 null이면 무시 (첫 번째 호출이거나 Reader 종료 - isComplete는 isComplete(context, result)에서 처리) + if (item == null) { + log.debug("[GroupByExecutionIdPolicy] item이 null - 무시 (아직 아이템을 읽지 않았거나 Reader 종료)"); + return; + } + + // 2. JobExecutionGroupable 구현체인 경우 job_execution_id 기준으로 청크 분리 + if (item instanceof JobExecutionGroupable groupableItem) { + Long rowId = groupableItem.getJobExecutionId(); + log.debug("[GroupByExecutionIdPolicy] jobExecutionId 비교 - currentId: {}, rowId: {}", currentId, rowId); + + if (currentId == null) { + // 첫 번째 아이템 + currentId = rowId; + log.debug("[GroupByExecutionIdPolicy] 청크 시작 - jobExecutionId: {}", currentId); + } else if (!currentId.equals(rowId)) { + // job_execution_id가 바뀌면 청크 완료 + log.info("[GroupByExecutionIdPolicy] 청크 완료 - jobExecutionId: {}, 다음: {}", currentId, rowId); + isComplete = true; + } + } else { + log.warn("[GroupByExecutionIdPolicy] item이 JobExecutionGroupable이 아님: {}", item.getClass().getName()); + } + } +} diff --git a/src/main/java/com/snp/batch/common/util/GroupByExecutionIdReadListener.java b/src/main/java/com/snp/batch/common/util/GroupByExecutionIdReadListener.java new file mode 100644 index 0000000..256f294 --- /dev/null +++ b/src/main/java/com/snp/batch/common/util/GroupByExecutionIdReadListener.java @@ -0,0 +1,30 @@ +package com.snp.batch.common.util; + +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.ItemReadListener; + +/** + * Reader가 아이템을 읽을 때마다 GroupByExecutionIdPolicy에 전달하는 리스너 + * 이를 통해 job_execution_id 단위로 청크를 분리할 수 있음 + */ +@Slf4j +public class GroupByExecutionIdReadListener implements ItemReadListener { + + @Override + public void beforeRead() { + // Reader 호출 전 - 필요시 구현 + } + + @Override + public void afterRead(T item) { + // Reader가 아이템을 읽은 후 ThreadLocal에 설정 + log.debug("[GroupByExecutionIdReadListener] afterRead 호출 - item: {}", item != null ? item.getClass().getSimpleName() : "null"); + GroupByExecutionIdPolicy.setCurrentItem(item); + } + + @Override + public void onReadError(Exception ex) { + log.error("[GroupByExecutionIdReadListener] Read error occurred", ex); + GroupByExecutionIdPolicy.clearCurrentItem(); + } +} diff --git a/src/main/java/com/snp/batch/common/util/JobExecutionGroupable.java b/src/main/java/com/snp/batch/common/util/JobExecutionGroupable.java new file mode 100644 index 0000000..0b94108 --- /dev/null +++ b/src/main/java/com/snp/batch/common/util/JobExecutionGroupable.java @@ -0,0 +1,5 @@ +package com.snp.batch.common.util; + +public interface JobExecutionGroupable { + Long getJobExecutionId(); +} diff --git a/src/main/java/com/snp/batch/common/util/JsonChangeDetector.java b/src/main/java/com/snp/batch/common/util/JsonChangeDetector.java new file mode 100644 index 0000000..164e381 --- /dev/null +++ b/src/main/java/com/snp/batch/common/util/JsonChangeDetector.java @@ -0,0 +1,179 @@ +package com.snp.batch.common.util; + +import com.fasterxml.jackson.databind.ObjectMapper; + +import java.security.MessageDigest; +import java.util.*; + +public class JsonChangeDetector { + + // Map으로 변환 시 사용할 ObjectMapper (표준 Mapper 사용) + private static final ObjectMapper MAPPER = new ObjectMapper(); + + // 해시 비교에서 제외할 필드 목록 (DataSetVersion 등) + // 이 목록은 모든 JSON 계층에 걸쳐 적용됩니다. + private static final Set EXCLUDE_KEYS = + Set.of("DataSetVersion", "APSStatus", "LastUpdateDate", "LastUpdateDateTime"); + + private static final Map LIST_SORT_KEYS = Map.of( + // List 필드명 // 정렬 기준 키 + "OwnerHistory" ,"Sequence", // OwnerHistory는 Sequence를 기준으로 정렬 + "SurveyDatesHistoryUnique" , "SurveyDate" // SurveyDatesHistoryUnique는 SurveyDate를 기준으로 정렬 + // 추가적인 List/Array 필드가 있다면 여기에 추가 + ); + + // ========================================================================= + // 1. JSON 문자열을 정렬 및 필터링된 Map으로 변환하는 핵심 로직 + // ========================================================================= + /** + * JSON 문자열을 Map으로 변환하고, 특정 키를 제거하며, 키 순서가 정렬된 상태로 만듭니다. + * @param jsonString API 응답 또는 DB에서 읽은 JSON 문자열 + * @return 필터링되고 정렬된 Map 객체 + */ + public static Map jsonToSortedFilteredMap(String jsonString) { + if (jsonString == null || jsonString.trim().isEmpty()) { + return Collections.emptyMap(); + } + + try { + // 1. Map으로 1차 변환합니다. (순서 보장 안됨) + Map rawMap = MAPPER.readValue(jsonString, + new com.fasterxml.jackson.core.type.TypeReference>() {}); + + // 2. 재귀 함수를 호출하여 키를 제거하고 TreeMap(키 순서 정렬)으로 깊은 복사합니다. + return deepFilterAndSort(rawMap); + + } catch (Exception e) { + System.err.println("Error converting JSON to filtered Map: " + e.getMessage()); + // 예외 발생 시 빈 Map 반환 + return Collections.emptyMap(); + } + } + + /** + * Map을 재귀적으로 탐색하며 제외 키를 제거하고 TreeMap(알파벳 순서)으로 변환합니다. + */ + private static Map deepFilterAndSort(Map rawMap) { + // Map을 TreeMap으로 생성하여 키 순서를 알파벳 순으로 강제 정렬합니다. + Map sortedMap = new TreeMap<>(); + + for (Map.Entry entry : rawMap.entrySet()) { + String key = entry.getKey(); + Object value = entry.getValue(); + + // 🔑 1. 제외할 키 값인지 확인 + if (EXCLUDE_KEYS.contains(key)) { + continue; // 제외 + } + + // 2. 값의 타입에 따라 재귀 처리 + if (value instanceof Map) { + // 재귀 호출: 하위 Map을 필터링하고 정렬 + @SuppressWarnings("unchecked") + Map subMap = (Map) value; + sortedMap.put(key, deepFilterAndSort(subMap)); + } else if (value instanceof List) { + // List 처리: List 내부의 Map 요소만 재귀 호출 + @SuppressWarnings("unchecked") + List rawList = (List) value; + List filteredList = new ArrayList<>(); + + // 1. List 내부의 Map 요소들을 재귀적으로 필터링/정렬하여 filteredList에 추가 + for (Object item : rawList) { + if (item instanceof Map) { + @SuppressWarnings("unchecked") + Map itemMap = (Map) item; + // List의 요소인 Map도 필터링하고 정렬 (Map의 필드 순서 정렬) + filteredList.add(deepFilterAndSort(itemMap)); + } else { + filteredList.add(item); + } + } + + // 2. 🔑 List 필드명에 따른 순서 정렬 로직 (추가된 핵심 로직) + String listFieldName = entry.getKey(); + String sortKey = LIST_SORT_KEYS.get(listFieldName); + + if (sortKey != null && !filteredList.isEmpty() && filteredList.get(0) instanceof Map) { + // Map 요소를 가진 리스트인 경우에만 정렬 실행 + try { + // 정렬 기준 키를 사용하여 Comparator를 생성 + Collections.sort(filteredList, new Comparator() { + @Override + @SuppressWarnings("unchecked") + public int compare(Object o1, Object o2) { + Map map1 = (Map) o1; + Map map2 = (Map) o2; + + // 정렬 기준 키(sortKey)의 값을 가져와 비교 + Object key1 = map1.get(sortKey); + Object key2 = map2.get(sortKey); + + if (key1 == null || key2 == null) { + // 키 값이 null인 경우, Map의 전체 문자열로 비교 (안전장치) + return map1.toString().compareTo(map2.toString()); + } + + // String 타입으로 변환하여 비교 (Date, Number 타입도 대부분 String으로 처리 가능) + return key1.toString().compareTo(key2.toString()); + } + }); + } catch (Exception e) { + System.err.println("List sort failed for key " + listFieldName + ": " + e.getMessage()); + // 정렬 실패 시 원래 순서 유지 + } + } + sortedMap.put(key, filteredList); + } else { + // String, Number 등 기본 타입은 그대로 추가 + sortedMap.put(key, value); + } + } + return sortedMap; + } + + + // ========================================================================= + // 2. 해시 생성 로직 + // ========================================================================= + + /** + * 필터링되고 정렬된 Map의 문자열 표현을 기반으로 SHA-256 해시를 생성합니다. + */ + public static String getSha256HashFromMap(Map sortedMap) { + // 1. Map을 String으로 변환: TreeMap 덕분에 toString() 결과가 항상 동일한 순서를 가집니다. + String mapString = sortedMap.toString(); + + try { + MessageDigest digest = MessageDigest.getInstance("SHA-256"); + byte[] hash = digest.digest(mapString.getBytes("UTF-8")); + + // 바이트 배열을 16진수 문자열로 변환 + StringBuilder hexString = new StringBuilder(); + for (byte b : hash) { + String hex = Integer.toHexString(0xff & b); + if (hex.length() == 1) hexString.append('0'); + hexString.append(hex); + } + return hexString.toString(); + } catch (Exception e) { + System.err.println("Error generating hash: " + e.getMessage()); + return "HASH_ERROR"; + } + } + + + // ========================================================================= + // 3. 해시값 비교 로직 + // ========================================================================= + public static boolean isChanged(String previousHash, String currentHash) { + // DB 해시가 null인 경우 (첫 Insert)는 변경된 것으로 간주 + if (previousHash == null || previousHash.isEmpty()) { + return true; + } + + // 해시값이 다르면 변경된 것으로 간주 + return !Objects.equals(previousHash, currentHash); + } + +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/common/util/TableMetaInfo.java b/src/main/java/com/snp/batch/common/util/TableMetaInfo.java new file mode 100644 index 0000000..bdf6863 --- /dev/null +++ b/src/main/java/com/snp/batch/common/util/TableMetaInfo.java @@ -0,0 +1,340 @@ +package com.snp.batch.common.util; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class TableMetaInfo { + + /* + * ============================================ + * Source Schema Tables (std_snp_data) + * ============================================ + */ + + // Ship Tables + @Value("${app.batch.source-schema.tables.ship-001}") + public String sourceShipData; + + @Value("${app.batch.source-schema.tables.ship-002}") + public String sourceShipDetailData; + + @Value("${app.batch.source-schema.tables.ship-003}") + public String sourceAdditionalShipsData; + + @Value("${app.batch.source-schema.tables.ship-004}") + public String sourceBareboatCharterHistory; + + @Value("${app.batch.source-schema.tables.ship-005}") + public String sourceCallsignAndMmsiHistory; + + @Value("${app.batch.source-schema.tables.ship-006}") + public String sourceClassHistory; + + @Value("${app.batch.source-schema.tables.ship-007}") + public String sourceCompanyVesselRelationships; + + @Value("${app.batch.source-schema.tables.ship-008}") + public String sourceCrewList; + + @Value("${app.batch.source-schema.tables.ship-009}") + public String sourceDarkActivityConfirmed; + + @Value("${app.batch.source-schema.tables.ship-010}") + public String sourceFlagHistory; + + @Value("${app.batch.source-schema.tables.ship-011}") + public String sourceGroupBeneficialOwnerHistory; + + @Value("${app.batch.source-schema.tables.ship-012}") + public String sourceIceClass; + + @Value("${app.batch.source-schema.tables.ship-013}") + public String sourceNameHistory; + + @Value("${app.batch.source-schema.tables.ship-014}") + public String sourceOperatorHistory; + + @Value("${app.batch.source-schema.tables.ship-015}") + public String sourceOwnerHistory; + + @Value("${app.batch.source-schema.tables.ship-016}") + public String sourcePandiHistory; + + @Value("${app.batch.source-schema.tables.ship-017}") + public String sourceSafetyManagementCertificateHist; + + @Value("${app.batch.source-schema.tables.ship-018}") + public String sourceShipManagerHistory; + + @Value("${app.batch.source-schema.tables.ship-019}") + public String sourceSisterShipLinks; + + @Value("${app.batch.source-schema.tables.ship-020}") + public String sourceSpecialFeature; + + @Value("${app.batch.source-schema.tables.ship-021}") + public String sourceStatusHistory; + + @Value("${app.batch.source-schema.tables.ship-022}") + public String sourceStowageCommodity; + + @Value("${app.batch.source-schema.tables.ship-023}") + public String sourceSurveyDates; + + @Value("${app.batch.source-schema.tables.ship-024}") + public String sourceSurveyDatesHistoryUnique; + + @Value("${app.batch.source-schema.tables.ship-025}") + public String sourceTechnicalManagerHistory; + + @Value("${app.batch.source-schema.tables.ship-026}") + public String sourceThrusters; + + // Company Tables + @Value("${app.batch.source-schema.tables.company-001}") + public String sourceTbCompanyDetail; + + // Event Tables + @Value("${app.batch.source-schema.tables.event-001}") + public String sourceEvent; + + @Value("${app.batch.source-schema.tables.event-002}") + public String sourceEventCargo; + + @Value("${app.batch.source-schema.tables.event-003}") + public String sourceEventHumanCasualty; + + @Value("${app.batch.source-schema.tables.event-004}") + public String sourceEventRelationship; + + // Facility Tables + @Value("${app.batch.source-schema.tables.facility-001}") + public String sourceFacilityPort; + + // PSC Tables + @Value("${app.batch.source-schema.tables.psc-001}") + public String sourcePscDetail; + + @Value("${app.batch.source-schema.tables.psc-002}") + public String sourcePscDefect; + + @Value("${app.batch.source-schema.tables.psc-003}") + public String sourcePscAllCertificate; + + // Movements Tables + @Value("${app.batch.source-schema.tables.movements-001}") + public String sourceTAnchorageCall; + + @Value("${app.batch.source-schema.tables.movements-002}") + public String sourceTBerthCall; + + @Value("${app.batch.source-schema.tables.movements-003}") + public String sourceTCurrentlyAt; + + @Value("${app.batch.source-schema.tables.movements-004}") + public String sourceTDestination; + + @Value("${app.batch.source-schema.tables.movements-005}") + public String sourceTShipStpovInfo; + + @Value("${app.batch.source-schema.tables.movements-006}") + public String sourceTStsOperation; + + @Value("${app.batch.source-schema.tables.movements-007}") + public String sourceTTerminalCall; + + @Value("${app.batch.source-schema.tables.movements-008}") + public String sourceTTransit; + + // Code Tables + @Value("${app.batch.source-schema.tables.code-001}") + public String sourceStat5Code; + + @Value("${app.batch.source-schema.tables.code-002}") + public String sourceFlagCode; + + // Risk & Compliance Tables + @Value("${app.batch.source-schema.tables.risk-compliance-001}") + public String sourceRisk; + + @Value("${app.batch.source-schema.tables.risk-compliance-002}") + public String sourceCompliance; + + @Value("${app.batch.source-schema.tables.risk-compliance-003}") + public String sourceTbCompanyComplianceInfo; + + + /* + * ============================================ + * Target Schema Tables (std_snp_svc) + * ============================================ + */ + + // Ship Tables + @Value("${app.batch.target-schema.tables.ship-001}") + public String targetTbShipInfoMst; + + @Value("${app.batch.target-schema.tables.ship-002}") + public String targetTbShipMainInfo; + + @Value("${app.batch.target-schema.tables.ship-003}") + public String targetTbShipAddInfo; + + @Value("${app.batch.target-schema.tables.ship-004}") + public String targetTbShipBbctrHstry; + + @Value("${app.batch.target-schema.tables.ship-005}") + public String targetTbShipIdntfInfoHstry; + + @Value("${app.batch.target-schema.tables.ship-006}") + public String targetTbShipClficHstry; + + @Value("${app.batch.target-schema.tables.ship-007}") + public String targetTbShipCompanyRel; + + @Value("${app.batch.target-schema.tables.ship-008}") + public String targetTbShipCrewList; + + @Value("${app.batch.target-schema.tables.ship-009}") + public String targetTbShipDarkActvIdnty; + + @Value("${app.batch.target-schema.tables.ship-010}") + public String targetTbShipCountryHstry; + + @Value("${app.batch.target-schema.tables.ship-011}") + public String targetTbShipGroupRevnOwnrHstry; + + @Value("${app.batch.target-schema.tables.ship-012}") + public String targetTbShipIceGrd; + + @Value("${app.batch.target-schema.tables.ship-013}") + public String targetTbShipNmChgHstry; + + @Value("${app.batch.target-schema.tables.ship-014}") + public String targetTbShipOperatorHstry; + + @Value("${app.batch.target-schema.tables.ship-015}") + public String targetTbShipOwnrHstry; + + @Value("${app.batch.target-schema.tables.ship-016}") + public String targetTbShipPrtcRpnHstry; + + @Value("${app.batch.target-schema.tables.ship-017}") + public String targetTbShipSftyMngEvdcHstry; + + @Value("${app.batch.target-schema.tables.ship-018}") + public String targetTbShipMngCompanyHstry; + + @Value("${app.batch.target-schema.tables.ship-019}") + public String targetTbShipSstrvslRel; + + @Value("${app.batch.target-schema.tables.ship-020}") + public String targetTbShipSpcFetr; + + @Value("${app.batch.target-schema.tables.ship-021}") + public String targetTbShipStatusHstry; + + @Value("${app.batch.target-schema.tables.ship-022}") + public String targetTbShipCargoCapacity; + + @Value("${app.batch.target-schema.tables.ship-023}") + public String targetTbShipInspectionYmd; + + @Value("${app.batch.target-schema.tables.ship-024}") + public String targetTbShipInspectionYmdHstry; + + @Value("${app.batch.target-schema.tables.ship-025}") + public String targetTbShipTechMngCompanyHstry; + + @Value("${app.batch.target-schema.tables.ship-026}") + public String targetTbThrstrInfo; + + // Company Tables + @Value("${app.batch.target-schema.tables.company-001}") + public String targetTbCompanyDtlInfo; + + // Event Tables + @Value("${app.batch.target-schema.tables.event-001}") + public String targetTbEventDtl; + + @Value("${app.batch.target-schema.tables.event-002}") + public String targetTbEventCargo; + + @Value("${app.batch.target-schema.tables.event-003}") + public String targetTbEventHumnAcdnt; + + @Value("${app.batch.target-schema.tables.event-004}") + public String targetTbEventRel; + + // Facility Tables + @Value("${app.batch.target-schema.tables.facility-001}") + public String targetTbPortFacilityInfo; + + // PSC Tables + @Value("${app.batch.target-schema.tables.psc-001}") + public String targetTbPscDtl; + + @Value("${app.batch.target-schema.tables.psc-002}") + public String targetTbPscDefect; + + @Value("${app.batch.target-schema.tables.psc-003}") + public String targetTbPscOaCertf; + + // Movements Tables + @Value("${app.batch.target-schema.tables.movements-001}") + public String targetTbShipAnchrgcallHstry; + + @Value("${app.batch.target-schema.tables.movements-002}") + public String targetTbShipBerthcallHstry; + + @Value("${app.batch.target-schema.tables.movements-003}") + public String targetTbShipNowStatusHstry; + + @Value("${app.batch.target-schema.tables.movements-004}") + public String targetTbShipDestHstry; + + @Value("${app.batch.target-schema.tables.movements-005}") + public String targetTbShipPrtcllHstry; + + @Value("${app.batch.target-schema.tables.movements-006}") + public String targetTbShipStsOpertHstry; + + @Value("${app.batch.target-schema.tables.movements-007}") + public String targetTbShipTeminalcallHstry; + + @Value("${app.batch.target-schema.tables.movements-008}") + public String targetTbShipTrnstHstry; + + // Code Tables + @Value("${app.batch.target-schema.tables.code-001}") + public String targetTbShipTypeCd; + + @Value("${app.batch.target-schema.tables.code-002}") + public String targetTbShipCountryCd; + + // Risk & Compliance Tables + @Value("${app.batch.target-schema.tables.risk-compliance-001}") + public String targetTbShipRiskInfo; + + @Value("${app.batch.target-schema.tables.risk-compliance-002}") + public String targetTbShipRiskHstry; + + @Value("${app.batch.target-schema.tables.risk-compliance-003}") + public String targetTbShipComplianceInfo; + + @Value("${app.batch.target-schema.tables.risk-compliance-004}") + public String targetTbShipComplianceHstry; + + @Value("${app.batch.target-schema.tables.risk-compliance-005}") + public String targetTbShipComplianceInfoHstry; + + @Value("${app.batch.target-schema.tables.risk-compliance-006}") + public String targetTbCompanyComplianceInfo; + + @Value("${app.batch.target-schema.tables.risk-compliance-007}") + public String targetTbCompanyComplianceHstry; + + @Value("${app.batch.target-schema.tables.risk-compliance-008}") + public String targetTbCompanyComplianceInfoHstry; +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/common/web/ApiResponse.java b/src/main/java/com/snp/batch/common/web/ApiResponse.java new file mode 100644 index 0000000..68d6edb --- /dev/null +++ b/src/main/java/com/snp/batch/common/web/ApiResponse.java @@ -0,0 +1,81 @@ +package com.snp.batch.common.web; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +/** + * 통일된 API 응답 형식 + * + * @param 응답 데이터 타입 + */ +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class ApiResponse { + + /** + * 성공 여부 + */ + private boolean success; + + /** + * 메시지 + */ + private String message; + + /** + * 응답 데이터 + */ + private T data; + + /** + * 에러 코드 (실패 시) + */ + private String errorCode; + + /** + * 성공 응답 생성 + */ + public static ApiResponse success(T data) { + return ApiResponse.builder() + .success(true) + .message("Success") + .data(data) + .build(); + } + + /** + * 성공 응답 생성 (메시지 포함) + */ + public static ApiResponse success(String message, T data) { + return ApiResponse.builder() + .success(true) + .message(message) + .data(data) + .build(); + } + + /** + * 실패 응답 생성 + */ + public static ApiResponse error(String message) { + return ApiResponse.builder() + .success(false) + .message(message) + .build(); + } + + /** + * 실패 응답 생성 (에러 코드 포함) + */ + public static ApiResponse error(String message, String errorCode) { + return ApiResponse.builder() + .success(false) + .message(message) + .errorCode(errorCode) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/common/web/controller/BaseController.java b/src/main/java/com/snp/batch/common/web/controller/BaseController.java new file mode 100644 index 0000000..445a0c6 --- /dev/null +++ b/src/main/java/com/snp/batch/common/web/controller/BaseController.java @@ -0,0 +1,300 @@ +package com.snp.batch.common.web.controller; + +import com.snp.batch.common.web.ApiResponse; +import com.snp.batch.common.web.service.BaseService; +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.Parameter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.*; + +import java.util.List; + +/** + * 모든 REST Controller의 공통 베이스 클래스 + * CRUD API의 일관된 구조 제공 + * + * 이 클래스는 추상 클래스이므로 @Tag를 붙이지 않습니다. + * 하위 클래스에서 @Tag를 정의하면 모든 엔드포인트가 해당 태그로 그룹화됩니다. + * + * @param DTO 타입 + * @param ID 타입 + */ +@Slf4j +public abstract class BaseController { + + /** + * Service 반환 (하위 클래스에서 구현) + */ + protected abstract BaseService getService(); + + /** + * 리소스 이름 반환 (로깅용) + */ + protected abstract String getResourceName(); + + /** + * 단건 생성 + */ + @Operation( + summary = "리소스 생성", + description = "새로운 리소스를 생성합니다", + responses = { + @io.swagger.v3.oas.annotations.responses.ApiResponse( + responseCode = "200", + description = "생성 성공" + ), + @io.swagger.v3.oas.annotations.responses.ApiResponse( + responseCode = "500", + description = "서버 오류" + ) + } + ) + @PostMapping + public ResponseEntity> create( + @Parameter(description = "생성할 리소스 데이터", required = true) + @RequestBody D dto) { + log.info("{} 생성 요청", getResourceName()); + try { + D created = getService().create(dto); + return ResponseEntity.ok( + ApiResponse.success(getResourceName() + " created successfully", created) + ); + } catch (Exception e) { + log.error("{} 생성 실패", getResourceName(), e); + return ResponseEntity.internalServerError().body( + ApiResponse.error("Failed to create " + getResourceName() + ": " + e.getMessage()) + ); + } + } + + /** + * 단건 조회 + */ + @Operation( + summary = "리소스 조회", + description = "ID로 특정 리소스를 조회합니다", + responses = { + @io.swagger.v3.oas.annotations.responses.ApiResponse( + responseCode = "200", + description = "조회 성공" + ), + @io.swagger.v3.oas.annotations.responses.ApiResponse( + responseCode = "404", + description = "리소스 없음" + ), + @io.swagger.v3.oas.annotations.responses.ApiResponse( + responseCode = "500", + description = "서버 오류" + ) + } + ) + @GetMapping("/{id}") + public ResponseEntity> getById( + @Parameter(description = "리소스 ID", required = true) + @PathVariable ID id) { + log.info("{} 조회 요청: ID={}", getResourceName(), id); + try { + return getService().findById(id) + .map(dto -> ResponseEntity.ok(ApiResponse.success(dto))) + .orElse(ResponseEntity.notFound().build()); + } catch (Exception e) { + log.error("{} 조회 실패: ID={}", getResourceName(), id, e); + return ResponseEntity.internalServerError().body( + ApiResponse.error("Failed to get " + getResourceName() + ": " + e.getMessage()) + ); + } + } + + /** + * 전체 조회 + */ + @Operation( + summary = "전체 리소스 조회", + description = "모든 리소스를 조회합니다", + responses = { + @io.swagger.v3.oas.annotations.responses.ApiResponse( + responseCode = "200", + description = "조회 성공" + ), + @io.swagger.v3.oas.annotations.responses.ApiResponse( + responseCode = "500", + description = "서버 오류" + ) + } + ) + @GetMapping + public ResponseEntity>> getAll() { + log.info("{} 전체 조회 요청", getResourceName()); + try { + List list = getService().findAll(); + return ResponseEntity.ok(ApiResponse.success(list)); + } catch (Exception e) { + log.error("{} 전체 조회 실패", getResourceName(), e); + return ResponseEntity.internalServerError().body( + ApiResponse.error("Failed to get all " + getResourceName() + ": " + e.getMessage()) + ); + } + } + + /** + * 페이징 조회 (JDBC 기반) + * + * @param offset 시작 위치 (기본값: 0) + * @param limit 조회 개수 (기본값: 20) + */ + @Operation( + summary = "페이징 조회", + description = "페이지 단위로 리소스를 조회합니다", + responses = { + @io.swagger.v3.oas.annotations.responses.ApiResponse( + responseCode = "200", + description = "조회 성공" + ), + @io.swagger.v3.oas.annotations.responses.ApiResponse( + responseCode = "500", + description = "서버 오류" + ) + } + ) + @GetMapping("/page") + public ResponseEntity>> getPage( + @Parameter(description = "시작 위치 (0부터 시작)", example = "0") + @RequestParam(defaultValue = "0") int offset, + @Parameter(description = "조회 개수", example = "20") + @RequestParam(defaultValue = "20") int limit) { + log.info("{} 페이징 조회 요청: offset={}, limit={}", + getResourceName(), offset, limit); + try { + List list = getService().findAll(offset, limit); + long total = getService().count(); + + // 페이징 정보를 포함한 응답 + return ResponseEntity.ok( + ApiResponse.success("Retrieved " + list.size() + " items (total: " + total + ")", list) + ); + } catch (Exception e) { + log.error("{} 페이징 조회 실패", getResourceName(), e); + return ResponseEntity.internalServerError().body( + ApiResponse.error("Failed to get page of " + getResourceName() + ": " + e.getMessage()) + ); + } + } + + /** + * 단건 수정 + */ + @Operation( + summary = "리소스 수정", + description = "기존 리소스를 수정합니다", + responses = { + @io.swagger.v3.oas.annotations.responses.ApiResponse( + responseCode = "200", + description = "수정 성공" + ), + @io.swagger.v3.oas.annotations.responses.ApiResponse( + responseCode = "404", + description = "리소스 없음" + ), + @io.swagger.v3.oas.annotations.responses.ApiResponse( + responseCode = "500", + description = "서버 오류" + ) + } + ) + @PutMapping("/{id}") + public ResponseEntity> update( + @Parameter(description = "리소스 ID", required = true) + @PathVariable ID id, + @Parameter(description = "수정할 리소스 데이터", required = true) + @RequestBody D dto) { + log.info("{} 수정 요청: ID={}", getResourceName(), id); + try { + D updated = getService().update(id, dto); + return ResponseEntity.ok( + ApiResponse.success(getResourceName() + " updated successfully", updated) + ); + } catch (IllegalArgumentException e) { + return ResponseEntity.notFound().build(); + } catch (Exception e) { + log.error("{} 수정 실패: ID={}", getResourceName(), id, e); + return ResponseEntity.internalServerError().body( + ApiResponse.error("Failed to update " + getResourceName() + ": " + e.getMessage()) + ); + } + } + + /** + * 단건 삭제 + */ + @Operation( + summary = "리소스 삭제", + description = "기존 리소스를 삭제합니다", + responses = { + @io.swagger.v3.oas.annotations.responses.ApiResponse( + responseCode = "200", + description = "삭제 성공" + ), + @io.swagger.v3.oas.annotations.responses.ApiResponse( + responseCode = "404", + description = "리소스 없음" + ), + @io.swagger.v3.oas.annotations.responses.ApiResponse( + responseCode = "500", + description = "서버 오류" + ) + } + ) + @DeleteMapping("/{id}") + public ResponseEntity> delete( + @Parameter(description = "리소스 ID", required = true) + @PathVariable ID id) { + log.info("{} 삭제 요청: ID={}", getResourceName(), id); + try { + getService().deleteById(id); + return ResponseEntity.ok( + ApiResponse.success(getResourceName() + " deleted successfully", null) + ); + } catch (IllegalArgumentException e) { + return ResponseEntity.notFound().build(); + } catch (Exception e) { + log.error("{} 삭제 실패: ID={}", getResourceName(), id, e); + return ResponseEntity.internalServerError().body( + ApiResponse.error("Failed to delete " + getResourceName() + ": " + e.getMessage()) + ); + } + } + + /** + * 존재 여부 확인 + */ + @Operation( + summary = "리소스 존재 확인", + description = "특정 ID의 리소스가 존재하는지 확인합니다", + responses = { + @io.swagger.v3.oas.annotations.responses.ApiResponse( + responseCode = "200", + description = "확인 성공" + ), + @io.swagger.v3.oas.annotations.responses.ApiResponse( + responseCode = "500", + description = "서버 오류" + ) + } + ) + @GetMapping("/{id}/exists") + public ResponseEntity> exists( + @Parameter(description = "리소스 ID", required = true) + @PathVariable ID id) { + log.debug("{} 존재 여부 확인: ID={}", getResourceName(), id); + try { + boolean exists = getService().existsById(id); + return ResponseEntity.ok(ApiResponse.success(exists)); + } catch (Exception e) { + log.error("{} 존재 여부 확인 실패: ID={}", getResourceName(), id, e); + return ResponseEntity.internalServerError().body( + ApiResponse.error("Failed to check existence: " + e.getMessage()) + ); + } + } +} diff --git a/src/main/java/com/snp/batch/common/web/dto/BaseDto.java b/src/main/java/com/snp/batch/common/web/dto/BaseDto.java new file mode 100644 index 0000000..46230bf --- /dev/null +++ b/src/main/java/com/snp/batch/common/web/dto/BaseDto.java @@ -0,0 +1,33 @@ +package com.snp.batch.common.web.dto; + +import lombok.Data; + +import java.time.LocalDateTime; + +/** + * 모든 DTO의 공통 베이스 클래스 + * 생성/수정 정보 등 공통 필드 + */ +@Data +public abstract class BaseDto { + + /** + * 생성 일시 + */ + private LocalDateTime createdAt; + + /** + * 수정 일시 + */ + private LocalDateTime updatedAt; + + /** + * 생성자 + */ + private String createdBy; + + /** + * 수정자 + */ + private String updatedBy; +} diff --git a/src/main/java/com/snp/batch/common/web/service/BaseHybridService.java b/src/main/java/com/snp/batch/common/web/service/BaseHybridService.java new file mode 100644 index 0000000..7499dcb --- /dev/null +++ b/src/main/java/com/snp/batch/common/web/service/BaseHybridService.java @@ -0,0 +1,202 @@ +package com.snp.batch.common.web.service; + +import com.snp.batch.common.batch.repository.BaseJdbcRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.web.reactive.function.client.WebClient; + +import java.time.Duration; +import java.time.LocalDateTime; +import java.util.Map; +import java.util.Optional; + +/** + * 하이브리드 서비스 Base 클래스 (DB 캐시 + 외부 API 프록시) + * + * 사용 시나리오: + * 1. 클라이언트 요청 → DB 조회 (캐시 Hit) + * - 캐시 데이터 유효 시 즉시 반환 + * 2. 캐시 Miss 또는 만료 시 + * - 외부 서비스 API 호출 + * - DB에 저장 (캐시 갱신) + * - 클라이언트에게 반환 + * + * 장점: + * - 빠른 응답 (DB 캐시) + * - 외부 서비스 장애 시에도 캐시 데이터 제공 가능 + * - 외부 API 호출 횟수 감소 (비용 절감) + * + * @param Entity 타입 + * @param DTO 타입 + * @param ID 타입 + */ +@Slf4j +public abstract class BaseHybridService extends BaseServiceImpl { + + /** + * WebClient 반환 (하위 클래스에서 구현) + */ + protected abstract WebClient getWebClient(); + + /** + * 외부 서비스 이름 반환 + */ + protected abstract String getExternalServiceName(); + + /** + * 캐시 유효 시간 (초) + * 기본값: 300초 (5분) + */ + protected long getCacheTtlSeconds() { + return 300; + } + + /** + * 요청 타임아웃 + */ + protected Duration getTimeout() { + return Duration.ofSeconds(30); + } + + /** + * 하이브리드 조회: DB 캐시 우선, 없으면 외부 API 호출 + * + * @param id 조회 키 + * @return DTO + */ + @Transactional + public D findByIdHybrid(ID id) { + log.info("[하이브리드] ID로 조회: {}", id); + + // 1. DB 캐시 조회 + Optional cached = findById(id); + + if (cached.isPresent()) { + // 캐시 유효성 검증 + if (isCacheValid(cached.get())) { + log.info("[하이브리드] 캐시 Hit - DB에서 반환"); + return cached.get(); + } else { + log.info("[하이브리드] 캐시 만료 - 외부 API 호출"); + } + } else { + log.info("[하이브리드] 캐시 Miss - 외부 API 호출"); + } + + // 2. 외부 API 호출 + try { + D externalData = fetchFromExternalApi(id); + + // 3. DB 저장 (캐시 갱신) + T entity = toEntity(externalData); + T saved = getRepository().save(entity); + + log.info("[하이브리드] 외부 데이터 DB 저장 완료"); + return toDto(saved); + + } catch (Exception e) { + log.error("[하이브리드] 외부 API 호출 실패: {}", e.getMessage()); + + // 4. 외부 API 실패 시 만료된 캐시라도 반환 (Fallback) + if (cached.isPresent()) { + log.warn("[하이브리드] Fallback - 만료된 캐시 반환"); + return cached.get(); + } + + throw new RuntimeException("데이터 조회 실패: " + e.getMessage(), e); + } + } + + /** + * 외부 API에서 데이터 조회 (하위 클래스에서 구현) + * + * @param id 조회 키 + * @return DTO + */ + protected abstract D fetchFromExternalApi(ID id) throws Exception; + + /** + * 캐시 유효성 검증 + * 기본 구현: updated_at 기준으로 TTL 체크 + * + * @param dto 캐시 데이터 + * @return 유효 여부 + */ + protected boolean isCacheValid(D dto) { + // BaseDto를 상속한 경우 updatedAt 체크 + try { + LocalDateTime updatedAt = extractUpdatedAt(dto); + if (updatedAt == null) { + return false; + } + + LocalDateTime now = LocalDateTime.now(); + long elapsedSeconds = Duration.between(updatedAt, now).getSeconds(); + + return elapsedSeconds < getCacheTtlSeconds(); + + } catch (Exception e) { + log.warn("캐시 유효성 검증 실패 - 항상 최신 데이터 조회: {}", e.getMessage()); + return false; + } + } + + /** + * DTO에서 updatedAt 추출 (하위 클래스에서 오버라이드 가능) + */ + protected LocalDateTime extractUpdatedAt(D dto) { + // 기본 구현: 항상 캐시 무효 (외부 API 호출) + return null; + } + + /** + * 강제 캐시 갱신 (외부 API 호출 강제) + */ + @Transactional + public D refreshCache(ID id) throws Exception { + log.info("[하이브리드] 캐시 강제 갱신: {}", id); + + D externalData = fetchFromExternalApi(id); + T entity = toEntity(externalData); + T saved = getRepository().save(entity); + + return toDto(saved); + } + + /** + * 외부 API GET 요청 + */ + protected RES callExternalGet(String endpoint, Map params, Class responseType) { + log.info("[{}] GET 요청: endpoint={}", getExternalServiceName(), endpoint); + + return getWebClient() + .get() + .uri(uriBuilder -> { + uriBuilder.path(endpoint); + if (params != null) { + params.forEach(uriBuilder::queryParam); + } + return uriBuilder.build(); + }) + .retrieve() + .bodyToMono(responseType) + .timeout(getTimeout()) + .block(); + } + + /** + * 외부 API POST 요청 + */ + protected RES callExternalPost(String endpoint, REQ requestBody, Class responseType) { + log.info("[{}] POST 요청: endpoint={}", getExternalServiceName(), endpoint); + + return getWebClient() + .post() + .uri(endpoint) + .bodyValue(requestBody) + .retrieve() + .bodyToMono(responseType) + .timeout(getTimeout()) + .block(); + } +} diff --git a/src/main/java/com/snp/batch/common/web/service/BaseProxyService.java b/src/main/java/com/snp/batch/common/web/service/BaseProxyService.java new file mode 100644 index 0000000..05c0032 --- /dev/null +++ b/src/main/java/com/snp/batch/common/web/service/BaseProxyService.java @@ -0,0 +1,176 @@ +package com.snp.batch.common.web.service; + +import lombok.extern.slf4j.Slf4j; +import org.springframework.web.reactive.function.client.WebClient; +import reactor.core.publisher.Mono; + +import java.time.Duration; +import java.util.Map; + +/** + * 외부 API 프록시 서비스 Base 클래스 + * + * 목적: 해외 외부 서비스를 국내에서 우회 접근할 수 있도록 프록시 역할 수행 + * + * 사용 시나리오: + * - 외부 서비스가 해외에 있고 국내 IP에서만 접근 가능 + * - 클라이언트 A → 우리 서버 (국내) → 외부 서비스 (해외) → 응답 전달 + * + * 장점: + * - 실시간 데이터 제공 (DB 캐시 없이) + * - 외부 서비스의 최신 데이터 보장 + * - DB 저장 부담 없음 + * + * @param 요청 DTO 타입 + * @param 응답 DTO 타입 + */ +@Slf4j +public abstract class BaseProxyService { + + /** + * WebClient 반환 (하위 클래스에서 구현) + * 외부 서비스별로 인증, Base URL 등 설정 + */ + protected abstract WebClient getWebClient(); + + /** + * 외부 서비스 이름 반환 (로깅용) + */ + protected abstract String getServiceName(); + + /** + * 요청 타임아웃 (밀리초) + * 기본값: 30초 + */ + protected Duration getTimeout() { + return Duration.ofSeconds(30); + } + + /** + * GET 요청 프록시 + * + * @param endpoint 엔드포인트 경로 (예: "/api/ships") + * @param params 쿼리 파라미터 + * @param responseType 응답 클래스 타입 + * @return 외부 서비스 응답 + */ + public RES proxyGet(String endpoint, Map params, Class responseType) { + log.info("[{}] GET 요청 프록시: endpoint={}, params={}", getServiceName(), endpoint, params); + + try { + WebClient.RequestHeadersSpec spec = getWebClient() + .get() + .uri(uriBuilder -> { + uriBuilder.path(endpoint); + if (params != null) { + params.forEach(uriBuilder::queryParam); + } + return uriBuilder.build(); + }); + + RES response = spec.retrieve() + .bodyToMono(responseType) + .timeout(getTimeout()) + .block(); + + log.info("[{}] 응답 성공", getServiceName()); + return response; + + } catch (Exception e) { + log.error("[{}] 프록시 요청 실패: {}", getServiceName(), e.getMessage(), e); + throw new RuntimeException("외부 서비스 호출 실패: " + e.getMessage(), e); + } + } + + /** + * POST 요청 프록시 + * + * @param endpoint 엔드포인트 경로 + * @param requestBody 요청 본문 + * @param responseType 응답 클래스 타입 + * @return 외부 서비스 응답 + */ + public RES proxyPost(String endpoint, REQ requestBody, Class responseType) { + log.info("[{}] POST 요청 프록시: endpoint={}", getServiceName(), endpoint); + + try { + RES response = getWebClient() + .post() + .uri(endpoint) + .bodyValue(requestBody) + .retrieve() + .bodyToMono(responseType) + .timeout(getTimeout()) + .block(); + + log.info("[{}] 응답 성공", getServiceName()); + return response; + + } catch (Exception e) { + log.error("[{}] 프록시 요청 실패: {}", getServiceName(), e.getMessage(), e); + throw new RuntimeException("외부 서비스 호출 실패: " + e.getMessage(), e); + } + } + + /** + * PUT 요청 프록시 + */ + public RES proxyPut(String endpoint, REQ requestBody, Class responseType) { + log.info("[{}] PUT 요청 프록시: endpoint={}", getServiceName(), endpoint); + + try { + RES response = getWebClient() + .put() + .uri(endpoint) + .bodyValue(requestBody) + .retrieve() + .bodyToMono(responseType) + .timeout(getTimeout()) + .block(); + + log.info("[{}] 응답 성공", getServiceName()); + return response; + + } catch (Exception e) { + log.error("[{}] 프록시 요청 실패: {}", getServiceName(), e.getMessage(), e); + throw new RuntimeException("외부 서비스 호출 실패: " + e.getMessage(), e); + } + } + + /** + * DELETE 요청 프록시 + */ + public void proxyDelete(String endpoint, Map params) { + log.info("[{}] DELETE 요청 프록시: endpoint={}, params={}", getServiceName(), endpoint, params); + + try { + getWebClient() + .delete() + .uri(uriBuilder -> { + uriBuilder.path(endpoint); + if (params != null) { + params.forEach(uriBuilder::queryParam); + } + return uriBuilder.build(); + }) + .retrieve() + .bodyToMono(Void.class) + .timeout(getTimeout()) + .block(); + + log.info("[{}] DELETE 성공", getServiceName()); + + } catch (Exception e) { + log.error("[{}] 프록시 DELETE 실패: {}", getServiceName(), e.getMessage(), e); + throw new RuntimeException("외부 서비스 호출 실패: " + e.getMessage(), e); + } + } + + /** + * 커스텀 요청 처리 (하위 클래스에서 오버라이드) + * 복잡한 로직이 필요한 경우 사용 + */ + protected RES customRequest(REQ request) { + throw new UnsupportedOperationException("커스텀 요청이 구현되지 않았습니다"); + } +} diff --git a/src/main/java/com/snp/batch/common/web/service/BaseService.java b/src/main/java/com/snp/batch/common/web/service/BaseService.java new file mode 100644 index 0000000..663870b --- /dev/null +++ b/src/main/java/com/snp/batch/common/web/service/BaseService.java @@ -0,0 +1,94 @@ +package com.snp.batch.common.web.service; + +import java.util.List; +import java.util.Optional; + +/** + * 모든 서비스의 공통 인터페이스 (JDBC 기반) + * CRUD 기본 메서드 정의 + * + * @param Entity 타입 + * @param DTO 타입 + * @param ID 타입 + */ +public interface BaseService { + + /** + * 단건 생성 + * + * @param dto 생성할 데이터 DTO + * @return 생성된 데이터 DTO + */ + D create(D dto); + + /** + * 단건 조회 + * + * @param id 조회할 ID + * @return 조회된 데이터 DTO (Optional) + */ + Optional findById(ID id); + + /** + * 전체 조회 + * + * @return 전체 데이터 DTO 리스트 + */ + List findAll(); + + /** + * 페이징 조회 + * + * @param offset 시작 위치 (0부터 시작) + * @param limit 조회 개수 + * @return 페이징된 데이터 리스트 + */ + List findAll(int offset, int limit); + + /** + * 전체 개수 조회 + * + * @return 전체 데이터 개수 + */ + long count(); + + /** + * 단건 수정 + * + * @param id 수정할 ID + * @param dto 수정할 데이터 DTO + * @return 수정된 데이터 DTO + */ + D update(ID id, D dto); + + /** + * 단건 삭제 + * + * @param id 삭제할 ID + */ + void deleteById(ID id); + + /** + * 존재 여부 확인 + * + * @param id 확인할 ID + * @return 존재 여부 + */ + boolean existsById(ID id); + + /** + * Entity를 DTO로 변환 + * + * @param entity 엔티티 + * @return DTO + */ + D toDto(T entity); + + /** + * DTO를 Entity로 변환 + * + * @param dto DTO + * @return 엔티티 + */ + T toEntity(D dto); +} diff --git a/src/main/java/com/snp/batch/common/web/service/BaseServiceImpl.java b/src/main/java/com/snp/batch/common/web/service/BaseServiceImpl.java new file mode 100644 index 0000000..3308a8f --- /dev/null +++ b/src/main/java/com/snp/batch/common/web/service/BaseServiceImpl.java @@ -0,0 +1,131 @@ +package com.snp.batch.common.web.service; + +import com.snp.batch.common.batch.repository.BaseJdbcRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.annotation.Transactional; + +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; + +/** + * BaseService의 기본 구현 (JDBC 기반) + * 공통 CRUD 로직 구현 + * + * @param Entity 타입 + * @param DTO 타입 + * @param ID 타입 + */ +@Slf4j +@Transactional(readOnly = true) +public abstract class BaseServiceImpl implements BaseService { + + /** + * Repository 반환 (하위 클래스에서 구현) + */ + protected abstract BaseJdbcRepository getRepository(); + + /** + * 엔티티 이름 반환 (로깅용) + */ + protected abstract String getEntityName(); + + @Override + @Transactional + public D create(D dto) { + log.info("{} 생성 시작", getEntityName()); + T entity = toEntity(dto); + T saved = getRepository().save(entity); + log.info("{} 생성 완료: ID={}", getEntityName(), extractId(saved)); + return toDto(saved); + } + + @Override + public Optional findById(ID id) { + log.debug("{} 조회: ID={}", getEntityName(), id); + return getRepository().findById(id).map(this::toDto); + } + + @Override + public List findAll() { + log.debug("{} 전체 조회", getEntityName()); + return getRepository().findAll().stream() + .map(this::toDto) + .collect(Collectors.toList()); + } + + @Override + public List findAll(int offset, int limit) { + log.debug("{} 페이징 조회: offset={}, limit={}", getEntityName(), offset, limit); + + // 하위 클래스에서 제공하는 페이징 쿼리 실행 + List entities = executePagingQuery(offset, limit); + + return entities.stream() + .map(this::toDto) + .collect(Collectors.toList()); + } + + /** + * 페이징 쿼리 실행 (하위 클래스에서 구현) + * + * @param offset 시작 위치 + * @param limit 조회 개수 + * @return Entity 리스트 + */ + protected abstract List executePagingQuery(int offset, int limit); + + @Override + public long count() { + log.debug("{} 개수 조회", getEntityName()); + return getRepository().count(); + } + + @Override + @Transactional + public D update(ID id, D dto) { + log.info("{} 수정 시작: ID={}", getEntityName(), id); + + T entity = getRepository().findById(id) + .orElseThrow(() -> new IllegalArgumentException( + getEntityName() + " not found with id: " + id)); + + updateEntity(entity, dto); + T updated = getRepository().save(entity); + + log.info("{} 수정 완료: ID={}", getEntityName(), id); + return toDto(updated); + } + + @Override + @Transactional + public void deleteById(ID id) { + log.info("{} 삭제: ID={}", getEntityName(), id); + + if (!getRepository().existsById(id)) { + throw new IllegalArgumentException( + getEntityName() + " not found with id: " + id); + } + + getRepository().deleteById(id); + log.info("{} 삭제 완료: ID={}", getEntityName(), id); + } + + @Override + public boolean existsById(ID id) { + return getRepository().existsById(id); + } + + /** + * Entity 업데이트 (하위 클래스에서 구현) + * + * @param entity 업데이트할 엔티티 + * @param dto 업데이트 데이터 + */ + protected abstract void updateEntity(T entity, D dto); + + /** + * Entity에서 ID 추출 (로깅용, 하위 클래스에서 구현) + */ + protected abstract ID extractId(T entity); +} diff --git a/src/main/java/com/snp/batch/global/config/DataSourceConfig.java b/src/main/java/com/snp/batch/global/config/DataSourceConfig.java new file mode 100644 index 0000000..40240e8 --- /dev/null +++ b/src/main/java/com/snp/batch/global/config/DataSourceConfig.java @@ -0,0 +1,52 @@ +package com.snp.batch.global.config; + +import javax.sql.DataSource; + +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.boot.jdbc.DataSourceBuilder; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Primary; +import org.springframework.jdbc.datasource.DataSourceTransactionManager; +import org.springframework.orm.jpa.JpaTransactionManager; +import org.springframework.transaction.PlatformTransactionManager; +import jakarta.persistence.EntityManagerFactory; + +@Configuration +public class DataSourceConfig { + + // ============================================================== + // 1. 배치 메타 및 Quartz DataSource (1번 DB) + // ============================================================== + @Bean + @Primary // Spring Batch/Boot가 기본적으로 이 DataSource를 메타데이터 저장용으로 사용하도록 지정 + @ConfigurationProperties(prefix = "spring.batch-meta-datasource") + public DataSource batchDataSource() { + return DataSourceBuilder.create().build(); + } + + // 1-1. 배치 메타 데이터용 트랜잭션 매니저 (JPA + JDBC 모두 지원) + // Spring Data JPA가 기본으로 'transactionManager' 이름을 탐색하므로 빈 이름을 맞춤 + @Bean(name = "transactionManager") + @Primary + public PlatformTransactionManager transactionManager(EntityManagerFactory entityManagerFactory) { + return new JpaTransactionManager(entityManagerFactory); + } + // ============================================================== + // 2. 비즈니스 데이터 DataSource (2번 DB) + // ============================================================== + @Bean + @ConfigurationProperties(prefix = "spring.business-datasource") + public DataSource businessDataSource() { + return DataSourceBuilder.create().build(); + } + + // 2-1. 비즈니스 데이터용 트랜잭션 매니저 (Step/Chunk에 사용) + @Bean + public PlatformTransactionManager businessTransactionManager( + @Qualifier("businessDataSource") DataSource businessDataSource) { + return new DataSourceTransactionManager(businessDataSource); + } + +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/global/config/MaritimeApiWebClientConfig.java b/src/main/java/com/snp/batch/global/config/MaritimeApiWebClientConfig.java new file mode 100644 index 0000000..2a3a6c3 --- /dev/null +++ b/src/main/java/com/snp/batch/global/config/MaritimeApiWebClientConfig.java @@ -0,0 +1,103 @@ +package com.snp.batch.global.config; + +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.web.reactive.function.client.WebClient; + +/** + * Maritime API WebClient 설정 + * + * 목적: + * - Maritime API 서버에 대한 WebClient Bean 등록 + * - 동일한 API 서버를 사용하는 여러 Job에서 재사용 + * - 설정 변경 시 한 곳에서만 수정 + * + * 사용 Job: + * - shipDataImportJob: IMO 번호 조회 + * - shipDetailImportJob: 선박 상세 정보 조회 + * + * 다른 API 서버 추가 시: + * - 새로운 Config 클래스 생성 (예: OtherApiWebClientConfig) + * - Bean 이름을 다르게 지정 (예: @Bean(name = "otherApiWebClient")) + */ +@Slf4j +@Configuration +public class MaritimeApiWebClientConfig { + + @Value("${app.batch.ship-api.url}") + private String maritimeApiUrl; + + @Value("${app.batch.ship-api.username}") + private String maritimeApiUsername; + + @Value("${app.batch.ship-api.password}") + private String maritimeApiPassword; + + /** + * Maritime API용 WebClient Bean + * + * 설정: + * - Base URL: Maritime API 서버 주소 + * - 인증: Basic Authentication + * - 버퍼: 20MB (대용량 응답 처리) + * + * @return Maritime API WebClient + */ + @Bean(name = "maritimeApiWebClient") + public WebClient maritimeApiWebClient() { + log.info("========================================"); + log.info("Maritime API WebClient 생성"); + log.info("Base URL: {}", maritimeApiUrl); + log.info("========================================"); + + return WebClient.builder() + .baseUrl(maritimeApiUrl) + .defaultHeaders(headers -> headers.setBasicAuth(maritimeApiUsername, maritimeApiPassword)) + .codecs(configurer -> configurer + .defaultCodecs() + .maxInMemorySize(20 * 1024 * 1024)) // 20MB 버퍼 + .build(); + } +} + + +/** + * ======================================== + * 다른 API 서버 추가 예시 + * ======================================== + * + * 1. 새로운 Config 클래스 생성: + * + * @Configuration + * public class ExternalApiWebClientConfig { + * + * @Bean(name = "externalApiWebClient") + * public WebClient externalApiWebClient( + * @Value("${app.batch.external-api.url}") String url, + * @Value("${app.batch.external-api.token}") String token) { + * + * return WebClient.builder() + * .baseUrl(url) + * .defaultHeader("Authorization", "Bearer " + token) + * .build(); + * } + * } + * + * 2. JobConfig에서 사용: + * + * public ExternalJobConfig( + * ..., + * @Qualifier("externalApiWebClient") WebClient externalApiWebClient) { + * this.webClient = externalApiWebClient; + * } + * + * 3. application.yml에 설정 추가: + * + * app: + * batch: + * external-api: + * url: https://external-api.example.com + * token: ${EXTERNAL_API_TOKEN} + */ diff --git a/src/main/java/com/snp/batch/global/config/QuartzConfig.java b/src/main/java/com/snp/batch/global/config/QuartzConfig.java new file mode 100644 index 0000000..572d853 --- /dev/null +++ b/src/main/java/com/snp/batch/global/config/QuartzConfig.java @@ -0,0 +1,82 @@ +package com.snp.batch.global.config; + +import org.quartz.spi.TriggerFiredBundle; +import org.springframework.beans.factory.annotation.Qualifier; // ⚠️ 추가 필요 +import org.springframework.beans.factory.config.AutowireCapableBeanFactory; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.scheduling.quartz.SchedulerFactoryBean; +import org.springframework.scheduling.quartz.SpringBeanJobFactory; + +import javax.sql.DataSource; +import java.util.Properties; + +/** + * Quartz 설정 + * Batch Meta DataSource (1번 DB)를 Quartz 메타데이터 저장용으로 재활용합니다. + */ +@Configuration +public class QuartzConfig { + + private final DataSource batchDataSource; // 1번 DB를 주입받을 필드 + + // 1. 생성자를 통해 @Qualifier를 사용하여 'batchDataSource' Bean을 주입받습니다. + // 'batchDataSource'는 Spring Batch 메타데이터와 Quartz 메타데이터를 함께 저장할 DB입니다. + public QuartzConfig(@Qualifier("batchDataSource") DataSource batchDataSource) { + this.batchDataSource = batchDataSource; + } + + /** + * Quartz Scheduler Factory Bean 설정 + * Spring Boot Auto-configuration 대신, batchDataSource를 명시적으로 설정합니다. + */ + @Bean + public SchedulerFactoryBean schedulerFactoryBean(ApplicationContext applicationContext) { + SchedulerFactoryBean factory = new SchedulerFactoryBean(); + + // 2. 주입받은 batchDataSource를 SchedulerFactoryBean에 명시적으로 설정합니다. + // Quartz 메타데이터(Job, Trigger 정보)가 이 DB에 저장됩니다. + factory.setDataSource(batchDataSource); + + factory.setJobFactory(springBeanJobFactory(applicationContext)); + factory.setOverwriteExistingJobs(true); + factory.setAutoStartup(true); + + Properties quartzProps = new Properties(); + quartzProps.put("org.quartz.jobStore.driverDelegateClass", "org.quartz.impl.jdbcjobstore.PostgreSQLDelegate"); + factory.setQuartzProperties(quartzProps); + return factory; + } + + /** + * Spring Bean 자동 주입을 지원하는 JobFactory (기존 코드 유지) + */ + @Bean + public SpringBeanJobFactory springBeanJobFactory(ApplicationContext applicationContext) { + AutowiringSpringBeanJobFactory jobFactory = new AutowiringSpringBeanJobFactory(); + jobFactory.setApplicationContext(applicationContext); + return jobFactory; + } + + /** + * Quartz Job에서 Spring Bean 자동 주입을 가능하게 하는 Factory (기존 코드 유지) + */ + public static class AutowiringSpringBeanJobFactory extends SpringBeanJobFactory implements ApplicationContextAware { + + private AutowireCapableBeanFactory beanFactory; + + @Override + public void setApplicationContext(ApplicationContext applicationContext) { + beanFactory = applicationContext.getAutowireCapableBeanFactory(); + } + + @Override + protected Object createJobInstance(TriggerFiredBundle bundle) throws Exception { + Object jobInstance = super.createJobInstance(bundle); + beanFactory.autowireBean(jobInstance); + return jobInstance; + } + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/global/config/SwaggerConfig.java b/src/main/java/com/snp/batch/global/config/SwaggerConfig.java new file mode 100644 index 0000000..3e0892c --- /dev/null +++ b/src/main/java/com/snp/batch/global/config/SwaggerConfig.java @@ -0,0 +1,86 @@ +package com.snp.batch.global.config; + +import io.swagger.v3.oas.models.OpenAPI; +import io.swagger.v3.oas.models.info.Contact; +import io.swagger.v3.oas.models.info.Info; +import io.swagger.v3.oas.models.info.License; +import io.swagger.v3.oas.models.servers.Server; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +import java.util.List; + +/** + * Swagger/OpenAPI 3.0 설정 + * + * Swagger UI 접속 URL: + * - Swagger UI: http://localhost:8051/snp-sync/swagger-ui/index.html + * - API 문서 (JSON): http://localhost:8051/snp-sync/v3/api-docs + * - API 문서 (YAML): http://localhost:8051/snp-sync/v3/api-docs.yaml + * + * 주요 기능: + * - REST API 자동 문서화 + * - API 테스트 UI 제공 + * - OpenAPI 3.0 스펙 준수 + */ +@Configuration +public class SwaggerConfig { + + @Value("${server.port:8051}") + private int serverPort; + + @Value("${server.servlet.context-path:}") + private String contextPath; + + @Bean + public OpenAPI openAPI() { + return new OpenAPI() + .info(apiInfo()) + .servers(List.of( + new Server() + .url("http://localhost:" + serverPort + contextPath) + .description("로컬 개발 서버"), + new Server() + .url("http://10.26.252.39:" + serverPort + contextPath) + .description("개발 서버"), + new Server() + .url("http://10.187.58.58:" + serverPort + contextPath) + .description("운영 서버") + )); + } + + private Info apiInfo() { + return new Info() + .title("SNP Batch REST API") + .description(""" + ## SNP Sync Batch 시스템 REST API 문서 + + 해양 데이터 API 동기화 시스템의 REST API 문서입니다. + + ### 제공 API + - **Batch Management API**: 배치 Job 실행, 중지, 이력 조회 + - **Schedule API**: Quartz 기반 스케줄 CRUD 및 활성화/비활성화 + - **Dashboard API**: 대시보드 데이터 및 타임라인 조회 + + ### 주요 기능 + - 배치 Job 수동 실행 및 중지 + - Job/Step 실행 이력 상세 조회 + - Cron 기반 스케줄 관리 (Quartz JDBC Store) + - 대시보드 현황 및 타임라인 시각화 + + ### 버전 정보 + - API Version: v1.0.0 + - Spring Boot: 3.2.1 + - Spring Batch: 5.1.0 + """) + .version("v1.0.0") + .contact(new Contact() + .name("SNP Batch Team") + .email("support@snp-batch.com") + .url("https://github.com/snp-batch")) + .license(new License() + .name("Apache 2.0") + .url("https://www.apache.org/licenses/LICENSE-2.0")); + } +} diff --git a/src/main/java/com/snp/batch/global/controller/BatchController.java b/src/main/java/com/snp/batch/global/controller/BatchController.java new file mode 100644 index 0000000..5ddd26e --- /dev/null +++ b/src/main/java/com/snp/batch/global/controller/BatchController.java @@ -0,0 +1,327 @@ +package com.snp.batch.global.controller; + +import com.snp.batch.global.dto.JobExecutionDto; +import com.snp.batch.global.dto.ScheduleRequest; +import com.snp.batch.global.dto.ScheduleResponse; +import com.snp.batch.service.BatchService; +import com.snp.batch.service.ScheduleService; +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.responses.ApiResponse; +import io.swagger.v3.oas.annotations.responses.ApiResponses; +import io.swagger.v3.oas.annotations.tags.Tag; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.*; + +import java.util.List; +import java.util.Map; + +@Slf4j +@RestController +@RequestMapping("/api/batch") +@RequiredArgsConstructor +@Tag(name = "Batch Management API", description = "배치 작업 실행 및 스케줄 관리 API") +public class BatchController { + + private final BatchService batchService; + private final ScheduleService scheduleService; + + @Operation(summary = "배치 작업 실행", description = "지정된 배치 작업을 즉시 실행합니다. 쿼리 파라미터로 Job Parameters 전달 가능") + @ApiResponses(value = { + @ApiResponse(responseCode = "200", description = "작업 실행 성공"), + @ApiResponse(responseCode = "500", description = "작업 실행 실패") + }) + @PostMapping("/jobs/{jobName}/execute") + public ResponseEntity> executeJob( + @Parameter(description = "실행할 배치 작업 이름", required = true, example = "shipDetailSyncJob") + @PathVariable String jobName, + @Parameter(description = "Job Parameters (동적 파라미터)", required = false, example = "?param1=value1¶m2=value2") + @RequestParam(required = false) Map params) { + log.info("Received request to execute job: {} with params: {}", jobName, params); + try { + Long executionId = batchService.executeJob(jobName, params); + return ResponseEntity.ok(Map.of( + "success", true, + "message", "Job started successfully", + "executionId", executionId + )); + } catch (Exception e) { + log.error("Error executing job: {}", jobName, e); + return ResponseEntity.internalServerError().body(Map.of( + "success", false, + "message", "Failed to start job: " + e.getMessage() + )); + } + } + + @Operation(summary = "배치 작업 목록 조회", description = "등록된 모든 배치 작업 목록을 조회합니다") + @ApiResponses(value = { + @ApiResponse(responseCode = "200", description = "조회 성공") + }) + @GetMapping("/jobs") + public ResponseEntity> listJobs() { + log.info("Received request to list all jobs"); + List jobs = batchService.listAllJobs(); + return ResponseEntity.ok(jobs); + } + + @Operation(summary = "Job 상세 목록 조회", description = "모든 Job의 최근 실행 상태 및 스케줄 정보를 조회합니다") + @GetMapping("/jobs/detail") + public ResponseEntity> getJobsDetail() { + List jobs = batchService.getJobsWithDetail(); + return ResponseEntity.ok(jobs); + } + + @Operation(summary = "배치 작업 실행 이력 조회", description = "특정 배치 작업의 실행 이력을 조회합니다") + @ApiResponses(value = { + @ApiResponse(responseCode = "200", description = "조회 성공") + }) + @GetMapping("/jobs/{jobName}/executions") + public ResponseEntity> getJobExecutions( + @Parameter(description = "배치 작업 이름", required = true, example = "shipDetailSyncJob") + @PathVariable String jobName) { + log.info("Received request to get executions for job: {}", jobName); + List executions = batchService.getJobExecutions(jobName); + return ResponseEntity.ok(executions); + } + + @Operation(summary = "최근 실행 이력 조회", description = "전체 작업의 최근 실행 이력을 조회합니다") + @GetMapping("/executions/recent") + public ResponseEntity> getRecentExecutions( + @RequestParam(defaultValue = "50") int limit) { + List executions = batchService.getRecentExecutions(limit); + return ResponseEntity.ok(executions); + } + + @GetMapping("/executions/{executionId}") + public ResponseEntity getExecutionDetails(@PathVariable Long executionId) { + log.info("Received request to get execution details for: {}", executionId); + try { + JobExecutionDto execution = batchService.getExecutionDetails(executionId); + return ResponseEntity.ok(execution); + } catch (Exception e) { + log.error("Error getting execution details: {}", executionId, e); + return ResponseEntity.notFound().build(); + } + } + + @GetMapping("/executions/{executionId}/detail") + public ResponseEntity getExecutionDetailWithSteps(@PathVariable Long executionId) { + log.info("Received request to get detailed execution for: {}", executionId); + try { + com.snp.batch.global.dto.JobExecutionDetailDto detail = batchService.getExecutionDetailWithSteps(executionId); + return ResponseEntity.ok(detail); + } catch (Exception e) { + log.error("Error getting detailed execution: {}", executionId, e); + return ResponseEntity.notFound().build(); + } + } + + @PostMapping("/executions/{executionId}/stop") + public ResponseEntity> stopExecution(@PathVariable Long executionId) { + log.info("Received request to stop execution: {}", executionId); + try { + batchService.stopExecution(executionId); + return ResponseEntity.ok(Map.of( + "success", true, + "message", "Execution stop requested" + )); + } catch (Exception e) { + log.error("Error stopping execution: {}", executionId, e); + return ResponseEntity.internalServerError().body(Map.of( + "success", false, + "message", "Failed to stop execution: " + e.getMessage() + )); + } + } + + @Operation(summary = "스케줄 목록 조회", description = "등록된 모든 스케줄을 조회합니다") + @ApiResponses(value = { + @ApiResponse(responseCode = "200", description = "조회 성공") + }) + @GetMapping("/schedules") + public ResponseEntity> getSchedules() { + log.info("Received request to get all schedules"); + List schedules = scheduleService.getAllSchedules(); + return ResponseEntity.ok(Map.of( + "schedules", schedules, + "count", schedules.size() + )); + } + + @GetMapping("/schedules/{jobName}") + public ResponseEntity getSchedule(@PathVariable String jobName) { + log.debug("Received request to get schedule for job: {}", jobName); + try { + ScheduleResponse schedule = scheduleService.getScheduleByJobName(jobName); + return ResponseEntity.ok(schedule); + } catch (IllegalArgumentException e) { + // 스케줄이 없는 경우 - 정상적인 시나리오 (UI에서 존재 여부 확인용) + log.debug("Schedule not found for job: {} (정상 - 존재 확인)", jobName); + return ResponseEntity.notFound().build(); + } catch (Exception e) { + log.error("Error getting schedule for job: {}", jobName, e); + return ResponseEntity.notFound().build(); + } + } + + @Operation(summary = "스케줄 생성", description = "새로운 배치 작업 스케줄을 등록합니다") + @ApiResponses(value = { + @ApiResponse(responseCode = "200", description = "생성 성공"), + @ApiResponse(responseCode = "500", description = "생성 실패") + }) + @PostMapping("/schedules") + public ResponseEntity> createSchedule( + @Parameter(description = "스케줄 생성 요청 데이터", required = true) + @RequestBody ScheduleRequest request) { + log.info("Received request to create schedule for job: {}", request.getJobName()); + try { + ScheduleResponse schedule = scheduleService.createSchedule(request); + return ResponseEntity.ok(Map.of( + "success", true, + "message", "Schedule created successfully", + "data", schedule + )); + } catch (Exception e) { + log.error("Error creating schedule for job: {}", request.getJobName(), e); + return ResponseEntity.internalServerError().body(Map.of( + "success", false, + "message", "Failed to create schedule: " + e.getMessage() + )); + } + } + + @PutMapping("/schedules/{jobName}") + public ResponseEntity> updateSchedule( + @PathVariable String jobName, + @RequestBody Map request) { + log.info("Received request to update schedule for job: {}", jobName); + try { + String cronExpression = request.get("cronExpression"); + String description = request.get("description"); + ScheduleResponse schedule = scheduleService.updateSchedule(jobName, cronExpression, description); + return ResponseEntity.ok(Map.of( + "success", true, + "message", "Schedule updated successfully", + "data", schedule + )); + } catch (Exception e) { + log.error("Error updating schedule for job: {}", jobName, e); + return ResponseEntity.internalServerError().body(Map.of( + "success", false, + "message", "Failed to update schedule: " + e.getMessage() + )); + } + } + + @Operation(summary = "스케줄 삭제", description = "배치 작업 스케줄을 삭제합니다") + @ApiResponses(value = { + @ApiResponse(responseCode = "200", description = "삭제 성공"), + @ApiResponse(responseCode = "500", description = "삭제 실패") + }) + @DeleteMapping("/schedules/{jobName}") + public ResponseEntity> deleteSchedule( + @Parameter(description = "배치 작업 이름", required = true) + @PathVariable String jobName) { + log.info("Received request to delete schedule for job: {}", jobName); + try { + scheduleService.deleteSchedule(jobName); + return ResponseEntity.ok(Map.of( + "success", true, + "message", "Schedule deleted successfully" + )); + } catch (Exception e) { + log.error("Error deleting schedule for job: {}", jobName, e); + return ResponseEntity.internalServerError().body(Map.of( + "success", false, + "message", "Failed to delete schedule: " + e.getMessage() + )); + } + } + + @PatchMapping("/schedules/{jobName}/toggle") + public ResponseEntity> toggleSchedule( + @PathVariable String jobName, + @RequestBody Map request) { + log.info("Received request to toggle schedule for job: {}", jobName); + try { + Boolean active = request.get("active"); + ScheduleResponse schedule = scheduleService.toggleScheduleActive(jobName, active); + return ResponseEntity.ok(Map.of( + "success", true, + "message", "Schedule toggled successfully", + "data", schedule + )); + } catch (Exception e) { + log.error("Error toggling schedule for job: {}", jobName, e); + return ResponseEntity.internalServerError().body(Map.of( + "success", false, + "message", "Failed to toggle schedule: " + e.getMessage() + )); + } + } + + @GetMapping("/timeline") + public ResponseEntity getTimeline( + @RequestParam String view, + @RequestParam String date) { + log.info("Received request to get timeline: view={}, date={}", view, date); + try { + com.snp.batch.global.dto.TimelineResponse timeline = batchService.getTimeline(view, date); + return ResponseEntity.ok(timeline); + } catch (Exception e) { + log.error("Error getting timeline", e); + return ResponseEntity.internalServerError().build(); + } + } + + @GetMapping("/dashboard") + public ResponseEntity getDashboard() { + log.info("Received request to get dashboard data"); + try { + com.snp.batch.global.dto.DashboardResponse dashboard = batchService.getDashboardData(); + return ResponseEntity.ok(dashboard); + } catch (Exception e) { + log.error("Error getting dashboard data", e); + return ResponseEntity.internalServerError().build(); + } + } + + @GetMapping("/timeline/period-executions") + public ResponseEntity> getPeriodExecutions( + @RequestParam String jobName, + @RequestParam String view, + @RequestParam String periodKey) { + log.info("Received request to get period executions: jobName={}, view={}, periodKey={}", jobName, view, periodKey); + try { + List executions = batchService.getPeriodExecutions(jobName, view, periodKey); + return ResponseEntity.ok(executions); + } catch (Exception e) { + log.error("Error getting period executions", e); + return ResponseEntity.internalServerError().build(); + } + } + + // ── F8: 실행 통계 API ────────────────────────────────────── + + @Operation(summary = "전체 실행 통계", description = "전체 배치 작업의 일별 실행 통계를 조회합니다") + @GetMapping("/statistics") + public ResponseEntity getStatistics( + @Parameter(description = "조회 기간(일)", example = "30") + @RequestParam(defaultValue = "30") int days) { + com.snp.batch.global.dto.ExecutionStatisticsDto stats = batchService.getStatistics(days); + return ResponseEntity.ok(stats); + } + + @Operation(summary = "Job별 실행 통계", description = "특정 배치 작업의 일별 실행 통계를 조회합니다") + @GetMapping("/statistics/{jobName}") + public ResponseEntity getJobStatistics( + @Parameter(description = "Job 이름", required = true) @PathVariable String jobName, + @Parameter(description = "조회 기간(일)", example = "30") + @RequestParam(defaultValue = "30") int days) { + com.snp.batch.global.dto.ExecutionStatisticsDto stats = batchService.getJobStatistics(jobName, days); + return ResponseEntity.ok(stats); + } +} diff --git a/src/main/java/com/snp/batch/global/controller/WebViewController.java b/src/main/java/com/snp/batch/global/controller/WebViewController.java new file mode 100644 index 0000000..a30ac55 --- /dev/null +++ b/src/main/java/com/snp/batch/global/controller/WebViewController.java @@ -0,0 +1,22 @@ +package com.snp.batch.global.controller; + +import org.springframework.stereotype.Controller; +import org.springframework.web.bind.annotation.GetMapping; + +/** + * SPA(React) fallback 라우터 + * + * React Router가 클라이언트 사이드 라우팅을 처리하므로, + * 모든 프론트 경로를 index.html로 포워딩한다. + */ +@Controller +public class WebViewController { + + @GetMapping({"/", "/jobs", "/executions", "/executions/{id:\\d+}", + "/schedules", "/schedule-timeline", + "/jobs/**", "/executions/**", + "/schedules/**", "/schedule-timeline/**"}) + public String forward() { + return "forward:/index.html"; + } +} diff --git a/src/main/java/com/snp/batch/global/dto/DashboardResponse.java b/src/main/java/com/snp/batch/global/dto/DashboardResponse.java new file mode 100644 index 0000000..feab188 --- /dev/null +++ b/src/main/java/com/snp/batch/global/dto/DashboardResponse.java @@ -0,0 +1,53 @@ +package com.snp.batch.global.dto; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.time.LocalDateTime; +import java.util.List; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class DashboardResponse { + private Stats stats; + private List runningJobs; + private List recentExecutions; + + @Data + @Builder + @NoArgsConstructor + @AllArgsConstructor + public static class Stats { + private int totalSchedules; + private int activeSchedules; + private int inactiveSchedules; + private int totalJobs; + } + + @Data + @Builder + @NoArgsConstructor + @AllArgsConstructor + public static class RunningJob { + private String jobName; + private Long executionId; + private String status; + private LocalDateTime startTime; + } + + @Data + @Builder + @NoArgsConstructor + @AllArgsConstructor + public static class RecentExecution { + private Long executionId; + private String jobName; + private String status; + private LocalDateTime startTime; + private LocalDateTime endTime; + } +} diff --git a/src/main/java/com/snp/batch/global/dto/ExecutionStatisticsDto.java b/src/main/java/com/snp/batch/global/dto/ExecutionStatisticsDto.java new file mode 100644 index 0000000..6dc7b5f --- /dev/null +++ b/src/main/java/com/snp/batch/global/dto/ExecutionStatisticsDto.java @@ -0,0 +1,33 @@ +package com.snp.batch.global.dto; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.util.List; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class ExecutionStatisticsDto { + + private List dailyStats; + private int totalExecutions; + private int totalSuccess; + private int totalFailed; + private double avgDurationMs; + + @Data + @Builder + @NoArgsConstructor + @AllArgsConstructor + public static class DailyStat { + private String date; + private int successCount; + private int failedCount; + private int otherCount; + private double avgDurationMs; + } +} diff --git a/src/main/java/com/snp/batch/global/dto/JobDetailDto.java b/src/main/java/com/snp/batch/global/dto/JobDetailDto.java new file mode 100644 index 0000000..9883b2a --- /dev/null +++ b/src/main/java/com/snp/batch/global/dto/JobDetailDto.java @@ -0,0 +1,30 @@ +package com.snp.batch.global.dto; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.time.LocalDateTime; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class JobDetailDto { + + private String jobName; + private LastExecution lastExecution; + private String scheduleCron; + + @Data + @Builder + @NoArgsConstructor + @AllArgsConstructor + public static class LastExecution { + private Long executionId; + private String status; + private LocalDateTime startTime; + private LocalDateTime endTime; + } +} diff --git a/src/main/java/com/snp/batch/global/dto/JobExecutionDetailDto.java b/src/main/java/com/snp/batch/global/dto/JobExecutionDetailDto.java new file mode 100644 index 0000000..79e4365 --- /dev/null +++ b/src/main/java/com/snp/batch/global/dto/JobExecutionDetailDto.java @@ -0,0 +1,89 @@ +package com.snp.batch.global.dto; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.time.LocalDateTime; +import java.util.List; +import java.util.Map; + +/** + * Job 실행 상세 정보 DTO + * JobExecution + StepExecution 정보 포함 + */ +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class JobExecutionDetailDto { + + // Job Execution 기본 정보 + private Long executionId; + private String jobName; + private String status; + private LocalDateTime startTime; + private LocalDateTime endTime; + private String exitCode; + private String exitMessage; + + // Job Parameters + private Map jobParameters; + + // Job Instance 정보 + private Long jobInstanceId; + + // 실행 통계 + private Long duration; // 실행 시간 (ms) + private Integer readCount; + private Integer writeCount; + private Integer skipCount; + private Integer filterCount; + + // Step 실행 정보 + private List stepExecutions; + + /** + * Step 실행 정보 DTO + */ + @Data + @Builder + @NoArgsConstructor + @AllArgsConstructor + public static class StepExecutionDto { + private Long stepExecutionId; + private String stepName; + private String status; + private LocalDateTime startTime; + private LocalDateTime endTime; + private Integer readCount; + private Integer writeCount; + private Integer commitCount; + private Integer rollbackCount; + private Integer readSkipCount; + private Integer processSkipCount; + private Integer writeSkipCount; + private Integer filterCount; + private String exitCode; + private String exitMessage; + private Long duration; // 실행 시간 (ms) + private ApiCallInfo apiCallInfo; // API 호출 정보 (옵셔널) + } + + /** + * API 호출 정보 DTO + */ + @Data + @Builder + @NoArgsConstructor + @AllArgsConstructor + public static class ApiCallInfo { + private String apiUrl; // API URL + private String method; // HTTP Method (GET, POST, etc.) + private Map parameters; // API 파라미터 + private Integer totalCalls; // 전체 API 호출 횟수 + private Integer completedCalls; // 완료된 API 호출 횟수 + private String lastCallTime; // 마지막 호출 시간 + } +} diff --git a/src/main/java/com/snp/batch/global/dto/JobExecutionDto.java b/src/main/java/com/snp/batch/global/dto/JobExecutionDto.java new file mode 100644 index 0000000..4f5d0c9 --- /dev/null +++ b/src/main/java/com/snp/batch/global/dto/JobExecutionDto.java @@ -0,0 +1,23 @@ +package com.snp.batch.global.dto; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.time.LocalDateTime; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class JobExecutionDto { + + private Long executionId; + private String jobName; + private String status; + private LocalDateTime startTime; + private LocalDateTime endTime; + private String exitCode; + private String exitMessage; +} diff --git a/src/main/java/com/snp/batch/global/dto/ScheduleRequest.java b/src/main/java/com/snp/batch/global/dto/ScheduleRequest.java new file mode 100644 index 0000000..aee9a5f --- /dev/null +++ b/src/main/java/com/snp/batch/global/dto/ScheduleRequest.java @@ -0,0 +1,46 @@ +package com.snp.batch.global.dto; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +/** + * 스케줄 등록/수정 요청 DTO + */ +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class ScheduleRequest { + + /** + * 배치 작업 이름 + * 예: "jsonToPostgresJob", "shipDataImportJob" + */ + private String jobName; + + /** + * Cron 표현식 + * 예: "0 0 2 * * ?" (매일 새벽 2시) + * "0 0 * * * ?" (매 시간) + * "0 0/30 * * * ?" (30분마다) + */ + private String cronExpression; + + /** + * 스케줄 설명 (선택) + */ + private String description; + + /** + * 활성화 여부 (선택, 기본값 true) + */ + @Builder.Default + private Boolean active = true; + + /** + * 생성자/수정자 정보 (선택) + */ + private String updatedBy; +} diff --git a/src/main/java/com/snp/batch/global/dto/ScheduleResponse.java b/src/main/java/com/snp/batch/global/dto/ScheduleResponse.java new file mode 100644 index 0000000..5436b05 --- /dev/null +++ b/src/main/java/com/snp/batch/global/dto/ScheduleResponse.java @@ -0,0 +1,80 @@ +package com.snp.batch.global.dto; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.time.LocalDateTime; +import java.util.Date; + +/** + * 스케줄 조회 응답 DTO + */ +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class ScheduleResponse { + + /** + * 스케줄 ID + */ + private Long id; + + /** + * 배치 작업 이름 + */ + private String jobName; + + /** + * Cron 표현식 + */ + private String cronExpression; + + /** + * 스케줄 설명 + */ + private String description; + + /** + * 활성화 여부 + */ + private Boolean active; + + /** + * 다음 실행 예정 시간 (Quartz에서 계산) + */ + private Date nextFireTime; + + /** + * 이전 실행 시간 (Quartz에서 조회) + */ + private Date previousFireTime; + + /** + * Quartz Trigger 상태 + * NORMAL, PAUSED, COMPLETE, ERROR, BLOCKED, NONE + */ + private String triggerState; + + /** + * 생성 일시 + */ + private LocalDateTime createdAt; + + /** + * 수정 일시 + */ + private LocalDateTime updatedAt; + + /** + * 생성자 + */ + private String createdBy; + + /** + * 수정자 + */ + private String updatedBy; +} diff --git a/src/main/java/com/snp/batch/global/dto/TimelineResponse.java b/src/main/java/com/snp/batch/global/dto/TimelineResponse.java new file mode 100644 index 0000000..157e860 --- /dev/null +++ b/src/main/java/com/snp/batch/global/dto/TimelineResponse.java @@ -0,0 +1,48 @@ +package com.snp.batch.global.dto; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.util.List; +import java.util.Map; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class TimelineResponse { + private String periodLabel; + private List periods; + private List schedules; + + @Data + @Builder + @NoArgsConstructor + @AllArgsConstructor + public static class PeriodInfo { + private String key; + private String label; + } + + @Data + @Builder + @NoArgsConstructor + @AllArgsConstructor + public static class ScheduleTimeline { + private String jobName; + private Map executions; + } + + @Data + @Builder + @NoArgsConstructor + @AllArgsConstructor + public static class ExecutionInfo { + private Long executionId; + private String status; + private String startTime; + private String endTime; + } +} diff --git a/src/main/java/com/snp/batch/global/model/JobScheduleEntity.java b/src/main/java/com/snp/batch/global/model/JobScheduleEntity.java new file mode 100644 index 0000000..ac653fc --- /dev/null +++ b/src/main/java/com/snp/batch/global/model/JobScheduleEntity.java @@ -0,0 +1,110 @@ +package com.snp.batch.global.model; + +import jakarta.persistence.*; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.time.LocalDateTime; + +/** + * 배치 작업 스케줄 정보를 저장하는 엔티티 + * Quartz 스케줄러와 연동하여 DB에 영속화 + * + * JPA를 사용하므로 @PrePersist, @PreUpdate로 감사 필드 자동 설정 + */ +@Entity +@Table(name = "job_schedule", schema = "snp_batch", indexes = { + @Index(name = "idx_job_name", columnList = "job_name", unique = true), + @Index(name = "idx_active", columnList = "active") +}) +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class JobScheduleEntity { + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + + /** + * 배치 작업 이름 (BatchConfig에 등록된 Job Bean 이름) + * 예: "jsonToPostgresJob", "shipDataImportJob" + */ + @Column(name = "job_name", unique = true, nullable = false, length = 100) + private String jobName; + + /** + * Cron 표현식 + * 예: "0 0 2 * * ?" (매일 새벽 2시) + */ + @Column(name = "cron_expression", nullable = false, length = 100) + private String cronExpression; + + /** + * 스케줄 설명 + */ + @Column(name = "description", length = 500) + private String description; + + /** + * 활성화 여부 + * true: 스케줄 활성, false: 일시 중지 + */ + @Column(name = "active", nullable = false) + @Builder.Default + private Boolean active = true; + + /** + * 생성 일시 (감사 필드) + */ + @Column(name = "created_at", nullable = false, updatable = false) + private LocalDateTime createdAt; + + /** + * 수정 일시 (감사 필드) + */ + @Column(name = "updated_at", nullable = false) + private LocalDateTime updatedAt; + + /** + * 생성자 (감사 필드) + */ + @Column(name = "created_by", length = 100) + private String createdBy; + + /** + * 수정자 (감사 필드) + */ + @Column(name = "updated_by", length = 100) + private String updatedBy; + + /** + * 엔티티 저장 전 자동 호출 (INSERT 시) + */ + @PrePersist + protected void onCreate() { + LocalDateTime now = LocalDateTime.now(); + this.createdAt = now; + this.updatedAt = now; + if (this.createdBy == null) { + this.createdBy = "SYSTEM"; + } + if (this.updatedBy == null) { + this.updatedBy = "SYSTEM"; + } + } + + /** + * 엔티티 업데이트 전 자동 호출 (UPDATE 시) + */ + @PreUpdate + protected void onUpdate() { + this.updatedAt = LocalDateTime.now(); + if (this.updatedBy == null) { + this.updatedBy = "SYSTEM"; + } + } +} diff --git a/src/main/java/com/snp/batch/global/repository/JobScheduleRepository.java b/src/main/java/com/snp/batch/global/repository/JobScheduleRepository.java new file mode 100644 index 0000000..c51fad2 --- /dev/null +++ b/src/main/java/com/snp/batch/global/repository/JobScheduleRepository.java @@ -0,0 +1,43 @@ +package com.snp.batch.global.repository; + +import com.snp.batch.global.model.JobScheduleEntity; +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.stereotype.Repository; + +import java.util.List; +import java.util.Optional; + +/** + * JobScheduleEntity Repository + * JPA Repository 방식으로 자동 구현 + */ +@Repository +public interface JobScheduleRepository extends JpaRepository { + + /** + * Job 이름으로 스케줄 조회 + */ + Optional findByJobName(String jobName); + + /** + * Job 이름 존재 여부 확인 + */ + boolean existsByJobName(String jobName); + + /** + * 활성화된 스케줄 목록 조회 + */ + List findByActive(Boolean active); + + /** + * 활성화된 모든 스케줄 조회 + */ + default List findAllActive() { + return findByActive(true); + } + + /** + * Job 이름으로 스케줄 삭제 + */ + void deleteByJobName(String jobName); +} diff --git a/src/main/java/com/snp/batch/global/repository/TimelineRepository.java b/src/main/java/com/snp/batch/global/repository/TimelineRepository.java new file mode 100644 index 0000000..b6d72a3 --- /dev/null +++ b/src/main/java/com/snp/batch/global/repository/TimelineRepository.java @@ -0,0 +1,194 @@ +package com.snp.batch.global.repository; + +import lombok.RequiredArgsConstructor; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.stereotype.Repository; + +import java.time.LocalDateTime; +import java.util.List; +import java.util.Map; + +/** + * 타임라인 조회를 위한 경량 Repository + * Step Context 등 불필요한 데이터를 조회하지 않고 필요한 정보만 가져옴 + */ +@Repository +@RequiredArgsConstructor +public class TimelineRepository { + + private final JdbcTemplate jdbcTemplate; + + /** + * 특정 Job의 특정 범위 내 실행 이력 조회 (경량) + * Step Context를 조회하지 않아 성능이 매우 빠름 + */ + public List> findExecutionsByJobNameAndDateRange( + String jobName, + LocalDateTime startTime, + LocalDateTime endTime) { + + String sql = """ + SELECT + je.JOB_EXECUTION_ID as executionId, + je.STATUS as status, + je.START_TIME as startTime, + je.END_TIME as endTime + FROM BATCH_JOB_EXECUTION je + INNER JOIN BATCH_JOB_INSTANCE ji ON je.JOB_INSTANCE_ID = ji.JOB_INSTANCE_ID + WHERE ji.JOB_NAME = ? + AND je.START_TIME >= ? + AND je.START_TIME < ? + ORDER BY je.START_TIME DESC + """; + + return jdbcTemplate.queryForList(sql, jobName, startTime, endTime); + } + + /** + * 모든 Job의 특정 범위 내 실행 이력 조회 (한 번의 쿼리) + */ + public List> findAllExecutionsByDateRange( + LocalDateTime startTime, + LocalDateTime endTime) { + + String sql = """ + SELECT + ji.JOB_NAME as jobName, + je.JOB_EXECUTION_ID as executionId, + je.STATUS as status, + je.START_TIME as startTime, + je.END_TIME as endTime + FROM BATCH_JOB_EXECUTION je + INNER JOIN BATCH_JOB_INSTANCE ji ON je.JOB_INSTANCE_ID = ji.JOB_INSTANCE_ID + WHERE je.START_TIME >= ? + AND je.START_TIME < ? + ORDER BY ji.JOB_NAME, je.START_TIME DESC + """; + + return jdbcTemplate.queryForList(sql, startTime, endTime); + } + + /** + * 현재 실행 중인 Job 조회 (STARTED, STARTING 상태) + */ + public List> findRunningExecutions() { + String sql = """ + SELECT + ji.JOB_NAME as jobName, + je.JOB_EXECUTION_ID as executionId, + je.STATUS as status, + je.START_TIME as startTime + FROM BATCH_JOB_EXECUTION je + INNER JOIN BATCH_JOB_INSTANCE ji ON je.JOB_INSTANCE_ID = ji.JOB_INSTANCE_ID + WHERE je.STATUS IN ('STARTED', 'STARTING') + ORDER BY je.START_TIME DESC + """; + + return jdbcTemplate.queryForList(sql); + } + + /** + * 최근 실행 이력 조회 (상위 N개) + */ + /** + * Job별 가장 최근 실행 정보 조회 + */ + public List> findLastExecutionPerJob() { + String sql = """ + SELECT DISTINCT ON (ji.JOB_NAME) + ji.JOB_NAME as jobName, + je.JOB_EXECUTION_ID as executionId, + je.STATUS as status, + je.START_TIME as startTime, + je.END_TIME as endTime + FROM BATCH_JOB_EXECUTION je + INNER JOIN BATCH_JOB_INSTANCE ji ON je.JOB_INSTANCE_ID = ji.JOB_INSTANCE_ID + ORDER BY ji.JOB_NAME, je.START_TIME DESC + """; + + return jdbcTemplate.queryForList(sql); + } + + /** + * 일별 실행 통계 (전체) + */ + public List> findDailyStatistics(int days) { + String sql = String.format(""" + SELECT + CAST(je.START_TIME AS DATE) as execDate, + SUM(CASE WHEN je.STATUS = 'COMPLETED' THEN 1 ELSE 0 END) as successCount, + SUM(CASE WHEN je.STATUS = 'FAILED' THEN 1 ELSE 0 END) as failedCount, + SUM(CASE WHEN je.STATUS NOT IN ('COMPLETED', 'FAILED') THEN 1 ELSE 0 END) as otherCount, + AVG(EXTRACT(EPOCH FROM (je.END_TIME - je.START_TIME)) * 1000) as avgDurationMs + FROM BATCH_JOB_EXECUTION je + WHERE je.START_TIME >= NOW() - INTERVAL '%d days' + AND je.START_TIME IS NOT NULL + GROUP BY CAST(je.START_TIME AS DATE) + ORDER BY execDate + """, days); + + return jdbcTemplate.queryForList(sql); + } + + /** + * 일별 실행 통계 (특정 Job) + */ + public List> findDailyStatisticsForJob(String jobName, int days) { + String sql = String.format(""" + SELECT + CAST(je.START_TIME AS DATE) as execDate, + SUM(CASE WHEN je.STATUS = 'COMPLETED' THEN 1 ELSE 0 END) as successCount, + SUM(CASE WHEN je.STATUS = 'FAILED' THEN 1 ELSE 0 END) as failedCount, + SUM(CASE WHEN je.STATUS NOT IN ('COMPLETED', 'FAILED') THEN 1 ELSE 0 END) as otherCount, + AVG(EXTRACT(EPOCH FROM (je.END_TIME - je.START_TIME)) * 1000) as avgDurationMs + FROM BATCH_JOB_EXECUTION je + INNER JOIN BATCH_JOB_INSTANCE ji ON je.JOB_INSTANCE_ID = ji.JOB_INSTANCE_ID + WHERE ji.JOB_NAME = ? + AND je.START_TIME >= NOW() - INTERVAL '%d days' + AND je.START_TIME IS NOT NULL + GROUP BY CAST(je.START_TIME AS DATE) + ORDER BY execDate + """, days); + + return jdbcTemplate.queryForList(sql, jobName); + } + + /** + * 최근 실행 이력 조회 (exitCode, exitMessage 포함) + */ + public List> findRecentExecutionsWithDetail(int limit) { + String sql = """ + SELECT + ji.JOB_NAME as jobName, + je.JOB_EXECUTION_ID as executionId, + je.STATUS as status, + je.START_TIME as startTime, + je.END_TIME as endTime, + je.EXIT_CODE as exitCode, + je.EXIT_MESSAGE as exitMessage + FROM BATCH_JOB_EXECUTION je + INNER JOIN BATCH_JOB_INSTANCE ji ON je.JOB_INSTANCE_ID = ji.JOB_INSTANCE_ID + ORDER BY je.START_TIME DESC + LIMIT ? + """; + + return jdbcTemplate.queryForList(sql, limit); + } + + public List> findRecentExecutions(int limit) { + String sql = """ + SELECT + ji.JOB_NAME as jobName, + je.JOB_EXECUTION_ID as executionId, + je.STATUS as status, + je.START_TIME as startTime, + je.END_TIME as endTime + FROM BATCH_JOB_EXECUTION je + INNER JOIN BATCH_JOB_INSTANCE ji ON je.JOB_INSTANCE_ID = ji.JOB_INSTANCE_ID + ORDER BY je.START_TIME DESC + LIMIT ? + """; + + return jdbcTemplate.queryForList(sql, limit); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/code/config/CodeSyncJobConfig.java b/src/main/java/com/snp/batch/jobs/datasync/batch/code/config/CodeSyncJobConfig.java new file mode 100644 index 0000000..9d61e74 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/code/config/CodeSyncJobConfig.java @@ -0,0 +1,168 @@ +package com.snp.batch.jobs.datasync.batch.code.config; + +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.common.util.BatchWriteListener; +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.GroupByExecutionIdChunkListener; +import com.snp.batch.common.util.GroupByExecutionIdPolicy; +import com.snp.batch.common.util.GroupByExecutionIdReadListener; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.code.dto.FlagCodeDto; +import com.snp.batch.jobs.datasync.batch.code.dto.Stat5CodeDto; +import com.snp.batch.jobs.datasync.batch.code.entity.FlagCodeEntity; +import com.snp.batch.jobs.datasync.batch.code.entity.Stat5CodeEntity; +import com.snp.batch.jobs.datasync.batch.code.processor.FlagCodeProcessor; +import com.snp.batch.jobs.datasync.batch.code.processor.Stat5CodeProcessor; +import com.snp.batch.jobs.datasync.batch.code.reader.FlagCodeReader; +import com.snp.batch.jobs.datasync.batch.code.reader.Stat5CodeReader; +import com.snp.batch.jobs.datasync.batch.code.repository.CodeRepository; +import com.snp.batch.jobs.datasync.batch.code.writer.FlagCodeWriter; +import com.snp.batch.jobs.datasync.batch.code.writer.Stat5CodeWriter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.PlatformTransactionManager; + +import javax.sql.DataSource; + +@Slf4j +@Configuration +public class CodeSyncJobConfig extends BaseJobConfig { + private final TableMetaInfo tableMetaInfo; + private final CodeRepository codeRepository; + private final DataSource batchDataSource; + private final DataSource businessDataSource; + private final JdbcTemplate businessJdbcTemplate; + + /** + * 생성자 주입 + */ + public CodeSyncJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + CodeRepository codeRepository, + TableMetaInfo tableMetaInfo, + @Qualifier("batchDataSource") DataSource batchDataSource, + @Qualifier("businessDataSource") DataSource businessDataSource + ) { + super(jobRepository, transactionManager); + this.codeRepository = codeRepository; + this.tableMetaInfo = tableMetaInfo; + this.batchDataSource = batchDataSource; + this.businessDataSource = businessDataSource; + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + } + + @Override + protected String getJobName() { + return "codeDataSyncJob"; + } + + @Override + protected String getStepName() { + return "flagCodeSyncStep"; + } + + @Override + protected ItemReader createReader() { + return flagCodeReader(businessDataSource, tableMetaInfo); + } + + @Override + protected ItemProcessor createProcessor() { + return new FlagCodeProcessor(); + } + + @Override + protected ItemWriter createWriter() { + return new FlagCodeWriter(codeRepository); + } + + // --- FlagCode Reader --- + + @Bean + @StepScope + public ItemReader flagCodeReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new FlagCodeReader(businessDataSource, tableMetaInfo); + } + + // --- Stat5Code Reader --- + + @Bean + @StepScope + public ItemReader stat5CodeReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new Stat5CodeReader(businessDataSource, tableMetaInfo); + } + + // --- Listeners --- + + @Bean + public BatchWriteListener flagCodeWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceFlagCode); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener stat5CodeWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceStat5Code); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + // --- Steps --- + + @Bean(name = "flagCodeSyncStep") + public Step flagCodeSyncStep() { + log.info("Step 생성: flagCodeSyncStep"); + return new StepBuilder(getStepName(), jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(createReader()) + .processor(createProcessor()) + .writer(createWriter()) + .listener(new GroupByExecutionIdReadListener()) + .listener(new GroupByExecutionIdChunkListener()) + .listener(flagCodeWriteListener()) + .build(); + } + + @Bean(name = "stat5CodeSyncStep") + public Step stat5CodeSyncStep() { + log.info("Step 생성: stat5CodeSyncStep"); + return new StepBuilder("stat5CodeSyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(stat5CodeReader(businessDataSource, tableMetaInfo)) + .processor(new Stat5CodeProcessor()) + .writer(new Stat5CodeWriter(codeRepository)) + .listener(new GroupByExecutionIdReadListener()) + .listener(new GroupByExecutionIdChunkListener()) + .listener(stat5CodeWriteListener()) + .build(); + } + + @Override + protected Job createJobFlow(JobBuilder jobBuilder) { + return jobBuilder + .start(flagCodeSyncStep()) + .next(stat5CodeSyncStep()) + .build(); + } + + @Bean(name = "codeDataSyncJob") + public Job codeDataSyncJob() { + return job(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/code/dto/FlagCodeDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/code/dto/FlagCodeDto.java new file mode 100644 index 0000000..4352470 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/code/dto/FlagCodeDto.java @@ -0,0 +1,23 @@ +package com.snp.batch.jobs.datasync.batch.code.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class FlagCodeDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String shipCountryCd; + private String cdNm; + private String isoTwoCd; + private String isoThrCd; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/code/dto/Stat5CodeDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/code/dto/Stat5CodeDto.java new file mode 100644 index 0000000..3910d7e --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/code/dto/Stat5CodeDto.java @@ -0,0 +1,30 @@ +package com.snp.batch.jobs.datasync.batch.code.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class Stat5CodeDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String lvOne; + private String lvOneDesc; + private String lvTwo; + private String lvTwoDesc; + private String lvThr; + private String lvThrDesc; + private String lvFour; + private String lvFourDesc; + private String lvFive; + private String lvFiveDesc; + private String dtlDesc; + private String rlsIem; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/code/entity/FlagCodeEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/code/entity/FlagCodeEntity.java new file mode 100644 index 0000000..8a2e818 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/code/entity/FlagCodeEntity.java @@ -0,0 +1,23 @@ +package com.snp.batch.jobs.datasync.batch.code.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class FlagCodeEntity implements JobExecutionGroupable { + private String datasetVer; + private String shipCountryCd; + private String cdNm; + private String isoTwoCd; + private String isoThrCd; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/code/entity/Stat5CodeEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/code/entity/Stat5CodeEntity.java new file mode 100644 index 0000000..a3d9207 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/code/entity/Stat5CodeEntity.java @@ -0,0 +1,30 @@ +package com.snp.batch.jobs.datasync.batch.code.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class Stat5CodeEntity implements JobExecutionGroupable { + private String lvOne; + private String lvOneDesc; + private String lvTwo; + private String lvTwoDesc; + private String lvThr; + private String lvThrDesc; + private String lvFour; + private String lvFourDesc; + private String lvFive; + private String lvFiveDesc; + private String dtlDesc; + private String rlsIem; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/code/processor/FlagCodeProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/code/processor/FlagCodeProcessor.java new file mode 100644 index 0000000..db8c9ec --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/code/processor/FlagCodeProcessor.java @@ -0,0 +1,21 @@ +package com.snp.batch.jobs.datasync.batch.code.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.code.dto.FlagCodeDto; +import com.snp.batch.jobs.datasync.batch.code.entity.FlagCodeEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class FlagCodeProcessor extends BaseProcessor { + @Override + protected FlagCodeEntity processItem(FlagCodeDto dto) throws Exception { + return FlagCodeEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .shipCountryCd(dto.getShipCountryCd()) + .cdNm(dto.getCdNm()) + .isoTwoCd(dto.getIsoTwoCd()) + .isoThrCd(dto.getIsoThrCd()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/code/processor/Stat5CodeProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/code/processor/Stat5CodeProcessor.java new file mode 100644 index 0000000..80b8eb0 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/code/processor/Stat5CodeProcessor.java @@ -0,0 +1,28 @@ +package com.snp.batch.jobs.datasync.batch.code.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.code.dto.Stat5CodeDto; +import com.snp.batch.jobs.datasync.batch.code.entity.Stat5CodeEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class Stat5CodeProcessor extends BaseProcessor { + @Override + protected Stat5CodeEntity processItem(Stat5CodeDto dto) throws Exception { + return Stat5CodeEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .lvOne(dto.getLvOne()) + .lvOneDesc(dto.getLvOneDesc()) + .lvTwo(dto.getLvTwo()) + .lvTwoDesc(dto.getLvTwoDesc()) + .lvThr(dto.getLvThr()) + .lvThrDesc(dto.getLvThrDesc()) + .lvFour(dto.getLvFour()) + .lvFourDesc(dto.getLvFourDesc()) + .lvFive(dto.getLvFive()) + .lvFiveDesc(dto.getLvFiveDesc()) + .dtlDesc(dto.getDtlDesc()) + .rlsIem(dto.getRlsIem()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/code/reader/FlagCodeReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/code/reader/FlagCodeReader.java new file mode 100644 index 0000000..0062535 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/code/reader/FlagCodeReader.java @@ -0,0 +1,68 @@ +package com.snp.batch.jobs.datasync.batch.code.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.code.dto.FlagCodeDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class FlagCodeReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public FlagCodeReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public FlagCodeDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceFlagCode), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[FlagCodeReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceFlagCode); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return FlagCodeDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .shipCountryCd(rs.getString("ship_country_cd")) + .cdNm(rs.getString("cd_nm")) + .isoTwoCd(rs.getString("iso_two_cd")) + .isoThrCd(rs.getString("iso_thr_cd")) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceFlagCode); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/code/reader/Stat5CodeReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/code/reader/Stat5CodeReader.java new file mode 100644 index 0000000..29020cd --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/code/reader/Stat5CodeReader.java @@ -0,0 +1,75 @@ +package com.snp.batch.jobs.datasync.batch.code.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.code.dto.Stat5CodeDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class Stat5CodeReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public Stat5CodeReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public Stat5CodeDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceStat5Code), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[Stat5CodeReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceStat5Code); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return Stat5CodeDto.builder() + .jobExecutionId(targetId) + .lvOne(rs.getString("lv_one")) + .lvOneDesc(rs.getString("lv_one_desc")) + .lvTwo(rs.getString("lv_two")) + .lvTwoDesc(rs.getString("lv_two_desc")) + .lvThr(rs.getString("lv_thr")) + .lvThrDesc(rs.getString("lv_thr_desc")) + .lvFour(rs.getString("lv_four")) + .lvFourDesc(rs.getString("lv_four_desc")) + .lvFive(rs.getString("lv_five")) + .lvFiveDesc(rs.getString("lv_five_desc")) + .dtlDesc(rs.getString("dtl_desc")) + .rlsIem(rs.getString("rls_iem")) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceStat5Code); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/code/repository/CodeRepository.java b/src/main/java/com/snp/batch/jobs/datasync/batch/code/repository/CodeRepository.java new file mode 100644 index 0000000..179a085 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/code/repository/CodeRepository.java @@ -0,0 +1,15 @@ +package com.snp.batch.jobs.datasync.batch.code.repository; + +import com.snp.batch.jobs.datasync.batch.code.entity.FlagCodeEntity; +import com.snp.batch.jobs.datasync.batch.code.entity.Stat5CodeEntity; + +import java.util.List; + +/** + * CodeEntity Repository 인터페이스 + * 구현체: CodeRepositoryImpl (JdbcTemplate 기반) + */ +public interface CodeRepository { + void saveFlagCode(List flagCodeEntityList); + void saveStat5Code(List stat5CodeEntityList); +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/code/repository/CodeRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/datasync/batch/code/repository/CodeRepositoryImpl.java new file mode 100644 index 0000000..3890621 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/code/repository/CodeRepositoryImpl.java @@ -0,0 +1,145 @@ +package com.snp.batch.jobs.datasync.batch.code.repository; + +import com.snp.batch.common.batch.repository.MultiDataSourceJdbcRepository; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.code.entity.FlagCodeEntity; +import com.snp.batch.jobs.datasync.batch.code.entity.Stat5CodeEntity; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.stereotype.Repository; + +import javax.sql.DataSource; +import java.sql.PreparedStatement; +import java.util.List; + +/** + * CodeEntity Repository (JdbcTemplate 기반) + */ +@Slf4j +@Repository("codeRepository") +public class CodeRepositoryImpl extends MultiDataSourceJdbcRepository implements CodeRepository { + + private DataSource batchDataSource; + private DataSource businessDataSource; + private final TableMetaInfo tableMetaInfo; + + public CodeRepositoryImpl(@Qualifier("batchDataSource") DataSource batchDataSource, + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + + super(new JdbcTemplate(batchDataSource), new JdbcTemplate(businessDataSource)); + + this.batchDataSource = batchDataSource; + this.businessDataSource = businessDataSource; + this.tableMetaInfo = tableMetaInfo; + } + + @Override + protected String getTableName() { + return null; + } + + @Override + protected RowMapper getRowMapper() { + return null; + } + + @Override + protected Long extractId(FlagCodeEntity entity) { + return null; + } + + @Override + protected String getInsertSql() { + return null; + } + + @Override + protected String getUpdateSql() { + return null; + } + + @Override + protected void setInsertParameters(PreparedStatement ps, FlagCodeEntity entity) throws Exception { + } + + @Override + protected void setUpdateParameters(PreparedStatement ps, FlagCodeEntity entity) throws Exception { + } + + @Override + protected String getEntityName() { + return null; + } + + @Override + public void saveFlagCode(List flagCodeEntityList) { + String sql = CodeSql.getFlagCodeUpsertSql(tableMetaInfo.targetTbShipCountryCd); + if (flagCodeEntityList == null || flagCodeEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "FlagCodeEntity", flagCodeEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, flagCodeEntityList, flagCodeEntityList.size(), + (ps, entity) -> { + try { + bindFlagCode(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "FlagCodeEntity", flagCodeEntityList.size()); + } + + public void bindFlagCode(PreparedStatement pstmt, FlagCodeEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getShipCountryCd()); // 3. ship_country_cd + pstmt.setString(idx++, entity.getCdNm()); // 4. cd_nm + pstmt.setString(idx++, entity.getIsoTwoCd()); // 5. iso_two_cd + pstmt.setString(idx++, entity.getIsoThrCd()); // 6. iso_thr_cd + } + + @Override + public void saveStat5Code(List stat5CodeEntityList) { + String sql = CodeSql.getStat5CodeUpsertSql(tableMetaInfo.targetTbShipTypeCd); + if (stat5CodeEntityList == null || stat5CodeEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "Stat5CodeEntity", stat5CodeEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, stat5CodeEntityList, stat5CodeEntityList.size(), + (ps, entity) -> { + try { + bindStat5Code(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "Stat5CodeEntity", stat5CodeEntityList.size()); + } + + public void bindStat5Code(PreparedStatement pstmt, Stat5CodeEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getLvOne()); // 2. lv_one + pstmt.setString(idx++, entity.getLvOneDesc()); // 3. lv_one_desc + pstmt.setString(idx++, entity.getLvTwo()); // 4. lv_two + pstmt.setString(idx++, entity.getLvTwoDesc()); // 5. lv_two_desc + pstmt.setString(idx++, entity.getLvThr()); // 6. lv_thr + pstmt.setString(idx++, entity.getLvThrDesc()); // 7. lv_thr_desc + pstmt.setString(idx++, entity.getLvFour()); // 8. lv_four + pstmt.setString(idx++, entity.getLvFourDesc()); // 9. lv_four_desc + pstmt.setString(idx++, entity.getLvFive()); // 10. lv_five + pstmt.setString(idx++, entity.getLvFiveDesc()); // 11. lv_five_desc + pstmt.setString(idx++, entity.getDtlDesc()); // 12. dtl_desc + pstmt.setString(idx++, entity.getRlsIem()); // 13. rls_iem + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/code/repository/CodeSql.java b/src/main/java/com/snp/batch/jobs/datasync/batch/code/repository/CodeSql.java new file mode 100644 index 0000000..06658d7 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/code/repository/CodeSql.java @@ -0,0 +1,60 @@ +package com.snp.batch.jobs.datasync.batch.code.repository; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class CodeSql { + private static String TARGET_SCHEMA; + public CodeSql(@Value("${app.batch.target-schema.name}") String targetSchema) { + TARGET_SCHEMA = targetSchema; + } + + public static String getFlagCodeUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, ship_country_cd, cd_nm, iso_two_cd, iso_thr_cd + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ? + ) + ON CONFLICT (ship_country_cd) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + cd_nm = EXCLUDED.cd_nm, + iso_two_cd = EXCLUDED.iso_two_cd, + iso_thr_cd = EXCLUDED.iso_thr_cd; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getStat5CodeUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + lv_one, lv_one_desc, lv_two, lv_two_desc, lv_thr, lv_thr_desc, + lv_four, lv_four_desc, lv_five, lv_five_desc, dtl_desc, rls_iem + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, ? + ) + ON CONFLICT (lv_one, lv_two, lv_thr, lv_four, lv_five) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + lv_one_desc = EXCLUDED.lv_one_desc, + lv_two_desc = EXCLUDED.lv_two_desc, + lv_thr_desc = EXCLUDED.lv_thr_desc, + lv_four_desc = EXCLUDED.lv_four_desc, + lv_five_desc = EXCLUDED.lv_five_desc, + dtl_desc = EXCLUDED.dtl_desc, + rls_iem = EXCLUDED.rls_iem; + """.formatted(TARGET_SCHEMA, targetTable); + } + +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/code/writer/FlagCodeWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/code/writer/FlagCodeWriter.java new file mode 100644 index 0000000..ba0339f --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/code/writer/FlagCodeWriter.java @@ -0,0 +1,26 @@ +package com.snp.batch.jobs.datasync.batch.code.writer; + +import com.snp.batch.common.batch.writer.BaseWriter; +import com.snp.batch.jobs.datasync.batch.code.entity.FlagCodeEntity; +import com.snp.batch.jobs.datasync.batch.code.repository.CodeRepository; +import lombok.extern.slf4j.Slf4j; + +import java.util.List; + +@Slf4j +public class FlagCodeWriter extends BaseWriter { + private final CodeRepository codeRepository; + + public FlagCodeWriter(CodeRepository codeRepository) { + super("FlagCodeEntity"); + this.codeRepository = codeRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + codeRepository.saveFlagCode(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/code/writer/Stat5CodeWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/code/writer/Stat5CodeWriter.java new file mode 100644 index 0000000..8f24928 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/code/writer/Stat5CodeWriter.java @@ -0,0 +1,26 @@ +package com.snp.batch.jobs.datasync.batch.code.writer; + +import com.snp.batch.common.batch.writer.BaseWriter; +import com.snp.batch.jobs.datasync.batch.code.entity.Stat5CodeEntity; +import com.snp.batch.jobs.datasync.batch.code.repository.CodeRepository; +import lombok.extern.slf4j.Slf4j; + +import java.util.List; + +@Slf4j +public class Stat5CodeWriter extends BaseWriter { + private final CodeRepository codeRepository; + + public Stat5CodeWriter(CodeRepository codeRepository) { + super("Stat5CodeEntity"); + this.codeRepository = codeRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + codeRepository.saveStat5Code(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/config/CompanyComplianceChangeSyncJobConfig.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/config/CompanyComplianceChangeSyncJobConfig.java new file mode 100644 index 0000000..42be2f5 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/config/CompanyComplianceChangeSyncJobConfig.java @@ -0,0 +1,110 @@ +package com.snp.batch.jobs.datasync.batch.compliance.config; + +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.compliance.dto.CompanyComplianceChangeDto; +import com.snp.batch.jobs.datasync.batch.compliance.entity.CompanyComplianceChangeEntity; +import com.snp.batch.jobs.datasync.batch.compliance.processor.CompanyComplianceChangeProcessor; +import com.snp.batch.jobs.datasync.batch.compliance.reader.CompanyComplianceChangeReader; +import com.snp.batch.jobs.datasync.batch.compliance.repository.ComplianceRepository; +import com.snp.batch.jobs.datasync.batch.compliance.writer.CompanyComplianceChangeWriter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.transaction.PlatformTransactionManager; + +import javax.sql.DataSource; + +@Slf4j +@Configuration +public class CompanyComplianceChangeSyncJobConfig extends BaseJobConfig { + private final TableMetaInfo tableMetaInfo; + private final ComplianceRepository complianceRepository; + private final DataSource batchDataSource; + private final String targetSchema; + + private static final int CHUNK_SIZE = 1000; + + public CompanyComplianceChangeSyncJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + ComplianceRepository complianceRepository, + TableMetaInfo tableMetaInfo, + @Qualifier("batchDataSource") DataSource batchDataSource, + @Value("${app.batch.target-schema.name}") String targetSchema + ) { + super(jobRepository, transactionManager); + this.complianceRepository = complianceRepository; + this.tableMetaInfo = tableMetaInfo; + this.batchDataSource = batchDataSource; + this.targetSchema = targetSchema; + } + + @Override + protected String getJobName() { + return "companyComplianceChangeDataSyncJob"; + } + + @Override + protected String getStepName() { + return "companyComplianceChangeSyncStep"; + } + + @Override + protected ItemReader createReader() { + return companyComplianceChangeReader(batchDataSource, tableMetaInfo, targetSchema); + } + + @Override + protected ItemProcessor createProcessor() { + return new CompanyComplianceChangeProcessor(); + } + + @Override + protected ItemWriter createWriter() { + return new CompanyComplianceChangeWriter(complianceRepository); + } + + @Bean + @StepScope + public ItemReader companyComplianceChangeReader( + @Qualifier("batchDataSource") DataSource batchDataSource, + TableMetaInfo tableMetaInfo, + @Value("${app.batch.target-schema.name}") String targetSchema) { + return new CompanyComplianceChangeReader(batchDataSource, tableMetaInfo, targetSchema); + } + + @Bean(name = "companyComplianceChangeSyncStep") + public Step companyComplianceChangeSyncStep() { + log.info("Step 생성: companyComplianceChangeSyncStep"); + return new StepBuilder(getStepName(), jobRepository) + .chunk(CHUNK_SIZE, transactionManager) + .reader(createReader()) + .processor(createProcessor()) + .writer(createWriter()) + .build(); + } + + @Override + protected Job createJobFlow(JobBuilder jobBuilder) { + return jobBuilder + .start(companyComplianceChangeSyncStep()) + .build(); + } + + @Bean(name = "companyComplianceChangeDataSyncJob") + public Job companyComplianceChangeDataSyncJob() { + return job(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/config/CompanyComplianceSyncJobConfig.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/config/CompanyComplianceSyncJobConfig.java new file mode 100644 index 0000000..aa2c1e3 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/config/CompanyComplianceSyncJobConfig.java @@ -0,0 +1,133 @@ +package com.snp.batch.jobs.datasync.batch.compliance.config; + +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.common.util.BatchWriteListener; +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.GroupByExecutionIdChunkListener; +import com.snp.batch.common.util.GroupByExecutionIdPolicy; +import com.snp.batch.common.util.GroupByExecutionIdReadListener; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.compliance.dto.CompanyComplianceDto; +import com.snp.batch.jobs.datasync.batch.compliance.entity.CompanyComplianceEntity; +import com.snp.batch.jobs.datasync.batch.compliance.processor.CompanyComplianceProcessor; +import com.snp.batch.jobs.datasync.batch.compliance.reader.CompanyComplianceReader; +import com.snp.batch.jobs.datasync.batch.compliance.repository.ComplianceRepository; +import com.snp.batch.jobs.datasync.batch.compliance.writer.CompanyComplianceWriter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.PlatformTransactionManager; + +import javax.sql.DataSource; + +@Slf4j +@Configuration +public class CompanyComplianceSyncJobConfig extends BaseJobConfig { + private final TableMetaInfo tableMetaInfo; + private final ComplianceRepository complianceRepository; + private final DataSource batchDataSource; + private final DataSource businessDataSource; + private final JdbcTemplate businessJdbcTemplate; + private final int subChunkSize; + + public CompanyComplianceSyncJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + ComplianceRepository complianceRepository, + TableMetaInfo tableMetaInfo, + @Qualifier("batchDataSource") DataSource batchDataSource, + @Qualifier("businessDataSource") DataSource businessDataSource, + @Value("${app.batch.sub-chunk-size:5000}") int subChunkSize + ) { + super(jobRepository, transactionManager); + this.complianceRepository = complianceRepository; + this.tableMetaInfo = tableMetaInfo; + this.batchDataSource = batchDataSource; + this.businessDataSource = businessDataSource; + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.subChunkSize = subChunkSize; + } + + @Override + protected String getJobName() { + return "companyComplianceDataSyncJob"; + } + + @Override + protected String getStepName() { + return "companyComplianceSyncStep"; + } + + @Override + protected ItemReader createReader() { + return companyComplianceReader(businessDataSource, tableMetaInfo); + } + + @Override + protected ItemProcessor createProcessor() { + return new CompanyComplianceProcessor(); + } + + @Override + protected ItemWriter createWriter() { + return new CompanyComplianceWriter(complianceRepository, transactionManager, subChunkSize); + } + + // --- CompanyCompliance Reader --- + + @Bean + @StepScope + public ItemReader companyComplianceReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new CompanyComplianceReader(businessDataSource, tableMetaInfo); + } + + // --- Listeners --- + + @Bean + public BatchWriteListener companyComplianceWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTbCompanyComplianceInfo); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + // --- Steps --- + + @Bean(name = "companyComplianceSyncStep") + public Step companyComplianceSyncStep() { + log.info("Step 생성: companyComplianceSyncStep"); + return new StepBuilder(getStepName(), jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(createReader()) + .processor(createProcessor()) + .writer(createWriter()) + .listener(new GroupByExecutionIdReadListener()) + .listener(new GroupByExecutionIdChunkListener()) + .listener(companyComplianceWriteListener()) + .build(); + } + + @Override + protected Job createJobFlow(JobBuilder jobBuilder) { + return jobBuilder + .start(companyComplianceSyncStep()) + .build(); + } + + @Bean(name = "companyComplianceDataSyncJob") + public Job companyComplianceDataSyncJob() { + return job(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/config/ShipComplianceChangeSyncJobConfig.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/config/ShipComplianceChangeSyncJobConfig.java new file mode 100644 index 0000000..3da64a7 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/config/ShipComplianceChangeSyncJobConfig.java @@ -0,0 +1,110 @@ +package com.snp.batch.jobs.datasync.batch.compliance.config; + +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.compliance.dto.ShipComplianceChangeDto; +import com.snp.batch.jobs.datasync.batch.compliance.entity.ShipComplianceChangeEntity; +import com.snp.batch.jobs.datasync.batch.compliance.processor.ShipComplianceChangeProcessor; +import com.snp.batch.jobs.datasync.batch.compliance.reader.ShipComplianceChangeReader; +import com.snp.batch.jobs.datasync.batch.compliance.repository.ComplianceRepository; +import com.snp.batch.jobs.datasync.batch.compliance.writer.ShipComplianceChangeWriter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.transaction.PlatformTransactionManager; + +import javax.sql.DataSource; + +@Slf4j +@Configuration +public class ShipComplianceChangeSyncJobConfig extends BaseJobConfig { + private final TableMetaInfo tableMetaInfo; + private final ComplianceRepository complianceRepository; + private final DataSource batchDataSource; + private final String targetSchema; + + private static final int CHUNK_SIZE = 1000; + + public ShipComplianceChangeSyncJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + ComplianceRepository complianceRepository, + TableMetaInfo tableMetaInfo, + @Qualifier("batchDataSource") DataSource batchDataSource, + @Value("${app.batch.target-schema.name}") String targetSchema + ) { + super(jobRepository, transactionManager); + this.complianceRepository = complianceRepository; + this.tableMetaInfo = tableMetaInfo; + this.batchDataSource = batchDataSource; + this.targetSchema = targetSchema; + } + + @Override + protected String getJobName() { + return "shipComplianceChangeDataSyncJob"; + } + + @Override + protected String getStepName() { + return "shipComplianceChangeSyncStep"; + } + + @Override + protected ItemReader createReader() { + return shipComplianceChangeReader(batchDataSource, tableMetaInfo, targetSchema); + } + + @Override + protected ItemProcessor createProcessor() { + return new ShipComplianceChangeProcessor(); + } + + @Override + protected ItemWriter createWriter() { + return new ShipComplianceChangeWriter(complianceRepository); + } + + @Bean + @StepScope + public ItemReader shipComplianceChangeReader( + @Qualifier("batchDataSource") DataSource batchDataSource, + TableMetaInfo tableMetaInfo, + @Value("${app.batch.target-schema.name}") String targetSchema) { + return new ShipComplianceChangeReader(batchDataSource, tableMetaInfo, targetSchema); + } + + @Bean(name = "shipComplianceChangeSyncStep") + public Step shipComplianceChangeSyncStep() { + log.info("Step 생성: shipComplianceChangeSyncStep"); + return new StepBuilder(getStepName(), jobRepository) + .chunk(CHUNK_SIZE, transactionManager) + .reader(createReader()) + .processor(createProcessor()) + .writer(createWriter()) + .build(); + } + + @Override + protected Job createJobFlow(JobBuilder jobBuilder) { + return jobBuilder + .start(shipComplianceChangeSyncStep()) + .build(); + } + + @Bean(name = "shipComplianceChangeDataSyncJob") + public Job shipComplianceChangeDataSyncJob() { + return job(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/config/ShipComplianceSyncJobConfig.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/config/ShipComplianceSyncJobConfig.java new file mode 100644 index 0000000..fc90f41 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/config/ShipComplianceSyncJobConfig.java @@ -0,0 +1,133 @@ +package com.snp.batch.jobs.datasync.batch.compliance.config; + +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.common.util.BatchWriteListener; +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.GroupByExecutionIdChunkListener; +import com.snp.batch.common.util.GroupByExecutionIdPolicy; +import com.snp.batch.common.util.GroupByExecutionIdReadListener; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.compliance.dto.ShipComplianceDto; +import com.snp.batch.jobs.datasync.batch.compliance.entity.ShipComplianceEntity; +import com.snp.batch.jobs.datasync.batch.compliance.processor.ShipComplianceProcessor; +import com.snp.batch.jobs.datasync.batch.compliance.reader.ShipComplianceReader; +import com.snp.batch.jobs.datasync.batch.compliance.repository.ComplianceRepository; +import com.snp.batch.jobs.datasync.batch.compliance.writer.ShipComplianceWriter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.PlatformTransactionManager; + +import javax.sql.DataSource; + +@Slf4j +@Configuration +public class ShipComplianceSyncJobConfig extends BaseJobConfig { + private final TableMetaInfo tableMetaInfo; + private final ComplianceRepository complianceRepository; + private final DataSource batchDataSource; + private final DataSource businessDataSource; + private final JdbcTemplate businessJdbcTemplate; + private final int subChunkSize; + + public ShipComplianceSyncJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + ComplianceRepository complianceRepository, + TableMetaInfo tableMetaInfo, + @Qualifier("batchDataSource") DataSource batchDataSource, + @Qualifier("businessDataSource") DataSource businessDataSource, + @Value("${app.batch.sub-chunk-size:5000}") int subChunkSize + ) { + super(jobRepository, transactionManager); + this.complianceRepository = complianceRepository; + this.tableMetaInfo = tableMetaInfo; + this.batchDataSource = batchDataSource; + this.businessDataSource = businessDataSource; + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.subChunkSize = subChunkSize; + } + + @Override + protected String getJobName() { + return "shipComplianceDataSyncJob"; + } + + @Override + protected String getStepName() { + return "shipComplianceSyncStep"; + } + + @Override + protected ItemReader createReader() { + return shipComplianceReader(businessDataSource, tableMetaInfo); + } + + @Override + protected ItemProcessor createProcessor() { + return new ShipComplianceProcessor(); + } + + @Override + protected ItemWriter createWriter() { + return new ShipComplianceWriter(complianceRepository, transactionManager, subChunkSize); + } + + // --- ShipCompliance Reader --- + + @Bean + @StepScope + public ItemReader shipComplianceReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new ShipComplianceReader(businessDataSource, tableMetaInfo); + } + + // --- Listeners --- + + @Bean + public BatchWriteListener shipComplianceWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceCompliance); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + // --- Steps --- + + @Bean(name = "shipComplianceSyncStep") + public Step shipComplianceSyncStep() { + log.info("Step 생성: shipComplianceSyncStep"); + return new StepBuilder(getStepName(), jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(createReader()) + .processor(createProcessor()) + .writer(createWriter()) + .listener(new GroupByExecutionIdReadListener()) + .listener(new GroupByExecutionIdChunkListener()) + .listener(shipComplianceWriteListener()) + .build(); + } + + @Override + protected Job createJobFlow(JobBuilder jobBuilder) { + return jobBuilder + .start(shipComplianceSyncStep()) + .build(); + } + + @Bean(name = "shipComplianceDataSyncJob") + public Job shipComplianceDataSyncJob() { + return job(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/dto/CompanyComplianceChangeDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/dto/CompanyComplianceChangeDto.java new file mode 100644 index 0000000..6414e5c --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/dto/CompanyComplianceChangeDto.java @@ -0,0 +1,24 @@ +package com.snp.batch.jobs.datasync.batch.compliance.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import java.time.LocalDateTime; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class CompanyComplianceChangeDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String companyCd; + private LocalDateTime lastMdfcnDt; + private String flctnColNm; + private String bfrVal; + private String aftrVal; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/dto/CompanyComplianceDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/dto/CompanyComplianceDto.java new file mode 100644 index 0000000..f3975b9 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/dto/CompanyComplianceDto.java @@ -0,0 +1,35 @@ +package com.snp.batch.jobs.datasync.batch.compliance.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import java.time.LocalDateTime; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class CompanyComplianceDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String companyCd; + private LocalDateTime lstMdfcnDt; + private Long companySnthsComplianceStatus; + private Long companyAusSanctionList; + private Long companyBesSanctionList; + private Long companyCanSanctionList; + private Long companyOfacSanctionCountry; + private Long companyFatfCmptncCountry; + private Long companyEuSanctionList; + private Long companyOfacSanctionList; + private Long companyOfacNonSdnSanctionList; + private Long companyOfacssiSanctionList; + private Long companySwissSanctionList; + private Long companyUaeSanctionList; + private Long companyUnSanctionList; + private Long prntCompanyComplianceRisk; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/dto/ShipComplianceChangeDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/dto/ShipComplianceChangeDto.java new file mode 100644 index 0000000..7c05ef6 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/dto/ShipComplianceChangeDto.java @@ -0,0 +1,24 @@ +package com.snp.batch.jobs.datasync.batch.compliance.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import java.time.LocalDateTime; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class ShipComplianceChangeDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String imoNo; + private LocalDateTime lastMdfcnDt; + private String flctnColNm; + private String bfrVal; + private String aftrVal; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/dto/ShipComplianceDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/dto/ShipComplianceDto.java new file mode 100644 index 0000000..dae86eb --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/dto/ShipComplianceDto.java @@ -0,0 +1,54 @@ +package com.snp.batch.jobs.datasync.batch.compliance.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import java.time.LocalDateTime; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class ShipComplianceDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String imoNo; + private LocalDateTime lastMdfcnDt; + private String lglSnthsSanction; + private String shipBesSanctionList; + private String shipDarkActvInd; + private String shipDtldInfoNtmntd; + private String shipEuSanctionList; + private String shipFlgDspt; + private String shipFlgSanctionCountry; + private String shipFlgSanctionCountryHstry; + private String shipOfacNonSdnSanctionList; + private String shipOfacSanctionList; + private String shipOfacCutnList; + private String shipOwnrOfcsSanctionList; + private String shipOwnrAusSanctionList; + private String shipOwnrBesSanctionList; + private String shipOwnrCanSanctionList; + private String shipOwnrEuSanctionList; + private String shipOwnrFatfRglZone; + private String shipOwnrOfacSanctionHstry; + private String shipOwnrOfacSanctionList; + private String shipOwnrOfacSanctionCountry; + private String shipOwnrPrntCompanyNcmplnc; + private String shipOwnrPrntCompanyFatfRglZone; + private String shipOwnrPrntCompanyOfacSanctionCountry; + private String shipOwnrSwiSanctionList; + private String shipOwnrUaeSanctionList; + private String shipOwnrUnSanctionList; + private String shipSanctionCountryPrtcllLastTwelveM; + private String shipSanctionCountryPrtcllLastThrM; + private String shipSanctionCountryPrtcllLastSixM; + private String shipScrtyLglDsptEvent; + private String shipStsPrtnrNonComplianceTwelveM; + private String shipSwiSanctionList; + private String shipUnSanctionList; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/entity/CompanyComplianceChangeEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/entity/CompanyComplianceChangeEntity.java new file mode 100644 index 0000000..7a04933 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/entity/CompanyComplianceChangeEntity.java @@ -0,0 +1,24 @@ +package com.snp.batch.jobs.datasync.batch.compliance.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; +import java.time.LocalDateTime; + +@Data +@SuperBuilder +@AllArgsConstructor +public class CompanyComplianceChangeEntity implements JobExecutionGroupable { + private String companyCd; + private LocalDateTime lastMdfcnDt; + private String flctnColNm; + private String bfrVal; + private String aftrVal; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/entity/CompanyComplianceEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/entity/CompanyComplianceEntity.java new file mode 100644 index 0000000..fea7271 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/entity/CompanyComplianceEntity.java @@ -0,0 +1,35 @@ +package com.snp.batch.jobs.datasync.batch.compliance.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; +import java.time.LocalDateTime; + +@Data +@SuperBuilder +@AllArgsConstructor +public class CompanyComplianceEntity implements JobExecutionGroupable { + private String companyCd; + private LocalDateTime lstMdfcnDt; + private Long companySnthsComplianceStatus; + private Long companyAusSanctionList; + private Long companyBesSanctionList; + private Long companyCanSanctionList; + private Long companyOfacSanctionCountry; + private Long companyFatfCmptncCountry; + private Long companyEuSanctionList; + private Long companyOfacSanctionList; + private Long companyOfacNonSdnSanctionList; + private Long companyOfacssiSanctionList; + private Long companySwissSanctionList; + private Long companyUaeSanctionList; + private Long companyUnSanctionList; + private Long prntCompanyComplianceRisk; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/entity/ShipComplianceChangeEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/entity/ShipComplianceChangeEntity.java new file mode 100644 index 0000000..dca8760 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/entity/ShipComplianceChangeEntity.java @@ -0,0 +1,24 @@ +package com.snp.batch.jobs.datasync.batch.compliance.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; +import java.time.LocalDateTime; + +@Data +@SuperBuilder +@AllArgsConstructor +public class ShipComplianceChangeEntity implements JobExecutionGroupable { + private String imoNo; + private LocalDateTime lastMdfcnDt; + private String flctnColNm; + private String bfrVal; + private String aftrVal; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/entity/ShipComplianceEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/entity/ShipComplianceEntity.java new file mode 100644 index 0000000..231e096 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/entity/ShipComplianceEntity.java @@ -0,0 +1,54 @@ +package com.snp.batch.jobs.datasync.batch.compliance.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; +import java.time.LocalDateTime; + +@Data +@SuperBuilder +@AllArgsConstructor +public class ShipComplianceEntity implements JobExecutionGroupable { + private String imoNo; + private LocalDateTime lastMdfcnDt; + private String lglSnthsSanction; + private String shipBesSanctionList; + private String shipDarkActvInd; + private String shipDtldInfoNtmntd; + private String shipEuSanctionList; + private String shipFlgDspt; + private String shipFlgSanctionCountry; + private String shipFlgSanctionCountryHstry; + private String shipOfacNonSdnSanctionList; + private String shipOfacSanctionList; + private String shipOfacCutnList; + private String shipOwnrOfcsSanctionList; + private String shipOwnrAusSanctionList; + private String shipOwnrBesSanctionList; + private String shipOwnrCanSanctionList; + private String shipOwnrEuSanctionList; + private String shipOwnrFatfRglZone; + private String shipOwnrOfacSanctionHstry; + private String shipOwnrOfacSanctionList; + private String shipOwnrOfacSanctionCountry; + private String shipOwnrPrntCompanyNcmplnc; + private String shipOwnrPrntCompanyFatfRglZone; + private String shipOwnrPrntCompanyOfacSanctionCountry; + private String shipOwnrSwiSanctionList; + private String shipOwnrUaeSanctionList; + private String shipOwnrUnSanctionList; + private String shipSanctionCountryPrtcllLastTwelveM; + private String shipSanctionCountryPrtcllLastThrM; + private String shipSanctionCountryPrtcllLastSixM; + private String shipScrtyLglDsptEvent; + private String shipStsPrtnrNonComplianceTwelveM; + private String shipSwiSanctionList; + private String shipUnSanctionList; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/processor/CompanyComplianceChangeProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/processor/CompanyComplianceChangeProcessor.java new file mode 100644 index 0000000..3ef023f --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/processor/CompanyComplianceChangeProcessor.java @@ -0,0 +1,22 @@ +package com.snp.batch.jobs.datasync.batch.compliance.processor; + +import com.snp.batch.jobs.datasync.batch.compliance.dto.CompanyComplianceChangeDto; +import com.snp.batch.jobs.datasync.batch.compliance.entity.CompanyComplianceChangeEntity; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemProcessor; + +@Slf4j +public class CompanyComplianceChangeProcessor implements ItemProcessor { + + @Override + public CompanyComplianceChangeEntity process(CompanyComplianceChangeDto item) throws Exception { + return CompanyComplianceChangeEntity.builder() + .jobExecutionId(item.getJobExecutionId()) + .companyCd(item.getCompanyCd()) + .lastMdfcnDt(item.getLastMdfcnDt()) + .flctnColNm(item.getFlctnColNm()) + .bfrVal(item.getBfrVal()) + .aftrVal(item.getAftrVal()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/processor/CompanyComplianceProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/processor/CompanyComplianceProcessor.java new file mode 100644 index 0000000..97ac007 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/processor/CompanyComplianceProcessor.java @@ -0,0 +1,33 @@ +package com.snp.batch.jobs.datasync.batch.compliance.processor; + +import com.snp.batch.jobs.datasync.batch.compliance.dto.CompanyComplianceDto; +import com.snp.batch.jobs.datasync.batch.compliance.entity.CompanyComplianceEntity; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemProcessor; + +@Slf4j +public class CompanyComplianceProcessor implements ItemProcessor { + + @Override + public CompanyComplianceEntity process(CompanyComplianceDto item) throws Exception { + return CompanyComplianceEntity.builder() + .jobExecutionId(item.getJobExecutionId()) + .companyCd(item.getCompanyCd()) + .lstMdfcnDt(item.getLstMdfcnDt()) + .companySnthsComplianceStatus(item.getCompanySnthsComplianceStatus()) + .companyAusSanctionList(item.getCompanyAusSanctionList()) + .companyBesSanctionList(item.getCompanyBesSanctionList()) + .companyCanSanctionList(item.getCompanyCanSanctionList()) + .companyOfacSanctionCountry(item.getCompanyOfacSanctionCountry()) + .companyFatfCmptncCountry(item.getCompanyFatfCmptncCountry()) + .companyEuSanctionList(item.getCompanyEuSanctionList()) + .companyOfacSanctionList(item.getCompanyOfacSanctionList()) + .companyOfacNonSdnSanctionList(item.getCompanyOfacNonSdnSanctionList()) + .companyOfacssiSanctionList(item.getCompanyOfacssiSanctionList()) + .companySwissSanctionList(item.getCompanySwissSanctionList()) + .companyUaeSanctionList(item.getCompanyUaeSanctionList()) + .companyUnSanctionList(item.getCompanyUnSanctionList()) + .prntCompanyComplianceRisk(item.getPrntCompanyComplianceRisk()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/processor/ShipComplianceChangeProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/processor/ShipComplianceChangeProcessor.java new file mode 100644 index 0000000..3d39722 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/processor/ShipComplianceChangeProcessor.java @@ -0,0 +1,22 @@ +package com.snp.batch.jobs.datasync.batch.compliance.processor; + +import com.snp.batch.jobs.datasync.batch.compliance.dto.ShipComplianceChangeDto; +import com.snp.batch.jobs.datasync.batch.compliance.entity.ShipComplianceChangeEntity; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemProcessor; + +@Slf4j +public class ShipComplianceChangeProcessor implements ItemProcessor { + + @Override + public ShipComplianceChangeEntity process(ShipComplianceChangeDto item) throws Exception { + return ShipComplianceChangeEntity.builder() + .jobExecutionId(item.getJobExecutionId()) + .imoNo(item.getImoNo()) + .lastMdfcnDt(item.getLastMdfcnDt()) + .flctnColNm(item.getFlctnColNm()) + .bfrVal(item.getBfrVal()) + .aftrVal(item.getAftrVal()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/processor/ShipComplianceProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/processor/ShipComplianceProcessor.java new file mode 100644 index 0000000..302d9bd --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/processor/ShipComplianceProcessor.java @@ -0,0 +1,52 @@ +package com.snp.batch.jobs.datasync.batch.compliance.processor; + +import com.snp.batch.jobs.datasync.batch.compliance.dto.ShipComplianceDto; +import com.snp.batch.jobs.datasync.batch.compliance.entity.ShipComplianceEntity; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemProcessor; + +@Slf4j +public class ShipComplianceProcessor implements ItemProcessor { + + @Override + public ShipComplianceEntity process(ShipComplianceDto item) throws Exception { + return ShipComplianceEntity.builder() + .jobExecutionId(item.getJobExecutionId()) + .imoNo(item.getImoNo()) + .lastMdfcnDt(item.getLastMdfcnDt()) + .lglSnthsSanction(item.getLglSnthsSanction()) + .shipBesSanctionList(item.getShipBesSanctionList()) + .shipDarkActvInd(item.getShipDarkActvInd()) + .shipDtldInfoNtmntd(item.getShipDtldInfoNtmntd()) + .shipEuSanctionList(item.getShipEuSanctionList()) + .shipFlgDspt(item.getShipFlgDspt()) + .shipFlgSanctionCountry(item.getShipFlgSanctionCountry()) + .shipFlgSanctionCountryHstry(item.getShipFlgSanctionCountryHstry()) + .shipOfacNonSdnSanctionList(item.getShipOfacNonSdnSanctionList()) + .shipOfacSanctionList(item.getShipOfacSanctionList()) + .shipOfacCutnList(item.getShipOfacCutnList()) + .shipOwnrOfcsSanctionList(item.getShipOwnrOfcsSanctionList()) + .shipOwnrAusSanctionList(item.getShipOwnrAusSanctionList()) + .shipOwnrBesSanctionList(item.getShipOwnrBesSanctionList()) + .shipOwnrCanSanctionList(item.getShipOwnrCanSanctionList()) + .shipOwnrEuSanctionList(item.getShipOwnrEuSanctionList()) + .shipOwnrFatfRglZone(item.getShipOwnrFatfRglZone()) + .shipOwnrOfacSanctionHstry(item.getShipOwnrOfacSanctionHstry()) + .shipOwnrOfacSanctionList(item.getShipOwnrOfacSanctionList()) + .shipOwnrOfacSanctionCountry(item.getShipOwnrOfacSanctionCountry()) + .shipOwnrPrntCompanyNcmplnc(item.getShipOwnrPrntCompanyNcmplnc()) + .shipOwnrPrntCompanyFatfRglZone(item.getShipOwnrPrntCompanyFatfRglZone()) + .shipOwnrPrntCompanyOfacSanctionCountry(item.getShipOwnrPrntCompanyOfacSanctionCountry()) + .shipOwnrSwiSanctionList(item.getShipOwnrSwiSanctionList()) + .shipOwnrUaeSanctionList(item.getShipOwnrUaeSanctionList()) + .shipOwnrUnSanctionList(item.getShipOwnrUnSanctionList()) + .shipSanctionCountryPrtcllLastTwelveM(item.getShipSanctionCountryPrtcllLastTwelveM()) + .shipSanctionCountryPrtcllLastThrM(item.getShipSanctionCountryPrtcllLastThrM()) + .shipSanctionCountryPrtcllLastSixM(item.getShipSanctionCountryPrtcllLastSixM()) + .shipScrtyLglDsptEvent(item.getShipScrtyLglDsptEvent()) + .shipStsPrtnrNonComplianceTwelveM(item.getShipStsPrtnrNonComplianceTwelveM()) + .shipSwiSanctionList(item.getShipSwiSanctionList()) + .shipUnSanctionList(item.getShipUnSanctionList()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/reader/CompanyComplianceChangeReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/reader/CompanyComplianceChangeReader.java new file mode 100644 index 0000000..e2c6419 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/reader/CompanyComplianceChangeReader.java @@ -0,0 +1,185 @@ +package com.snp.batch.jobs.datasync.batch.compliance.reader; + +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.compliance.dto.CompanyComplianceChangeDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.sql.Timestamp; +import java.time.LocalDateTime; +import java.util.*; + +@Slf4j +public class CompanyComplianceChangeReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate batchJdbcTemplate; + private final String targetSchema; + private List changeBuffer = new ArrayList<>(); + private boolean initialized = false; + + // 변경 감지 대상 컬럼 목록 + private static final List CHANGE_DETECT_COLUMNS = Arrays.asList( + "company_snths_compliance_status", + "company_aus_sanction_list", + "company_bes_sanction_list", + "company_can_sanction_list", + "company_ofac_sanction_country", + "company_fatf_cmptnc_country", + "company_eu_sanction_list", + "company_ofac_sanction_list", + "company_ofac_non_sdn_sanction_list", + "company_ofacssi_sanction_list", + "company_swiss_sanction_list", + "company_uae_sanction_list", + "company_un_sanction_list", + "prnt_company_compliance_risk" + ); + + public CompanyComplianceChangeReader(@Qualifier("batchDataSource") DataSource batchDataSource, + TableMetaInfo tableMetaInfo, + @Value("${app.batch.target-schema.name}") String targetSchema) { + this.batchJdbcTemplate = new JdbcTemplate(batchDataSource); + this.tableMetaInfo = tableMetaInfo; + this.targetSchema = targetSchema; + } + + @Override + public CompanyComplianceChangeDto read() throws Exception { + if (!initialized) { + initializeChangeBuffer(); + initialized = true; + } + + if (changeBuffer.isEmpty()) { + return null; + } + return changeBuffer.remove(0); + } + + private void initializeChangeBuffer() { + log.info("[CompanyComplianceChangeReader] 변경 이력 감지 시작"); + + // tb_company_compliance_hstry에서 company_cd, lst_mdfcn_dt 기준 정렬 데이터 조회 + String sql = String.format(""" + SELECT company_cd, lst_mdfcn_dt, + company_snths_compliance_status, company_aus_sanction_list, + company_bes_sanction_list, company_can_sanction_list, + company_ofac_sanction_country, company_fatf_cmptnc_country, + company_eu_sanction_list, company_ofac_sanction_list, + company_ofac_non_sdn_sanction_list, company_ofacssi_sanction_list, + company_swiss_sanction_list, company_uae_sanction_list, + company_un_sanction_list, prnt_company_compliance_risk + FROM %s.%s + ORDER BY company_cd, lst_mdfcn_dt ASC + """, targetSchema, tableMetaInfo.targetTbCompanyComplianceHstry); + + List> allData = batchJdbcTemplate.queryForList(sql); + log.info("[CompanyComplianceChangeReader] 전체 데이터 조회 완료: {} 건", allData.size()); + + // tb_company_compliance_info_hstry에 이미 저장된 데이터 조회 (중복 방지) + String existingSql = String.format(""" + SELECT company_cd, last_mdfcn_dt, flctn_col_nm + FROM %s.%s + """, targetSchema, tableMetaInfo.targetTbCompanyComplianceInfoHstry); + + Set existingKeys = new HashSet<>(); + try { + List> existingData = batchJdbcTemplate.queryForList(existingSql); + for (Map row : existingData) { + String key = buildExistingKey(row); + existingKeys.add(key); + } + log.info("[CompanyComplianceChangeReader] 기존 변경 이력 데이터: {} 건", existingKeys.size()); + } catch (Exception e) { + log.warn("[CompanyComplianceChangeReader] 기존 데이터 조회 실패 (테이블이 비어있을 수 있음): {}", e.getMessage()); + } + + // company_cd별로 그룹핑 + Map>> groupedByCompanyCd = new LinkedHashMap<>(); + for (Map row : allData) { + String companyCd = (String) row.get("company_cd"); + groupedByCompanyCd.computeIfAbsent(companyCd, k -> new ArrayList<>()).add(row); + } + + // 각 company_cd별로 변경 감지 + long changeCount = 0; + for (Map.Entry>> entry : groupedByCompanyCd.entrySet()) { + String companyCd = entry.getKey(); + List> records = entry.getValue(); + + Map previousRecord = null; + for (Map currentRecord : records) { + if (previousRecord != null) { + // 이전 레코드와 현재 레코드 비교 + List changes = detectChanges(companyCd, previousRecord, currentRecord, existingKeys); + changeBuffer.addAll(changes); + changeCount += changes.size(); + } + previousRecord = currentRecord; + } + } + + log.info("[CompanyComplianceChangeReader] 변경 이력 감지 완료: {} 건 (company_cd 그룹: {} 개)", changeCount, groupedByCompanyCd.size()); + } + + private List detectChanges(String companyCd, + Map previousRecord, + Map currentRecord, + Set existingKeys) { + List changes = new ArrayList<>(); + Timestamp currentTs = (Timestamp) currentRecord.get("lst_mdfcn_dt"); + LocalDateTime lastMdfcnDt = currentTs != null ? currentTs.toLocalDateTime() : null; + + for (String column : CHANGE_DETECT_COLUMNS) { + Object prevValue = previousRecord.get(column); + Object currValue = currentRecord.get(column); + + // 값이 변경된 경우 + if (!Objects.equals(prevValue, currValue)) { + String bfrVal = convertToString(prevValue); + String aftrVal = convertToString(currValue); + + // 이미 존재하는 데이터인지 확인 + String existingKey = companyCd + "|" + lastMdfcnDt + "|" + column; + if (existingKeys.contains(existingKey)) { + continue; // 이미 저장된 데이터는 스킵 + } + + CompanyComplianceChangeDto changeDto = CompanyComplianceChangeDto.builder() + .jobExecutionId(0L) // 이 Job은 job_execution_id 기반이 아님 + .companyCd(companyCd) + .lastMdfcnDt(lastMdfcnDt) + .flctnColNm(column) + .bfrVal(bfrVal) + .aftrVal(aftrVal) + .build(); + + changes.add(changeDto); + } + } + + return changes; + } + + private String buildExistingKey(Map row) { + String companyCd = (String) row.get("company_cd"); + Object lastMdfcnDtObj = row.get("last_mdfcn_dt"); + LocalDateTime lastMdfcnDt = null; + if (lastMdfcnDtObj instanceof Timestamp) { + lastMdfcnDt = ((Timestamp) lastMdfcnDtObj).toLocalDateTime(); + } + String flctnColNm = (String) row.get("flctn_col_nm"); + return companyCd + "|" + lastMdfcnDt + "|" + flctnColNm; + } + + private String convertToString(Object value) { + if (value == null) { + return "null"; + } + return String.valueOf(value); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/reader/CompanyComplianceReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/reader/CompanyComplianceReader.java new file mode 100644 index 0000000..4d1cd7e --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/reader/CompanyComplianceReader.java @@ -0,0 +1,82 @@ +package com.snp.batch.jobs.datasync.batch.compliance.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.compliance.dto.CompanyComplianceDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class CompanyComplianceReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public CompanyComplianceReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public CompanyComplianceDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceTbCompanyComplianceInfo), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[CompanyComplianceReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTbCompanyComplianceInfo); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + Timestamp lstMdfcnDtTs = rs.getTimestamp("lst_mdfcn_dt"); + + return CompanyComplianceDto.builder() + .jobExecutionId(targetId) + .companyCd(rs.getString("company_cd")) + .lstMdfcnDt(lstMdfcnDtTs != null ? lstMdfcnDtTs.toLocalDateTime() : null) + .companySnthsComplianceStatus(rs.getObject("company_snths_compliance_status") != null ? rs.getLong("company_snths_compliance_status") : null) + .companyAusSanctionList(rs.getObject("company_aus_sanction_list") != null ? rs.getLong("company_aus_sanction_list") : null) + .companyBesSanctionList(rs.getObject("company_bes_sanction_list") != null ? rs.getLong("company_bes_sanction_list") : null) + .companyCanSanctionList(rs.getObject("company_can_sanction_list") != null ? rs.getLong("company_can_sanction_list") : null) + .companyOfacSanctionCountry(rs.getObject("company_ofac_sanction_country") != null ? rs.getLong("company_ofac_sanction_country") : null) + .companyFatfCmptncCountry(rs.getObject("company_fatf_cmptnc_country") != null ? rs.getLong("company_fatf_cmptnc_country") : null) + .companyEuSanctionList(rs.getObject("company_eu_sanction_list") != null ? rs.getLong("company_eu_sanction_list") : null) + .companyOfacSanctionList(rs.getObject("company_ofac_sanction_list") != null ? rs.getLong("company_ofac_sanction_list") : null) + .companyOfacNonSdnSanctionList(rs.getObject("company_ofac_non_sdn_sanction_list") != null ? rs.getLong("company_ofac_non_sdn_sanction_list") : null) + .companyOfacssiSanctionList(rs.getObject("company_ofacssi_sanction_list") != null ? rs.getLong("company_ofacssi_sanction_list") : null) + .companySwissSanctionList(rs.getObject("company_swiss_sanction_list") != null ? rs.getLong("company_swiss_sanction_list") : null) + .companyUaeSanctionList(rs.getObject("company_uae_sanction_list") != null ? rs.getLong("company_uae_sanction_list") : null) + .companyUnSanctionList(rs.getObject("company_un_sanction_list") != null ? rs.getLong("company_un_sanction_list") : null) + .prntCompanyComplianceRisk(rs.getObject("prnt_company_compliance_risk") != null ? rs.getLong("prnt_company_compliance_risk") : null) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTbCompanyComplianceInfo); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/reader/ShipComplianceChangeReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/reader/ShipComplianceChangeReader.java new file mode 100644 index 0000000..2c8418b --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/reader/ShipComplianceChangeReader.java @@ -0,0 +1,210 @@ +package com.snp.batch.jobs.datasync.batch.compliance.reader; + +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.compliance.dto.ShipComplianceChangeDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.sql.Timestamp; +import java.time.LocalDateTime; +import java.util.*; + +@Slf4j +public class ShipComplianceChangeReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate batchJdbcTemplate; + private final String targetSchema; + private List changeBuffer = new ArrayList<>(); + private boolean initialized = false; + + // 변경 감지 대상 컬럼 목록 + private static final List CHANGE_DETECT_COLUMNS = Arrays.asList( + "lgl_snths_sanction", + "ship_bes_sanction_list", + "ship_dark_actv_ind", + "ship_dtld_info_ntmntd", + "ship_eu_sanction_list", + "ship_flg_dspt", + "ship_flg_sanction_country", + "ship_flg_sanction_country_hstry", + "ship_ofac_non_sdn_sanction_list", + "ship_ofac_sanction_list", + "ship_ofac_cutn_list", + "ship_ownr_ofcs_sanction_list", + "ship_ownr_aus_sanction_list", + "ship_ownr_bes_sanction_list", + "ship_ownr_can_sanction_list", + "ship_ownr_eu_sanction_list", + "ship_ownr_fatf_rgl_zone", + "ship_ownr_ofac_sanction_hstry", + "ship_ownr_ofac_sanction_list", + "ship_ownr_ofac_sanction_country", + "ship_ownr_prnt_company_ncmplnc", + "ship_ownr_prnt_company_fatf_rgl_zone", + "ship_ownr_prnt_company_ofac_sanction_country", + "ship_ownr_swi_sanction_list", + "ship_ownr_uae_sanction_list", + "ship_ownr_un_sanction_list", + "ship_sanction_country_prtcll_last_twelve_m", + "ship_sanction_country_prtcll_last_thr_m", + "ship_sanction_country_prtcll_last_six_m", + "ship_scrty_lgl_dspt_event", + "ship_sts_prtnr_non_compliance_twelve_m", + "ship_swi_sanction_list", + "ship_un_sanction_list" + ); + + public ShipComplianceChangeReader(@Qualifier("batchDataSource") DataSource batchDataSource, + TableMetaInfo tableMetaInfo, + @Value("${app.batch.target-schema.name}") String targetSchema) { + this.batchJdbcTemplate = new JdbcTemplate(batchDataSource); + this.tableMetaInfo = tableMetaInfo; + this.targetSchema = targetSchema; + } + + @Override + public ShipComplianceChangeDto read() throws Exception { + if (!initialized) { + initializeChangeBuffer(); + initialized = true; + } + + if (changeBuffer.isEmpty()) { + return null; + } + return changeBuffer.remove(0); + } + + private void initializeChangeBuffer() { + log.info("[ShipComplianceChangeReader] 변경 이력 감지 시작"); + + // tb_ship_compliance_hstry에서 imo_no, last_mdfcn_dt 기준 정렬 데이터 조회 + String sql = String.format(""" + SELECT imo_no, last_mdfcn_dt, + lgl_snths_sanction, ship_bes_sanction_list, ship_dark_actv_ind, + ship_dtld_info_ntmntd, ship_eu_sanction_list, ship_flg_dspt, + ship_flg_sanction_country, ship_flg_sanction_country_hstry, + ship_ofac_non_sdn_sanction_list, ship_ofac_sanction_list, ship_ofac_cutn_list, + ship_ownr_ofcs_sanction_list, ship_ownr_aus_sanction_list, ship_ownr_bes_sanction_list, + ship_ownr_can_sanction_list, ship_ownr_eu_sanction_list, ship_ownr_fatf_rgl_zone, + ship_ownr_ofac_sanction_hstry, ship_ownr_ofac_sanction_list, + ship_ownr_ofac_sanction_country, ship_ownr_prnt_company_ncmplnc, + ship_ownr_prnt_company_fatf_rgl_zone, ship_ownr_prnt_company_ofac_sanction_country, + ship_ownr_swi_sanction_list, ship_ownr_uae_sanction_list, ship_ownr_un_sanction_list, + ship_sanction_country_prtcll_last_twelve_m, ship_sanction_country_prtcll_last_thr_m, + ship_sanction_country_prtcll_last_six_m, ship_scrty_lgl_dspt_event, + ship_sts_prtnr_non_compliance_twelve_m, ship_swi_sanction_list, ship_un_sanction_list + FROM %s.%s + ORDER BY imo_no, last_mdfcn_dt ASC + """, targetSchema, tableMetaInfo.targetTbShipComplianceHstry); + + List> allData = batchJdbcTemplate.queryForList(sql); + log.info("[ShipComplianceChangeReader] 전체 데이터 조회 완료: {} 건", allData.size()); + + // tb_vsl_cmplnc_info_hstry에 이미 저장된 데이터 조회 (중복 방지) + String existingSql = String.format(""" + SELECT imo_no, last_mdfcn_dt, flctn_col_nm + FROM %s.%s + """, targetSchema, tableMetaInfo.targetTbShipComplianceInfoHstry); + + Set existingKeys = new HashSet<>(); + try { + List> existingData = batchJdbcTemplate.queryForList(existingSql); + for (Map row : existingData) { + String key = buildExistingKey(row); + existingKeys.add(key); + } + log.info("[ShipComplianceChangeReader] 기존 변경 이력 데이터: {} 건", existingKeys.size()); + } catch (Exception e) { + log.warn("[ShipComplianceChangeReader] 기존 데이터 조회 실패 (테이블이 비어있을 수 있음): {}", e.getMessage()); + } + + // imo_no별로 그룹핑 + Map>> groupedByImoNo = new LinkedHashMap<>(); + for (Map row : allData) { + String imoNo = (String) row.get("imo_no"); + groupedByImoNo.computeIfAbsent(imoNo, k -> new ArrayList<>()).add(row); + } + + // 각 imo_no별로 변경 감지 + long changeCount = 0; + for (Map.Entry>> entry : groupedByImoNo.entrySet()) { + String imoNo = entry.getKey(); + List> records = entry.getValue(); + + Map previousRecord = null; + for (Map currentRecord : records) { + if (previousRecord != null) { + // 이전 레코드와 현재 레코드 비교 + List changes = detectChanges(imoNo, previousRecord, currentRecord, existingKeys); + changeBuffer.addAll(changes); + changeCount += changes.size(); + } + previousRecord = currentRecord; + } + } + + log.info("[ShipComplianceChangeReader] 변경 이력 감지 완료: {} 건 (imo_no 그룹: {} 개)", changeCount, groupedByImoNo.size()); + } + + private List detectChanges(String imoNo, + Map previousRecord, + Map currentRecord, + Set existingKeys) { + List changes = new ArrayList<>(); + Timestamp currentTs = (Timestamp) currentRecord.get("last_mdfcn_dt"); + LocalDateTime lastMdfcnDt = currentTs != null ? currentTs.toLocalDateTime() : null; + + for (String column : CHANGE_DETECT_COLUMNS) { + Object prevValue = previousRecord.get(column); + Object currValue = currentRecord.get(column); + + // 값이 변경된 경우 + if (!Objects.equals(prevValue, currValue)) { + String bfrVal = convertToString(prevValue); + String aftrVal = convertToString(currValue); + + // 이미 존재하는 데이터인지 확인 + String existingKey = imoNo + "|" + lastMdfcnDt + "|" + column; + if (existingKeys.contains(existingKey)) { + continue; // 이미 저장된 데이터는 스킵 + } + + ShipComplianceChangeDto changeDto = ShipComplianceChangeDto.builder() + .jobExecutionId(0L) // 이 Job은 job_execution_id 기반이 아님 + .imoNo(imoNo) + .lastMdfcnDt(lastMdfcnDt) + .flctnColNm(column) + .bfrVal(bfrVal) + .aftrVal(aftrVal) + .build(); + + changes.add(changeDto); + } + } + + return changes; + } + + private String buildExistingKey(Map row) { + String imoNo = (String) row.get("imo_no"); + Object lastMdfcnDtObj = row.get("last_mdfcn_dt"); + LocalDateTime lastMdfcnDt = null; + if (lastMdfcnDtObj instanceof Timestamp) { + lastMdfcnDt = ((Timestamp) lastMdfcnDtObj).toLocalDateTime(); + } + String flctnColNm = (String) row.get("flctn_col_nm"); + return imoNo + "|" + lastMdfcnDt + "|" + flctnColNm; + } + + private String convertToString(Object value) { + if (value == null) { + return "null"; + } + return String.valueOf(value); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/reader/ShipComplianceReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/reader/ShipComplianceReader.java new file mode 100644 index 0000000..24f2479 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/reader/ShipComplianceReader.java @@ -0,0 +1,101 @@ +package com.snp.batch.jobs.datasync.batch.compliance.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.compliance.dto.ShipComplianceDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class ShipComplianceReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public ShipComplianceReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public ShipComplianceDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceCompliance), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[ShipComplianceReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceCompliance); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + Timestamp lastMdfcnDtTs = rs.getTimestamp("last_mdfcn_dt"); + + return ShipComplianceDto.builder() + .jobExecutionId(targetId) + .imoNo(rs.getString("imo_no")) + .lastMdfcnDt(lastMdfcnDtTs != null ? lastMdfcnDtTs.toLocalDateTime() : null) + .lglSnthsSanction(rs.getString("lgl_snths_sanction")) + .shipBesSanctionList(rs.getString("ship_bes_sanction_list")) + .shipDarkActvInd(rs.getString("ship_dark_actv_ind")) + .shipDtldInfoNtmntd(rs.getString("ship_dtld_info_ntmntd")) + .shipEuSanctionList(rs.getString("ship_eu_sanction_list")) + .shipFlgDspt(rs.getString("ship_flg_dspt")) + .shipFlgSanctionCountry(rs.getString("ship_flg_sanction_country")) + .shipFlgSanctionCountryHstry(rs.getString("ship_flg_sanction_country_hstry")) + .shipOfacNonSdnSanctionList(rs.getString("ship_ofac_non_sdn_sanction_list")) + .shipOfacSanctionList(rs.getString("ship_ofac_sanction_list")) + .shipOfacCutnList(rs.getString("ship_ofac_cutn_list")) + .shipOwnrOfcsSanctionList(rs.getString("ship_ownr_ofcs_sanction_list")) + .shipOwnrAusSanctionList(rs.getString("ship_ownr_aus_sanction_list")) + .shipOwnrBesSanctionList(rs.getString("ship_ownr_bes_sanction_list")) + .shipOwnrCanSanctionList(rs.getString("ship_ownr_can_sanction_list")) + .shipOwnrEuSanctionList(rs.getString("ship_ownr_eu_sanction_list")) + .shipOwnrFatfRglZone(rs.getString("ship_ownr_fatf_rgl_zone")) + .shipOwnrOfacSanctionHstry(rs.getString("ship_ownr_ofac_sanction_hstry")) + .shipOwnrOfacSanctionList(rs.getString("ship_ownr_ofac_sanction_list")) + .shipOwnrOfacSanctionCountry(rs.getString("ship_ownr_ofac_sanction_country")) + .shipOwnrPrntCompanyNcmplnc(rs.getString("ship_ownr_prnt_company_ncmplnc")) + .shipOwnrPrntCompanyFatfRglZone(rs.getString("ship_ownr_prnt_company_fatf_rgl_zone")) + .shipOwnrPrntCompanyOfacSanctionCountry(rs.getString("ship_ownr_prnt_company_ofac_sanction_country")) + .shipOwnrSwiSanctionList(rs.getString("ship_ownr_swi_sanction_list")) + .shipOwnrUaeSanctionList(rs.getString("ship_ownr_uae_sanction_list")) + .shipOwnrUnSanctionList(rs.getString("ship_ownr_un_sanction_list")) + .shipSanctionCountryPrtcllLastTwelveM(rs.getString("ship_sanction_country_prtcll_last_twelve_m")) + .shipSanctionCountryPrtcllLastThrM(rs.getString("ship_sanction_country_prtcll_last_thr_m")) + .shipSanctionCountryPrtcllLastSixM(rs.getString("ship_sanction_country_prtcll_last_six_m")) + .shipScrtyLglDsptEvent(rs.getString("ship_scrty_lgl_dspt_event")) + .shipStsPrtnrNonComplianceTwelveM(rs.getString("ship_sts_prtnr_non_compliance_twelve_m")) + .shipSwiSanctionList(rs.getString("ship_swi_sanction_list")) + .shipUnSanctionList(rs.getString("ship_un_sanction_list")) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceCompliance); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/repository/ComplianceRepository.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/repository/ComplianceRepository.java new file mode 100644 index 0000000..339bb2d --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/repository/ComplianceRepository.java @@ -0,0 +1,17 @@ +package com.snp.batch.jobs.datasync.batch.compliance.repository; + +import com.snp.batch.jobs.datasync.batch.compliance.entity.CompanyComplianceChangeEntity; +import com.snp.batch.jobs.datasync.batch.compliance.entity.CompanyComplianceEntity; +import com.snp.batch.jobs.datasync.batch.compliance.entity.ShipComplianceChangeEntity; +import com.snp.batch.jobs.datasync.batch.compliance.entity.ShipComplianceEntity; + +import java.util.List; + +public interface ComplianceRepository { + void saveShipCompliance(List shipComplianceEntityList); + void saveShipComplianceHistory(List shipComplianceEntityList); + void saveCompanyCompliance(List companyComplianceEntityList); + void saveCompanyComplianceHistory(List companyComplianceEntityList); + void saveCompanyComplianceChange(List companyComplianceChangeEntityList); + void saveShipComplianceChange(List shipComplianceChangeEntityList); +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/repository/ComplianceRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/repository/ComplianceRepositoryImpl.java new file mode 100644 index 0000000..e6b0251 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/repository/ComplianceRepositoryImpl.java @@ -0,0 +1,339 @@ +package com.snp.batch.jobs.datasync.batch.compliance.repository; + +import com.snp.batch.common.batch.repository.MultiDataSourceJdbcRepository; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.compliance.entity.CompanyComplianceChangeEntity; +import com.snp.batch.jobs.datasync.batch.compliance.entity.CompanyComplianceEntity; +import com.snp.batch.jobs.datasync.batch.compliance.entity.ShipComplianceChangeEntity; +import com.snp.batch.jobs.datasync.batch.compliance.entity.ShipComplianceEntity; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.stereotype.Repository; + +import javax.sql.DataSource; +import java.sql.PreparedStatement; +import java.sql.Timestamp; +import java.util.List; + +@Slf4j +@Repository("complianceRepository") +public class ComplianceRepositoryImpl extends MultiDataSourceJdbcRepository implements ComplianceRepository { + + private DataSource batchDataSource; + private DataSource businessDataSource; + private final TableMetaInfo tableMetaInfo; + + public ComplianceRepositoryImpl(@Qualifier("batchDataSource") DataSource batchDataSource, + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + + super(new JdbcTemplate(batchDataSource), new JdbcTemplate(businessDataSource)); + + this.batchDataSource = batchDataSource; + this.businessDataSource = businessDataSource; + this.tableMetaInfo = tableMetaInfo; + } + + @Override + protected String getTableName() { + return null; + } + + @Override + protected RowMapper getRowMapper() { + return null; + } + + @Override + protected Long extractId(ShipComplianceEntity entity) { + return null; + } + + @Override + protected String getInsertSql() { + return null; + } + + @Override + protected String getUpdateSql() { + return null; + } + + @Override + protected void setInsertParameters(PreparedStatement ps, ShipComplianceEntity entity) throws Exception { + } + + @Override + protected void setUpdateParameters(PreparedStatement ps, ShipComplianceEntity entity) throws Exception { + } + + @Override + protected String getEntityName() { + return null; + } + + @Override + public void saveShipCompliance(List shipComplianceEntityList) { + String sql = ComplianceSql.getShipComplianceUpsertSql(tableMetaInfo.targetTbShipComplianceInfo, "imo_no"); + if (shipComplianceEntityList == null || shipComplianceEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "ShipComplianceEntity", shipComplianceEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, shipComplianceEntityList, shipComplianceEntityList.size(), + (ps, entity) -> { + try { + bindShipCompliance(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "ShipComplianceEntity", shipComplianceEntityList.size()); + } + + @Override + public void saveShipComplianceHistory(List shipComplianceEntityList) { + String sql = ComplianceSql.getShipComplianceUpsertSql(tableMetaInfo.targetTbShipComplianceHstry, "imo_no, last_mdfcn_dt"); + if (shipComplianceEntityList == null || shipComplianceEntityList.isEmpty()) { + return; + } + log.debug("{} History 배치 삽입 시작: {} 건", "ShipComplianceEntity", shipComplianceEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, shipComplianceEntityList, shipComplianceEntityList.size(), + (ps, entity) -> { + try { + bindShipCompliance(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} History 배치 삽입 완료: {} 건", "ShipComplianceEntity", shipComplianceEntityList.size()); + } + + @Override + public void saveCompanyCompliance(List companyComplianceEntityList) { + String sql = ComplianceSql.getCompanyComplianceUpsertSql(tableMetaInfo.targetTbCompanyComplianceInfo, "company_cd"); + if (companyComplianceEntityList == null || companyComplianceEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "CompanyComplianceEntity", companyComplianceEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, companyComplianceEntityList, companyComplianceEntityList.size(), + (ps, entity) -> { + try { + bindCompanyCompliance(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "CompanyComplianceEntity", companyComplianceEntityList.size()); + } + + @Override + public void saveCompanyComplianceHistory(List companyComplianceEntityList) { + String sql = ComplianceSql.getCompanyComplianceUpsertSql(tableMetaInfo.targetTbCompanyComplianceHstry, "company_cd, lst_mdfcn_dt"); + if (companyComplianceEntityList == null || companyComplianceEntityList.isEmpty()) { + return; + } + log.debug("{} History 배치 삽입 시작: {} 건", "CompanyComplianceEntity", companyComplianceEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, companyComplianceEntityList, companyComplianceEntityList.size(), + (ps, entity) -> { + try { + bindCompanyCompliance(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} History 배치 삽입 완료: {} 건", "CompanyComplianceEntity", companyComplianceEntityList.size()); + } + + public void bindShipCompliance(PreparedStatement pstmt, ShipComplianceEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getImoNo()); // 2. imo_no + pstmt.setTimestamp(idx++, entity.getLastMdfcnDt() != null ? Timestamp.valueOf(entity.getLastMdfcnDt()) : null); // 3. last_mdfcn_dt + pstmt.setString(idx++, entity.getLglSnthsSanction()); // 4. lgl_snths_sanction + pstmt.setString(idx++, entity.getShipBesSanctionList()); // 5. ship_bes_sanction_list + pstmt.setString(idx++, entity.getShipDarkActvInd()); // 6. ship_dark_actv_ind + pstmt.setString(idx++, entity.getShipDtldInfoNtmntd()); // 7. ship_dtld_info_ntmntd + pstmt.setString(idx++, entity.getShipEuSanctionList()); // 8. ship_eu_sanction_list + pstmt.setString(idx++, entity.getShipFlgDspt()); // 9. ship_flg_dspt + pstmt.setString(idx++, entity.getShipFlgSanctionCountry()); // 10. ship_flg_sanction_country + pstmt.setString(idx++, entity.getShipFlgSanctionCountryHstry()); // 11. ship_flg_sanction_country_hstry + pstmt.setString(idx++, entity.getShipOfacNonSdnSanctionList()); // 12. ship_ofac_non_sdn_sanction_list + pstmt.setString(idx++, entity.getShipOfacSanctionList()); // 13. ship_ofac_sanction_list + pstmt.setString(idx++, entity.getShipOfacCutnList()); // 14. ship_ofac_cutn_list + pstmt.setString(idx++, entity.getShipOwnrOfcsSanctionList()); // 15. ship_ownr_ofcs_sanction_list + pstmt.setString(idx++, entity.getShipOwnrAusSanctionList()); // 16. ship_ownr_aus_sanction_list + pstmt.setString(idx++, entity.getShipOwnrBesSanctionList()); // 17. ship_ownr_bes_sanction_list + pstmt.setString(idx++, entity.getShipOwnrCanSanctionList()); // 18. ship_ownr_can_sanction_list + pstmt.setString(idx++, entity.getShipOwnrEuSanctionList()); // 19. ship_ownr_eu_sanction_list + pstmt.setString(idx++, entity.getShipOwnrFatfRglZone()); // 20. ship_ownr_fatf_rgl_zone + pstmt.setString(idx++, entity.getShipOwnrOfacSanctionHstry()); // 21. ship_ownr_ofac_sanction_hstry + pstmt.setString(idx++, entity.getShipOwnrOfacSanctionList()); // 22. ship_ownr_ofac_sanction_list + pstmt.setString(idx++, entity.getShipOwnrOfacSanctionCountry()); // 23. ship_ownr_ofac_sanction_country + pstmt.setString(idx++, entity.getShipOwnrPrntCompanyNcmplnc()); // 24. ship_ownr_prnt_company_ncmplnc + pstmt.setString(idx++, entity.getShipOwnrPrntCompanyFatfRglZone()); // 25. ship_ownr_prnt_company_fatf_rgl_zone + pstmt.setString(idx++, entity.getShipOwnrPrntCompanyOfacSanctionCountry()); // 26. ship_ownr_prnt_company_ofac_sanction_country + pstmt.setString(idx++, entity.getShipOwnrSwiSanctionList()); // 27. ship_ownr_swi_sanction_list + pstmt.setString(idx++, entity.getShipOwnrUaeSanctionList()); // 28. ship_ownr_uae_sanction_list + pstmt.setString(idx++, entity.getShipOwnrUnSanctionList()); // 29. ship_ownr_un_sanction_list + pstmt.setString(idx++, entity.getShipSanctionCountryPrtcllLastTwelveM()); // 30. ship_sanction_country_prtcll_last_twelve_m + pstmt.setString(idx++, entity.getShipSanctionCountryPrtcllLastThrM()); // 31. ship_sanction_country_prtcll_last_thr_m + pstmt.setString(idx++, entity.getShipSanctionCountryPrtcllLastSixM()); // 32. ship_sanction_country_prtcll_last_six_m + pstmt.setString(idx++, entity.getShipScrtyLglDsptEvent()); // 33. ship_scrty_lgl_dspt_event + pstmt.setString(idx++, entity.getShipStsPrtnrNonComplianceTwelveM()); // 34. ship_sts_prtnr_non_compliance_twelve_m + pstmt.setString(idx++, entity.getShipSwiSanctionList()); // 35. ship_swi_sanction_list + pstmt.setString(idx++, entity.getShipUnSanctionList()); // 36. ship_un_sanction_list + } + + public void bindCompanyCompliance(PreparedStatement pstmt, CompanyComplianceEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getCompanyCd()); // 2. company_cd + pstmt.setTimestamp(idx++, entity.getLstMdfcnDt() != null ? Timestamp.valueOf(entity.getLstMdfcnDt()) : null); // 3. lst_mdfcn_dt + if (entity.getCompanySnthsComplianceStatus() != null) { + pstmt.setLong(idx++, entity.getCompanySnthsComplianceStatus()); // 4. company_snths_compliance_status + } else { + pstmt.setNull(idx++, java.sql.Types.BIGINT); + } + if (entity.getCompanyAusSanctionList() != null) { + pstmt.setLong(idx++, entity.getCompanyAusSanctionList()); // 5. company_aus_sanction_list + } else { + pstmt.setNull(idx++, java.sql.Types.BIGINT); + } + if (entity.getCompanyBesSanctionList() != null) { + pstmt.setLong(idx++, entity.getCompanyBesSanctionList()); // 6. company_bes_sanction_list + } else { + pstmt.setNull(idx++, java.sql.Types.BIGINT); + } + if (entity.getCompanyCanSanctionList() != null) { + pstmt.setLong(idx++, entity.getCompanyCanSanctionList()); // 7. company_can_sanction_list + } else { + pstmt.setNull(idx++, java.sql.Types.BIGINT); + } + if (entity.getCompanyOfacSanctionCountry() != null) { + pstmt.setLong(idx++, entity.getCompanyOfacSanctionCountry()); // 8. company_ofac_sanction_country + } else { + pstmt.setNull(idx++, java.sql.Types.BIGINT); + } + if (entity.getCompanyFatfCmptncCountry() != null) { + pstmt.setLong(idx++, entity.getCompanyFatfCmptncCountry()); // 9. company_fatf_cmptnc_country + } else { + pstmt.setNull(idx++, java.sql.Types.BIGINT); + } + if (entity.getCompanyEuSanctionList() != null) { + pstmt.setLong(idx++, entity.getCompanyEuSanctionList()); // 10. company_eu_sanction_list + } else { + pstmt.setNull(idx++, java.sql.Types.BIGINT); + } + if (entity.getCompanyOfacSanctionList() != null) { + pstmt.setLong(idx++, entity.getCompanyOfacSanctionList()); // 11. company_ofac_sanction_list + } else { + pstmt.setNull(idx++, java.sql.Types.BIGINT); + } + if (entity.getCompanyOfacNonSdnSanctionList() != null) { + pstmt.setLong(idx++, entity.getCompanyOfacNonSdnSanctionList()); // 12. company_ofac_non_sdn_sanction_list + } else { + pstmt.setNull(idx++, java.sql.Types.BIGINT); + } + if (entity.getCompanyOfacssiSanctionList() != null) { + pstmt.setLong(idx++, entity.getCompanyOfacssiSanctionList()); // 13. company_ofacssi_sanction_list + } else { + pstmt.setNull(idx++, java.sql.Types.BIGINT); + } + if (entity.getCompanySwissSanctionList() != null) { + pstmt.setLong(idx++, entity.getCompanySwissSanctionList()); // 14. company_swiss_sanction_list + } else { + pstmt.setNull(idx++, java.sql.Types.BIGINT); + } + if (entity.getCompanyUaeSanctionList() != null) { + pstmt.setLong(idx++, entity.getCompanyUaeSanctionList()); // 15. company_uae_sanction_list + } else { + pstmt.setNull(idx++, java.sql.Types.BIGINT); + } + if (entity.getCompanyUnSanctionList() != null) { + pstmt.setLong(idx++, entity.getCompanyUnSanctionList()); // 16. company_un_sanction_list + } else { + pstmt.setNull(idx++, java.sql.Types.BIGINT); + } + if (entity.getPrntCompanyComplianceRisk() != null) { + pstmt.setLong(idx++, entity.getPrntCompanyComplianceRisk()); // 17. prnt_company_compliance_risk + } else { + pstmt.setNull(idx++, java.sql.Types.BIGINT); + } + } + + @Override + public void saveCompanyComplianceChange(List companyComplianceChangeEntityList) { + String sql = ComplianceSql.getCompanyComplianceChangeUpsertSql(tableMetaInfo.targetTbCompanyComplianceInfoHstry); + if (companyComplianceChangeEntityList == null || companyComplianceChangeEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "CompanyComplianceChangeEntity", companyComplianceChangeEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, companyComplianceChangeEntityList, companyComplianceChangeEntityList.size(), + (ps, entity) -> { + try { + bindCompanyComplianceChange(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "CompanyComplianceChangeEntity", companyComplianceChangeEntityList.size()); + } + + public void bindCompanyComplianceChange(PreparedStatement pstmt, CompanyComplianceChangeEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getCompanyCd()); // 2. company_cd + pstmt.setTimestamp(idx++, entity.getLastMdfcnDt() != null ? Timestamp.valueOf(entity.getLastMdfcnDt()) : null); // 3. last_mdfcn_dt + pstmt.setString(idx++, entity.getFlctnColNm()); // 4. flctn_col_nm + pstmt.setString(idx++, entity.getBfrVal()); // 5. bfr_val + pstmt.setString(idx++, entity.getAftrVal()); // 6. aftr_val + } + + @Override + public void saveShipComplianceChange(List shipComplianceChangeEntityList) { + String sql = ComplianceSql.getShipComplianceChangeUpsertSql(tableMetaInfo.targetTbShipComplianceInfoHstry); + if (shipComplianceChangeEntityList == null || shipComplianceChangeEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "ShipComplianceChangeEntity", shipComplianceChangeEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, shipComplianceChangeEntityList, shipComplianceChangeEntityList.size(), + (ps, entity) -> { + try { + bindShipComplianceChange(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "ShipComplianceChangeEntity", shipComplianceChangeEntityList.size()); + } + + public void bindShipComplianceChange(PreparedStatement pstmt, ShipComplianceChangeEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. created_by + pstmt.setString(idx++, entity.getImoNo()); // 2. imo_no + pstmt.setTimestamp(idx++, entity.getLastMdfcnDt() != null ? Timestamp.valueOf(entity.getLastMdfcnDt()) : null); // 3. last_mdfcn_dt + pstmt.setString(idx++, entity.getFlctnColNm()); // 4. flctn_col_nm + pstmt.setString(idx++, entity.getBfrVal()); // 5. bfr_val + pstmt.setString(idx++, entity.getAftrVal()); // 6. aftr_val + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/repository/ComplianceSql.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/repository/ComplianceSql.java new file mode 100644 index 0000000..bddc89a --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/repository/ComplianceSql.java @@ -0,0 +1,171 @@ +package com.snp.batch.jobs.datasync.batch.compliance.repository; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class ComplianceSql { + private static String TARGET_SCHEMA; + public ComplianceSql(@Value("${app.batch.target-schema.name}") String targetSchema) { + TARGET_SCHEMA = targetSchema; + } + + public static String getShipComplianceUpsertSql(String targetTable, String targetIndex) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + imo_no, last_mdfcn_dt, lgl_snths_sanction, ship_bes_sanction_list, + ship_dark_actv_ind, ship_dtld_info_ntmntd, ship_eu_sanction_list, + ship_flg_dspt, ship_flg_sanction_country, ship_flg_sanction_country_hstry, + ship_ofac_non_sdn_sanction_list, ship_ofac_sanction_list, ship_ofac_cutn_list, + ship_ownr_ofcs_sanction_list, ship_ownr_aus_sanction_list, ship_ownr_bes_sanction_list, + ship_ownr_can_sanction_list, ship_ownr_eu_sanction_list, ship_ownr_fatf_rgl_zone, + ship_ownr_ofac_sanction_hstry, ship_ownr_ofac_sanction_list, + ship_ownr_ofac_sanction_country, ship_ownr_prnt_company_ncmplnc, + ship_ownr_prnt_company_fatf_rgl_zone, ship_ownr_prnt_company_ofac_sanction_country, + ship_ownr_swi_sanction_list, ship_ownr_uae_sanction_list, ship_ownr_un_sanction_list, + ship_sanction_country_prtcll_last_twelve_m, ship_sanction_country_prtcll_last_thr_m, + ship_sanction_country_prtcll_last_six_m, ship_scrty_lgl_dspt_event, + ship_sts_prtnr_non_compliance_twelve_m, ship_swi_sanction_list, ship_un_sanction_list + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, + ?, ?, ?, + ?, ?, ?, + ?, ?, ?, + ?, ?, ?, + ?, ?, ?, + ?, ?, + ?, ?, + ?, ?, + ?, ?, ?, + ?, ?, + ?, ?, + ?, ?, ? + ) + ON CONFLICT (%s) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + last_mdfcn_dt = EXCLUDED.last_mdfcn_dt, + lgl_snths_sanction = EXCLUDED.lgl_snths_sanction, + ship_bes_sanction_list = EXCLUDED.ship_bes_sanction_list, + ship_dark_actv_ind = EXCLUDED.ship_dark_actv_ind, + ship_dtld_info_ntmntd = EXCLUDED.ship_dtld_info_ntmntd, + ship_eu_sanction_list = EXCLUDED.ship_eu_sanction_list, + ship_flg_dspt = EXCLUDED.ship_flg_dspt, + ship_flg_sanction_country = EXCLUDED.ship_flg_sanction_country, + ship_flg_sanction_country_hstry = EXCLUDED.ship_flg_sanction_country_hstry, + ship_ofac_non_sdn_sanction_list = EXCLUDED.ship_ofac_non_sdn_sanction_list, + ship_ofac_sanction_list = EXCLUDED.ship_ofac_sanction_list, + ship_ofac_cutn_list = EXCLUDED.ship_ofac_cutn_list, + ship_ownr_ofcs_sanction_list = EXCLUDED.ship_ownr_ofcs_sanction_list, + ship_ownr_aus_sanction_list = EXCLUDED.ship_ownr_aus_sanction_list, + ship_ownr_bes_sanction_list = EXCLUDED.ship_ownr_bes_sanction_list, + ship_ownr_can_sanction_list = EXCLUDED.ship_ownr_can_sanction_list, + ship_ownr_eu_sanction_list = EXCLUDED.ship_ownr_eu_sanction_list, + ship_ownr_fatf_rgl_zone = EXCLUDED.ship_ownr_fatf_rgl_zone, + ship_ownr_ofac_sanction_hstry = EXCLUDED.ship_ownr_ofac_sanction_hstry, + ship_ownr_ofac_sanction_list = EXCLUDED.ship_ownr_ofac_sanction_list, + ship_ownr_ofac_sanction_country = EXCLUDED.ship_ownr_ofac_sanction_country, + ship_ownr_prnt_company_ncmplnc = EXCLUDED.ship_ownr_prnt_company_ncmplnc, + ship_ownr_prnt_company_fatf_rgl_zone = EXCLUDED.ship_ownr_prnt_company_fatf_rgl_zone, + ship_ownr_prnt_company_ofac_sanction_country = EXCLUDED.ship_ownr_prnt_company_ofac_sanction_country, + ship_ownr_swi_sanction_list = EXCLUDED.ship_ownr_swi_sanction_list, + ship_ownr_uae_sanction_list = EXCLUDED.ship_ownr_uae_sanction_list, + ship_ownr_un_sanction_list = EXCLUDED.ship_ownr_un_sanction_list, + ship_sanction_country_prtcll_last_twelve_m = EXCLUDED.ship_sanction_country_prtcll_last_twelve_m, + ship_sanction_country_prtcll_last_thr_m = EXCLUDED.ship_sanction_country_prtcll_last_thr_m, + ship_sanction_country_prtcll_last_six_m = EXCLUDED.ship_sanction_country_prtcll_last_six_m, + ship_scrty_lgl_dspt_event = EXCLUDED.ship_scrty_lgl_dspt_event, + ship_sts_prtnr_non_compliance_twelve_m = EXCLUDED.ship_sts_prtnr_non_compliance_twelve_m, + ship_swi_sanction_list = EXCLUDED.ship_swi_sanction_list, + ship_un_sanction_list = EXCLUDED.ship_un_sanction_list; + """.formatted(TARGET_SCHEMA, targetTable, targetIndex); + } + + public static String getCompanyComplianceUpsertSql(String targetTable, String targetIndex) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + company_cd, lst_mdfcn_dt, company_snths_compliance_status, + company_aus_sanction_list, company_bes_sanction_list, + company_can_sanction_list, company_ofac_sanction_country, + company_fatf_cmptnc_country, company_eu_sanction_list, + company_ofac_sanction_list, company_ofac_non_sdn_sanction_list, + company_ofacssi_sanction_list, company_swiss_sanction_list, + company_uae_sanction_list, company_un_sanction_list, + prnt_company_compliance_risk + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, + ?, ?, + ?, ?, + ?, ?, + ?, ?, + ?, ?, + ?, ?, + ? + ) + ON CONFLICT (%s) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + lst_mdfcn_dt = EXCLUDED.lst_mdfcn_dt, + company_snths_compliance_status = EXCLUDED.company_snths_compliance_status, + company_aus_sanction_list = EXCLUDED.company_aus_sanction_list, + company_bes_sanction_list = EXCLUDED.company_bes_sanction_list, + company_can_sanction_list = EXCLUDED.company_can_sanction_list, + company_ofac_sanction_country = EXCLUDED.company_ofac_sanction_country, + company_fatf_cmptnc_country = EXCLUDED.company_fatf_cmptnc_country, + company_eu_sanction_list = EXCLUDED.company_eu_sanction_list, + company_ofac_sanction_list = EXCLUDED.company_ofac_sanction_list, + company_ofac_non_sdn_sanction_list = EXCLUDED.company_ofac_non_sdn_sanction_list, + company_ofacssi_sanction_list = EXCLUDED.company_ofacssi_sanction_list, + company_swiss_sanction_list = EXCLUDED.company_swiss_sanction_list, + company_uae_sanction_list = EXCLUDED.company_uae_sanction_list, + company_un_sanction_list = EXCLUDED.company_un_sanction_list, + prnt_company_compliance_risk = EXCLUDED.prnt_company_compliance_risk; + """.formatted(TARGET_SCHEMA, targetTable, targetIndex); + } + + public static String getCompanyComplianceChangeUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + company_cd, last_mdfcn_dt, flctn_col_nm, bfr_val, aftr_val + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ? + ) + ON CONFLICT (company_cd, last_mdfcn_dt, flctn_col_nm) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + bfr_val = EXCLUDED.bfr_val, + aftr_val = EXCLUDED.aftr_val; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getShipComplianceChangeUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + imo_no, last_mdfcn_dt, flctn_col_nm, bfr_val, aftr_val + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ? + ) + ON CONFLICT (imo_no, last_mdfcn_dt, flctn_col_nm) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + bfr_val = EXCLUDED.bfr_val, + aftr_val = EXCLUDED.aftr_val; + """.formatted(TARGET_SCHEMA, targetTable); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/writer/CompanyComplianceChangeWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/writer/CompanyComplianceChangeWriter.java new file mode 100644 index 0000000..4bcf720 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/writer/CompanyComplianceChangeWriter.java @@ -0,0 +1,26 @@ +package com.snp.batch.jobs.datasync.batch.compliance.writer; + +import com.snp.batch.common.batch.writer.BaseWriter; +import com.snp.batch.jobs.datasync.batch.compliance.entity.CompanyComplianceChangeEntity; +import com.snp.batch.jobs.datasync.batch.compliance.repository.ComplianceRepository; +import lombok.extern.slf4j.Slf4j; + +import java.util.List; + +@Slf4j +public class CompanyComplianceChangeWriter extends BaseWriter { + private final ComplianceRepository complianceRepository; + + public CompanyComplianceChangeWriter(ComplianceRepository complianceRepository) { + super("CompanyComplianceChangeEntity"); + this.complianceRepository = complianceRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + complianceRepository.saveCompanyComplianceChange(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/writer/CompanyComplianceWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/writer/CompanyComplianceWriter.java new file mode 100644 index 0000000..bade346 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/writer/CompanyComplianceWriter.java @@ -0,0 +1,28 @@ +package com.snp.batch.jobs.datasync.batch.compliance.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.compliance.entity.CompanyComplianceEntity; +import com.snp.batch.jobs.datasync.batch.compliance.repository.ComplianceRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class CompanyComplianceWriter extends BaseChunkedWriter { + private final ComplianceRepository complianceRepository; + + public CompanyComplianceWriter(ComplianceRepository complianceRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("CompanyComplianceEntity", transactionManager, subChunkSize); + this.complianceRepository = complianceRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + complianceRepository.saveCompanyCompliance(items); + complianceRepository.saveCompanyComplianceHistory(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/writer/ShipComplianceChangeWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/writer/ShipComplianceChangeWriter.java new file mode 100644 index 0000000..979813a --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/writer/ShipComplianceChangeWriter.java @@ -0,0 +1,26 @@ +package com.snp.batch.jobs.datasync.batch.compliance.writer; + +import com.snp.batch.common.batch.writer.BaseWriter; +import com.snp.batch.jobs.datasync.batch.compliance.entity.ShipComplianceChangeEntity; +import com.snp.batch.jobs.datasync.batch.compliance.repository.ComplianceRepository; +import lombok.extern.slf4j.Slf4j; + +import java.util.List; + +@Slf4j +public class ShipComplianceChangeWriter extends BaseWriter { + private final ComplianceRepository complianceRepository; + + public ShipComplianceChangeWriter(ComplianceRepository complianceRepository) { + super("ShipComplianceChangeEntity"); + this.complianceRepository = complianceRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + complianceRepository.saveShipComplianceChange(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/writer/ShipComplianceWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/writer/ShipComplianceWriter.java new file mode 100644 index 0000000..3221aef --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/compliance/writer/ShipComplianceWriter.java @@ -0,0 +1,28 @@ +package com.snp.batch.jobs.datasync.batch.compliance.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.compliance.entity.ShipComplianceEntity; +import com.snp.batch.jobs.datasync.batch.compliance.repository.ComplianceRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class ShipComplianceWriter extends BaseChunkedWriter { + private final ComplianceRepository complianceRepository; + + public ShipComplianceWriter(ComplianceRepository complianceRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("ShipComplianceEntity", transactionManager, subChunkSize); + this.complianceRepository = complianceRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + complianceRepository.saveShipCompliance(items); + complianceRepository.saveShipComplianceHistory(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/event/config/EventSyncJobConfig.java b/src/main/java/com/snp/batch/jobs/datasync/batch/event/config/EventSyncJobConfig.java new file mode 100644 index 0000000..45d9c82 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/event/config/EventSyncJobConfig.java @@ -0,0 +1,244 @@ +package com.snp.batch.jobs.datasync.batch.event.config; + +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.common.util.BatchWriteListener; +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.GroupByExecutionIdChunkListener; +import com.snp.batch.common.util.GroupByExecutionIdPolicy; +import com.snp.batch.common.util.GroupByExecutionIdReadListener; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.event.dto.EventCargoDto; +import com.snp.batch.jobs.datasync.batch.event.dto.EventDto; +import com.snp.batch.jobs.datasync.batch.event.dto.EventHumanCasualtyDto; +import com.snp.batch.jobs.datasync.batch.event.dto.EventRelationshipDto; +import com.snp.batch.jobs.datasync.batch.event.entity.EventCargoEntity; +import com.snp.batch.jobs.datasync.batch.event.entity.EventEntity; +import com.snp.batch.jobs.datasync.batch.event.entity.EventHumanCasualtyEntity; +import com.snp.batch.jobs.datasync.batch.event.entity.EventRelationshipEntity; +import com.snp.batch.jobs.datasync.batch.event.processor.EventCargoProcessor; +import com.snp.batch.jobs.datasync.batch.event.processor.EventHumanCasualtyProcessor; +import com.snp.batch.jobs.datasync.batch.event.processor.EventProcessor; +import com.snp.batch.jobs.datasync.batch.event.processor.EventRelationshipProcessor; +import com.snp.batch.jobs.datasync.batch.event.reader.EventCargoReader; +import com.snp.batch.jobs.datasync.batch.event.reader.EventHumanCasualtyReader; +import com.snp.batch.jobs.datasync.batch.event.reader.EventReader; +import com.snp.batch.jobs.datasync.batch.event.reader.EventRelationshipReader; +import com.snp.batch.jobs.datasync.batch.event.repository.EventRepository; +import com.snp.batch.jobs.datasync.batch.event.writer.EventCargoWriter; +import com.snp.batch.jobs.datasync.batch.event.writer.EventHumanCasualtyWriter; +import com.snp.batch.jobs.datasync.batch.event.writer.EventRelationshipWriter; +import com.snp.batch.jobs.datasync.batch.event.writer.EventWriter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.PlatformTransactionManager; + +import javax.sql.DataSource; + +@Slf4j +@Configuration +public class EventSyncJobConfig extends BaseJobConfig { + private final TableMetaInfo tableMetaInfo; + private final EventRepository eventRepository; + private final DataSource batchDataSource; + private final DataSource businessDataSource; + private final JdbcTemplate businessJdbcTemplate; + private final int subChunkSize; + + /** + * 생성자 주입 + */ + public EventSyncJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + EventRepository eventRepository, + TableMetaInfo tableMetaInfo, + @Qualifier("batchDataSource") DataSource batchDataSource, + @Qualifier("businessDataSource") DataSource businessDataSource, + @Value("${app.batch.sub-chunk-size:5000}") int subChunkSize + ) { + super(jobRepository, transactionManager); + this.eventRepository = eventRepository; + this.tableMetaInfo = tableMetaInfo; + this.batchDataSource = batchDataSource; + this.businessDataSource = businessDataSource; + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.subChunkSize = subChunkSize; + } + + @Override + protected String getJobName() { + return "eventDataSyncJob"; + } + + @Override + protected String getStepName() { + return "eventSyncStep"; + } + + @Override + protected ItemReader createReader() { + return eventReader(businessDataSource, tableMetaInfo); + } + + @Override + protected ItemProcessor createProcessor() { + return new EventProcessor(); + } + + @Override + protected ItemWriter createWriter() { + return new EventWriter(eventRepository, transactionManager, subChunkSize); + } + + // --- Event Reader --- + + @Bean + @StepScope + public ItemReader eventReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new EventReader(businessDataSource, tableMetaInfo); + } + + // --- EventCargo Reader --- + + @Bean + @StepScope + public ItemReader eventCargoReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new EventCargoReader(businessDataSource, tableMetaInfo); + } + + // --- EventHumanCasualty Reader --- + + @Bean + @StepScope + public ItemReader eventHumanCasualtyReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new EventHumanCasualtyReader(businessDataSource, tableMetaInfo); + } + + // --- EventRelationship Reader --- + + @Bean + @StepScope + public ItemReader eventRelationshipReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new EventRelationshipReader(businessDataSource, tableMetaInfo); + } + + // --- Listeners --- + + @Bean + public BatchWriteListener eventWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceEvent); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener eventCargoWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceEventCargo); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener eventHumanCasualtyWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceEventHumanCasualty); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener eventRelationshipWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceEventRelationship); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + // --- Steps --- + + @Bean(name = "eventSyncStep") + public Step eventSyncStep() { + log.info("Step 생성: eventSyncStep"); + return new StepBuilder(getStepName(), jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(createReader()) + .processor(createProcessor()) + .writer(createWriter()) + .listener(new GroupByExecutionIdReadListener()) + .listener(new GroupByExecutionIdChunkListener()) + .listener(eventWriteListener()) + .build(); + } + + @Bean(name = "eventCargoSyncStep") + public Step eventCargoSyncStep() { + log.info("Step 생성: eventCargoSyncStep"); + return new StepBuilder("eventCargoSyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(eventCargoReader(businessDataSource, tableMetaInfo)) + .processor(new EventCargoProcessor()) + .writer(new EventCargoWriter(eventRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) + .listener(new GroupByExecutionIdChunkListener()) + .listener(eventCargoWriteListener()) + .build(); + } + + @Bean(name = "eventHumanCasualtySyncStep") + public Step eventHumanCasualtySyncStep() { + log.info("Step 생성: eventHumanCasualtySyncStep"); + return new StepBuilder("eventHumanCasualtySyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(eventHumanCasualtyReader(businessDataSource, tableMetaInfo)) + .processor(new EventHumanCasualtyProcessor()) + .writer(new EventHumanCasualtyWriter(eventRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) + .listener(new GroupByExecutionIdChunkListener()) + .listener(eventHumanCasualtyWriteListener()) + .build(); + } + + @Bean(name = "eventRelationshipSyncStep") + public Step eventRelationshipSyncStep() { + log.info("Step 생성: eventRelationshipSyncStep"); + return new StepBuilder("eventRelationshipSyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(eventRelationshipReader(businessDataSource, tableMetaInfo)) + .processor(new EventRelationshipProcessor()) + .writer(new EventRelationshipWriter(eventRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) + .listener(new GroupByExecutionIdChunkListener()) + .listener(eventRelationshipWriteListener()) + .build(); + } + + @Override + protected Job createJobFlow(JobBuilder jobBuilder) { + return jobBuilder + .start(eventSyncStep()) + .next(eventCargoSyncStep()) + .next(eventHumanCasualtySyncStep()) + .next(eventRelationshipSyncStep()) + .build(); + } + + @Bean(name = "eventDataSyncJob") + public Job eventDataSyncJob() { + return job(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/event/dto/EventCargoDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/event/dto/EventCargoDto.java new file mode 100644 index 0000000..0e67bfd --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/event/dto/EventCargoDto.java @@ -0,0 +1,28 @@ +package com.snp.batch.jobs.datasync.batch.event.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class EventCargoDto implements JobExecutionGroupable { + private Long jobExecutionId; + private Integer eventId; + private String imoNo; + private String type; + private String eventSeq; + private Long cnt; + private String unitAbbr; + private String unit; + private String cargoDamg; + private String riskYn; + private String text; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/event/dto/EventDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/event/dto/EventDto.java new file mode 100644 index 0000000..d3b1b8a --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/event/dto/EventDto.java @@ -0,0 +1,61 @@ +package com.snp.batch.jobs.datasync.batch.event.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import java.time.ZonedDateTime; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class EventDto implements JobExecutionGroupable { + private Long jobExecutionId; + private Integer eventId; + private String acdntId; + private String imoNo; + private ZonedDateTime pstgYmd; + private ZonedDateTime eventStartDay; + private ZonedDateTime eventEndDay; + private String embrkTryYn; + private String cargoCapacityStatusCd; + private String acdntActn; + private String acdntZone; + private String acdntZoneCd; + private String cfgCmpntTwo; + private String countryCd; + private String buildYmd; + private String desc; + private String envPosition; + private String positionNm; + private Long masdGridRef; + private String ctyNm; + private String eventType; + private String eventTypeDtl; + private Long eventTypeDtlId; + private Long eventTypeId; + private String fireduponYn; + private String title; + private Long ldtTimpt; + private String signfct; + private String wethr; + private String pltnMatral; + private Long pltnMatralCnt; + private String pltnMatralUnit; + private String regShponrCdHr; + private String regShponrHr; + private String regShponrCountryCdHr; + private String regShponrCountryHr; + private Long shipDwt; + private String shipFlgCd; + private String shipFlgDecd; + private Long shipGt; + private String shipNm; + private String shipType; + private String shipTypeNm; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/event/dto/EventHumanCasualtyDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/event/dto/EventHumanCasualtyDto.java new file mode 100644 index 0000000..e7a84f6 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/event/dto/EventHumanCasualtyDto.java @@ -0,0 +1,23 @@ +package com.snp.batch.jobs.datasync.batch.event.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class EventHumanCasualtyDto implements JobExecutionGroupable { + private Long jobExecutionId; + private Long eventId; + private String type; + private String scope; + private String qualfr; + private Long cnt; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/event/dto/EventRelationshipDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/event/dto/EventRelationshipDto.java new file mode 100644 index 0000000..a99d9b9 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/event/dto/EventRelationshipDto.java @@ -0,0 +1,25 @@ +package com.snp.batch.jobs.datasync.batch.event.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class EventRelationshipDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String acdntId; + private Long eventId; + private Long eventIdTwo; + private String eventTypeCd; + private String eventType; + private String relTypeCd; + private String relType; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/event/entity/EventCargoEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/event/entity/EventCargoEntity.java new file mode 100644 index 0000000..65f2998 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/event/entity/EventCargoEntity.java @@ -0,0 +1,28 @@ +package com.snp.batch.jobs.datasync.batch.event.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class EventCargoEntity implements JobExecutionGroupable { + private Integer eventId; + private String imoNo; + private String type; + private String eventSeq; + private Long cnt; + private String unitAbbr; + private String unit; + private String cargoDamg; + private String riskYn; + private String text; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/event/entity/EventEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/event/entity/EventEntity.java new file mode 100644 index 0000000..f5be317 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/event/entity/EventEntity.java @@ -0,0 +1,61 @@ +package com.snp.batch.jobs.datasync.batch.event.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; +import java.time.ZonedDateTime; + +@Data +@SuperBuilder +@AllArgsConstructor +public class EventEntity implements JobExecutionGroupable { + private Integer eventId; + private String acdntId; + private String imoNo; + private ZonedDateTime pstgYmd; + private ZonedDateTime eventStartDay; + private ZonedDateTime eventEndDay; + private String embrkTryYn; + private String cargoCapacityStatusCd; + private String acdntActn; + private String acdntZone; + private String acdntZoneCd; + private String cfgCmpntTwo; + private String countryCd; + private String buildYmd; + private String desc; + private String envPosition; + private String positionNm; + private Long masdGridRef; + private String ctyNm; + private String eventType; + private String eventTypeDtl; + private Long eventTypeDtlId; + private Long eventTypeId; + private String fireduponYn; + private String title; + private Long ldtTimpt; + private String signfct; + private String wethr; + private String pltnMatral; + private Long pltnMatralCnt; + private String pltnMatralUnit; + private String regShponrCdHr; + private String regShponrHr; + private String regShponrCountryCdHr; + private String regShponrCountryHr; + private Long shipDwt; + private String shipFlgCd; + private String shipFlgDecd; + private Long shipGt; + private String shipNm; + private String shipType; + private String shipTypeNm; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/event/entity/EventHumanCasualtyEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/event/entity/EventHumanCasualtyEntity.java new file mode 100644 index 0000000..e2adc49 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/event/entity/EventHumanCasualtyEntity.java @@ -0,0 +1,23 @@ +package com.snp.batch.jobs.datasync.batch.event.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class EventHumanCasualtyEntity implements JobExecutionGroupable { + private Long eventId; + private String type; + private String scope; + private String qualfr; + private Long cnt; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/event/entity/EventRelationshipEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/event/entity/EventRelationshipEntity.java new file mode 100644 index 0000000..066d052 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/event/entity/EventRelationshipEntity.java @@ -0,0 +1,25 @@ +package com.snp.batch.jobs.datasync.batch.event.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class EventRelationshipEntity implements JobExecutionGroupable { + private String acdntId; + private Long eventId; + private Long eventIdTwo; + private String eventTypeCd; + private String eventType; + private String relTypeCd; + private String relType; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/event/processor/EventCargoProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/event/processor/EventCargoProcessor.java new file mode 100644 index 0000000..1f3e1fe --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/event/processor/EventCargoProcessor.java @@ -0,0 +1,26 @@ +package com.snp.batch.jobs.datasync.batch.event.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.event.dto.EventCargoDto; +import com.snp.batch.jobs.datasync.batch.event.entity.EventCargoEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class EventCargoProcessor extends BaseProcessor { + @Override + protected EventCargoEntity processItem(EventCargoDto dto) throws Exception { + return EventCargoEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .eventId(dto.getEventId()) + .imoNo(dto.getImoNo()) + .type(dto.getType()) + .eventSeq(dto.getEventSeq()) + .cnt(dto.getCnt()) + .unitAbbr(dto.getUnitAbbr()) + .unit(dto.getUnit()) + .cargoDamg(dto.getCargoDamg()) + .riskYn(dto.getRiskYn()) + .text(dto.getText()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/event/processor/EventHumanCasualtyProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/event/processor/EventHumanCasualtyProcessor.java new file mode 100644 index 0000000..a706729 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/event/processor/EventHumanCasualtyProcessor.java @@ -0,0 +1,21 @@ +package com.snp.batch.jobs.datasync.batch.event.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.event.dto.EventHumanCasualtyDto; +import com.snp.batch.jobs.datasync.batch.event.entity.EventHumanCasualtyEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class EventHumanCasualtyProcessor extends BaseProcessor { + @Override + protected EventHumanCasualtyEntity processItem(EventHumanCasualtyDto dto) throws Exception { + return EventHumanCasualtyEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .eventId(dto.getEventId()) + .type(dto.getType()) + .scope(dto.getScope()) + .qualfr(dto.getQualfr()) + .cnt(dto.getCnt()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/event/processor/EventProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/event/processor/EventProcessor.java new file mode 100644 index 0000000..7a922ce --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/event/processor/EventProcessor.java @@ -0,0 +1,58 @@ +package com.snp.batch.jobs.datasync.batch.event.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.event.dto.EventDto; +import com.snp.batch.jobs.datasync.batch.event.entity.EventEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class EventProcessor extends BaseProcessor { + @Override + protected EventEntity processItem(EventDto dto) throws Exception { + return EventEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .eventId(dto.getEventId()) + .acdntId(dto.getAcdntId()) + .imoNo(dto.getImoNo()) + .pstgYmd(dto.getPstgYmd()) + .eventStartDay(dto.getEventStartDay()) + .eventEndDay(dto.getEventEndDay()) + .embrkTryYn(dto.getEmbrkTryYn()) + .cargoCapacityStatusCd(dto.getCargoCapacityStatusCd()) + .acdntActn(dto.getAcdntActn()) + .acdntZone(dto.getAcdntZone()) + .acdntZoneCd(dto.getAcdntZoneCd()) + .cfgCmpntTwo(dto.getCfgCmpntTwo()) + .countryCd(dto.getCountryCd()) + .buildYmd(dto.getBuildYmd()) + .desc(dto.getDesc()) + .envPosition(dto.getEnvPosition()) + .positionNm(dto.getPositionNm()) + .masdGridRef(dto.getMasdGridRef()) + .ctyNm(dto.getCtyNm()) + .eventType(dto.getEventType()) + .eventTypeDtl(dto.getEventTypeDtl()) + .eventTypeDtlId(dto.getEventTypeDtlId()) + .eventTypeId(dto.getEventTypeId()) + .fireduponYn(dto.getFireduponYn()) + .title(dto.getTitle()) + .ldtTimpt(dto.getLdtTimpt()) + .signfct(dto.getSignfct()) + .wethr(dto.getWethr()) + .pltnMatral(dto.getPltnMatral()) + .pltnMatralCnt(dto.getPltnMatralCnt()) + .pltnMatralUnit(dto.getPltnMatralUnit()) + .regShponrCdHr(dto.getRegShponrCdHr()) + .regShponrHr(dto.getRegShponrHr()) + .regShponrCountryCdHr(dto.getRegShponrCountryCdHr()) + .regShponrCountryHr(dto.getRegShponrCountryHr()) + .shipDwt(dto.getShipDwt()) + .shipFlgCd(dto.getShipFlgCd()) + .shipFlgDecd(dto.getShipFlgDecd()) + .shipGt(dto.getShipGt()) + .shipNm(dto.getShipNm()) + .shipType(dto.getShipType()) + .shipTypeNm(dto.getShipTypeNm()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/event/processor/EventRelationshipProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/event/processor/EventRelationshipProcessor.java new file mode 100644 index 0000000..1a6f613 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/event/processor/EventRelationshipProcessor.java @@ -0,0 +1,23 @@ +package com.snp.batch.jobs.datasync.batch.event.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.event.dto.EventRelationshipDto; +import com.snp.batch.jobs.datasync.batch.event.entity.EventRelationshipEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class EventRelationshipProcessor extends BaseProcessor { + @Override + protected EventRelationshipEntity processItem(EventRelationshipDto dto) throws Exception { + return EventRelationshipEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .acdntId(dto.getAcdntId()) + .eventId(dto.getEventId()) + .eventIdTwo(dto.getEventIdTwo()) + .eventTypeCd(dto.getEventTypeCd()) + .eventType(dto.getEventType()) + .relTypeCd(dto.getRelTypeCd()) + .relType(dto.getRelType()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/event/reader/EventCargoReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/event/reader/EventCargoReader.java new file mode 100644 index 0000000..af82b44 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/event/reader/EventCargoReader.java @@ -0,0 +1,73 @@ +package com.snp.batch.jobs.datasync.batch.event.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.event.dto.EventCargoDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class EventCargoReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public EventCargoReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public EventCargoDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceEventCargo), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[EventCargoReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceEventCargo); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return EventCargoDto.builder() + .jobExecutionId(targetId) + .eventId(rs.getObject("event_id") != null ? rs.getInt("event_id") : null) + .imoNo(rs.getString("imo_no")) + .type(rs.getString("type")) + .eventSeq(rs.getString("event_seq")) + .cnt(rs.getObject("cnt") != null ? rs.getLong("cnt") : null) + .unitAbbr(rs.getString("unit_abbr")) + .unit(rs.getString("unit")) + .cargoDamg(rs.getString("cargo_damg")) + .riskYn(rs.getString("risk_yn")) + .text(rs.getString("text")) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceEventCargo); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/event/reader/EventHumanCasualtyReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/event/reader/EventHumanCasualtyReader.java new file mode 100644 index 0000000..3a00435 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/event/reader/EventHumanCasualtyReader.java @@ -0,0 +1,68 @@ +package com.snp.batch.jobs.datasync.batch.event.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.event.dto.EventHumanCasualtyDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class EventHumanCasualtyReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public EventHumanCasualtyReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public EventHumanCasualtyDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceEventHumanCasualty), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[EventHumanCasualtyReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceEventHumanCasualty); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return EventHumanCasualtyDto.builder() + .jobExecutionId(targetId) + .eventId(rs.getObject("event_id") != null ? rs.getLong("event_id") : null) + .type(rs.getString("type")) + .scope(rs.getString("scope")) + .qualfr(rs.getString("qualfr")) + .cnt(rs.getObject("cnt") != null ? rs.getLong("cnt") : null) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceEventHumanCasualty); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/event/reader/EventReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/event/reader/EventReader.java new file mode 100644 index 0000000..04da68c --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/event/reader/EventReader.java @@ -0,0 +1,110 @@ +package com.snp.batch.jobs.datasync.batch.event.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.event.dto.EventDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class EventReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public EventReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public EventDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceEvent), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[EventReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceEvent); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + Timestamp pstgYmdTs = rs.getTimestamp("pstg_ymd"); + Timestamp eventStartDayTs = rs.getTimestamp("event_start_day"); + Timestamp eventEndDayTs = rs.getTimestamp("event_end_day"); + + return EventDto.builder() + .jobExecutionId(targetId) + .eventId(rs.getObject("event_id") != null ? rs.getInt("event_id") : null) + .acdntId(rs.getString("acdnt_id")) + .imoNo(rs.getString("imo_no")) + .pstgYmd(pstgYmdTs != null ? pstgYmdTs.toInstant().atZone(java.time.ZoneId.systemDefault()) : null) + .eventStartDay(eventStartDayTs != null ? eventStartDayTs.toInstant().atZone(java.time.ZoneId.systemDefault()) : null) + .eventEndDay(eventEndDayTs != null ? eventEndDayTs.toInstant().atZone(java.time.ZoneId.systemDefault()) : null) + .embrkTryYn(rs.getString("embrk_try_yn")) + .cargoCapacityStatusCd(rs.getString("cargo_capacity_status_cd")) + .acdntActn(rs.getString("acdnt_actn")) + .acdntZone(rs.getString("acdnt_zone")) + .acdntZoneCd(rs.getString("acdnt_zone_cd")) + .cfgCmpntTwo(rs.getString("cfg_cmpnt_two")) + .countryCd(rs.getString("country_cd")) + .buildYmd(rs.getString("build_ymd")) + .desc(rs.getString("desc")) + .envPosition(rs.getString("env_position")) + .positionNm(rs.getString("position_nm")) + .masdGridRef(rs.getObject("masd_grid_ref") != null ? rs.getLong("masd_grid_ref") : null) + .ctyNm(rs.getString("cty_nm")) + .eventType(rs.getString("event_type")) + .eventTypeDtl(rs.getString("event_type_dtl")) + .eventTypeDtlId(rs.getObject("event_type_dtl_id") != null ? rs.getLong("event_type_dtl_id") : null) + .eventTypeId(rs.getObject("event_type_id") != null ? rs.getLong("event_type_id") : null) + .fireduponYn(rs.getString("firedupon_yn")) + .title(rs.getString("title")) + .ldtTimpt(rs.getObject("ldt_timpt") != null ? rs.getLong("ldt_timpt") : null) + .signfct(rs.getString("signfct")) + .wethr(rs.getString("wethr")) + .pltnMatral(rs.getString("pltn_matral")) + .pltnMatralCnt(rs.getObject("pltn_matral_cnt") != null ? rs.getLong("pltn_matral_cnt") : null) + .pltnMatralUnit(rs.getString("pltn_matral_unit")) + .regShponrCdHr(rs.getString("reg_shponr_cd_hr")) + .regShponrHr(rs.getString("reg_shponr_hr")) + .regShponrCountryCdHr(rs.getString("reg_shponr_country_cd_hr")) + .regShponrCountryHr(rs.getString("reg_shponr_country_hr")) + .shipDwt(rs.getObject("ship_dwt") != null ? rs.getLong("ship_dwt") : null) + .shipFlgCd(rs.getString("ship_flg_cd")) + .shipFlgDecd(rs.getString("ship_flg_decd")) + .shipGt(rs.getObject("ship_gt") != null ? rs.getLong("ship_gt") : null) + .shipNm(rs.getString("ship_nm")) + .shipType(rs.getString("ship_type")) + .shipTypeNm(rs.getString("ship_type_nm")) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceEvent); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/event/reader/EventRelationshipReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/event/reader/EventRelationshipReader.java new file mode 100644 index 0000000..9fdd2eb --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/event/reader/EventRelationshipReader.java @@ -0,0 +1,70 @@ +package com.snp.batch.jobs.datasync.batch.event.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.event.dto.EventRelationshipDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class EventRelationshipReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public EventRelationshipReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public EventRelationshipDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceEventRelationship), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[EventRelationshipReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceEventRelationship); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return EventRelationshipDto.builder() + .jobExecutionId(targetId) + .acdntId(rs.getString("acdnt_id")) + .eventId(rs.getObject("event_id") != null ? rs.getLong("event_id") : null) + .eventIdTwo(rs.getObject("event_id_two") != null ? rs.getLong("event_id_two") : null) + .eventTypeCd(rs.getString("event_type_cd")) + .eventType(rs.getString("event_type")) + .relTypeCd(rs.getString("rel_type_cd")) + .relType(rs.getString("rel_type")) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceEventRelationship); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/event/repository/EventRepository.java b/src/main/java/com/snp/batch/jobs/datasync/batch/event/repository/EventRepository.java new file mode 100644 index 0000000..c48ed0c --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/event/repository/EventRepository.java @@ -0,0 +1,19 @@ +package com.snp.batch.jobs.datasync.batch.event.repository; + +import com.snp.batch.jobs.datasync.batch.event.entity.EventCargoEntity; +import com.snp.batch.jobs.datasync.batch.event.entity.EventEntity; +import com.snp.batch.jobs.datasync.batch.event.entity.EventHumanCasualtyEntity; +import com.snp.batch.jobs.datasync.batch.event.entity.EventRelationshipEntity; + +import java.util.List; + +/** + * EventEntity Repository 인터페이스 + * 구현체: EventRepositoryImpl (JdbcTemplate 기반) + */ +public interface EventRepository { + void saveEvent(List eventEntityList); + void saveEventCargo(List eventCargoEntityList); + void saveEventHumanCasualty(List eventHumanCasualtyEntityList); + void saveEventRelationship(List eventRelationshipEntityList); +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/event/repository/EventRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/datasync/batch/event/repository/EventRepositoryImpl.java new file mode 100644 index 0000000..7c24111 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/event/repository/EventRepositoryImpl.java @@ -0,0 +1,248 @@ +package com.snp.batch.jobs.datasync.batch.event.repository; + +import com.snp.batch.common.batch.repository.MultiDataSourceJdbcRepository; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.event.entity.EventCargoEntity; +import com.snp.batch.jobs.datasync.batch.event.entity.EventEntity; +import com.snp.batch.jobs.datasync.batch.event.entity.EventHumanCasualtyEntity; +import com.snp.batch.jobs.datasync.batch.event.entity.EventRelationshipEntity; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.stereotype.Repository; + +import javax.sql.DataSource; +import java.sql.PreparedStatement; +import java.sql.Timestamp; +import java.sql.Types; +import java.util.List; + +/** + * EventEntity Repository (JdbcTemplate 기반) + */ +@Slf4j +@Repository("eventRepository") +public class EventRepositoryImpl extends MultiDataSourceJdbcRepository implements EventRepository { + + private DataSource batchDataSource; + private DataSource businessDataSource; + private final TableMetaInfo tableMetaInfo; + + public EventRepositoryImpl(@Qualifier("batchDataSource") DataSource batchDataSource, + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + + super(new JdbcTemplate(batchDataSource), new JdbcTemplate(businessDataSource)); + + this.batchDataSource = batchDataSource; + this.businessDataSource = businessDataSource; + this.tableMetaInfo = tableMetaInfo; + } + + @Override + protected String getTableName() { + return null; + } + + @Override + protected RowMapper getRowMapper() { + return null; + } + + @Override + protected Long extractId(EventEntity entity) { + return null; + } + + @Override + protected String getInsertSql() { + return null; + } + + @Override + protected String getUpdateSql() { + return null; + } + + @Override + protected void setInsertParameters(PreparedStatement ps, EventEntity entity) throws Exception { + } + + @Override + protected void setUpdateParameters(PreparedStatement ps, EventEntity entity) throws Exception { + } + + @Override + protected String getEntityName() { + return null; + } + + @Override + public void saveEvent(List eventEntityList) { + String sql = EventSql.getEventUpsertSql(tableMetaInfo.targetTbEventDtl); + if (eventEntityList == null || eventEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "EventEntity", eventEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, eventEntityList, eventEntityList.size(), + (ps, entity) -> { + try { + bindEvent(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "EventEntity", eventEntityList.size()); + } + + public void bindEvent(PreparedStatement pstmt, EventEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setObject(idx++, entity.getEventId(), Types.INTEGER); // 2. event_id + pstmt.setString(idx++, entity.getAcdntId()); // 3. acdnt_id + pstmt.setString(idx++, entity.getImoNo()); // 4. imo_no + pstmt.setTimestamp(idx++, entity.getPstgYmd() != null ? Timestamp.from(entity.getPstgYmd().toInstant()) : null); // 5. pstg_ymd + pstmt.setTimestamp(idx++, entity.getEventStartDay() != null ? Timestamp.from(entity.getEventStartDay().toInstant()) : null); // 6. event_start_day + pstmt.setTimestamp(idx++, entity.getEventEndDay() != null ? Timestamp.from(entity.getEventEndDay().toInstant()) : null); // 7. event_end_day + pstmt.setString(idx++, entity.getEmbrkTryYn()); // 8. embrk_try_yn + pstmt.setString(idx++, entity.getCargoCapacityStatusCd()); // 9. cargo_capacity_status_cd + pstmt.setString(idx++, entity.getAcdntActn()); // 10. acdnt_actn + pstmt.setString(idx++, entity.getAcdntZone()); // 11. acdnt_zone + pstmt.setString(idx++, entity.getAcdntZoneCd()); // 12. acdnt_zone_cd + pstmt.setString(idx++, entity.getCfgCmpntTwo()); // 13. cfg_cmpnt_two + pstmt.setString(idx++, entity.getCountryCd()); // 14. country_cd + pstmt.setString(idx++, entity.getBuildYmd()); // 15. build_ymd + pstmt.setString(idx++, entity.getDesc()); // 16. desc + pstmt.setString(idx++, entity.getEnvPosition()); // 17. env_position + pstmt.setString(idx++, entity.getPositionNm()); // 18. position_nm + pstmt.setObject(idx++, entity.getMasdGridRef(), Types.BIGINT); // 19. masd_grid_ref + pstmt.setString(idx++, entity.getCtyNm()); // 20. cty_nm + pstmt.setString(idx++, entity.getEventType()); // 21. event_type + pstmt.setString(idx++, entity.getEventTypeDtl()); // 22. event_type_dtl + pstmt.setObject(idx++, entity.getEventTypeDtlId(), Types.BIGINT); // 23. event_type_dtl_id + pstmt.setObject(idx++, entity.getEventTypeId(), Types.BIGINT); // 24. event_type_id + pstmt.setString(idx++, entity.getFireduponYn()); // 25. firedupon_yn + pstmt.setString(idx++, entity.getTitle()); // 26. title + pstmt.setObject(idx++, entity.getLdtTimpt(), Types.BIGINT); // 27. ldt_timpt + pstmt.setString(idx++, entity.getSignfct()); // 28. signfct + pstmt.setString(idx++, entity.getWethr()); // 29. wethr + pstmt.setString(idx++, entity.getPltnMatral()); // 30. pltn_matral + pstmt.setObject(idx++, entity.getPltnMatralCnt(), Types.BIGINT); // 31. pltn_matral_cnt + pstmt.setString(idx++, entity.getPltnMatralUnit()); // 32. pltn_matral_unit + pstmt.setString(idx++, entity.getRegShponrCdHr()); // 33. reg_shponr_cd_hr + pstmt.setString(idx++, entity.getRegShponrHr()); // 34. reg_shponr_hr + pstmt.setString(idx++, entity.getRegShponrCountryCdHr()); // 35. reg_shponr_country_cd_hr + pstmt.setString(idx++, entity.getRegShponrCountryHr()); // 36. reg_shponr_country_hr + pstmt.setObject(idx++, entity.getShipDwt(), Types.BIGINT); // 37. ship_dwt + pstmt.setString(idx++, entity.getShipFlgCd()); // 38. ship_flg_cd + pstmt.setString(idx++, entity.getShipFlgDecd()); // 39. ship_flg_decd + pstmt.setObject(idx++, entity.getShipGt(), Types.BIGINT); // 40. ship_gt + pstmt.setString(idx++, entity.getShipNm()); // 41. ship_nm + pstmt.setString(idx++, entity.getShipType()); // 42. ship_type + pstmt.setString(idx++, entity.getShipTypeNm()); // 43. ship_type_nm + } + + @Override + public void saveEventCargo(List eventCargoEntityList) { + String sql = EventSql.getEventCargoUpsertSql(tableMetaInfo.targetTbEventCargo); + if (eventCargoEntityList == null || eventCargoEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "EventCargoEntity", eventCargoEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, eventCargoEntityList, eventCargoEntityList.size(), + (ps, entity) -> { + try { + bindEventCargo(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "EventCargoEntity", eventCargoEntityList.size()); + } + + public void bindEventCargo(PreparedStatement pstmt, EventCargoEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setObject(idx++, entity.getEventId(), Types.INTEGER); // 2. event_id + pstmt.setString(idx++, entity.getImoNo()); // 3. imo_no + pstmt.setString(idx++, entity.getType()); // 4. type + pstmt.setString(idx++, entity.getEventSeq()); // 5. event_seq + pstmt.setObject(idx++, entity.getCnt(), Types.BIGINT); // 6. cnt + pstmt.setString(idx++, entity.getUnitAbbr()); // 7. unit_abbr + pstmt.setString(idx++, entity.getUnit()); // 8. unit + pstmt.setString(idx++, entity.getCargoDamg()); // 9. cargo_damg + pstmt.setString(idx++, entity.getRiskYn()); // 10. risk_yn + pstmt.setString(idx++, entity.getText()); // 11. text + } + + @Override + public void saveEventHumanCasualty(List eventHumanCasualtyEntityList) { + String sql = EventSql.getEventHumanCasualtyUpsertSql(tableMetaInfo.targetTbEventHumnAcdnt); + if (eventHumanCasualtyEntityList == null || eventHumanCasualtyEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "EventHumanCasualtyEntity", eventHumanCasualtyEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, eventHumanCasualtyEntityList, eventHumanCasualtyEntityList.size(), + (ps, entity) -> { + try { + bindEventHumanCasualty(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "EventHumanCasualtyEntity", eventHumanCasualtyEntityList.size()); + } + + public void bindEventHumanCasualty(PreparedStatement pstmt, EventHumanCasualtyEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setObject(idx++, entity.getEventId(), Types.BIGINT); // 2. event_id + pstmt.setString(idx++, entity.getType()); // 3. type + pstmt.setString(idx++, entity.getScope()); // 4. scope + pstmt.setString(idx++, entity.getQualfr()); // 5. qualfr + pstmt.setObject(idx++, entity.getCnt(), Types.BIGINT); // 6. cnt + } + + @Override + public void saveEventRelationship(List eventRelationshipEntityList) { + String sql = EventSql.getEventRelationshipUpsertSql(tableMetaInfo.targetTbEventRel); + if (eventRelationshipEntityList == null || eventRelationshipEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "EventRelationshipEntity", eventRelationshipEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, eventRelationshipEntityList, eventRelationshipEntityList.size(), + (ps, entity) -> { + try { + bindEventRelationship(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "EventRelationshipEntity", eventRelationshipEntityList.size()); + } + + public void bindEventRelationship(PreparedStatement pstmt, EventRelationshipEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getAcdntId()); // 2. acdnt_id + pstmt.setObject(idx++, entity.getEventId(), Types.BIGINT); // 3. event_id + pstmt.setObject(idx++, entity.getEventIdTwo(), Types.BIGINT); // 4. event_id_two + pstmt.setString(idx++, entity.getEventTypeCd()); // 5. event_type_cd + pstmt.setString(idx++, entity.getEventType()); // 6. event_type + pstmt.setString(idx++, entity.getRelTypeCd()); // 7. rel_type_cd + pstmt.setString(idx++, entity.getRelType()); // 8. rel_type + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/event/repository/EventSql.java b/src/main/java/com/snp/batch/jobs/datasync/batch/event/repository/EventSql.java new file mode 100644 index 0000000..c0f85fc --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/event/repository/EventSql.java @@ -0,0 +1,161 @@ +package com.snp.batch.jobs.datasync.batch.event.repository; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class EventSql { + private static String TARGET_SCHEMA; + public EventSql(@Value("${app.batch.target-schema.name}") String targetSchema) { + TARGET_SCHEMA = targetSchema; + } + + public static String getEventUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + event_id, acdnt_id, imo_no, pstg_ymd, + event_start_day, event_end_day, embrk_try_yn, + cargo_capacity_status_cd, acdnt_actn, acdnt_zone, + acdnt_zone_cd, cfg_cmpnt_two, country_cd, build_ymd, + "desc", env_position, position_nm, + masd_grid_ref, cty_nm, event_type, + event_type_dtl, event_type_dtl_id, event_type_id, + firedupon_yn, title, ldt_timpt, signfct, + wethr, pltn_matral, pltn_matral_cnt, pltn_matral_unit, + reg_shponr_cd_hr, reg_shponr_hr, + reg_shponr_country_cd_hr, reg_shponr_country_hr, + ship_dwt, ship_flg_cd, ship_flg_decd, ship_gt, + ship_nm, ship_type, ship_type_nm + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, + ?, ?, ?, + ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, + ?, ?, ?, + ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, + ?, ?, + ?, ?, ?, ?, + ?, ?, ? + ) + ON CONFLICT (event_id) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + acdnt_id = EXCLUDED.acdnt_id, + imo_no = EXCLUDED.imo_no, + pstg_ymd = EXCLUDED.pstg_ymd, + event_start_day = EXCLUDED.event_start_day, + event_end_day = EXCLUDED.event_end_day, + embrk_try_yn = EXCLUDED.embrk_try_yn, + cargo_capacity_status_cd = EXCLUDED.cargo_capacity_status_cd, + acdnt_actn = EXCLUDED.acdnt_actn, + acdnt_zone = EXCLUDED.acdnt_zone, + acdnt_zone_cd = EXCLUDED.acdnt_zone_cd, + cfg_cmpnt_two = EXCLUDED.cfg_cmpnt_two, + country_cd = EXCLUDED.country_cd, + build_ymd = EXCLUDED.build_ymd, + "desc" = EXCLUDED."desc", + env_position = EXCLUDED.env_position, + position_nm = EXCLUDED.position_nm, + masd_grid_ref = EXCLUDED.masd_grid_ref, + cty_nm = EXCLUDED.cty_nm, + event_type = EXCLUDED.event_type, + event_type_dtl = EXCLUDED.event_type_dtl, + event_type_dtl_id = EXCLUDED.event_type_dtl_id, + event_type_id = EXCLUDED.event_type_id, + firedupon_yn = EXCLUDED.firedupon_yn, + title = EXCLUDED.title, + ldt_timpt = EXCLUDED.ldt_timpt, + signfct = EXCLUDED.signfct, + wethr = EXCLUDED.wethr, + pltn_matral = EXCLUDED.pltn_matral, + pltn_matral_cnt = EXCLUDED.pltn_matral_cnt, + pltn_matral_unit = EXCLUDED.pltn_matral_unit, + reg_shponr_cd_hr = EXCLUDED.reg_shponr_cd_hr, + reg_shponr_hr = EXCLUDED.reg_shponr_hr, + reg_shponr_country_cd_hr = EXCLUDED.reg_shponr_country_cd_hr, + reg_shponr_country_hr = EXCLUDED.reg_shponr_country_hr, + ship_dwt = EXCLUDED.ship_dwt, + ship_flg_cd = EXCLUDED.ship_flg_cd, + ship_flg_decd = EXCLUDED.ship_flg_decd, + ship_gt = EXCLUDED.ship_gt, + ship_nm = EXCLUDED.ship_nm, + ship_type = EXCLUDED.ship_type, + ship_type_nm = EXCLUDED.ship_type_nm; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getEventCargoUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + event_id, imo_no, "type", event_seq, + cnt, unit_abbr, unit, cargo_damg, + risk_yn, "text" + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ? + ) + ON CONFLICT (event_id, event_seq, imo_no, "type") + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + cnt = EXCLUDED.cnt, + unit_abbr = EXCLUDED.unit_abbr, + unit = EXCLUDED.unit, + cargo_damg = EXCLUDED.cargo_damg, + risk_yn = EXCLUDED.risk_yn, + "text" = EXCLUDED."text"; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getEventHumanCasualtyUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + event_id, "type", "scope", qualfr, cnt + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ? + ) + ON CONFLICT (event_id, "scope", "type", qualfr) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + cnt = EXCLUDED.cnt; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getEventRelationshipUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + acdnt_id, event_id, event_id_two, event_type_cd, + event_type, rel_type_cd, rel_type + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, + ?, ?, ? + ) + ON CONFLICT (acdnt_id, event_id, event_id_two, event_type_cd, rel_type_cd) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + event_type = EXCLUDED.event_type, + rel_type = EXCLUDED.rel_type; + """.formatted(TARGET_SCHEMA, targetTable); + } + +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/event/writer/EventCargoWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/event/writer/EventCargoWriter.java new file mode 100644 index 0000000..f01e024 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/event/writer/EventCargoWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.event.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.event.entity.EventCargoEntity; +import com.snp.batch.jobs.datasync.batch.event.repository.EventRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class EventCargoWriter extends BaseChunkedWriter { + private final EventRepository eventRepository; + + public EventCargoWriter(EventRepository eventRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("EventCargoEntity", transactionManager, subChunkSize); + this.eventRepository = eventRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + eventRepository.saveEventCargo(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/event/writer/EventHumanCasualtyWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/event/writer/EventHumanCasualtyWriter.java new file mode 100644 index 0000000..945edf0 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/event/writer/EventHumanCasualtyWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.event.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.event.entity.EventHumanCasualtyEntity; +import com.snp.batch.jobs.datasync.batch.event.repository.EventRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class EventHumanCasualtyWriter extends BaseChunkedWriter { + private final EventRepository eventRepository; + + public EventHumanCasualtyWriter(EventRepository eventRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("EventHumanCasualtyEntity", transactionManager, subChunkSize); + this.eventRepository = eventRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + eventRepository.saveEventHumanCasualty(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/event/writer/EventRelationshipWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/event/writer/EventRelationshipWriter.java new file mode 100644 index 0000000..092e172 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/event/writer/EventRelationshipWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.event.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.event.entity.EventRelationshipEntity; +import com.snp.batch.jobs.datasync.batch.event.repository.EventRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class EventRelationshipWriter extends BaseChunkedWriter { + private final EventRepository eventRepository; + + public EventRelationshipWriter(EventRepository eventRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("EventRelationshipEntity", transactionManager, subChunkSize); + this.eventRepository = eventRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + eventRepository.saveEventRelationship(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/event/writer/EventWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/event/writer/EventWriter.java new file mode 100644 index 0000000..c657c24 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/event/writer/EventWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.event.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.event.entity.EventEntity; +import com.snp.batch.jobs.datasync.batch.event.repository.EventRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class EventWriter extends BaseChunkedWriter { + private final EventRepository eventRepository; + + public EventWriter(EventRepository eventRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("EventEntity", transactionManager, subChunkSize); + this.eventRepository = eventRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + eventRepository.saveEvent(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/facility/config/FacilitySyncJobConfig.java b/src/main/java/com/snp/batch/jobs/datasync/batch/facility/config/FacilitySyncJobConfig.java new file mode 100644 index 0000000..74c4265 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/facility/config/FacilitySyncJobConfig.java @@ -0,0 +1,136 @@ +package com.snp.batch.jobs.datasync.batch.facility.config; + +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.common.util.BatchWriteListener; +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.GroupByExecutionIdChunkListener; +import com.snp.batch.common.util.GroupByExecutionIdPolicy; +import com.snp.batch.common.util.GroupByExecutionIdReadListener; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.facility.dto.FacilityPortDto; +import com.snp.batch.jobs.datasync.batch.facility.entity.FacilityPortEntity; +import com.snp.batch.jobs.datasync.batch.facility.processor.FacilityPortProcessor; +import com.snp.batch.jobs.datasync.batch.facility.reader.FacilityPortReader; +import com.snp.batch.jobs.datasync.batch.facility.repository.FacilityRepository; +import com.snp.batch.jobs.datasync.batch.facility.writer.FacilityPortWriter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.PlatformTransactionManager; + +import javax.sql.DataSource; + +@Slf4j +@Configuration +public class FacilitySyncJobConfig extends BaseJobConfig { + private final TableMetaInfo tableMetaInfo; + private final FacilityRepository facilityRepository; + private final DataSource batchDataSource; + private final DataSource businessDataSource; + private final JdbcTemplate businessJdbcTemplate; + private final int subChunkSize; + + /** + * 생성자 주입 + */ + public FacilitySyncJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + FacilityRepository facilityRepository, + TableMetaInfo tableMetaInfo, + @Qualifier("batchDataSource") DataSource batchDataSource, + @Qualifier("businessDataSource") DataSource businessDataSource, + @Value("${app.batch.sub-chunk-size:5000}") int subChunkSize + ) { + super(jobRepository, transactionManager); + this.facilityRepository = facilityRepository; + this.tableMetaInfo = tableMetaInfo; + this.batchDataSource = batchDataSource; + this.businessDataSource = businessDataSource; + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.subChunkSize = subChunkSize; + } + + @Override + protected String getJobName() { + return "facilityDataSyncJob"; + } + + @Override + protected String getStepName() { + return "facilityPortSyncStep"; + } + + @Override + protected ItemReader createReader() { + return facilityPortReader(businessDataSource, tableMetaInfo); + } + + @Override + protected ItemProcessor createProcessor() { + return new FacilityPortProcessor(); + } + + @Override + protected ItemWriter createWriter() { + return new FacilityPortWriter(facilityRepository, transactionManager, subChunkSize); + } + + // --- FacilityPort Reader --- + + @Bean + @StepScope + public ItemReader facilityPortReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new FacilityPortReader(businessDataSource, tableMetaInfo); + } + + // --- Listeners --- + + @Bean + public BatchWriteListener facilityPortWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceFacilityPort); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + // --- Steps --- + + @Bean(name = "facilityPortSyncStep") + public Step facilityPortSyncStep() { + log.info("Step 생성: facilityPortSyncStep"); + return new StepBuilder(getStepName(), jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(createReader()) + .processor(createProcessor()) + .writer(createWriter()) + .listener(new GroupByExecutionIdReadListener()) + .listener(new GroupByExecutionIdChunkListener()) + .listener(facilityPortWriteListener()) + .build(); + } + + @Override + protected Job createJobFlow(JobBuilder jobBuilder) { + return jobBuilder + .start(facilityPortSyncStep()) + .build(); + } + + @Bean(name = "facilityDataSyncJob") + public Job facilityDataSyncJob() { + return job(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/facility/dto/FacilityPortDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/facility/dto/FacilityPortDto.java new file mode 100644 index 0000000..24e6bdc --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/facility/dto/FacilityPortDto.java @@ -0,0 +1,69 @@ +package com.snp.batch.jobs.datasync.batch.facility.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import java.time.ZonedDateTime; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class FacilityPortDto implements JobExecutionGroupable { + private Long jobExecutionId; + private Long portId; + private String bfrId; + private String status; + private String portNm; + private String unPortCd; + private String countryCd; + private String countryNm; + private String areanm; + private String cntntnm; + private String mstPortId; + private Double latDecml; + private Double lonDecml; + private Double positionLat; + private Double positionLon; + private Double positionZVal; + private Double positionMvalVal; + private Boolean zValHasYn; + private Boolean mvalValHasYn; + private Boolean positionNulYn; + private Long positionStsId; + private String hrZone; + private Boolean daylgtSaveHr; + private Double maxDraft; + private Double maxWhlnth; + private Double maxBeam; + private Double maxDwt; + private Double maxSeaDraft; + private Double maxSeaWhlnth; + private Double maxSeaBcm; + private Double maxSeaDwt; + private Boolean baleCargoFacility; + private Boolean cntnrFacility; + private Boolean caseCargoFacility; + private Boolean liquidCargoFacility; + private Boolean roroFacility; + private Boolean paxfclty; + private Boolean drydkfclty; + private Long lpgFacility; + private Long lngFacility; + private Boolean lngBnkr; + private Boolean doBnkr; + private Boolean foBnkr; + private Boolean ispsComplianceYn; + private Boolean csiComplianceYn; + private Boolean freeTrdZone; + private Boolean ecfrdPort; + private Boolean emsnCtrlArea; + private Long wsPort; + private ZonedDateTime lastMdfcnDt; + private ZonedDateTime regYmd; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/facility/entity/FacilityPortEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/facility/entity/FacilityPortEntity.java new file mode 100644 index 0000000..4e3d32e --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/facility/entity/FacilityPortEntity.java @@ -0,0 +1,69 @@ +package com.snp.batch.jobs.datasync.batch.facility.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; +import java.time.ZonedDateTime; + +@Data +@SuperBuilder +@AllArgsConstructor +public class FacilityPortEntity implements JobExecutionGroupable { + private Long portId; + private String bfrId; + private String status; + private String portNm; + private String unPortCd; + private String countryCd; + private String countryNm; + private String areanm; + private String cntntnm; + private String mstPortId; + private Double latDecml; + private Double lonDecml; + private Double positionLat; + private Double positionLon; + private Double positionZVal; + private Double positionMvalVal; + private Boolean zValHasYn; + private Boolean mvalValHasYn; + private Boolean positionNulYn; + private Long positionStsId; + private String hrZone; + private Boolean daylgtSaveHr; + private Double maxDraft; + private Double maxWhlnth; + private Double maxBeam; + private Double maxDwt; + private Double maxSeaDraft; + private Double maxSeaWhlnth; + private Double maxSeaBcm; + private Double maxSeaDwt; + private Boolean baleCargoFacility; + private Boolean cntnrFacility; + private Boolean caseCargoFacility; + private Boolean liquidCargoFacility; + private Boolean roroFacility; + private Boolean paxfclty; + private Boolean drydkfclty; + private Long lpgFacility; + private Long lngFacility; + private Boolean lngBnkr; + private Boolean doBnkr; + private Boolean foBnkr; + private Boolean ispsComplianceYn; + private Boolean csiComplianceYn; + private Boolean freeTrdZone; + private Boolean ecfrdPort; + private Boolean emsnCtrlArea; + private Long wsPort; + private ZonedDateTime lastMdfcnDt; + private ZonedDateTime regYmd; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/facility/processor/FacilityPortProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/facility/processor/FacilityPortProcessor.java new file mode 100644 index 0000000..3c66032 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/facility/processor/FacilityPortProcessor.java @@ -0,0 +1,66 @@ +package com.snp.batch.jobs.datasync.batch.facility.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.facility.dto.FacilityPortDto; +import com.snp.batch.jobs.datasync.batch.facility.entity.FacilityPortEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class FacilityPortProcessor extends BaseProcessor { + @Override + protected FacilityPortEntity processItem(FacilityPortDto dto) throws Exception { + return FacilityPortEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .portId(dto.getPortId()) + .bfrId(dto.getBfrId()) + .status(dto.getStatus()) + .portNm(dto.getPortNm()) + .unPortCd(dto.getUnPortCd()) + .countryCd(dto.getCountryCd()) + .countryNm(dto.getCountryNm()) + .areanm(dto.getAreanm()) + .cntntnm(dto.getCntntnm()) + .mstPortId(dto.getMstPortId()) + .latDecml(dto.getLatDecml()) + .lonDecml(dto.getLonDecml()) + .positionLat(dto.getPositionLat()) + .positionLon(dto.getPositionLon()) + .positionZVal(dto.getPositionZVal()) + .positionMvalVal(dto.getPositionMvalVal()) + .zValHasYn(dto.getZValHasYn()) + .mvalValHasYn(dto.getMvalValHasYn()) + .positionNulYn(dto.getPositionNulYn()) + .positionStsId(dto.getPositionStsId()) + .hrZone(dto.getHrZone()) + .daylgtSaveHr(dto.getDaylgtSaveHr()) + .maxDraft(dto.getMaxDraft()) + .maxWhlnth(dto.getMaxWhlnth()) + .maxBeam(dto.getMaxBeam()) + .maxDwt(dto.getMaxDwt()) + .maxSeaDraft(dto.getMaxSeaDraft()) + .maxSeaWhlnth(dto.getMaxSeaWhlnth()) + .maxSeaBcm(dto.getMaxSeaBcm()) + .maxSeaDwt(dto.getMaxSeaDwt()) + .baleCargoFacility(dto.getBaleCargoFacility()) + .cntnrFacility(dto.getCntnrFacility()) + .caseCargoFacility(dto.getCaseCargoFacility()) + .liquidCargoFacility(dto.getLiquidCargoFacility()) + .roroFacility(dto.getRoroFacility()) + .paxfclty(dto.getPaxfclty()) + .drydkfclty(dto.getDrydkfclty()) + .lpgFacility(dto.getLpgFacility()) + .lngFacility(dto.getLngFacility()) + .lngBnkr(dto.getLngBnkr()) + .doBnkr(dto.getDoBnkr()) + .foBnkr(dto.getFoBnkr()) + .ispsComplianceYn(dto.getIspsComplianceYn()) + .csiComplianceYn(dto.getCsiComplianceYn()) + .freeTrdZone(dto.getFreeTrdZone()) + .ecfrdPort(dto.getEcfrdPort()) + .emsnCtrlArea(dto.getEmsnCtrlArea()) + .wsPort(dto.getWsPort()) + .lastMdfcnDt(dto.getLastMdfcnDt()) + .regYmd(dto.getRegYmd()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/facility/reader/FacilityPortReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/facility/reader/FacilityPortReader.java new file mode 100644 index 0000000..67aa60d --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/facility/reader/FacilityPortReader.java @@ -0,0 +1,117 @@ +package com.snp.batch.jobs.datasync.batch.facility.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.facility.dto.FacilityPortDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class FacilityPortReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public FacilityPortReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public FacilityPortDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceFacilityPort), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[FacilityPortReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceFacilityPort); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + Timestamp lastMdfcnDtTs = rs.getTimestamp("last_mdfcn_dt"); + Timestamp regYmdTs = rs.getTimestamp("reg_ymd"); + + return FacilityPortDto.builder() + .jobExecutionId(targetId) + .portId(rs.getObject("port_id") != null ? rs.getLong("port_id") : null) + .bfrId(rs.getString("bfr_id")) + .status(rs.getString("status")) + .portNm(rs.getString("port_nm")) + .unPortCd(rs.getString("un_port_cd")) + .countryCd(rs.getString("country_cd")) + .countryNm(rs.getString("country_nm")) + .areanm(rs.getString("areanm")) + .cntntnm(rs.getString("cntntnm")) + .mstPortId(rs.getString("mst_port_id")) + .latDecml(rs.getObject("lat_decml") != null ? rs.getDouble("lat_decml") : null) + .lonDecml(rs.getObject("lon_decml") != null ? rs.getDouble("lon_decml") : null) + .positionLat(rs.getObject("position_lat") != null ? rs.getDouble("position_lat") : null) + .positionLon(rs.getObject("position_lon") != null ? rs.getDouble("position_lon") : null) + .positionZVal(rs.getObject("position_z_val") != null ? rs.getDouble("position_z_val") : null) + .positionMvalVal(rs.getObject("position_mval_val") != null ? rs.getDouble("position_mval_val") : null) + .zValHasYn(rs.getObject("z_val_has_yn") != null ? rs.getBoolean("z_val_has_yn") : null) + .mvalValHasYn(rs.getObject("mval_val_has_yn") != null ? rs.getBoolean("mval_val_has_yn") : null) + .positionNulYn(rs.getObject("position_nul_yn") != null ? rs.getBoolean("position_nul_yn") : null) + .positionStsId(rs.getObject("position_sts_id") != null ? rs.getLong("position_sts_id") : null) + .hrZone(rs.getString("hr_zone")) + .daylgtSaveHr(rs.getObject("daylgt_save_hr") != null ? rs.getBoolean("daylgt_save_hr") : null) + .maxDraft(rs.getObject("max_draft") != null ? rs.getDouble("max_draft") : null) + .maxWhlnth(rs.getObject("max_whlnth") != null ? rs.getDouble("max_whlnth") : null) + .maxBeam(rs.getObject("max_beam") != null ? rs.getDouble("max_beam") : null) + .maxDwt(rs.getObject("max_dwt") != null ? rs.getDouble("max_dwt") : null) + .maxSeaDraft(rs.getObject("max_sea_draft") != null ? rs.getDouble("max_sea_draft") : null) + .maxSeaWhlnth(rs.getObject("max_sea_whlnth") != null ? rs.getDouble("max_sea_whlnth") : null) + .maxSeaBcm(rs.getObject("max_sea_bcm") != null ? rs.getDouble("max_sea_bcm") : null) + .maxSeaDwt(rs.getObject("max_sea_dwt") != null ? rs.getDouble("max_sea_dwt") : null) + .baleCargoFacility(rs.getObject("bale_cargo_facility") != null ? rs.getBoolean("bale_cargo_facility") : null) + .cntnrFacility(rs.getObject("cntnr_facility") != null ? rs.getBoolean("cntnr_facility") : null) + .caseCargoFacility(rs.getObject("case_cargo_facility") != null ? rs.getBoolean("case_cargo_facility") : null) + .liquidCargoFacility(rs.getObject("liquid_cargo_facility") != null ? rs.getBoolean("liquid_cargo_facility") : null) + .roroFacility(rs.getObject("roro_facility") != null ? rs.getBoolean("roro_facility") : null) + .paxfclty(rs.getObject("paxfclty") != null ? rs.getBoolean("paxfclty") : null) + .drydkfclty(rs.getObject("drydkfclty") != null ? rs.getBoolean("drydkfclty") : null) + .lpgFacility(rs.getObject("lpg_facility") != null ? rs.getLong("lpg_facility") : null) + .lngFacility(rs.getObject("lng_facility") != null ? rs.getLong("lng_facility") : null) + .lngBnkr(rs.getObject("lng_bnkr") != null ? rs.getBoolean("lng_bnkr") : null) + .doBnkr(rs.getObject("do_bnkr") != null ? rs.getBoolean("do_bnkr") : null) + .foBnkr(rs.getObject("fo_bnkr") != null ? rs.getBoolean("fo_bnkr") : null) + .ispsComplianceYn(rs.getObject("isps_compliance_yn") != null ? rs.getBoolean("isps_compliance_yn") : null) + .csiComplianceYn(rs.getObject("csi_compliance_yn") != null ? rs.getBoolean("csi_compliance_yn") : null) + .freeTrdZone(rs.getObject("free_trd_zone") != null ? rs.getBoolean("free_trd_zone") : null) + .ecfrdPort(rs.getObject("ecfrd_port") != null ? rs.getBoolean("ecfrd_port") : null) + .emsnCtrlArea(rs.getObject("emsn_ctrl_area") != null ? rs.getBoolean("emsn_ctrl_area") : null) + .wsPort(rs.getObject("ws_port") != null ? rs.getLong("ws_port") : null) + .lastMdfcnDt(lastMdfcnDtTs != null ? lastMdfcnDtTs.toInstant().atZone(java.time.ZoneId.systemDefault()) : null) + .regYmd(regYmdTs != null ? regYmdTs.toInstant().atZone(java.time.ZoneId.systemDefault()) : null) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceFacilityPort); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/facility/repository/FacilityRepository.java b/src/main/java/com/snp/batch/jobs/datasync/batch/facility/repository/FacilityRepository.java new file mode 100644 index 0000000..cd1554b --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/facility/repository/FacilityRepository.java @@ -0,0 +1,13 @@ +package com.snp.batch.jobs.datasync.batch.facility.repository; + +import com.snp.batch.jobs.datasync.batch.facility.entity.FacilityPortEntity; + +import java.util.List; + +/** + * FacilityEntity Repository 인터페이스 + * 구현체: FacilityRepositoryImpl (JdbcTemplate 기반) + */ +public interface FacilityRepository { + void saveFacilityPort(List facilityPortEntityList); +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/facility/repository/FacilityRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/datasync/batch/facility/repository/FacilityRepositoryImpl.java new file mode 100644 index 0000000..67292e4 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/facility/repository/FacilityRepositoryImpl.java @@ -0,0 +1,153 @@ +package com.snp.batch.jobs.datasync.batch.facility.repository; + +import com.snp.batch.common.batch.repository.MultiDataSourceJdbcRepository; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.facility.entity.FacilityPortEntity; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.stereotype.Repository; + +import javax.sql.DataSource; +import java.sql.PreparedStatement; +import java.sql.Timestamp; +import java.sql.Types; +import java.util.List; + +/** + * FacilityEntity Repository (JdbcTemplate 기반) + */ +@Slf4j +@Repository("facilityRepository") +public class FacilityRepositoryImpl extends MultiDataSourceJdbcRepository implements FacilityRepository { + + private DataSource batchDataSource; + private DataSource businessDataSource; + private final TableMetaInfo tableMetaInfo; + + public FacilityRepositoryImpl(@Qualifier("batchDataSource") DataSource batchDataSource, + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + + super(new JdbcTemplate(batchDataSource), new JdbcTemplate(businessDataSource)); + + this.batchDataSource = batchDataSource; + this.businessDataSource = businessDataSource; + this.tableMetaInfo = tableMetaInfo; + } + + @Override + protected String getTableName() { + return null; + } + + @Override + protected RowMapper getRowMapper() { + return null; + } + + @Override + protected Long extractId(FacilityPortEntity entity) { + return null; + } + + @Override + protected String getInsertSql() { + return null; + } + + @Override + protected String getUpdateSql() { + return null; + } + + @Override + protected void setInsertParameters(PreparedStatement ps, FacilityPortEntity entity) throws Exception { + } + + @Override + protected void setUpdateParameters(PreparedStatement ps, FacilityPortEntity entity) throws Exception { + } + + @Override + protected String getEntityName() { + return null; + } + + @Override + public void saveFacilityPort(List facilityPortEntityList) { + String sql = FacilitySql.getFacilityPortUpsertSql(tableMetaInfo.targetTbPortFacilityInfo); + if (facilityPortEntityList == null || facilityPortEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "FacilityPortEntity", facilityPortEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, facilityPortEntityList, facilityPortEntityList.size(), + (ps, entity) -> { + try { + bindFacilityPort(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "FacilityPortEntity", facilityPortEntityList.size()); + } + + public void bindFacilityPort(PreparedStatement pstmt, FacilityPortEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setObject(idx++, entity.getPortId(), Types.BIGINT); // 2. port_id + pstmt.setString(idx++, entity.getBfrId()); // 3. bfr_id + pstmt.setString(idx++, entity.getStatus()); // 4. status + pstmt.setString(idx++, entity.getPortNm()); // 5. port_nm + pstmt.setString(idx++, entity.getUnPortCd()); // 6. un_port_cd + pstmt.setString(idx++, entity.getCountryCd()); // 7. country_cd + pstmt.setString(idx++, entity.getCountryNm()); // 8. country_nm + pstmt.setString(idx++, entity.getAreanm()); // 9. areanm + pstmt.setString(idx++, entity.getMstPortId()); // 10. mst_port_id + pstmt.setObject(idx++, entity.getLatDecml(), Types.DOUBLE); // 11. lat_decml + pstmt.setObject(idx++, entity.getLonDecml(), Types.DOUBLE); // 12. lon_decml + pstmt.setObject(idx++, entity.getPositionLat(), Types.DOUBLE); // 13. position_lat + pstmt.setObject(idx++, entity.getPositionLon(), Types.DOUBLE); // 14. position_lon + pstmt.setObject(idx++, entity.getPositionZVal(), Types.DOUBLE); // 15. position_z_val + pstmt.setObject(idx++, entity.getPositionMvalVal(), Types.DOUBLE); // 16. position_mval_val + pstmt.setObject(idx++, entity.getZValHasYn(), Types.BOOLEAN); // 17. z_val_has_yn + pstmt.setObject(idx++, entity.getMvalValHasYn(), Types.BOOLEAN); // 18. mval_val_has_yn + pstmt.setObject(idx++, entity.getPositionNulYn(), Types.BOOLEAN); // 19. position_nul_yn + pstmt.setObject(idx++, entity.getPositionStsId(), Types.BIGINT); // 20. position_sts_id + pstmt.setString(idx++, entity.getHrZone()); // 21. hr_zone + pstmt.setObject(idx++, entity.getDaylgtSaveHr(), Types.BOOLEAN); // 22. daylgt_save_hr + pstmt.setObject(idx++, entity.getMaxDraft(), Types.DOUBLE); // 23. max_draft + pstmt.setObject(idx++, entity.getMaxWhlnth(), Types.DOUBLE); // 24. max_whlnth + pstmt.setObject(idx++, entity.getMaxBeam(), Types.DOUBLE); // 25. max_beam + pstmt.setObject(idx++, entity.getMaxDwt(), Types.DOUBLE); // 26. max_dwt + pstmt.setObject(idx++, entity.getMaxSeaDraft(), Types.DOUBLE); // 27. max_sea_draft + pstmt.setObject(idx++, entity.getMaxSeaWhlnth(), Types.DOUBLE); // 28. max_sea_whlnth + pstmt.setObject(idx++, entity.getMaxSeaBcm(), Types.DOUBLE); // 29. max_sea_bcm + pstmt.setObject(idx++, entity.getMaxSeaDwt(), Types.DOUBLE); // 30. max_sea_dwt + pstmt.setObject(idx++, entity.getBaleCargoFacility(), Types.BOOLEAN); // 31. bale_cargo_facility + pstmt.setObject(idx++, entity.getCntnrFacility(), Types.BOOLEAN); // 32. cntnr_facility + pstmt.setObject(idx++, entity.getCaseCargoFacility(), Types.BOOLEAN); // 33. case_cargo_facility + pstmt.setObject(idx++, entity.getLiquidCargoFacility(), Types.BOOLEAN); // 34. liquid_cargo_facility + pstmt.setObject(idx++, entity.getRoroFacility(), Types.BOOLEAN); // 35. roro_facility + pstmt.setObject(idx++, entity.getPaxfclty(), Types.BOOLEAN); // 36. paxfclty + pstmt.setObject(idx++, entity.getDrydkfclty(), Types.BOOLEAN); // 37. drydkfclty + pstmt.setObject(idx++, entity.getLpgFacility(), Types.BIGINT); // 38. lpg_facility + pstmt.setObject(idx++, entity.getLngFacility(), Types.BIGINT); // 39. lng_facility + pstmt.setObject(idx++, entity.getLngBnkr(), Types.BOOLEAN); // 40. lng_bnkr + pstmt.setObject(idx++, entity.getDoBnkr(), Types.BOOLEAN); // 41. do_bnkr + pstmt.setObject(idx++, entity.getFoBnkr(), Types.BOOLEAN); // 42. fo_bnkr + pstmt.setObject(idx++, entity.getIspsComplianceYn(), Types.BOOLEAN); // 43. isps_compliance_yn + pstmt.setObject(idx++, entity.getCsiComplianceYn(), Types.BOOLEAN); // 44. csi_compliance_yn + pstmt.setObject(idx++, entity.getFreeTrdZone(), Types.BOOLEAN); // 45. free_trd_zone + pstmt.setObject(idx++, entity.getEcfrdPort(), Types.BOOLEAN); // 46. ecfrd_port + pstmt.setObject(idx++, entity.getEmsnCtrlArea(), Types.BOOLEAN); // 47. emsn_ctrl_area + pstmt.setObject(idx++, entity.getWsPort(), Types.BIGINT); // 48. ws_port + pstmt.setTimestamp(idx++, entity.getLastMdfcnDt() != null ? Timestamp.from(entity.getLastMdfcnDt().toInstant()) : null); // 49. last_mdfcn_dt + pstmt.setTimestamp(idx++, entity.getRegYmd() != null ? Timestamp.from(entity.getRegYmd().toInstant()) : null); // 50. reg_ymd + pstmt.setString(idx++, entity.getCntntnm()); // 9. areanm + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/facility/repository/FacilitySql.java b/src/main/java/com/snp/batch/jobs/datasync/batch/facility/repository/FacilitySql.java new file mode 100644 index 0000000..159d37d --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/facility/repository/FacilitySql.java @@ -0,0 +1,102 @@ +package com.snp.batch.jobs.datasync.batch.facility.repository; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class FacilitySql { + private static String TARGET_SCHEMA; + public FacilitySql(@Value("${app.batch.target-schema.name}") String targetSchema) { + TARGET_SCHEMA = targetSchema; + } + public static String getFacilityPortUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + port_id, bfr_id, status, port_nm, un_port_cd, country_cd, + country_nm, areanm, mst_port_id, + lat_decml, lon_decml, position_lat, position_lon, position_z_val, + position_mval_val, z_val_has_yn, mval_val_has_yn, position_nul_yn, + position_sts_id, hr_zone, daylgt_save_hr, + max_draft, max_whlnth, max_beam, max_dwt, + max_sea_draft, max_sea_whlnth, max_sea_bcm, + max_sea_dwt, bale_cargo_facility, cntnr_facility, + case_cargo_facility, liquid_cargo_facility, roro_facility, + paxfclty, drydkfclty, lpg_facility, + lng_facility, lng_bnkr, do_bnkr, fo_bnkr, + isps_compliance_yn, csi_compliance_yn, free_trd_zone, ecfrd_port, + emsn_ctrl_area, ws_port, last_mdfcn_dt, reg_ymd, cntntnm + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, ?, + ?, ?, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, + ?, ?, ?, + ?, ?, ?, + ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, ?, ? + ) + ON CONFLICT (port_id) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + bfr_id = EXCLUDED.bfr_id, + status = EXCLUDED.status, + port_nm = EXCLUDED.port_nm, + un_port_cd = EXCLUDED.un_port_cd, + country_cd = EXCLUDED.country_cd, + country_nm = EXCLUDED.country_nm, + areanm = EXCLUDED.areanm, + cntntnm = EXCLUDED.cntntnm, + mst_port_id = EXCLUDED.mst_port_id, + lat_decml = EXCLUDED.lat_decml, + lon_decml = EXCLUDED.lon_decml, + position_lat = EXCLUDED.position_lat, + position_lon = EXCLUDED.position_lon, + position_z_val = EXCLUDED.position_z_val, + position_mval_val = EXCLUDED.position_mval_val, + z_val_has_yn = EXCLUDED.z_val_has_yn, + mval_val_has_yn = EXCLUDED.mval_val_has_yn, + position_nul_yn = EXCLUDED.position_nul_yn, + position_sts_id = EXCLUDED.position_sts_id, + hr_zone = EXCLUDED.hr_zone, + daylgt_save_hr = EXCLUDED.daylgt_save_hr, + max_draft = EXCLUDED.max_draft, + max_whlnth = EXCLUDED.max_whlnth, + max_beam = EXCLUDED.max_beam, + max_dwt = EXCLUDED.max_dwt, + max_sea_draft = EXCLUDED.max_sea_draft, + max_sea_whlnth = EXCLUDED.max_sea_whlnth, + max_sea_bcm = EXCLUDED.max_sea_bcm, + max_sea_dwt = EXCLUDED.max_sea_dwt, + bale_cargo_facility = EXCLUDED.bale_cargo_facility, + cntnr_facility = EXCLUDED.cntnr_facility, + case_cargo_facility = EXCLUDED.case_cargo_facility, + liquid_cargo_facility = EXCLUDED.liquid_cargo_facility, + roro_facility = EXCLUDED.roro_facility, + paxfclty = EXCLUDED.paxfclty, + drydkfclty = EXCLUDED.drydkfclty, + lpg_facility = EXCLUDED.lpg_facility, + lng_facility = EXCLUDED.lng_facility, + lng_bnkr = EXCLUDED.lng_bnkr, + do_bnkr = EXCLUDED.do_bnkr, + fo_bnkr = EXCLUDED.fo_bnkr, + isps_compliance_yn = EXCLUDED.isps_compliance_yn, + csi_compliance_yn = EXCLUDED.csi_compliance_yn, + free_trd_zone = EXCLUDED.free_trd_zone, + ecfrd_port = EXCLUDED.ecfrd_port, + emsn_ctrl_area = EXCLUDED.emsn_ctrl_area, + ws_port = EXCLUDED.ws_port, + last_mdfcn_dt = EXCLUDED.last_mdfcn_dt, + reg_ymd = EXCLUDED.reg_ymd; + ; + """.formatted(TARGET_SCHEMA, targetTable); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/facility/writer/FacilityPortWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/facility/writer/FacilityPortWriter.java new file mode 100644 index 0000000..aa78dc4 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/facility/writer/FacilityPortWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.facility.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.facility.entity.FacilityPortEntity; +import com.snp.batch.jobs.datasync.batch.facility.repository.FacilityRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class FacilityPortWriter extends BaseChunkedWriter { + private final FacilityRepository facilityRepository; + + public FacilityPortWriter(FacilityRepository facilityRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("FacilityPortEntity", transactionManager, subChunkSize); + this.facilityRepository = facilityRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + facilityRepository.saveFacilityPort(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/config/AnchorageCallSyncJobConfig.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/config/AnchorageCallSyncJobConfig.java new file mode 100644 index 0000000..eede226 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/config/AnchorageCallSyncJobConfig.java @@ -0,0 +1,127 @@ +package com.snp.batch.jobs.datasync.batch.movement.config; + +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.common.util.BatchWriteListener; +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.GroupByExecutionIdChunkListener; +import com.snp.batch.common.util.GroupByExecutionIdPolicy; +import com.snp.batch.common.util.GroupByExecutionIdReadListener; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.movement.dto.AnchorageCallDto; +import com.snp.batch.jobs.datasync.batch.movement.entity.AnchorageCallEntity; +import com.snp.batch.jobs.datasync.batch.movement.processor.AnchorageCallProcessor; +import com.snp.batch.jobs.datasync.batch.movement.reader.AnchorageCallReader; +import com.snp.batch.jobs.datasync.batch.movement.repository.MovementRepository; +import com.snp.batch.jobs.datasync.batch.movement.writer.AnchorageCallWriter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.PlatformTransactionManager; + +import javax.sql.DataSource; + +@Slf4j +@Configuration +public class AnchorageCallSyncJobConfig extends BaseJobConfig { + private final TableMetaInfo tableMetaInfo; + private final MovementRepository movementRepository; + private final DataSource batchDataSource; + private final DataSource businessDataSource; + private final JdbcTemplate businessJdbcTemplate; + private final int subChunkSize; + + public AnchorageCallSyncJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + MovementRepository movementRepository, + TableMetaInfo tableMetaInfo, + @Qualifier("batchDataSource") DataSource batchDataSource, + @Qualifier("businessDataSource") DataSource businessDataSource, + @Value("${app.batch.sub-chunk-size:5000}") int subChunkSize + ) { + super(jobRepository, transactionManager); + this.movementRepository = movementRepository; + this.tableMetaInfo = tableMetaInfo; + this.batchDataSource = batchDataSource; + this.businessDataSource = businessDataSource; + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.subChunkSize = subChunkSize; + } + + @Override + protected String getJobName() { + return "anchorageCallDataSyncJob"; + } + + @Override + protected String getStepName() { + return "anchorageCallSyncStep"; + } + + @Override + protected ItemReader createReader() { + return anchorageCallReader(businessDataSource, tableMetaInfo); + } + + @Override + protected ItemProcessor createProcessor() { + return new AnchorageCallProcessor(); + } + + @Override + protected ItemWriter createWriter() { + return new AnchorageCallWriter(movementRepository, transactionManager, subChunkSize); + } + + @Bean + @StepScope + public ItemReader anchorageCallReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new AnchorageCallReader(businessDataSource, tableMetaInfo); + } + + @Bean + public BatchWriteListener anchorageCallWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTAnchorageCall); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean(name = "anchorageCallSyncStep") + public Step anchorageCallSyncStep() { + log.info("Step 생성: anchorageCallSyncStep"); + return new StepBuilder(getStepName(), jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(createReader()) + .processor(createProcessor()) + .writer(createWriter()) + .listener(new GroupByExecutionIdReadListener()) + .listener(new GroupByExecutionIdChunkListener()) + .listener(anchorageCallWriteListener()) + .build(); + } + + @Override + protected Job createJobFlow(JobBuilder jobBuilder) { + return jobBuilder + .start(anchorageCallSyncStep()) + .build(); + } + + @Bean(name = "anchorageCallDataSyncJob") + public Job anchorageCallDataSyncJob() { + return job(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/config/BerthCallSyncJobConfig.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/config/BerthCallSyncJobConfig.java new file mode 100644 index 0000000..c7bc8a1 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/config/BerthCallSyncJobConfig.java @@ -0,0 +1,127 @@ +package com.snp.batch.jobs.datasync.batch.movement.config; + +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.common.util.BatchWriteListener; +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.GroupByExecutionIdChunkListener; +import com.snp.batch.common.util.GroupByExecutionIdPolicy; +import com.snp.batch.common.util.GroupByExecutionIdReadListener; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.movement.dto.BerthCallDto; +import com.snp.batch.jobs.datasync.batch.movement.entity.BerthCallEntity; +import com.snp.batch.jobs.datasync.batch.movement.processor.BerthCallProcessor; +import com.snp.batch.jobs.datasync.batch.movement.reader.BerthCallReader; +import com.snp.batch.jobs.datasync.batch.movement.repository.MovementRepository; +import com.snp.batch.jobs.datasync.batch.movement.writer.BerthCallWriter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.PlatformTransactionManager; + +import javax.sql.DataSource; + +@Slf4j +@Configuration +public class BerthCallSyncJobConfig extends BaseJobConfig { + private final TableMetaInfo tableMetaInfo; + private final MovementRepository movementRepository; + private final DataSource batchDataSource; + private final DataSource businessDataSource; + private final JdbcTemplate businessJdbcTemplate; + private final int subChunkSize; + + public BerthCallSyncJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + MovementRepository movementRepository, + TableMetaInfo tableMetaInfo, + @Qualifier("batchDataSource") DataSource batchDataSource, + @Qualifier("businessDataSource") DataSource businessDataSource, + @Value("${app.batch.sub-chunk-size:5000}") int subChunkSize + ) { + super(jobRepository, transactionManager); + this.movementRepository = movementRepository; + this.tableMetaInfo = tableMetaInfo; + this.batchDataSource = batchDataSource; + this.businessDataSource = businessDataSource; + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.subChunkSize = subChunkSize; + } + + @Override + protected String getJobName() { + return "berthCallDataSyncJob"; + } + + @Override + protected String getStepName() { + return "berthCallSyncStep"; + } + + @Override + protected ItemReader createReader() { + return berthCallReader(businessDataSource, tableMetaInfo); + } + + @Override + protected ItemProcessor createProcessor() { + return new BerthCallProcessor(); + } + + @Override + protected ItemWriter createWriter() { + return new BerthCallWriter(movementRepository, transactionManager, subChunkSize); + } + + @Bean + @StepScope + public ItemReader berthCallReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new BerthCallReader(businessDataSource, tableMetaInfo); + } + + @Bean + public BatchWriteListener berthCallWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTBerthCall); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean(name = "berthCallSyncStep") + public Step berthCallSyncStep() { + log.info("Step 생성: berthCallSyncStep"); + return new StepBuilder(getStepName(), jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(createReader()) + .processor(createProcessor()) + .writer(createWriter()) + .listener(new GroupByExecutionIdReadListener()) + .listener(new GroupByExecutionIdChunkListener()) + .listener(berthCallWriteListener()) + .build(); + } + + @Override + protected Job createJobFlow(JobBuilder jobBuilder) { + return jobBuilder + .start(berthCallSyncStep()) + .build(); + } + + @Bean(name = "berthCallDataSyncJob") + public Job berthCallDataSyncJob() { + return job(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/config/CurrentlyAtSyncJobConfig.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/config/CurrentlyAtSyncJobConfig.java new file mode 100644 index 0000000..42562d4 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/config/CurrentlyAtSyncJobConfig.java @@ -0,0 +1,127 @@ +package com.snp.batch.jobs.datasync.batch.movement.config; + +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.common.util.BatchWriteListener; +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.GroupByExecutionIdChunkListener; +import com.snp.batch.common.util.GroupByExecutionIdPolicy; +import com.snp.batch.common.util.GroupByExecutionIdReadListener; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.movement.dto.CurrentlyAtDto; +import com.snp.batch.jobs.datasync.batch.movement.entity.CurrentlyAtEntity; +import com.snp.batch.jobs.datasync.batch.movement.processor.CurrentlyAtProcessor; +import com.snp.batch.jobs.datasync.batch.movement.reader.CurrentlyAtReader; +import com.snp.batch.jobs.datasync.batch.movement.repository.MovementRepository; +import com.snp.batch.jobs.datasync.batch.movement.writer.CurrentlyAtWriter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.PlatformTransactionManager; + +import javax.sql.DataSource; + +@Slf4j +@Configuration +public class CurrentlyAtSyncJobConfig extends BaseJobConfig { + private final TableMetaInfo tableMetaInfo; + private final MovementRepository movementRepository; + private final DataSource batchDataSource; + private final DataSource businessDataSource; + private final JdbcTemplate businessJdbcTemplate; + private final int subChunkSize; + + public CurrentlyAtSyncJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + MovementRepository movementRepository, + TableMetaInfo tableMetaInfo, + @Qualifier("batchDataSource") DataSource batchDataSource, + @Qualifier("businessDataSource") DataSource businessDataSource, + @Value("${app.batch.sub-chunk-size:5000}") int subChunkSize + ) { + super(jobRepository, transactionManager); + this.movementRepository = movementRepository; + this.tableMetaInfo = tableMetaInfo; + this.batchDataSource = batchDataSource; + this.businessDataSource = businessDataSource; + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.subChunkSize = subChunkSize; + } + + @Override + protected String getJobName() { + return "currentlyAtDataSyncJob"; + } + + @Override + protected String getStepName() { + return "currentlyAtSyncStep"; + } + + @Override + protected ItemReader createReader() { + return currentlyAtReader(businessDataSource, tableMetaInfo); + } + + @Override + protected ItemProcessor createProcessor() { + return new CurrentlyAtProcessor(); + } + + @Override + protected ItemWriter createWriter() { + return new CurrentlyAtWriter(movementRepository, transactionManager, subChunkSize); + } + + @Bean + @StepScope + public ItemReader currentlyAtReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new CurrentlyAtReader(businessDataSource, tableMetaInfo); + } + + @Bean + public BatchWriteListener currentlyAtWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTCurrentlyAt); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean(name = "currentlyAtSyncStep") + public Step currentlyAtSyncStep() { + log.info("Step 생성: currentlyAtSyncStep"); + return new StepBuilder(getStepName(), jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(createReader()) + .processor(createProcessor()) + .writer(createWriter()) + .listener(new GroupByExecutionIdReadListener()) + .listener(new GroupByExecutionIdChunkListener()) + .listener(currentlyAtWriteListener()) + .build(); + } + + @Override + protected Job createJobFlow(JobBuilder jobBuilder) { + return jobBuilder + .start(currentlyAtSyncStep()) + .build(); + } + + @Bean(name = "currentlyAtDataSyncJob") + public Job currentlyAtDataSyncJob() { + return job(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/config/DestinationSyncJobConfig.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/config/DestinationSyncJobConfig.java new file mode 100644 index 0000000..43f42fc --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/config/DestinationSyncJobConfig.java @@ -0,0 +1,127 @@ +package com.snp.batch.jobs.datasync.batch.movement.config; + +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.common.util.BatchWriteListener; +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.GroupByExecutionIdChunkListener; +import com.snp.batch.common.util.GroupByExecutionIdPolicy; +import com.snp.batch.common.util.GroupByExecutionIdReadListener; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.movement.dto.DestinationDto; +import com.snp.batch.jobs.datasync.batch.movement.entity.DestinationEntity; +import com.snp.batch.jobs.datasync.batch.movement.processor.DestinationProcessor; +import com.snp.batch.jobs.datasync.batch.movement.reader.DestinationReader; +import com.snp.batch.jobs.datasync.batch.movement.repository.MovementRepository; +import com.snp.batch.jobs.datasync.batch.movement.writer.DestinationWriter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.PlatformTransactionManager; + +import javax.sql.DataSource; + +@Slf4j +@Configuration +public class DestinationSyncJobConfig extends BaseJobConfig { + private final TableMetaInfo tableMetaInfo; + private final MovementRepository movementRepository; + private final DataSource batchDataSource; + private final DataSource businessDataSource; + private final JdbcTemplate businessJdbcTemplate; + private final int subChunkSize; + + public DestinationSyncJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + MovementRepository movementRepository, + TableMetaInfo tableMetaInfo, + @Qualifier("batchDataSource") DataSource batchDataSource, + @Qualifier("businessDataSource") DataSource businessDataSource, + @Value("${app.batch.sub-chunk-size:5000}") int subChunkSize + ) { + super(jobRepository, transactionManager); + this.movementRepository = movementRepository; + this.tableMetaInfo = tableMetaInfo; + this.batchDataSource = batchDataSource; + this.businessDataSource = businessDataSource; + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.subChunkSize = subChunkSize; + } + + @Override + protected String getJobName() { + return "destinationDataSyncJob"; + } + + @Override + protected String getStepName() { + return "destinationSyncStep"; + } + + @Override + protected ItemReader createReader() { + return destinationReader(businessDataSource, tableMetaInfo); + } + + @Override + protected ItemProcessor createProcessor() { + return new DestinationProcessor(); + } + + @Override + protected ItemWriter createWriter() { + return new DestinationWriter(movementRepository, transactionManager, subChunkSize); + } + + @Bean + @StepScope + public ItemReader destinationReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new DestinationReader(businessDataSource, tableMetaInfo); + } + + @Bean + public BatchWriteListener destinationWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTDestination); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean(name = "destinationSyncStep") + public Step destinationSyncStep() { + log.info("Step 생성: destinationSyncStep"); + return new StepBuilder(getStepName(), jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(createReader()) + .processor(createProcessor()) + .writer(createWriter()) + .listener(new GroupByExecutionIdReadListener()) + .listener(new GroupByExecutionIdChunkListener()) + .listener(destinationWriteListener()) + .build(); + } + + @Override + protected Job createJobFlow(JobBuilder jobBuilder) { + return jobBuilder + .start(destinationSyncStep()) + .build(); + } + + @Bean(name = "destinationDataSyncJob") + public Job destinationDataSyncJob() { + return job(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/config/PortCallSyncJobConfig.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/config/PortCallSyncJobConfig.java new file mode 100644 index 0000000..4b50c19 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/config/PortCallSyncJobConfig.java @@ -0,0 +1,127 @@ +package com.snp.batch.jobs.datasync.batch.movement.config; + +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.common.util.BatchWriteListener; +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.GroupByExecutionIdChunkListener; +import com.snp.batch.common.util.GroupByExecutionIdPolicy; +import com.snp.batch.common.util.GroupByExecutionIdReadListener; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.movement.dto.PortCallDto; +import com.snp.batch.jobs.datasync.batch.movement.entity.PortCallEntity; +import com.snp.batch.jobs.datasync.batch.movement.processor.PortCallProcessor; +import com.snp.batch.jobs.datasync.batch.movement.reader.PortCallReader; +import com.snp.batch.jobs.datasync.batch.movement.repository.MovementRepository; +import com.snp.batch.jobs.datasync.batch.movement.writer.PortCallWriter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.PlatformTransactionManager; + +import javax.sql.DataSource; + +@Slf4j +@Configuration +public class PortCallSyncJobConfig extends BaseJobConfig { + private final TableMetaInfo tableMetaInfo; + private final MovementRepository movementRepository; + private final DataSource batchDataSource; + private final DataSource businessDataSource; + private final JdbcTemplate businessJdbcTemplate; + private final int subChunkSize; + + public PortCallSyncJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + MovementRepository movementRepository, + TableMetaInfo tableMetaInfo, + @Qualifier("batchDataSource") DataSource batchDataSource, + @Qualifier("businessDataSource") DataSource businessDataSource, + @Value("${app.batch.sub-chunk-size:5000}") int subChunkSize + ) { + super(jobRepository, transactionManager); + this.movementRepository = movementRepository; + this.tableMetaInfo = tableMetaInfo; + this.batchDataSource = batchDataSource; + this.businessDataSource = businessDataSource; + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.subChunkSize = subChunkSize; + } + + @Override + protected String getJobName() { + return "portCallDataSyncJob"; + } + + @Override + protected String getStepName() { + return "portCallSyncStep"; + } + + @Override + protected ItemReader createReader() { + return portCallReader(businessDataSource, tableMetaInfo); + } + + @Override + protected ItemProcessor createProcessor() { + return new PortCallProcessor(); + } + + @Override + protected ItemWriter createWriter() { + return new PortCallWriter(movementRepository, transactionManager, subChunkSize); + } + + @Bean + @StepScope + public ItemReader portCallReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new PortCallReader(businessDataSource, tableMetaInfo); + } + + @Bean + public BatchWriteListener portCallWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTShipStpovInfo); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean(name = "portCallSyncStep") + public Step portCallSyncStep() { + log.info("Step 생성: portCallSyncStep"); + return new StepBuilder(getStepName(), jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(createReader()) + .processor(createProcessor()) + .writer(createWriter()) + .listener(new GroupByExecutionIdReadListener()) + .listener(new GroupByExecutionIdChunkListener()) + .listener(portCallWriteListener()) + .build(); + } + + @Override + protected Job createJobFlow(JobBuilder jobBuilder) { + return jobBuilder + .start(portCallSyncStep()) + .build(); + } + + @Bean(name = "portCallDataSyncJob") + public Job portCallDataSyncJob() { + return job(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/config/StsOperationSyncJobConfig.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/config/StsOperationSyncJobConfig.java new file mode 100644 index 0000000..aa81346 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/config/StsOperationSyncJobConfig.java @@ -0,0 +1,127 @@ +package com.snp.batch.jobs.datasync.batch.movement.config; + +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.common.util.BatchWriteListener; +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.GroupByExecutionIdChunkListener; +import com.snp.batch.common.util.GroupByExecutionIdPolicy; +import com.snp.batch.common.util.GroupByExecutionIdReadListener; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.movement.dto.StsOperationDto; +import com.snp.batch.jobs.datasync.batch.movement.entity.StsOperationEntity; +import com.snp.batch.jobs.datasync.batch.movement.processor.StsOperationProcessor; +import com.snp.batch.jobs.datasync.batch.movement.reader.StsOperationReader; +import com.snp.batch.jobs.datasync.batch.movement.repository.MovementRepository; +import com.snp.batch.jobs.datasync.batch.movement.writer.StsOperationWriter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.PlatformTransactionManager; + +import javax.sql.DataSource; + +@Slf4j +@Configuration +public class StsOperationSyncJobConfig extends BaseJobConfig { + private final TableMetaInfo tableMetaInfo; + private final MovementRepository movementRepository; + private final DataSource batchDataSource; + private final DataSource businessDataSource; + private final JdbcTemplate businessJdbcTemplate; + private final int subChunkSize; + + public StsOperationSyncJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + MovementRepository movementRepository, + TableMetaInfo tableMetaInfo, + @Qualifier("batchDataSource") DataSource batchDataSource, + @Qualifier("businessDataSource") DataSource businessDataSource, + @Value("${app.batch.sub-chunk-size:5000}") int subChunkSize + ) { + super(jobRepository, transactionManager); + this.movementRepository = movementRepository; + this.tableMetaInfo = tableMetaInfo; + this.batchDataSource = batchDataSource; + this.businessDataSource = businessDataSource; + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.subChunkSize = subChunkSize; + } + + @Override + protected String getJobName() { + return "stsOperationDataSyncJob"; + } + + @Override + protected String getStepName() { + return "stsOperationSyncStep"; + } + + @Override + protected ItemReader createReader() { + return stsOperationReader(businessDataSource, tableMetaInfo); + } + + @Override + protected ItemProcessor createProcessor() { + return new StsOperationProcessor(); + } + + @Override + protected ItemWriter createWriter() { + return new StsOperationWriter(movementRepository, transactionManager, subChunkSize); + } + + @Bean + @StepScope + public ItemReader stsOperationReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new StsOperationReader(businessDataSource, tableMetaInfo); + } + + @Bean + public BatchWriteListener stsOperationWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTStsOperation); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean(name = "stsOperationSyncStep") + public Step stsOperationSyncStep() { + log.info("Step 생성: stsOperationSyncStep"); + return new StepBuilder(getStepName(), jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(createReader()) + .processor(createProcessor()) + .writer(createWriter()) + .listener(new GroupByExecutionIdReadListener()) + .listener(new GroupByExecutionIdChunkListener()) + .listener(stsOperationWriteListener()) + .build(); + } + + @Override + protected Job createJobFlow(JobBuilder jobBuilder) { + return jobBuilder + .start(stsOperationSyncStep()) + .build(); + } + + @Bean(name = "stsOperationDataSyncJob") + public Job stsOperationDataSyncJob() { + return job(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/config/TerminalCallSyncJobConfig.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/config/TerminalCallSyncJobConfig.java new file mode 100644 index 0000000..6cc2a03 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/config/TerminalCallSyncJobConfig.java @@ -0,0 +1,127 @@ +package com.snp.batch.jobs.datasync.batch.movement.config; + +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.common.util.BatchWriteListener; +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.GroupByExecutionIdChunkListener; +import com.snp.batch.common.util.GroupByExecutionIdPolicy; +import com.snp.batch.common.util.GroupByExecutionIdReadListener; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.movement.dto.TerminalCallDto; +import com.snp.batch.jobs.datasync.batch.movement.entity.TerminalCallEntity; +import com.snp.batch.jobs.datasync.batch.movement.processor.TerminalCallProcessor; +import com.snp.batch.jobs.datasync.batch.movement.reader.TerminalCallReader; +import com.snp.batch.jobs.datasync.batch.movement.repository.MovementRepository; +import com.snp.batch.jobs.datasync.batch.movement.writer.TerminalCallWriter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.PlatformTransactionManager; + +import javax.sql.DataSource; + +@Slf4j +@Configuration +public class TerminalCallSyncJobConfig extends BaseJobConfig { + private final TableMetaInfo tableMetaInfo; + private final MovementRepository movementRepository; + private final DataSource batchDataSource; + private final DataSource businessDataSource; + private final JdbcTemplate businessJdbcTemplate; + private final int subChunkSize; + + public TerminalCallSyncJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + MovementRepository movementRepository, + TableMetaInfo tableMetaInfo, + @Qualifier("batchDataSource") DataSource batchDataSource, + @Qualifier("businessDataSource") DataSource businessDataSource, + @Value("${app.batch.sub-chunk-size:5000}") int subChunkSize + ) { + super(jobRepository, transactionManager); + this.movementRepository = movementRepository; + this.tableMetaInfo = tableMetaInfo; + this.batchDataSource = batchDataSource; + this.businessDataSource = businessDataSource; + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.subChunkSize = subChunkSize; + } + + @Override + protected String getJobName() { + return "terminalCallDataSyncJob"; + } + + @Override + protected String getStepName() { + return "terminalCallSyncStep"; + } + + @Override + protected ItemReader createReader() { + return terminalCallReader(businessDataSource, tableMetaInfo); + } + + @Override + protected ItemProcessor createProcessor() { + return new TerminalCallProcessor(); + } + + @Override + protected ItemWriter createWriter() { + return new TerminalCallWriter(movementRepository, transactionManager, subChunkSize); + } + + @Bean + @StepScope + public ItemReader terminalCallReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new TerminalCallReader(businessDataSource, tableMetaInfo); + } + + @Bean + public BatchWriteListener terminalCallWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTTerminalCall); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean(name = "terminalCallSyncStep") + public Step terminalCallSyncStep() { + log.info("Step 생성: terminalCallSyncStep"); + return new StepBuilder(getStepName(), jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(createReader()) + .processor(createProcessor()) + .writer(createWriter()) + .listener(new GroupByExecutionIdReadListener()) + .listener(new GroupByExecutionIdChunkListener()) + .listener(terminalCallWriteListener()) + .build(); + } + + @Override + protected Job createJobFlow(JobBuilder jobBuilder) { + return jobBuilder + .start(terminalCallSyncStep()) + .build(); + } + + @Bean(name = "terminalCallDataSyncJob") + public Job terminalCallDataSyncJob() { + return job(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/config/TransitSyncJobConfig.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/config/TransitSyncJobConfig.java new file mode 100644 index 0000000..8753268 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/config/TransitSyncJobConfig.java @@ -0,0 +1,127 @@ +package com.snp.batch.jobs.datasync.batch.movement.config; + +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.common.util.BatchWriteListener; +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.GroupByExecutionIdChunkListener; +import com.snp.batch.common.util.GroupByExecutionIdPolicy; +import com.snp.batch.common.util.GroupByExecutionIdReadListener; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.movement.dto.TransitDto; +import com.snp.batch.jobs.datasync.batch.movement.entity.TransitEntity; +import com.snp.batch.jobs.datasync.batch.movement.processor.TransitProcessor; +import com.snp.batch.jobs.datasync.batch.movement.reader.TransitReader; +import com.snp.batch.jobs.datasync.batch.movement.repository.MovementRepository; +import com.snp.batch.jobs.datasync.batch.movement.writer.TransitWriter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.PlatformTransactionManager; + +import javax.sql.DataSource; + +@Slf4j +@Configuration +public class TransitSyncJobConfig extends BaseJobConfig { + private final TableMetaInfo tableMetaInfo; + private final MovementRepository movementRepository; + private final DataSource batchDataSource; + private final DataSource businessDataSource; + private final JdbcTemplate businessJdbcTemplate; + private final int subChunkSize; + + public TransitSyncJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + MovementRepository movementRepository, + TableMetaInfo tableMetaInfo, + @Qualifier("batchDataSource") DataSource batchDataSource, + @Qualifier("businessDataSource") DataSource businessDataSource, + @Value("${app.batch.sub-chunk-size:5000}") int subChunkSize + ) { + super(jobRepository, transactionManager); + this.movementRepository = movementRepository; + this.tableMetaInfo = tableMetaInfo; + this.batchDataSource = batchDataSource; + this.businessDataSource = businessDataSource; + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.subChunkSize = subChunkSize; + } + + @Override + protected String getJobName() { + return "transitDataSyncJob"; + } + + @Override + protected String getStepName() { + return "transitSyncStep"; + } + + @Override + protected ItemReader createReader() { + return transitReader(businessDataSource, tableMetaInfo); + } + + @Override + protected ItemProcessor createProcessor() { + return new TransitProcessor(); + } + + @Override + protected ItemWriter createWriter() { + return new TransitWriter(movementRepository, transactionManager, subChunkSize); + } + + @Bean + @StepScope + public ItemReader transitReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new TransitReader(businessDataSource, tableMetaInfo); + } + + @Bean + public BatchWriteListener transitWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTTransit); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean(name = "transitSyncStep") + public Step transitSyncStep() { + log.info("Step 생성: transitSyncStep"); + return new StepBuilder(getStepName(), jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(createReader()) + .processor(createProcessor()) + .writer(createWriter()) + .listener(new GroupByExecutionIdReadListener()) + .listener(new GroupByExecutionIdChunkListener()) + .listener(transitWriteListener()) + .build(); + } + + @Override + protected Job createJobFlow(JobBuilder jobBuilder) { + return jobBuilder + .start(transitSyncStep()) + .build(); + } + + @Bean(name = "transitDataSyncJob") + public Job transitDataSyncJob() { + return job(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/dto/AnchorageCallDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/dto/AnchorageCallDto.java new file mode 100644 index 0000000..18c37e1 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/dto/AnchorageCallDto.java @@ -0,0 +1,38 @@ +package com.snp.batch.jobs.datasync.batch.movement.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import java.math.BigDecimal; +import java.time.LocalDateTime; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class AnchorageCallDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String imoNo; + private String mvmnType; + private LocalDateTime mvmnDt; + private Integer prtcllId; + private Integer facilityId; + private String facilityNm; + private String facilityType; + private Integer lwrnkFacilityId; + private String lwrnkFacilityDesc; + private String lwrnkFacilityType; + private String countryCd; + private String countryNm; + private BigDecimal draft; + private BigDecimal lat; + private BigDecimal lon; + private String positionInfo; + private String dest; + private String isoTwoCountryCd; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/dto/BerthCallDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/dto/BerthCallDto.java new file mode 100644 index 0000000..21d4b25 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/dto/BerthCallDto.java @@ -0,0 +1,38 @@ +package com.snp.batch.jobs.datasync.batch.movement.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import java.math.BigDecimal; +import java.time.LocalDateTime; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class BerthCallDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String imoNo; + private String mvmnType; + private LocalDateTime mvmnDt; + private Integer facilityId; + private String facilityNm; + private String facilityType; + private Integer upFacilityId; + private String upFacilityNm; + private String upFacilityType; + private String countryCd; + private String countryNm; + private BigDecimal draft; + private BigDecimal lat; + private BigDecimal lon; + private String positionInfo; + private Long upClotId; + private String isoTwoCountryCd; + private LocalDateTime eventStaDt; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/dto/CurrentlyAtDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/dto/CurrentlyAtDto.java new file mode 100644 index 0000000..668c53b --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/dto/CurrentlyAtDto.java @@ -0,0 +1,41 @@ +package com.snp.batch.jobs.datasync.batch.movement.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import java.math.BigDecimal; +import java.time.LocalDateTime; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class CurrentlyAtDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String imoNo; + private String mvmnType; + private LocalDateTime mvmnDt; + private Integer prtcllId; + private Integer facilityId; + private String facilityNm; + private String facilityType; + private Integer lwrnkFacilityId; + private String lwrnkFacilityDesc; + private String lwrnkFacilityType; + private Integer upFacilityId; + private String upFacilityNm; + private String upFacilityType; + private String countryCd; + private String countryNm; + private BigDecimal draft; + private BigDecimal lat; + private BigDecimal lon; + private String dest; + private String countryIsoTwoCd; + private String positionInfo; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/dto/DestinationDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/dto/DestinationDto.java new file mode 100644 index 0000000..9ae0f24 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/dto/DestinationDto.java @@ -0,0 +1,32 @@ +package com.snp.batch.jobs.datasync.batch.movement.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import java.math.BigDecimal; +import java.time.LocalDateTime; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class DestinationDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String imoNo; + private String mvmnType; + private LocalDateTime mvmnDt; + private Integer facilityId; + private String facilityNm; + private String facilityType; + private String countryCd; + private String countryNm; + private BigDecimal lat; + private BigDecimal lon; + private String positionInfo; + private String countryIsoTwoCd; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/dto/PortCallDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/dto/PortCallDto.java new file mode 100644 index 0000000..5b5b8cc --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/dto/PortCallDto.java @@ -0,0 +1,41 @@ +package com.snp.batch.jobs.datasync.batch.movement.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import java.math.BigDecimal; +import java.time.LocalDateTime; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class PortCallDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String imoNo; + private String mvmnType; + private LocalDateTime mvmnDt; + private Integer prtcllId; + private Integer facilityId; + private String facilityNm; + private String facilityType; + private Integer lwrnkFacilityId; + private String lwrnkFacilityDesc; + private String lwrnkFacilityType; + private Integer upFacilityId; + private String upFacilityNm; + private String upFacilityType; + private String countryCd; + private String countryNm; + private BigDecimal draft; + private BigDecimal lat; + private BigDecimal lon; + private String dest; + private String countryIsoTwoCd; + private String positionInfo; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/dto/StsOperationDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/dto/StsOperationDto.java new file mode 100644 index 0000000..f7a919a --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/dto/StsOperationDto.java @@ -0,0 +1,39 @@ +package com.snp.batch.jobs.datasync.batch.movement.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import java.math.BigDecimal; +import java.time.LocalDateTime; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class StsOperationDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String imoNo; + private String mvmnType; + private LocalDateTime mvmnDt; + private Integer facilityId; + private String facilityNm; + private String facilityType; + private Integer upFacilityId; + private String upFacilityNm; + private String upFacilityType; + private BigDecimal draft; + private BigDecimal lat; + private BigDecimal lon; + private String positionInfo; + private Long upPrtcllId; + private String countryCd; + private String countryNm; + private String stsPosition; + private String stsType; + private LocalDateTime eventStaDt; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/dto/TerminalCallDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/dto/TerminalCallDto.java new file mode 100644 index 0000000..3af3d5a --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/dto/TerminalCallDto.java @@ -0,0 +1,41 @@ +package com.snp.batch.jobs.datasync.batch.movement.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import java.math.BigDecimal; +import java.time.LocalDateTime; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class TerminalCallDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String imoNo; + private String mvmnType; + private LocalDateTime mvmnDt; + private Integer facilityId; + private String facilityNm; + private String facilityType; + private Integer upFacilityId; + private String upFacilityNm; + private String upFacilityType; + private String countryCd; + private String countryNm; + private BigDecimal draft; + private BigDecimal lat; + private BigDecimal lon; + private String positionInfo; + private Long upPrtcllId; + private String countryIsoTwoCd; + private LocalDateTime eventStaDt; + private Integer lwrnkFacilityId; + private String lwrnkFacilityDesc; + private String lwrnkFacilityType; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/dto/TransitDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/dto/TransitDto.java new file mode 100644 index 0000000..88e2824 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/dto/TransitDto.java @@ -0,0 +1,25 @@ +package com.snp.batch.jobs.datasync.batch.movement.dto; +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import java.math.BigDecimal; +import java.time.LocalDateTime; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class TransitDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String imoNo; + private String mvmnType; + private LocalDateTime mvmnDt; + private String facilityNm; + private String facilityType; + private BigDecimal draft; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/entity/AnchorageCallEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/entity/AnchorageCallEntity.java new file mode 100644 index 0000000..53bd93d --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/entity/AnchorageCallEntity.java @@ -0,0 +1,38 @@ +package com.snp.batch.jobs.datasync.batch.movement.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; +import java.math.BigDecimal; +import java.time.LocalDateTime; + +@Data +@SuperBuilder +@AllArgsConstructor +public class AnchorageCallEntity implements JobExecutionGroupable { + private String imoNo; + private String mvmnType; + private LocalDateTime mvmnDt; + private Integer prtcllId; + private Integer facilityId; + private String facilityNm; + private String facilityType; + private Integer lwrnkFacilityId; + private String lwrnkFacilityDesc; + private String lwrnkFacilityType; + private String countryCd; + private String countryNm; + private BigDecimal draft; + private BigDecimal lat; + private BigDecimal lon; + private String positionInfo; + private String dest; + private String isoTwoCountryCd; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/entity/BerthCallEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/entity/BerthCallEntity.java new file mode 100644 index 0000000..825a6ac --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/entity/BerthCallEntity.java @@ -0,0 +1,38 @@ +package com.snp.batch.jobs.datasync.batch.movement.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; +import java.math.BigDecimal; +import java.time.LocalDateTime; + +@Data +@SuperBuilder +@AllArgsConstructor +public class BerthCallEntity implements JobExecutionGroupable { + private String imoNo; + private String mvmnType; + private LocalDateTime mvmnDt; + private Integer facilityId; + private String facilityNm; + private String facilityType; + private Integer upFacilityId; + private String upFacilityNm; + private String upFacilityType; + private String countryCd; + private String countryNm; + private BigDecimal draft; + private BigDecimal lat; + private BigDecimal lon; + private String positionInfo; + private Long upClotId; + private String isoTwoCountryCd; + private LocalDateTime eventStaDt; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/entity/CurrentlyAtEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/entity/CurrentlyAtEntity.java new file mode 100644 index 0000000..796a911 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/entity/CurrentlyAtEntity.java @@ -0,0 +1,41 @@ +package com.snp.batch.jobs.datasync.batch.movement.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; +import java.math.BigDecimal; +import java.time.LocalDateTime; + +@Data +@SuperBuilder +@AllArgsConstructor +public class CurrentlyAtEntity implements JobExecutionGroupable { + private String imoNo; + private String mvmnType; + private LocalDateTime mvmnDt; + private Integer prtcllId; + private Integer facilityId; + private String facilityNm; + private String facilityType; + private Integer lwrnkFacilityId; + private String lwrnkFacilityDesc; + private String lwrnkFacilityType; + private Integer upFacilityId; + private String upFacilityNm; + private String upFacilityType; + private String countryCd; + private String countryNm; + private BigDecimal draft; + private BigDecimal lat; + private BigDecimal lon; + private String dest; + private String countryIsoTwoCd; + private String positionInfo; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/entity/DestinationEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/entity/DestinationEntity.java new file mode 100644 index 0000000..e93da27 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/entity/DestinationEntity.java @@ -0,0 +1,32 @@ +package com.snp.batch.jobs.datasync.batch.movement.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; +import java.math.BigDecimal; +import java.time.LocalDateTime; + +@Data +@SuperBuilder +@AllArgsConstructor +public class DestinationEntity implements JobExecutionGroupable { + private String imoNo; + private String mvmnType; + private LocalDateTime mvmnDt; + private Integer facilityId; + private String facilityNm; + private String facilityType; + private String countryCd; + private String countryNm; + private BigDecimal lat; + private BigDecimal lon; + private String positionInfo; + private String countryIsoTwoCd; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/entity/PortCallEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/entity/PortCallEntity.java new file mode 100644 index 0000000..36d4341 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/entity/PortCallEntity.java @@ -0,0 +1,41 @@ +package com.snp.batch.jobs.datasync.batch.movement.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; +import java.math.BigDecimal; +import java.time.LocalDateTime; + +@Data +@SuperBuilder +@AllArgsConstructor +public class PortCallEntity implements JobExecutionGroupable { + private String imoNo; + private String mvmnType; + private LocalDateTime mvmnDt; + private Integer prtcllId; + private Integer facilityId; + private String facilityNm; + private String facilityType; + private Integer lwrnkFacilityId; + private String lwrnkFacilityDesc; + private String lwrnkFacilityType; + private Integer upFacilityId; + private String upFacilityNm; + private String upFacilityType; + private String countryCd; + private String countryNm; + private BigDecimal draft; + private BigDecimal lat; + private BigDecimal lon; + private String dest; + private String countryIsoTwoCd; + private String positionInfo; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/entity/StsOperationEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/entity/StsOperationEntity.java new file mode 100644 index 0000000..a25aebb --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/entity/StsOperationEntity.java @@ -0,0 +1,39 @@ +package com.snp.batch.jobs.datasync.batch.movement.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; +import java.math.BigDecimal; +import java.time.LocalDateTime; + +@Data +@SuperBuilder +@AllArgsConstructor +public class StsOperationEntity implements JobExecutionGroupable { + private String imoNo; + private String mvmnType; + private LocalDateTime mvmnDt; + private Integer facilityId; + private String facilityNm; + private String facilityType; + private Integer upFacilityId; + private String upFacilityNm; + private String upFacilityType; + private BigDecimal draft; + private BigDecimal lat; + private BigDecimal lon; + private String positionInfo; + private Long upPrtcllId; + private String countryCd; + private String countryNm; + private String stsPosition; + private String stsType; + private LocalDateTime eventStaDt; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/entity/TerminalCallEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/entity/TerminalCallEntity.java new file mode 100644 index 0000000..c6205bf --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/entity/TerminalCallEntity.java @@ -0,0 +1,40 @@ +package com.snp.batch.jobs.datasync.batch.movement.entity; +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; +import java.math.BigDecimal; +import java.time.LocalDateTime; + +@Data +@SuperBuilder +@AllArgsConstructor +public class TerminalCallEntity implements JobExecutionGroupable { + private String imoNo; + private String mvmnType; + private LocalDateTime mvmnDt; + private Integer facilityId; + private String facilityNm; + private String facilityType; + private Integer upFacilityId; + private String upFacilityNm; + private String upFacilityType; + private String countryCd; + private String countryNm; + private BigDecimal draft; + private BigDecimal lat; + private BigDecimal lon; + private String positionInfo; + private Long upPrtcllId; + private String countryIsoTwoCd; + private LocalDateTime eventStaDt; + private Integer lwrnkFacilityId; + private String lwrnkFacilityDesc; + private String lwrnkFacilityType; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/entity/TransitEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/entity/TransitEntity.java new file mode 100644 index 0000000..5f33ac9 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/entity/TransitEntity.java @@ -0,0 +1,26 @@ +package com.snp.batch.jobs.datasync.batch.movement.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; +import java.math.BigDecimal; +import java.time.LocalDateTime; + +@Data +@SuperBuilder +@AllArgsConstructor +public class TransitEntity implements JobExecutionGroupable { + private String imoNo; + private String mvmnType; + private LocalDateTime mvmnDt; + private String facilityNm; + private String facilityType; + private BigDecimal draft; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/processor/AnchorageCallProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/processor/AnchorageCallProcessor.java new file mode 100644 index 0000000..1c7bfd3 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/processor/AnchorageCallProcessor.java @@ -0,0 +1,35 @@ +package com.snp.batch.jobs.datasync.batch.movement.processor; + +import com.snp.batch.jobs.datasync.batch.movement.dto.AnchorageCallDto; +import com.snp.batch.jobs.datasync.batch.movement.entity.AnchorageCallEntity; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemProcessor; + +@Slf4j +public class AnchorageCallProcessor implements ItemProcessor { + + @Override + public AnchorageCallEntity process(AnchorageCallDto item) throws Exception { + return AnchorageCallEntity.builder() + .jobExecutionId(item.getJobExecutionId()) + .imoNo(item.getImoNo()) + .mvmnType(item.getMvmnType()) + .mvmnDt(item.getMvmnDt()) + .prtcllId(item.getPrtcllId()) + .facilityId(item.getFacilityId()) + .facilityNm(item.getFacilityNm()) + .facilityType(item.getFacilityType()) + .lwrnkFacilityId(item.getLwrnkFacilityId()) + .lwrnkFacilityDesc(item.getLwrnkFacilityDesc()) + .lwrnkFacilityType(item.getLwrnkFacilityType()) + .countryCd(item.getCountryCd()) + .countryNm(item.getCountryNm()) + .draft(item.getDraft()) + .lat(item.getLat()) + .lon(item.getLon()) + .positionInfo(item.getPositionInfo()) + .dest(item.getDest()) + .isoTwoCountryCd(item.getIsoTwoCountryCd()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/processor/BerthCallProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/processor/BerthCallProcessor.java new file mode 100644 index 0000000..07d3284 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/processor/BerthCallProcessor.java @@ -0,0 +1,35 @@ +package com.snp.batch.jobs.datasync.batch.movement.processor; + +import com.snp.batch.jobs.datasync.batch.movement.dto.BerthCallDto; +import com.snp.batch.jobs.datasync.batch.movement.entity.BerthCallEntity; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemProcessor; + +@Slf4j +public class BerthCallProcessor implements ItemProcessor { + + @Override + public BerthCallEntity process(BerthCallDto item) throws Exception { + return BerthCallEntity.builder() + .jobExecutionId(item.getJobExecutionId()) + .imoNo(item.getImoNo()) + .mvmnType(item.getMvmnType()) + .mvmnDt(item.getMvmnDt()) + .facilityId(item.getFacilityId()) + .facilityNm(item.getFacilityNm()) + .facilityType(item.getFacilityType()) + .upFacilityId(item.getUpFacilityId()) + .upFacilityNm(item.getUpFacilityNm()) + .upFacilityType(item.getUpFacilityType()) + .countryCd(item.getCountryCd()) + .countryNm(item.getCountryNm()) + .draft(item.getDraft()) + .lat(item.getLat()) + .lon(item.getLon()) + .positionInfo(item.getPositionInfo()) + .upClotId(item.getUpClotId()) + .isoTwoCountryCd(item.getIsoTwoCountryCd()) + .eventStaDt(item.getEventStaDt()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/processor/CurrentlyAtProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/processor/CurrentlyAtProcessor.java new file mode 100644 index 0000000..45f8d61 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/processor/CurrentlyAtProcessor.java @@ -0,0 +1,38 @@ +package com.snp.batch.jobs.datasync.batch.movement.processor; + +import com.snp.batch.jobs.datasync.batch.movement.dto.CurrentlyAtDto; +import com.snp.batch.jobs.datasync.batch.movement.entity.CurrentlyAtEntity; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemProcessor; + +@Slf4j +public class CurrentlyAtProcessor implements ItemProcessor { + + @Override + public CurrentlyAtEntity process(CurrentlyAtDto item) throws Exception { + return CurrentlyAtEntity.builder() + .jobExecutionId(item.getJobExecutionId()) + .imoNo(item.getImoNo()) + .mvmnType(item.getMvmnType()) + .mvmnDt(item.getMvmnDt()) + .prtcllId(item.getPrtcllId()) + .facilityId(item.getFacilityId()) + .facilityNm(item.getFacilityNm()) + .facilityType(item.getFacilityType()) + .lwrnkFacilityId(item.getLwrnkFacilityId()) + .lwrnkFacilityDesc(item.getLwrnkFacilityDesc()) + .lwrnkFacilityType(item.getLwrnkFacilityType()) + .upFacilityId(item.getUpFacilityId()) + .upFacilityNm(item.getUpFacilityNm()) + .upFacilityType(item.getUpFacilityType()) + .countryCd(item.getCountryCd()) + .countryNm(item.getCountryNm()) + .draft(item.getDraft()) + .lat(item.getLat()) + .lon(item.getLon()) + .dest(item.getDest()) + .countryIsoTwoCd(item.getCountryIsoTwoCd()) + .positionInfo(item.getPositionInfo()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/processor/DestinationProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/processor/DestinationProcessor.java new file mode 100644 index 0000000..08917cb --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/processor/DestinationProcessor.java @@ -0,0 +1,29 @@ +package com.snp.batch.jobs.datasync.batch.movement.processor; + +import com.snp.batch.jobs.datasync.batch.movement.dto.DestinationDto; +import com.snp.batch.jobs.datasync.batch.movement.entity.DestinationEntity; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemProcessor; + +@Slf4j +public class DestinationProcessor implements ItemProcessor { + + @Override + public DestinationEntity process(DestinationDto item) throws Exception { + return DestinationEntity.builder() + .jobExecutionId(item.getJobExecutionId()) + .imoNo(item.getImoNo()) + .mvmnType(item.getMvmnType()) + .mvmnDt(item.getMvmnDt()) + .facilityId(item.getFacilityId()) + .facilityNm(item.getFacilityNm()) + .facilityType(item.getFacilityType()) + .countryCd(item.getCountryCd()) + .countryNm(item.getCountryNm()) + .lat(item.getLat()) + .lon(item.getLon()) + .positionInfo(item.getPositionInfo()) + .countryIsoTwoCd(item.getCountryIsoTwoCd()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/processor/PortCallProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/processor/PortCallProcessor.java new file mode 100644 index 0000000..1e521ad --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/processor/PortCallProcessor.java @@ -0,0 +1,38 @@ +package com.snp.batch.jobs.datasync.batch.movement.processor; + +import com.snp.batch.jobs.datasync.batch.movement.dto.PortCallDto; +import com.snp.batch.jobs.datasync.batch.movement.entity.PortCallEntity; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemProcessor; + +@Slf4j +public class PortCallProcessor implements ItemProcessor { + + @Override + public PortCallEntity process(PortCallDto item) throws Exception { + return PortCallEntity.builder() + .jobExecutionId(item.getJobExecutionId()) + .imoNo(item.getImoNo()) + .mvmnType(item.getMvmnType()) + .mvmnDt(item.getMvmnDt()) + .prtcllId(item.getPrtcllId()) + .facilityId(item.getFacilityId()) + .facilityNm(item.getFacilityNm()) + .facilityType(item.getFacilityType()) + .lwrnkFacilityId(item.getLwrnkFacilityId()) + .lwrnkFacilityDesc(item.getLwrnkFacilityDesc()) + .lwrnkFacilityType(item.getLwrnkFacilityType()) + .upFacilityId(item.getUpFacilityId()) + .upFacilityNm(item.getUpFacilityNm()) + .upFacilityType(item.getUpFacilityType()) + .countryCd(item.getCountryCd()) + .countryNm(item.getCountryNm()) + .draft(item.getDraft()) + .lat(item.getLat()) + .lon(item.getLon()) + .dest(item.getDest()) + .countryIsoTwoCd(item.getCountryIsoTwoCd()) + .positionInfo(item.getPositionInfo()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/processor/StsOperationProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/processor/StsOperationProcessor.java new file mode 100644 index 0000000..aeb2bc7 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/processor/StsOperationProcessor.java @@ -0,0 +1,36 @@ +package com.snp.batch.jobs.datasync.batch.movement.processor; + +import com.snp.batch.jobs.datasync.batch.movement.dto.StsOperationDto; +import com.snp.batch.jobs.datasync.batch.movement.entity.StsOperationEntity; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemProcessor; + +@Slf4j +public class StsOperationProcessor implements ItemProcessor { + + @Override + public StsOperationEntity process(StsOperationDto item) throws Exception { + return StsOperationEntity.builder() + .jobExecutionId(item.getJobExecutionId()) + .imoNo(item.getImoNo()) + .mvmnType(item.getMvmnType()) + .mvmnDt(item.getMvmnDt()) + .facilityId(item.getFacilityId()) + .facilityNm(item.getFacilityNm()) + .facilityType(item.getFacilityType()) + .upFacilityId(item.getUpFacilityId()) + .upFacilityNm(item.getUpFacilityNm()) + .upFacilityType(item.getUpFacilityType()) + .draft(item.getDraft()) + .lat(item.getLat()) + .lon(item.getLon()) + .positionInfo(item.getPositionInfo()) + .upPrtcllId(item.getUpPrtcllId()) + .countryCd(item.getCountryCd()) + .countryNm(item.getCountryNm()) + .stsPosition(item.getStsPosition()) + .stsType(item.getStsType()) + .eventStaDt(item.getEventStaDt()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/processor/TerminalCallProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/processor/TerminalCallProcessor.java new file mode 100644 index 0000000..7cc6239 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/processor/TerminalCallProcessor.java @@ -0,0 +1,38 @@ +package com.snp.batch.jobs.datasync.batch.movement.processor; + +import com.snp.batch.jobs.datasync.batch.movement.dto.TerminalCallDto; +import com.snp.batch.jobs.datasync.batch.movement.entity.TerminalCallEntity; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemProcessor; + +@Slf4j +public class TerminalCallProcessor implements ItemProcessor { + + @Override + public TerminalCallEntity process(TerminalCallDto item) throws Exception { + return TerminalCallEntity.builder() + .jobExecutionId(item.getJobExecutionId()) + .imoNo(item.getImoNo()) + .mvmnType(item.getMvmnType()) + .mvmnDt(item.getMvmnDt()) + .facilityId(item.getFacilityId()) + .facilityNm(item.getFacilityNm()) + .facilityType(item.getFacilityType()) + .upFacilityId(item.getUpFacilityId()) + .upFacilityNm(item.getUpFacilityNm()) + .upFacilityType(item.getUpFacilityType()) + .countryCd(item.getCountryCd()) + .countryNm(item.getCountryNm()) + .draft(item.getDraft()) + .lat(item.getLat()) + .lon(item.getLon()) + .positionInfo(item.getPositionInfo()) + .upPrtcllId(item.getUpPrtcllId()) + .countryIsoTwoCd(item.getCountryIsoTwoCd()) + .eventStaDt(item.getEventStaDt()) + .lwrnkFacilityId(item.getLwrnkFacilityId()) + .lwrnkFacilityDesc(item.getLwrnkFacilityDesc()) + .lwrnkFacilityType(item.getLwrnkFacilityType()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/processor/TransitProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/processor/TransitProcessor.java new file mode 100644 index 0000000..233ca97 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/processor/TransitProcessor.java @@ -0,0 +1,23 @@ +package com.snp.batch.jobs.datasync.batch.movement.processor; + +import com.snp.batch.jobs.datasync.batch.movement.dto.TransitDto; +import com.snp.batch.jobs.datasync.batch.movement.entity.TransitEntity; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemProcessor; + +@Slf4j +public class TransitProcessor implements ItemProcessor { + + @Override + public TransitEntity process(TransitDto item) throws Exception { + return TransitEntity.builder() + .jobExecutionId(item.getJobExecutionId()) + .imoNo(item.getImoNo()) + .mvmnType(item.getMvmnType()) + .mvmnDt(item.getMvmnDt()) + .facilityNm(item.getFacilityNm()) + .facilityType(item.getFacilityType()) + .draft(item.getDraft()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/reader/AnchorageCallReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/reader/AnchorageCallReader.java new file mode 100644 index 0000000..0990776 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/reader/AnchorageCallReader.java @@ -0,0 +1,85 @@ +package com.snp.batch.jobs.datasync.batch.movement.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.movement.dto.AnchorageCallDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.math.BigDecimal; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class AnchorageCallReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public AnchorageCallReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public AnchorageCallDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceTAnchorageCall), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[AnchorageCallReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTAnchorageCall); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt"); + + return AnchorageCallDto.builder() + .jobExecutionId(targetId) + .imoNo(rs.getString("imo_no")) + .mvmnType(rs.getString("mvmn_type")) + .mvmnDt(mvmnDtTs != null ? mvmnDtTs.toLocalDateTime() : null) + .prtcllId(rs.getObject("prtcll_id") != null ? rs.getInt("prtcll_id") : null) + .facilityId(rs.getObject("facility_id") != null ? rs.getInt("facility_id") : null) + .facilityNm(rs.getString("facility_nm")) + .facilityType(rs.getString("facility_type")) + .lwrnkFacilityId(rs.getObject("lwrnk_facility_id") != null ? rs.getInt("lwrnk_facility_id") : null) + .lwrnkFacilityDesc(rs.getString("lwrnk_facility_desc")) + .lwrnkFacilityType(rs.getString("lwrnk_facility_type")) + .countryCd(rs.getString("country_cd")) + .countryNm(rs.getString("country_nm")) + .draft(rs.getObject("draft") != null ? rs.getBigDecimal("draft") : null) + .lat(rs.getObject("lat") != null ? rs.getBigDecimal("lat") : null) + .lon(rs.getObject("lon") != null ? rs.getBigDecimal("lon") : null) + .positionInfo(rs.getString("position_info")) + .dest(rs.getString("dest")) + .isoTwoCountryCd(rs.getString("iso_two_country_cd")) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTAnchorageCall); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/reader/BerthCallReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/reader/BerthCallReader.java new file mode 100644 index 0000000..2d79011 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/reader/BerthCallReader.java @@ -0,0 +1,85 @@ +package com.snp.batch.jobs.datasync.batch.movement.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.movement.dto.BerthCallDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class BerthCallReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public BerthCallReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public BerthCallDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceTBerthCall), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[BerthCallReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTBerthCall); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt"); + Timestamp eventStaDtTs = rs.getTimestamp("event_sta_dt"); + + return BerthCallDto.builder() + .jobExecutionId(targetId) + .imoNo(rs.getString("imo_no")) + .mvmnType(rs.getString("mvmn_type")) + .mvmnDt(mvmnDtTs != null ? mvmnDtTs.toLocalDateTime() : null) + .facilityId(rs.getObject("facility_id") != null ? rs.getInt("facility_id") : null) + .facilityNm(rs.getString("facility_nm")) + .facilityType(rs.getString("facility_type")) + .upFacilityId(rs.getObject("up_facility_id") != null ? rs.getInt("up_facility_id") : null) + .upFacilityNm(rs.getString("up_facility_nm")) + .upFacilityType(rs.getString("up_facility_type")) + .countryCd(rs.getString("country_cd")) + .countryNm(rs.getString("country_nm")) + .draft(rs.getObject("draft") != null ? rs.getBigDecimal("draft") : null) + .lat(rs.getObject("lat") != null ? rs.getBigDecimal("lat") : null) + .lon(rs.getObject("lon") != null ? rs.getBigDecimal("lon") : null) + .positionInfo(rs.getString("position_info")) + .upClotId(rs.getObject("up_clot_id") != null ? rs.getLong("up_clot_id") : null) + .isoTwoCountryCd(rs.getString("iso_two_country_cd")) + .eventStaDt(eventStaDtTs != null ? eventStaDtTs.toLocalDateTime() : null) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTBerthCall); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/reader/CurrentlyAtReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/reader/CurrentlyAtReader.java new file mode 100644 index 0000000..b62784e --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/reader/CurrentlyAtReader.java @@ -0,0 +1,87 @@ +package com.snp.batch.jobs.datasync.batch.movement.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.movement.dto.CurrentlyAtDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class CurrentlyAtReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public CurrentlyAtReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public CurrentlyAtDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceTCurrentlyAt), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[CurrentlyAtReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTCurrentlyAt); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt"); + + return CurrentlyAtDto.builder() + .jobExecutionId(targetId) + .imoNo(rs.getString("imo_no")) + .mvmnType(rs.getString("mvmn_type")) + .mvmnDt(mvmnDtTs != null ? mvmnDtTs.toLocalDateTime() : null) + .prtcllId(rs.getObject("prtcll_id") != null ? rs.getInt("prtcll_id") : null) + .facilityId(rs.getObject("facility_id") != null ? rs.getInt("facility_id") : null) + .facilityNm(rs.getString("facility_nm")) + .facilityType(rs.getString("facility_type")) + .lwrnkFacilityId(rs.getObject("lwrnk_facility_id") != null ? rs.getInt("lwrnk_facility_id") : null) + .lwrnkFacilityDesc(rs.getString("lwrnk_facility_desc")) + .lwrnkFacilityType(rs.getString("lwrnk_facility_type")) + .upFacilityId(rs.getObject("up_facility_id") != null ? rs.getInt("up_facility_id") : null) + .upFacilityNm(rs.getString("up_facility_nm")) + .upFacilityType(rs.getString("up_facility_type")) + .countryCd(rs.getString("country_cd")) + .countryNm(rs.getString("country_nm")) + .draft(rs.getObject("draft") != null ? rs.getBigDecimal("draft") : null) + .lat(rs.getObject("lat") != null ? rs.getBigDecimal("lat") : null) + .lon(rs.getObject("lon") != null ? rs.getBigDecimal("lon") : null) + .dest(rs.getString("dest")) + .countryIsoTwoCd(rs.getString("country_iso_two_cd")) + .positionInfo(rs.getString("position_info")) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTCurrentlyAt); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/reader/DestinationReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/reader/DestinationReader.java new file mode 100644 index 0000000..6064409 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/reader/DestinationReader.java @@ -0,0 +1,78 @@ +package com.snp.batch.jobs.datasync.batch.movement.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.movement.dto.DestinationDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class DestinationReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public DestinationReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public DestinationDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceTDestination), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[DestinationReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTDestination); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt"); + + return DestinationDto.builder() + .jobExecutionId(targetId) + .imoNo(rs.getString("imo_no")) + .mvmnType(rs.getString("mvmn_type")) + .mvmnDt(mvmnDtTs != null ? mvmnDtTs.toLocalDateTime() : null) + .facilityId(rs.getObject("facility_id") != null ? rs.getInt("facility_id") : null) + .facilityNm(rs.getString("facility_nm")) + .facilityType(rs.getString("facility_type")) + .countryCd(rs.getString("country_cd")) + .countryNm(rs.getString("country_nm")) + .lat(rs.getObject("lat") != null ? rs.getBigDecimal("lat") : null) + .lon(rs.getObject("lon") != null ? rs.getBigDecimal("lon") : null) + .positionInfo(rs.getString("position_info")) + .countryIsoTwoCd(rs.getString("country_iso_two_cd")) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTDestination); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/reader/PortCallReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/reader/PortCallReader.java new file mode 100644 index 0000000..910159f --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/reader/PortCallReader.java @@ -0,0 +1,87 @@ +package com.snp.batch.jobs.datasync.batch.movement.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.movement.dto.PortCallDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class PortCallReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public PortCallReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public PortCallDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceTShipStpovInfo), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[PortCallReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTShipStpovInfo); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt"); + + return PortCallDto.builder() + .jobExecutionId(targetId) + .imoNo(rs.getString("imo_no")) + .mvmnType(rs.getString("mvmn_type")) + .mvmnDt(mvmnDtTs != null ? mvmnDtTs.toLocalDateTime() : null) + .prtcllId(rs.getObject("prtcll_id") != null ? rs.getInt("prtcll_id") : null) + .facilityId(rs.getObject("facility_id") != null ? rs.getInt("facility_id") : null) + .facilityNm(rs.getString("facility_nm")) + .facilityType(rs.getString("facility_type")) + .lwrnkFacilityId(rs.getObject("lwrnk_facility_id") != null ? rs.getInt("lwrnk_facility_id") : null) + .lwrnkFacilityDesc(rs.getString("lwrnk_facility_desc")) + .lwrnkFacilityType(rs.getString("lwrnk_facility_type")) + .upFacilityId(rs.getObject("up_facility_id") != null ? rs.getInt("up_facility_id") : null) + .upFacilityNm(rs.getString("up_facility_nm")) + .upFacilityType(rs.getString("up_facility_type")) + .countryCd(rs.getString("country_cd")) + .countryNm(rs.getString("country_nm")) + .draft(rs.getObject("draft") != null ? rs.getBigDecimal("draft") : null) + .lat(rs.getObject("lat") != null ? rs.getBigDecimal("lat") : null) + .lon(rs.getObject("lon") != null ? rs.getBigDecimal("lon") : null) + .dest(rs.getString("dest")) + .countryIsoTwoCd(rs.getString("country_iso_two_cd")) + .positionInfo(rs.getString("position_info")) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTShipStpovInfo); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/reader/StsOperationReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/reader/StsOperationReader.java new file mode 100644 index 0000000..580f78c --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/reader/StsOperationReader.java @@ -0,0 +1,86 @@ +package com.snp.batch.jobs.datasync.batch.movement.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.movement.dto.StsOperationDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class StsOperationReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public StsOperationReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public StsOperationDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceTStsOperation), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[StsOperationReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTStsOperation); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt"); + Timestamp eventStaDtTs = rs.getTimestamp("event_sta_dt"); + + return StsOperationDto.builder() + .jobExecutionId(targetId) + .imoNo(rs.getString("imo_no")) + .mvmnType(rs.getString("mvmn_type")) + .mvmnDt(mvmnDtTs != null ? mvmnDtTs.toLocalDateTime() : null) + .facilityId(rs.getObject("facility_id") != null ? rs.getInt("facility_id") : null) + .facilityNm(rs.getString("facility_nm")) + .facilityType(rs.getString("facility_type")) + .upFacilityId(rs.getObject("up_facility_id") != null ? rs.getInt("up_facility_id") : null) + .upFacilityNm(rs.getString("up_facility_nm")) + .upFacilityType(rs.getString("up_facility_type")) + .draft(rs.getObject("draft") != null ? rs.getBigDecimal("draft") : null) + .lat(rs.getObject("lat") != null ? rs.getBigDecimal("lat") : null) + .lon(rs.getObject("lon") != null ? rs.getBigDecimal("lon") : null) + .positionInfo(rs.getString("position_info")) + .upPrtcllId(rs.getObject("up_prtcll_id") != null ? rs.getLong("up_prtcll_id") : null) + .countryCd(rs.getString("country_cd")) + .countryNm(rs.getString("country_nm")) + .stsPosition(rs.getString("sts_position")) + .stsType(rs.getString("sts_type")) + .eventStaDt(eventStaDtTs != null ? eventStaDtTs.toLocalDateTime() : null) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTStsOperation); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/reader/TerminalCallReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/reader/TerminalCallReader.java new file mode 100644 index 0000000..a5a9f52 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/reader/TerminalCallReader.java @@ -0,0 +1,88 @@ +package com.snp.batch.jobs.datasync.batch.movement.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.movement.dto.TerminalCallDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class TerminalCallReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public TerminalCallReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public TerminalCallDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceTTerminalCall), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[TerminalCallReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTTerminalCall); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt"); + Timestamp eventStaDtTs = rs.getTimestamp("event_sta_dt"); + + return TerminalCallDto.builder() + .jobExecutionId(targetId) + .imoNo(rs.getString("imo_no")) + .mvmnType(rs.getString("mvmn_type")) + .mvmnDt(mvmnDtTs != null ? mvmnDtTs.toLocalDateTime() : null) + .facilityId(rs.getObject("facility_id") != null ? rs.getInt("facility_id") : null) + .facilityNm(rs.getString("facility_nm")) + .facilityType(rs.getString("facility_type")) + .upFacilityId(rs.getObject("up_facility_id") != null ? rs.getInt("up_facility_id") : null) + .upFacilityNm(rs.getString("up_facility_nm")) + .upFacilityType(rs.getString("up_facility_type")) + .countryCd(rs.getString("country_cd")) + .countryNm(rs.getString("country_nm")) + .draft(rs.getObject("draft") != null ? rs.getBigDecimal("draft") : null) + .lat(rs.getObject("lat") != null ? rs.getBigDecimal("lat") : null) + .lon(rs.getObject("lon") != null ? rs.getBigDecimal("lon") : null) + .positionInfo(rs.getString("position_info")) + .upPrtcllId(rs.getObject("up_prtcll_id") != null ? rs.getLong("up_prtcll_id") : null) + .countryIsoTwoCd(rs.getString("country_iso_two_cd")) + .eventStaDt(eventStaDtTs != null ? eventStaDtTs.toLocalDateTime() : null) + .lwrnkFacilityId(rs.getObject("lwrnk_facility_id") != null ? rs.getInt("lwrnk_facility_id") : null) + .lwrnkFacilityDesc(rs.getString("lwrnk_facility_desc")) + .lwrnkFacilityType(rs.getString("lwrnk_facility_type")) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTTerminalCall); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/reader/TransitReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/reader/TransitReader.java new file mode 100644 index 0000000..bbe235d --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/reader/TransitReader.java @@ -0,0 +1,72 @@ +package com.snp.batch.jobs.datasync.batch.movement.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.movement.dto.TransitDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class TransitReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public TransitReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public TransitDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceTTransit), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[TransitReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTTransit); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt"); + + return TransitDto.builder() + .jobExecutionId(targetId) + .imoNo(rs.getString("imo_no")) + .mvmnType(rs.getString("mvmn_type")) + .mvmnDt(mvmnDtTs != null ? mvmnDtTs.toLocalDateTime() : null) + .facilityNm(rs.getString("facility_nm")) + .facilityType(rs.getString("facility_type")) + .draft(rs.getObject("draft") != null ? rs.getBigDecimal("draft") : null) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTTransit); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/repository/MovementRepository.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/repository/MovementRepository.java new file mode 100644 index 0000000..dc6ea02 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/repository/MovementRepository.java @@ -0,0 +1,16 @@ +package com.snp.batch.jobs.datasync.batch.movement.repository; + +import com.snp.batch.jobs.datasync.batch.movement.entity.*; + +import java.util.List; + +public interface MovementRepository { + void saveAnchorageCall(List anchorageCallEntityList); + void saveBerthCall(List berthCallEntityList); + void saveCurrentlyAt(List currentlyAtEntityList); + void saveDestination(List destinationEntityList); + void savePortCall(List portCallEntityList); + void saveStsOperation(List stsOperationEntityList); + void saveTerminalCall(List terminalCallEntityList); + void saveTransit(List transitEntityList); +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/repository/MovementRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/repository/MovementRepositoryImpl.java new file mode 100644 index 0000000..8a6b01f --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/repository/MovementRepositoryImpl.java @@ -0,0 +1,452 @@ +package com.snp.batch.jobs.datasync.batch.movement.repository; + +import com.snp.batch.common.batch.repository.MultiDataSourceJdbcRepository; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.movement.entity.*; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.stereotype.Repository; + +import javax.sql.DataSource; +import java.math.BigDecimal; +import java.sql.PreparedStatement; +import java.sql.Timestamp; +import java.sql.Types; +import java.util.List; + +@Slf4j +@Repository("movementRepository") +public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository implements MovementRepository { + + private DataSource batchDataSource; + private DataSource businessDataSource; + private final TableMetaInfo tableMetaInfo; + + public MovementRepositoryImpl(@Qualifier("batchDataSource") DataSource batchDataSource, + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + + super(new JdbcTemplate(batchDataSource), new JdbcTemplate(businessDataSource)); + + this.batchDataSource = batchDataSource; + this.businessDataSource = businessDataSource; + this.tableMetaInfo = tableMetaInfo; + } + + @Override + protected String getTableName() { + return null; + } + + @Override + protected RowMapper getRowMapper() { + return null; + } + + @Override + protected Long extractId(AnchorageCallEntity entity) { + return null; + } + + @Override + protected String getInsertSql() { + return null; + } + + @Override + protected String getUpdateSql() { + return null; + } + + @Override + protected void setInsertParameters(PreparedStatement ps, AnchorageCallEntity entity) throws Exception { + } + + @Override + protected void setUpdateParameters(PreparedStatement ps, AnchorageCallEntity entity) throws Exception { + } + + @Override + protected String getEntityName() { + return null; + } + + // ========== AnchorageCall ========== + @Override + public void saveAnchorageCall(List anchorageCallEntityList) { + String sql = MovementSql.getAnchorageCallUpsertSql(tableMetaInfo.targetTbShipAnchrgcallHstry); + if (anchorageCallEntityList == null || anchorageCallEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "AnchorageCallEntity", anchorageCallEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, anchorageCallEntityList, anchorageCallEntityList.size(), + (ps, entity) -> { + try { + bindAnchorageCall(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "AnchorageCallEntity", anchorageCallEntityList.size()); + } + + public void bindAnchorageCall(PreparedStatement pstmt, AnchorageCallEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); + pstmt.setString(idx++, entity.getImoNo()); + pstmt.setString(idx++, entity.getMvmnType()); + pstmt.setTimestamp(idx++, entity.getMvmnDt() != null ? Timestamp.valueOf(entity.getMvmnDt()) : null); + setIntOrNull(pstmt, idx++, entity.getPrtcllId()); + setIntOrNull(pstmt, idx++, entity.getFacilityId()); + pstmt.setString(idx++, entity.getFacilityNm()); + pstmt.setString(idx++, entity.getFacilityType()); + setIntOrNull(pstmt, idx++, entity.getLwrnkFacilityId()); + pstmt.setString(idx++, entity.getLwrnkFacilityDesc()); + pstmt.setString(idx++, entity.getLwrnkFacilityType()); + pstmt.setString(idx++, entity.getCountryCd()); + pstmt.setString(idx++, entity.getCountryNm()); + setBigDecimalOrNull(pstmt, idx++, entity.getDraft()); + setBigDecimalOrNull(pstmt, idx++, entity.getLat()); + setBigDecimalOrNull(pstmt, idx++, entity.getLon()); + pstmt.setString(idx++, entity.getPositionInfo()); + pstmt.setString(idx++, entity.getDest()); + pstmt.setString(idx++, entity.getIsoTwoCountryCd()); + } + + // ========== BerthCall ========== + @Override + public void saveBerthCall(List berthCallEntityList) { + String sql = MovementSql.getBerthCallUpsertSql(tableMetaInfo.targetTbShipBerthcallHstry); + if (berthCallEntityList == null || berthCallEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "BerthCallEntity", berthCallEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, berthCallEntityList, berthCallEntityList.size(), + (ps, entity) -> { + try { + bindBerthCall(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "BerthCallEntity", berthCallEntityList.size()); + } + + public void bindBerthCall(PreparedStatement pstmt, BerthCallEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); + pstmt.setString(idx++, entity.getImoNo()); + pstmt.setString(idx++, entity.getMvmnType()); + pstmt.setTimestamp(idx++, entity.getMvmnDt() != null ? Timestamp.valueOf(entity.getMvmnDt()) : null); + setIntOrNull(pstmt, idx++, entity.getFacilityId()); + pstmt.setString(idx++, entity.getFacilityNm()); + pstmt.setString(idx++, entity.getFacilityType()); + setIntOrNull(pstmt, idx++, entity.getUpFacilityId()); + pstmt.setString(idx++, entity.getUpFacilityNm()); + pstmt.setString(idx++, entity.getUpFacilityType()); + pstmt.setString(idx++, entity.getCountryCd()); + pstmt.setString(idx++, entity.getCountryNm()); + setBigDecimalOrNull(pstmt, idx++, entity.getDraft()); + setBigDecimalOrNull(pstmt, idx++, entity.getLat()); + setBigDecimalOrNull(pstmt, idx++, entity.getLon()); + pstmt.setString(idx++, entity.getPositionInfo()); + setLongOrNull(pstmt, idx++, entity.getUpClotId()); + pstmt.setString(idx++, entity.getIsoTwoCountryCd()); + pstmt.setTimestamp(idx++, entity.getEventStaDt() != null ? Timestamp.valueOf(entity.getEventStaDt()) : null); + } + + // ========== CurrentlyAt ========== + @Override + public void saveCurrentlyAt(List currentlyAtEntityList) { + String sql = MovementSql.getCurrentlyAtUpsertSql(tableMetaInfo.targetTbShipNowStatusHstry); + if (currentlyAtEntityList == null || currentlyAtEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "CurrentlyAtEntity", currentlyAtEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, currentlyAtEntityList, currentlyAtEntityList.size(), + (ps, entity) -> { + try { + bindCurrentlyAt(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "CurrentlyAtEntity", currentlyAtEntityList.size()); + } + + public void bindCurrentlyAt(PreparedStatement pstmt, CurrentlyAtEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); + pstmt.setString(idx++, entity.getImoNo()); + pstmt.setString(idx++, entity.getMvmnType()); + pstmt.setTimestamp(idx++, entity.getMvmnDt() != null ? Timestamp.valueOf(entity.getMvmnDt()) : null); + setIntOrNull(pstmt, idx++, entity.getPrtcllId()); + setIntOrNull(pstmt, idx++, entity.getFacilityId()); + pstmt.setString(idx++, entity.getFacilityNm()); + pstmt.setString(idx++, entity.getFacilityType()); + setIntOrNull(pstmt, idx++, entity.getLwrnkFacilityId()); + pstmt.setString(idx++, entity.getLwrnkFacilityDesc()); + pstmt.setString(idx++, entity.getLwrnkFacilityType()); + setIntOrNull(pstmt, idx++, entity.getUpFacilityId()); + pstmt.setString(idx++, entity.getUpFacilityNm()); + pstmt.setString(idx++, entity.getUpFacilityType()); + pstmt.setString(idx++, entity.getCountryCd()); + pstmt.setString(idx++, entity.getCountryNm()); + setBigDecimalOrNull(pstmt, idx++, entity.getDraft()); + setBigDecimalOrNull(pstmt, idx++, entity.getLat()); + setBigDecimalOrNull(pstmt, idx++, entity.getLon()); + pstmt.setString(idx++, entity.getDest()); + pstmt.setString(idx++, entity.getCountryIsoTwoCd()); + pstmt.setString(idx++, entity.getPositionInfo()); + } + + // ========== Destination ========== + @Override + public void saveDestination(List destinationEntityList) { + String sql = MovementSql.getDestinationUpsertSql(tableMetaInfo.targetTbShipDestHstry); + if (destinationEntityList == null || destinationEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "DestinationEntity", destinationEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, destinationEntityList, destinationEntityList.size(), + (ps, entity) -> { + try { + bindDestination(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "DestinationEntity", destinationEntityList.size()); + } + + public void bindDestination(PreparedStatement pstmt, DestinationEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); + pstmt.setString(idx++, entity.getImoNo()); + pstmt.setString(idx++, entity.getMvmnType()); + pstmt.setTimestamp(idx++, entity.getMvmnDt() != null ? Timestamp.valueOf(entity.getMvmnDt()) : null); + setIntOrNull(pstmt, idx++, entity.getFacilityId()); + pstmt.setString(idx++, entity.getFacilityNm()); + pstmt.setString(idx++, entity.getFacilityType()); + pstmt.setString(idx++, entity.getCountryCd()); + pstmt.setString(idx++, entity.getCountryNm()); + setBigDecimalOrNull(pstmt, idx++, entity.getLat()); + setBigDecimalOrNull(pstmt, idx++, entity.getLon()); + pstmt.setString(idx++, entity.getPositionInfo()); + pstmt.setString(idx++, entity.getCountryIsoTwoCd()); + } + + // ========== PortCall ========== + @Override + public void savePortCall(List portCallEntityList) { + String sql = MovementSql.getPortCallUpsertSql(tableMetaInfo.targetTbShipPrtcllHstry); + if (portCallEntityList == null || portCallEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "PortCallEntity", portCallEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, portCallEntityList, portCallEntityList.size(), + (ps, entity) -> { + try { + bindPortCall(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "PortCallEntity", portCallEntityList.size()); + } + + public void bindPortCall(PreparedStatement pstmt, PortCallEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); + pstmt.setString(idx++, entity.getImoNo()); + pstmt.setString(idx++, entity.getMvmnType()); + pstmt.setTimestamp(idx++, entity.getMvmnDt() != null ? Timestamp.valueOf(entity.getMvmnDt()) : null); + setIntOrNull(pstmt, idx++, entity.getPrtcllId()); + setIntOrNull(pstmt, idx++, entity.getFacilityId()); + pstmt.setString(idx++, entity.getFacilityNm()); + pstmt.setString(idx++, entity.getFacilityType()); + setIntOrNull(pstmt, idx++, entity.getLwrnkFacilityId()); + pstmt.setString(idx++, entity.getLwrnkFacilityDesc()); + pstmt.setString(idx++, entity.getLwrnkFacilityType()); + setIntOrNull(pstmt, idx++, entity.getUpFacilityId()); + pstmt.setString(idx++, entity.getUpFacilityNm()); + pstmt.setString(idx++, entity.getUpFacilityType()); + pstmt.setString(idx++, entity.getCountryCd()); + pstmt.setString(idx++, entity.getCountryNm()); + setBigDecimalOrNull(pstmt, idx++, entity.getDraft()); + setBigDecimalOrNull(pstmt, idx++, entity.getLat()); + setBigDecimalOrNull(pstmt, idx++, entity.getLon()); + pstmt.setString(idx++, entity.getDest()); + pstmt.setString(idx++, entity.getCountryIsoTwoCd()); + pstmt.setString(idx++, entity.getPositionInfo()); + } + + // ========== StsOperation ========== + @Override + public void saveStsOperation(List stsOperationEntityList) { + String sql = MovementSql.getStsOperationUpsertSql(tableMetaInfo.targetTbShipStsOpertHstry); + if (stsOperationEntityList == null || stsOperationEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "StsOperationEntity", stsOperationEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, stsOperationEntityList, stsOperationEntityList.size(), + (ps, entity) -> { + try { + bindStsOperation(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "StsOperationEntity", stsOperationEntityList.size()); + } + + public void bindStsOperation(PreparedStatement pstmt, StsOperationEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); + pstmt.setString(idx++, entity.getImoNo()); + pstmt.setString(idx++, entity.getMvmnType()); + pstmt.setTimestamp(idx++, entity.getMvmnDt() != null ? Timestamp.valueOf(entity.getMvmnDt()) : null); + setIntOrNull(pstmt, idx++, entity.getFacilityId()); + pstmt.setString(idx++, entity.getFacilityNm()); + pstmt.setString(idx++, entity.getFacilityType()); + setIntOrNull(pstmt, idx++, entity.getUpFacilityId()); + pstmt.setString(idx++, entity.getUpFacilityNm()); + pstmt.setString(idx++, entity.getUpFacilityType()); + setBigDecimalOrNull(pstmt, idx++, entity.getDraft()); + setBigDecimalOrNull(pstmt, idx++, entity.getLat()); + setBigDecimalOrNull(pstmt, idx++, entity.getLon()); + pstmt.setString(idx++, entity.getPositionInfo()); + setLongOrNull(pstmt, idx++, entity.getUpPrtcllId()); + pstmt.setString(idx++, entity.getCountryCd()); + pstmt.setString(idx++, entity.getCountryNm()); + pstmt.setString(idx++, entity.getStsPosition()); + pstmt.setString(idx++, entity.getStsType()); + pstmt.setTimestamp(idx++, entity.getEventStaDt() != null ? Timestamp.valueOf(entity.getEventStaDt()) : null); + } + + // ========== TerminalCall ========== + @Override + public void saveTerminalCall(List terminalCallEntityList) { + String sql = MovementSql.getTerminalCallUpsertSql(tableMetaInfo.targetTbShipTeminalcallHstry); + if (terminalCallEntityList == null || terminalCallEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "TerminalCallEntity", terminalCallEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, terminalCallEntityList, terminalCallEntityList.size(), + (ps, entity) -> { + try { + bindTerminalCall(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "TerminalCallEntity", terminalCallEntityList.size()); + } + + public void bindTerminalCall(PreparedStatement pstmt, TerminalCallEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); + pstmt.setString(idx++, entity.getImoNo()); + pstmt.setString(idx++, entity.getMvmnType()); + pstmt.setTimestamp(idx++, entity.getMvmnDt() != null ? Timestamp.valueOf(entity.getMvmnDt()) : null); + setIntOrNull(pstmt, idx++, entity.getFacilityId()); + pstmt.setString(idx++, entity.getFacilityNm()); + pstmt.setString(idx++, entity.getFacilityType()); + setIntOrNull(pstmt, idx++, entity.getUpFacilityId()); + pstmt.setString(idx++, entity.getUpFacilityNm()); + pstmt.setString(idx++, entity.getUpFacilityType()); + pstmt.setString(idx++, entity.getCountryCd()); + pstmt.setString(idx++, entity.getCountryNm()); + setBigDecimalOrNull(pstmt, idx++, entity.getDraft()); + setBigDecimalOrNull(pstmt, idx++, entity.getLat()); + setBigDecimalOrNull(pstmt, idx++, entity.getLon()); + pstmt.setString(idx++, entity.getPositionInfo()); + setLongOrNull(pstmt, idx++, entity.getUpPrtcllId()); + pstmt.setString(idx++, entity.getCountryIsoTwoCd()); + pstmt.setTimestamp(idx++, entity.getEventStaDt() != null ? Timestamp.valueOf(entity.getEventStaDt()) : null); + setIntOrNull(pstmt, idx++, entity.getLwrnkFacilityId()); + pstmt.setString(idx++, entity.getLwrnkFacilityDesc()); + pstmt.setString(idx++, entity.getLwrnkFacilityType()); + } + + // ========== Transit ========== + @Override + public void saveTransit(List transitEntityList) { + String sql = MovementSql.getTransitUpsertSql(tableMetaInfo.targetTbShipTrnstHstry); + if (transitEntityList == null || transitEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "TransitEntity", transitEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, transitEntityList, transitEntityList.size(), + (ps, entity) -> { + try { + bindTransit(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "TransitEntity", transitEntityList.size()); + } + + public void bindTransit(PreparedStatement pstmt, TransitEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); + pstmt.setString(idx++, entity.getImoNo()); + pstmt.setString(idx++, entity.getMvmnType()); + pstmt.setTimestamp(idx++, entity.getMvmnDt() != null ? Timestamp.valueOf(entity.getMvmnDt()) : null); + pstmt.setString(idx++, entity.getFacilityNm()); + pstmt.setString(idx++, entity.getFacilityType()); + setBigDecimalOrNull(pstmt, idx++, entity.getDraft()); + } + + // ========== Helper Methods ========== + private void setIntOrNull(PreparedStatement pstmt, int idx, Integer value) throws Exception { + if (value != null) { + pstmt.setInt(idx, value); + } else { + pstmt.setNull(idx, Types.INTEGER); + } + } + + private void setLongOrNull(PreparedStatement pstmt, int idx, Long value) throws Exception { + if (value != null) { + pstmt.setLong(idx, value); + } else { + pstmt.setNull(idx, Types.BIGINT); + } + } + + private void setBigDecimalOrNull(PreparedStatement pstmt, int idx, BigDecimal value) throws Exception { + if (value != null) { + pstmt.setBigDecimal(idx, value); + } else { + pstmt.setNull(idx, Types.DECIMAL); + } + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/repository/MovementSql.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/repository/MovementSql.java new file mode 100644 index 0000000..33dda2a --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/repository/MovementSql.java @@ -0,0 +1,291 @@ +package com.snp.batch.jobs.datasync.batch.movement.repository; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class MovementSql { + private static String TARGET_SCHEMA; + public MovementSql(@Value("${app.batch.target-schema.name}") String targetSchema) { + TARGET_SCHEMA = targetSchema; + } + + public static String getAnchorageCallUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + imo_no, mvmn_type, mvmn_dt, prtcll_id, facility_id, facility_nm, + facility_type, lwrnk_facility_id, lwrnk_facility_desc, lwrnk_facility_type, + country_cd, country_nm, draft, lat, lon, position_info, dest, iso_two_country_cd + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, ?, ?, ?::jsonb, ?, ? + ) + ON CONFLICT (imo_no, mvmn_type, mvmn_dt) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + prtcll_id = EXCLUDED.prtcll_id, + facility_id = EXCLUDED.facility_id, + facility_nm = EXCLUDED.facility_nm, + facility_type = EXCLUDED.facility_type, + lwrnk_facility_id = EXCLUDED.lwrnk_facility_id, + lwrnk_facility_desc = EXCLUDED.lwrnk_facility_desc, + lwrnk_facility_type = EXCLUDED.lwrnk_facility_type, + country_cd = EXCLUDED.country_cd, + country_nm = EXCLUDED.country_nm, + draft = EXCLUDED.draft, + lat = EXCLUDED.lat, + lon = EXCLUDED.lon, + position_info = EXCLUDED.position_info, + dest = EXCLUDED.dest, + iso_two_country_cd = EXCLUDED.iso_two_country_cd; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getBerthCallUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + imo_no, mvmn_type, mvmn_dt, facility_id, facility_nm, facility_type, + up_facility_id, up_facility_nm, up_facility_type, country_cd, country_nm, + draft, lat, lon, position_info, up_clot_id, iso_two_country_cd, event_sta_dt + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ?::jsonb, ?, ?, ? + ) + ON CONFLICT (imo_no, mvmn_type, mvmn_dt) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + facility_id = EXCLUDED.facility_id, + facility_nm = EXCLUDED.facility_nm, + facility_type = EXCLUDED.facility_type, + up_facility_id = EXCLUDED.up_facility_id, + up_facility_nm = EXCLUDED.up_facility_nm, + up_facility_type = EXCLUDED.up_facility_type, + country_cd = EXCLUDED.country_cd, + country_nm = EXCLUDED.country_nm, + draft = EXCLUDED.draft, + lat = EXCLUDED.lat, + lon = EXCLUDED.lon, + position_info = EXCLUDED.position_info, + up_clot_id = EXCLUDED.up_clot_id, + iso_two_country_cd = EXCLUDED.iso_two_country_cd, + event_sta_dt = EXCLUDED.event_sta_dt; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getCurrentlyAtUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + imo_no, mvmn_type, mvmn_dt, prtcll_id, facility_id, facility_nm, + facility_type, lwrnk_facility_id, lwrnk_facility_desc, lwrnk_facility_type, + up_facility_id, up_facility_nm, up_facility_type, country_cd, country_nm, + draft, lat, lon, dest, country_iso_two_cd, position_info + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, ?::jsonb + ) + ON CONFLICT (imo_no, mvmn_type, mvmn_dt) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + prtcll_id = EXCLUDED.prtcll_id, + facility_id = EXCLUDED.facility_id, + facility_nm = EXCLUDED.facility_nm, + facility_type = EXCLUDED.facility_type, + lwrnk_facility_id = EXCLUDED.lwrnk_facility_id, + lwrnk_facility_desc = EXCLUDED.lwrnk_facility_desc, + lwrnk_facility_type = EXCLUDED.lwrnk_facility_type, + up_facility_id = EXCLUDED.up_facility_id, + up_facility_nm = EXCLUDED.up_facility_nm, + up_facility_type = EXCLUDED.up_facility_type, + country_cd = EXCLUDED.country_cd, + country_nm = EXCLUDED.country_nm, + draft = EXCLUDED.draft, + lat = EXCLUDED.lat, + lon = EXCLUDED.lon, + dest = EXCLUDED.dest, + country_iso_two_cd = EXCLUDED.country_iso_two_cd, + position_info = EXCLUDED.position_info; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getDestinationUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + imo_no, mvmn_type, mvmn_dt, facility_id, facility_nm, + facility_type, country_cd, country_nm, lat, lon, position_info, country_iso_two_cd + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, ?::jsonb, ? + ) + ON CONFLICT (imo_no, mvmn_type, mvmn_dt) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + facility_id = EXCLUDED.facility_id, + facility_nm = EXCLUDED.facility_nm, + facility_type = EXCLUDED.facility_type, + country_cd = EXCLUDED.country_cd, + country_nm = EXCLUDED.country_nm, + lat = EXCLUDED.lat, + lon = EXCLUDED.lon, + position_info = EXCLUDED.position_info, + country_iso_two_cd = EXCLUDED.country_iso_two_cd; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getPortCallUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + imo_no, mvmn_type, mvmn_dt, prtcll_id, facility_id, facility_nm, + facility_type, lwrnk_facility_id, lwrnk_facility_desc, lwrnk_facility_type, + up_facility_id, up_facility_nm, up_facility_type, country_cd, country_nm, + draft, lat, lon, dest, country_iso_two_cd, position_info + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, ?::jsonb + ) + ON CONFLICT (imo_no, mvmn_type, mvmn_dt) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + prtcll_id = EXCLUDED.prtcll_id, + facility_id = EXCLUDED.facility_id, + facility_nm = EXCLUDED.facility_nm, + facility_type = EXCLUDED.facility_type, + lwrnk_facility_id = EXCLUDED.lwrnk_facility_id, + lwrnk_facility_desc = EXCLUDED.lwrnk_facility_desc, + lwrnk_facility_type = EXCLUDED.lwrnk_facility_type, + up_facility_id = EXCLUDED.up_facility_id, + up_facility_nm = EXCLUDED.up_facility_nm, + up_facility_type = EXCLUDED.up_facility_type, + country_cd = EXCLUDED.country_cd, + country_nm = EXCLUDED.country_nm, + draft = EXCLUDED.draft, + lat = EXCLUDED.lat, + lon = EXCLUDED.lon, + dest = EXCLUDED.dest, + country_iso_two_cd = EXCLUDED.country_iso_two_cd, + position_info = EXCLUDED.position_info; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getStsOperationUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + imo_no, mvmn_type, mvmn_dt, facility_id, facility_nm, facility_type, + up_facility_id, up_facility_nm, up_facility_type, draft, lat, lon, + position_info, up_prtcll_id, country_cd, country_nm, sts_position, sts_type, event_sta_dt + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, ?, + ?::jsonb, ?, ?, ?, ?, ?, ? + ) + ON CONFLICT (imo_no, mvmn_type, mvmn_dt, facility_id) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + facility_nm = EXCLUDED.facility_nm, + facility_type = EXCLUDED.facility_type, + up_facility_id = EXCLUDED.up_facility_id, + up_facility_nm = EXCLUDED.up_facility_nm, + up_facility_type = EXCLUDED.up_facility_type, + draft = EXCLUDED.draft, + lat = EXCLUDED.lat, + lon = EXCLUDED.lon, + position_info = EXCLUDED.position_info, + up_prtcll_id = EXCLUDED.up_prtcll_id, + country_cd = EXCLUDED.country_cd, + country_nm = EXCLUDED.country_nm, + sts_position = EXCLUDED.sts_position, + sts_type = EXCLUDED.sts_type, + event_sta_dt = EXCLUDED.event_sta_dt; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getTerminalCallUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + imo_no, mvmn_type, mvmn_dt, facility_id, facility_nm, facility_type, + up_facility_id, up_facility_nm, up_facility_type, country_cd, country_nm, + draft, lat, lon, position_info, up_prtcll_id, country_iso_two_cd, + event_sta_dt, lwrnk_facility_id, lwrnk_facility_desc, lwrnk_facility_type + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ?::jsonb, ?, ?, + ?, ?, ?, ? + ) + ON CONFLICT (imo_no, mvmn_type, mvmn_dt) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + facility_id = EXCLUDED.facility_id, + facility_nm = EXCLUDED.facility_nm, + facility_type = EXCLUDED.facility_type, + up_facility_id = EXCLUDED.up_facility_id, + up_facility_nm = EXCLUDED.up_facility_nm, + up_facility_type = EXCLUDED.up_facility_type, + country_cd = EXCLUDED.country_cd, + country_nm = EXCLUDED.country_nm, + draft = EXCLUDED.draft, + lat = EXCLUDED.lat, + lon = EXCLUDED.lon, + position_info = EXCLUDED.position_info, + up_prtcll_id = EXCLUDED.up_prtcll_id, + country_iso_two_cd = EXCLUDED.country_iso_two_cd, + event_sta_dt = EXCLUDED.event_sta_dt, + lwrnk_facility_id = EXCLUDED.lwrnk_facility_id, + lwrnk_facility_desc = EXCLUDED.lwrnk_facility_desc, + lwrnk_facility_type = EXCLUDED.lwrnk_facility_type; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getTransitUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + imo_no, mvmn_type, mvmn_dt, facility_nm, facility_type, draft + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, ? + ) + ON CONFLICT (imo_no, mvmn_type, mvmn_dt) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + facility_nm = EXCLUDED.facility_nm, + facility_type = EXCLUDED.facility_type, + draft = EXCLUDED.draft; + """.formatted(TARGET_SCHEMA, targetTable); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/writer/AnchorageCallWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/writer/AnchorageCallWriter.java new file mode 100644 index 0000000..1e879c7 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/writer/AnchorageCallWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.movement.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.movement.entity.AnchorageCallEntity; +import com.snp.batch.jobs.datasync.batch.movement.repository.MovementRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class AnchorageCallWriter extends BaseChunkedWriter { + private final MovementRepository movementRepository; + + public AnchorageCallWriter(MovementRepository movementRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("AnchorageCallEntity", transactionManager, subChunkSize); + this.movementRepository = movementRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + movementRepository.saveAnchorageCall(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/writer/BerthCallWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/writer/BerthCallWriter.java new file mode 100644 index 0000000..665a11e --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/writer/BerthCallWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.movement.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.movement.entity.BerthCallEntity; +import com.snp.batch.jobs.datasync.batch.movement.repository.MovementRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class BerthCallWriter extends BaseChunkedWriter { + private final MovementRepository movementRepository; + + public BerthCallWriter(MovementRepository movementRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("BerthCallEntity", transactionManager, subChunkSize); + this.movementRepository = movementRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + movementRepository.saveBerthCall(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/writer/CurrentlyAtWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/writer/CurrentlyAtWriter.java new file mode 100644 index 0000000..a36da5f --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/writer/CurrentlyAtWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.movement.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.movement.entity.CurrentlyAtEntity; +import com.snp.batch.jobs.datasync.batch.movement.repository.MovementRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class CurrentlyAtWriter extends BaseChunkedWriter { + private final MovementRepository movementRepository; + + public CurrentlyAtWriter(MovementRepository movementRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("CurrentlyAtEntity", transactionManager, subChunkSize); + this.movementRepository = movementRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + movementRepository.saveCurrentlyAt(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/writer/DestinationWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/writer/DestinationWriter.java new file mode 100644 index 0000000..45ee6e1 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/writer/DestinationWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.movement.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.movement.entity.DestinationEntity; +import com.snp.batch.jobs.datasync.batch.movement.repository.MovementRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class DestinationWriter extends BaseChunkedWriter { + private final MovementRepository movementRepository; + + public DestinationWriter(MovementRepository movementRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("DestinationEntity", transactionManager, subChunkSize); + this.movementRepository = movementRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + movementRepository.saveDestination(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/writer/PortCallWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/writer/PortCallWriter.java new file mode 100644 index 0000000..d088045 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/writer/PortCallWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.movement.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.movement.entity.PortCallEntity; +import com.snp.batch.jobs.datasync.batch.movement.repository.MovementRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class PortCallWriter extends BaseChunkedWriter { + private final MovementRepository movementRepository; + + public PortCallWriter(MovementRepository movementRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("PortCallEntity", transactionManager, subChunkSize); + this.movementRepository = movementRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + movementRepository.savePortCall(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/writer/StsOperationWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/writer/StsOperationWriter.java new file mode 100644 index 0000000..b10fe50 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/writer/StsOperationWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.movement.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.movement.entity.StsOperationEntity; +import com.snp.batch.jobs.datasync.batch.movement.repository.MovementRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class StsOperationWriter extends BaseChunkedWriter { + private final MovementRepository movementRepository; + + public StsOperationWriter(MovementRepository movementRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("StsOperationEntity", transactionManager, subChunkSize); + this.movementRepository = movementRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + movementRepository.saveStsOperation(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/writer/TerminalCallWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/writer/TerminalCallWriter.java new file mode 100644 index 0000000..7b028ab --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/writer/TerminalCallWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.movement.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.movement.entity.TerminalCallEntity; +import com.snp.batch.jobs.datasync.batch.movement.repository.MovementRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class TerminalCallWriter extends BaseChunkedWriter { + private final MovementRepository movementRepository; + + public TerminalCallWriter(MovementRepository movementRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("TerminalCallEntity", transactionManager, subChunkSize); + this.movementRepository = movementRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + movementRepository.saveTerminalCall(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/movement/writer/TransitWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/writer/TransitWriter.java new file mode 100644 index 0000000..aa4173d --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/movement/writer/TransitWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.movement.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.movement.entity.TransitEntity; +import com.snp.batch.jobs.datasync.batch.movement.repository.MovementRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class TransitWriter extends BaseChunkedWriter { + private final MovementRepository movementRepository; + + public TransitWriter(MovementRepository movementRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("TransitEntity", transactionManager, subChunkSize); + this.movementRepository = movementRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + movementRepository.saveTransit(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/psc/config/PscSyncJobConfig.java b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/config/PscSyncJobConfig.java new file mode 100644 index 0000000..b8b7a5e --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/config/PscSyncJobConfig.java @@ -0,0 +1,208 @@ +package com.snp.batch.jobs.datasync.batch.psc.config; + +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.common.util.BatchWriteListener; +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.GroupByExecutionIdChunkListener; +import com.snp.batch.common.util.GroupByExecutionIdPolicy; +import com.snp.batch.common.util.GroupByExecutionIdReadListener; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.psc.dto.PscAllCertificateDto; +import com.snp.batch.jobs.datasync.batch.psc.dto.PscDefectDto; +import com.snp.batch.jobs.datasync.batch.psc.dto.PscDetailDto; +import com.snp.batch.jobs.datasync.batch.psc.entity.PscAllCertificateEntity; +import com.snp.batch.jobs.datasync.batch.psc.entity.PscDefectEntity; +import com.snp.batch.jobs.datasync.batch.psc.entity.PscDetailEntity; +import com.snp.batch.jobs.datasync.batch.psc.processor.PscAllCertificateProcessor; +import com.snp.batch.jobs.datasync.batch.psc.processor.PscDefectProcessor; +import com.snp.batch.jobs.datasync.batch.psc.processor.PscDetailProcessor; +import com.snp.batch.jobs.datasync.batch.psc.reader.PscAllCertificateReader; +import com.snp.batch.jobs.datasync.batch.psc.reader.PscDefectReader; +import com.snp.batch.jobs.datasync.batch.psc.reader.PscDetailReader; +import com.snp.batch.jobs.datasync.batch.psc.repository.PscRepository; +import com.snp.batch.jobs.datasync.batch.psc.writer.PscAllCertificateWriter; +import com.snp.batch.jobs.datasync.batch.psc.writer.PscDefectWriter; +import com.snp.batch.jobs.datasync.batch.psc.writer.PscDetailWriter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.PlatformTransactionManager; + +import javax.sql.DataSource; + +@Slf4j +@Configuration +public class PscSyncJobConfig extends BaseJobConfig { + private final TableMetaInfo tableMetaInfo; + private final PscRepository pscRepository; + private final DataSource batchDataSource; + private final DataSource businessDataSource; + private final JdbcTemplate businessJdbcTemplate; + private final int subChunkSize; + + /** + * 생성자 주입 + */ + public PscSyncJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + PscRepository pscRepository, + TableMetaInfo tableMetaInfo, + @Qualifier("batchDataSource") DataSource batchDataSource, + @Qualifier("businessDataSource") DataSource businessDataSource, + @Value("${app.batch.sub-chunk-size:5000}") int subChunkSize + ) { + super(jobRepository, transactionManager); + this.pscRepository = pscRepository; + this.tableMetaInfo = tableMetaInfo; + this.batchDataSource = batchDataSource; + this.businessDataSource = businessDataSource; + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.subChunkSize = subChunkSize; + } + + @Override + protected String getJobName() { + return "pscDataSyncJob"; + } + + @Override + protected String getStepName() { + return "pscDetailSyncStep"; + } + + @Override + protected ItemReader createReader() { + return pscDetailReader(businessDataSource, tableMetaInfo); + } + + @Override + protected ItemProcessor createProcessor() { + return new PscDetailProcessor(); + } + + @Override + protected ItemWriter createWriter() { + return new PscDetailWriter(pscRepository, transactionManager, subChunkSize); + } + + // --- PscDetail Reader --- + + @Bean + @StepScope + public ItemReader pscDetailReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new PscDetailReader(businessDataSource, tableMetaInfo); + } + + // --- PscDefect Reader --- + + @Bean + @StepScope + public ItemReader pscDefectReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new PscDefectReader(businessDataSource, tableMetaInfo); + } + + // --- PscAllCertificate Reader --- + + @Bean + @StepScope + public ItemReader pscAllCertificateReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new PscAllCertificateReader(businessDataSource, tableMetaInfo); + } + + // --- Listeners --- + + @Bean + public BatchWriteListener pscDetailWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourcePscDetail); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener pscDefectWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourcePscDefect); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener pscAllCertificateWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourcePscAllCertificate); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + // --- Steps --- + + @Bean(name = "pscDetailSyncStep") + public Step pscDetailSyncStep() { + log.info("Step 생성: pscDetailSyncStep"); + return new StepBuilder(getStepName(), jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(createReader()) + .processor(createProcessor()) + .writer(createWriter()) + .listener(new GroupByExecutionIdReadListener()) + .listener(new GroupByExecutionIdChunkListener()) + .listener(pscDetailWriteListener()) + .build(); + } + + @Bean(name = "pscDefectSyncStep") + public Step pscDefectSyncStep() { + log.info("Step 생성: pscDefectSyncStep"); + return new StepBuilder("pscDefectSyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(pscDefectReader(businessDataSource, tableMetaInfo)) + .processor(new PscDefectProcessor()) + .writer(new PscDefectWriter(pscRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) + .listener(new GroupByExecutionIdChunkListener()) + .listener(pscDefectWriteListener()) + .build(); + } + + @Bean(name = "pscAllCertificateSyncStep") + public Step pscAllCertificateSyncStep() { + log.info("Step 생성: pscAllCertificateSyncStep"); + return new StepBuilder("pscAllCertificateSyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(pscAllCertificateReader(businessDataSource, tableMetaInfo)) + .processor(new PscAllCertificateProcessor()) + .writer(new PscAllCertificateWriter(pscRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) + .listener(new GroupByExecutionIdChunkListener()) + .listener(pscAllCertificateWriteListener()) + .build(); + } + + @Override + protected Job createJobFlow(JobBuilder jobBuilder) { + return jobBuilder + .start(pscDetailSyncStep()) + .next(pscDefectSyncStep()) + .next(pscAllCertificateSyncStep()) + .build(); + } + + @Bean(name = "pscDataSyncJob") + public Job pscDataSyncJob() { + return job(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/psc/dto/PscAllCertificateDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/dto/PscAllCertificateDto.java new file mode 100644 index 0000000..0d41d5f --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/dto/PscAllCertificateDto.java @@ -0,0 +1,39 @@ +package com.snp.batch.jobs.datasync.batch.psc.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import java.time.LocalDateTime; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class PscAllCertificateDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String certId; + private String inspectionId; + private String imoNo; + private String certfNmCd; + private String certfNm; + private String issueEnginesCd; + private String issueEngines; + private String etcIssueEngines; + private String issueYmd; + private LocalDateTime expryYmd; + private LocalDateTime lastInspectionYmd; + private String inspectionEnginesCd; + private String inspectionEngines; + private String etcInspectionEngines; + private String recentInspectionPlc; + private String recentInspectionPlcCd; + private String inspectionEnginesType; + private String checkYmd; + private String insptr; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/psc/dto/PscDefectDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/dto/PscDefectDto.java new file mode 100644 index 0000000..56cf75f --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/dto/PscDefectDto.java @@ -0,0 +1,42 @@ +package com.snp.batch.jobs.datasync.batch.psc.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class PscDefectDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String defectId; + private String inspectionId; + private String actnOne; + private String actnTwo; + private String actnThr; + private String actnCdOne; + private String actnCdTwo; + private String actnCdThr; + private String clficRespsbYn; + private String defectCd; + private String defectCn; + private String defectIemCd; + private String detainedReasonDefect; + private String mainDefectCd; + private String mainDefectCn; + private String defectTypeCd; + private String defectTypeNm; + private String etcActn; + private String etcPubcEnginesRespsb; + private String pubcEnginesRespsb; + private String pubcEnginesRespsbCd; + private String pubcEnginesRespsbYn; + private String acdntDamgYn; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/psc/dto/PscDetailDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/dto/PscDetailDto.java new file mode 100644 index 0000000..00f12e0 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/dto/PscDetailDto.java @@ -0,0 +1,50 @@ +package com.snp.batch.jobs.datasync.batch.psc.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import java.math.BigDecimal; +import java.time.LocalDateTime; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class PscDetailDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String imoNo; + private String inspectionId; + private String typeId; + private String clsgnNo; + private String chrter; + private String clfic; + private String country; + private LocalDateTime inspectionYmd; + private LocalDateTime tkoffPrmtYmd; + private String shipDetainedYn; + private String dwt; + private String expndInspectionYn; + private String flg; + private String folwInspectionYn; + private String gt; + private String inspectionPortNm; + private LocalDateTime lastMdfcnDt; + private String shipMngr; + private Integer detainedDays; + private String defectCnt; + private BigDecimal defectCntDays; + private String etcInspectionType; + private String shponr; + private String shipNm; + private String shipTypeCd; + private String shipTypeNm; + private String dataSrc; + private String unPortCd; + private String buildYy; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/psc/entity/PscAllCertificateEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/entity/PscAllCertificateEntity.java new file mode 100644 index 0000000..4f3aa26 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/entity/PscAllCertificateEntity.java @@ -0,0 +1,39 @@ +package com.snp.batch.jobs.datasync.batch.psc.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; +import java.time.LocalDateTime; + +@Data +@SuperBuilder +@AllArgsConstructor +public class PscAllCertificateEntity implements JobExecutionGroupable { + private String datasetVer; + private String certId; + private String inspectionId; + private String imoNo; + private String certfNmCd; + private String certfNm; + private String issueEnginesCd; + private String issueEngines; + private String etcIssueEngines; + private String issueYmd; + private LocalDateTime expryYmd; + private LocalDateTime lastInspectionYmd; + private String inspectionEnginesCd; + private String inspectionEngines; + private String etcInspectionEngines; + private String recentInspectionPlc; + private String recentInspectionPlcCd; + private String inspectionEnginesType; + private String checkYmd; + private String insptr; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/psc/entity/PscDefectEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/entity/PscDefectEntity.java new file mode 100644 index 0000000..3ca8361 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/entity/PscDefectEntity.java @@ -0,0 +1,42 @@ +package com.snp.batch.jobs.datasync.batch.psc.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class PscDefectEntity implements JobExecutionGroupable { + private String datasetVer; + private String defectId; + private String inspectionId; + private String actnOne; + private String actnTwo; + private String actnThr; + private String actnCdOne; + private String actnCdTwo; + private String actnCdThr; + private String clficRespsbYn; + private String defectCd; + private String defectCn; + private String defectIemCd; + private String detainedReasonDefect; + private String mainDefectCd; + private String mainDefectCn; + private String defectTypeCd; + private String defectTypeNm; + private String etcActn; + private String etcPubcEnginesRespsb; + private String pubcEnginesRespsb; + private String pubcEnginesRespsbCd; + private String pubcEnginesRespsbYn; + private String acdntDamgYn; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/psc/entity/PscDetailEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/entity/PscDetailEntity.java new file mode 100644 index 0000000..01be36e --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/entity/PscDetailEntity.java @@ -0,0 +1,50 @@ +package com.snp.batch.jobs.datasync.batch.psc.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; +import java.math.BigDecimal; +import java.time.LocalDateTime; + +@Data +@SuperBuilder +@AllArgsConstructor +public class PscDetailEntity implements JobExecutionGroupable { + private String datasetVer; + private String imoNo; + private String inspectionId; + private String typeId; + private String clsgnNo; + private String chrter; + private String clfic; + private String country; + private LocalDateTime inspectionYmd; + private LocalDateTime tkoffPrmtYmd; + private String shipDetainedYn; + private String dwt; + private String expndInspectionYn; + private String flg; + private String folwInspectionYn; + private String gt; + private String inspectionPortNm; + private LocalDateTime lastMdfcnDt; + private String shipMngr; + private Integer detainedDays; + private String defectCnt; + private BigDecimal defectCntDays; + private String etcInspectionType; + private String shponr; + private String shipNm; + private String shipTypeCd; + private String shipTypeNm; + private String dataSrc; + private String unPortCd; + private String buildYy; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/psc/processor/PscAllCertificateProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/processor/PscAllCertificateProcessor.java new file mode 100644 index 0000000..eea775a --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/processor/PscAllCertificateProcessor.java @@ -0,0 +1,36 @@ +package com.snp.batch.jobs.datasync.batch.psc.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.psc.dto.PscAllCertificateDto; +import com.snp.batch.jobs.datasync.batch.psc.entity.PscAllCertificateEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class PscAllCertificateProcessor extends BaseProcessor { + @Override + protected PscAllCertificateEntity processItem(PscAllCertificateDto dto) throws Exception { + return PscAllCertificateEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .certId(dto.getCertId()) + .inspectionId(dto.getInspectionId()) + .imoNo(dto.getImoNo()) + .certfNmCd(dto.getCertfNmCd()) + .certfNm(dto.getCertfNm()) + .issueEnginesCd(dto.getIssueEnginesCd()) + .issueEngines(dto.getIssueEngines()) + .etcIssueEngines(dto.getEtcIssueEngines()) + .issueYmd(dto.getIssueYmd()) + .expryYmd(dto.getExpryYmd()) + .lastInspectionYmd(dto.getLastInspectionYmd()) + .inspectionEnginesCd(dto.getInspectionEnginesCd()) + .inspectionEngines(dto.getInspectionEngines()) + .etcInspectionEngines(dto.getEtcInspectionEngines()) + .recentInspectionPlc(dto.getRecentInspectionPlc()) + .recentInspectionPlcCd(dto.getRecentInspectionPlcCd()) + .inspectionEnginesType(dto.getInspectionEnginesType()) + .checkYmd(dto.getCheckYmd()) + .insptr(dto.getInsptr()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/psc/processor/PscDefectProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/processor/PscDefectProcessor.java new file mode 100644 index 0000000..823eb93 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/processor/PscDefectProcessor.java @@ -0,0 +1,40 @@ +package com.snp.batch.jobs.datasync.batch.psc.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.psc.dto.PscDefectDto; +import com.snp.batch.jobs.datasync.batch.psc.entity.PscDefectEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class PscDefectProcessor extends BaseProcessor { + @Override + protected PscDefectEntity processItem(PscDefectDto dto) throws Exception { + return PscDefectEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .defectId(dto.getDefectId()) + .inspectionId(dto.getInspectionId()) + .actnOne(dto.getActnOne()) + .actnTwo(dto.getActnTwo()) + .actnThr(dto.getActnThr()) + .actnCdOne(dto.getActnCdOne()) + .actnCdTwo(dto.getActnCdTwo()) + .actnCdThr(dto.getActnCdThr()) + .clficRespsbYn(dto.getClficRespsbYn()) + .defectCd(dto.getDefectCd()) + .defectCn(dto.getDefectCn()) + .defectIemCd(dto.getDefectIemCd()) + .detainedReasonDefect(dto.getDetainedReasonDefect()) + .mainDefectCd(dto.getMainDefectCd()) + .mainDefectCn(dto.getMainDefectCn()) + .defectTypeCd(dto.getDefectTypeCd()) + .defectTypeNm(dto.getDefectTypeNm()) + .etcActn(dto.getEtcActn()) + .etcPubcEnginesRespsb(dto.getEtcPubcEnginesRespsb()) + .pubcEnginesRespsb(dto.getPubcEnginesRespsb()) + .pubcEnginesRespsbCd(dto.getPubcEnginesRespsbCd()) + .pubcEnginesRespsbYn(dto.getPubcEnginesRespsbYn()) + .acdntDamgYn(dto.getAcdntDamgYn()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/psc/processor/PscDetailProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/processor/PscDetailProcessor.java new file mode 100644 index 0000000..a407ffb --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/processor/PscDetailProcessor.java @@ -0,0 +1,46 @@ +package com.snp.batch.jobs.datasync.batch.psc.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.psc.dto.PscDetailDto; +import com.snp.batch.jobs.datasync.batch.psc.entity.PscDetailEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class PscDetailProcessor extends BaseProcessor { + @Override + protected PscDetailEntity processItem(PscDetailDto dto) throws Exception { + return PscDetailEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .imoNo(dto.getImoNo()) + .inspectionId(dto.getInspectionId()) + .typeId(dto.getTypeId()) + .clsgnNo(dto.getClsgnNo()) + .chrter(dto.getChrter()) + .clfic(dto.getClfic()) + .country(dto.getCountry()) + .inspectionYmd(dto.getInspectionYmd()) + .tkoffPrmtYmd(dto.getTkoffPrmtYmd()) + .shipDetainedYn(dto.getShipDetainedYn()) + .dwt(dto.getDwt()) + .expndInspectionYn(dto.getExpndInspectionYn()) + .flg(dto.getFlg()) + .folwInspectionYn(dto.getFolwInspectionYn()) + .gt(dto.getGt()) + .inspectionPortNm(dto.getInspectionPortNm()) + .lastMdfcnDt(dto.getLastMdfcnDt()) + .shipMngr(dto.getShipMngr()) + .detainedDays(dto.getDetainedDays()) + .defectCnt(dto.getDefectCnt()) + .defectCntDays(dto.getDefectCntDays()) + .etcInspectionType(dto.getEtcInspectionType()) + .shponr(dto.getShponr()) + .shipNm(dto.getShipNm()) + .shipTypeCd(dto.getShipTypeCd()) + .shipTypeNm(dto.getShipTypeNm()) + .dataSrc(dto.getDataSrc()) + .unPortCd(dto.getUnPortCd()) + .buildYy(dto.getBuildYy()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/psc/reader/PscAllCertificateReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/reader/PscAllCertificateReader.java new file mode 100644 index 0000000..568e9df --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/reader/PscAllCertificateReader.java @@ -0,0 +1,87 @@ +package com.snp.batch.jobs.datasync.batch.psc.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.psc.dto.PscAllCertificateDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class PscAllCertificateReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public PscAllCertificateReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public PscAllCertificateDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourcePscAllCertificate), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[PscAllCertificateReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourcePscAllCertificate); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + Timestamp expryYmdTs = rs.getTimestamp("expry_ymd"); + Timestamp lastInspectionYmdTs = rs.getTimestamp("last_inspection_ymd"); + + return PscAllCertificateDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .certId(rs.getString("cert_id")) + .inspectionId(rs.getString("inspection_id")) + .imoNo(rs.getString("imo_no")) + .certfNmCd(rs.getString("certf_nm_cd")) + .certfNm(rs.getString("certf_nm")) + .issueEnginesCd(rs.getString("issue_engines_cd")) + .issueEngines(rs.getString("issue_engines")) + .etcIssueEngines(rs.getString("etc_issue_engines")) + .issueYmd(rs.getString("issue_ymd")) + .expryYmd(expryYmdTs != null ? expryYmdTs.toLocalDateTime() : null) + .lastInspectionYmd(lastInspectionYmdTs != null ? lastInspectionYmdTs.toLocalDateTime() : null) + .inspectionEnginesCd(rs.getString("inspection_engines_cd")) + .inspectionEngines(rs.getString("inspection_engines")) + .etcInspectionEngines(rs.getString("etc_inspection_engines")) + .recentInspectionPlc(rs.getString("recent_inspection_plc")) + .recentInspectionPlcCd(rs.getString("recent_inspection_plc_cd")) + .inspectionEnginesType(rs.getString("inspection_engines_type")) + .checkYmd(rs.getString("check_ymd")) + .insptr(rs.getString("insptr")) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourcePscAllCertificate); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/psc/reader/PscDefectReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/reader/PscDefectReader.java new file mode 100644 index 0000000..f943a3e --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/reader/PscDefectReader.java @@ -0,0 +1,87 @@ +package com.snp.batch.jobs.datasync.batch.psc.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.psc.dto.PscDefectDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class PscDefectReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public PscDefectReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public PscDefectDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourcePscDefect), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[PscDefectReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourcePscDefect); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return PscDefectDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .defectId(rs.getString("defect_id")) + .inspectionId(rs.getString("inspection_id")) + .actnOne(rs.getString("actn_one")) + .actnTwo(rs.getString("actn_two")) + .actnThr(rs.getString("actn_thr")) + .actnCdOne(rs.getString("actn_cd_one")) + .actnCdTwo(rs.getString("actn_cd_two")) + .actnCdThr(rs.getString("actn_cd_thr")) + .clficRespsbYn(rs.getString("clfic_respsb_yn")) + .defectCd(rs.getString("defect_cd")) + .defectCn(rs.getString("defect_cn")) + .defectIemCd(rs.getString("defect_iem_cd")) + .detainedReasonDefect(rs.getString("detained_reason_defect")) + .mainDefectCd(rs.getString("main_defect_cd")) + .mainDefectCn(rs.getString("main_defect_cn")) + .defectTypeCd(rs.getString("defect_type_cd")) + .defectTypeNm(rs.getString("defect_type_nm")) + .etcActn(rs.getString("etc_actn")) + .etcPubcEnginesRespsb(rs.getString("etc_pubc_engines_respsb")) + .pubcEnginesRespsb(rs.getString("pubc_engines_respsb")) + .pubcEnginesRespsbCd(rs.getString("pubc_engines_respsb_cd")) + .pubcEnginesRespsbYn(rs.getString("pubc_engines_respsb_yn")) + .acdntDamgYn(rs.getString("acdnt_damg_yn")) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourcePscDefect); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/psc/reader/PscDetailReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/reader/PscDetailReader.java new file mode 100644 index 0000000..f7d9e3d --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/reader/PscDetailReader.java @@ -0,0 +1,98 @@ +package com.snp.batch.jobs.datasync.batch.psc.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.psc.dto.PscDetailDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class PscDetailReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public PscDetailReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public PscDetailDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourcePscDetail), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[PscDetailReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourcePscDetail); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + Timestamp inspectionYmdTs = rs.getTimestamp("inspection_ymd"); + Timestamp tkoffPrmtYmdTs = rs.getTimestamp("tkoff_prmt_ymd"); + Timestamp lastMdfcnDtTs = rs.getTimestamp("last_mdfcn_dt"); + + return PscDetailDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .imoNo(rs.getString("imo_no")) + .inspectionId(rs.getString("inspection_id")) + .typeId(rs.getString("type_id")) + .clsgnNo(rs.getString("clsgn_no")) + .chrter(rs.getString("chrter")) + .clfic(rs.getString("clfic")) + .country(rs.getString("country")) + .inspectionYmd(inspectionYmdTs != null ? inspectionYmdTs.toLocalDateTime() : null) + .tkoffPrmtYmd(tkoffPrmtYmdTs != null ? tkoffPrmtYmdTs.toLocalDateTime() : null) + .shipDetainedYn(rs.getString("ship_detained_yn")) + .dwt(rs.getString("dwt")) + .expndInspectionYn(rs.getString("expnd_inspection_yn")) + .flg(rs.getString("flg")) + .folwInspectionYn(rs.getString("folw_inspection_yn")) + .gt(rs.getString("gt")) + .inspectionPortNm(rs.getString("inspection_port_nm")) + .lastMdfcnDt(lastMdfcnDtTs != null ? lastMdfcnDtTs.toLocalDateTime() : null) + .shipMngr(rs.getString("ship_mngr")) + .detainedDays(rs.getObject("detained_days") != null ? rs.getInt("detained_days") : null) + .defectCnt(rs.getString("defect_cnt")) + .defectCntDays(rs.getBigDecimal("defect_cnt_days")) + .etcInspectionType(rs.getString("etc_inspection_type")) + .shponr(rs.getString("shponr")) + .shipNm(rs.getString("ship_nm")) + .shipTypeCd(rs.getString("ship_type_cd")) + .shipTypeNm(rs.getString("ship_type_nm")) + .dataSrc(rs.getString("data_src")) + .unPortCd(rs.getString("un_port_cd")) + .buildYy(rs.getString("build_yy")) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourcePscDetail); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/psc/repository/PscRepository.java b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/repository/PscRepository.java new file mode 100644 index 0000000..9b9c433 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/repository/PscRepository.java @@ -0,0 +1,17 @@ +package com.snp.batch.jobs.datasync.batch.psc.repository; + +import com.snp.batch.jobs.datasync.batch.psc.entity.PscAllCertificateEntity; +import com.snp.batch.jobs.datasync.batch.psc.entity.PscDefectEntity; +import com.snp.batch.jobs.datasync.batch.psc.entity.PscDetailEntity; + +import java.util.List; + +/** + * PscEntity Repository 인터페이스 + * 구현체: PscRepositoryImpl (JdbcTemplate 기반) + */ +public interface PscRepository { + void savePscDetail(List pscDetailEntityList); + void savePscDefect(List pscDefectEntityList); + void savePscAllCertificate(List pscAllCertificateEntityList); +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/psc/repository/PscRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/repository/PscRepositoryImpl.java new file mode 100644 index 0000000..d6f292f --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/repository/PscRepositoryImpl.java @@ -0,0 +1,231 @@ +package com.snp.batch.jobs.datasync.batch.psc.repository; + +import com.snp.batch.common.batch.repository.MultiDataSourceJdbcRepository; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.psc.entity.PscAllCertificateEntity; +import com.snp.batch.jobs.datasync.batch.psc.entity.PscDefectEntity; +import com.snp.batch.jobs.datasync.batch.psc.entity.PscDetailEntity; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.stereotype.Repository; + +import javax.sql.DataSource; +import java.sql.PreparedStatement; +import java.sql.Timestamp; +import java.sql.Types; +import java.util.List; + +/** + * PscEntity Repository (JdbcTemplate 기반) + */ +@Slf4j +@Repository("pscRepository") +public class PscRepositoryImpl extends MultiDataSourceJdbcRepository implements PscRepository { + + private DataSource batchDataSource; + private DataSource businessDataSource; + private final TableMetaInfo tableMetaInfo; + + public PscRepositoryImpl(@Qualifier("batchDataSource") DataSource batchDataSource, + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + + super(new JdbcTemplate(batchDataSource), new JdbcTemplate(businessDataSource)); + + this.batchDataSource = batchDataSource; + this.businessDataSource = businessDataSource; + this.tableMetaInfo = tableMetaInfo; + } + + @Override + protected String getTableName() { + return null; + } + + @Override + protected RowMapper getRowMapper() { + return null; + } + + @Override + protected Long extractId(PscDetailEntity entity) { + return null; + } + + @Override + protected String getInsertSql() { + return null; + } + + @Override + protected String getUpdateSql() { + return null; + } + + @Override + protected void setInsertParameters(PreparedStatement ps, PscDetailEntity entity) throws Exception { + } + + @Override + protected void setUpdateParameters(PreparedStatement ps, PscDetailEntity entity) throws Exception { + } + + @Override + protected String getEntityName() { + return null; + } + + @Override + public void savePscDetail(List pscDetailEntityList) { + String sql = PscSql.getPscDetailUpsertSql(tableMetaInfo.targetTbPscDtl); + if (pscDetailEntityList == null || pscDetailEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "PscDetailEntity", pscDetailEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, pscDetailEntityList, pscDetailEntityList.size(), + (ps, entity) -> { + try { + bindPscDetail(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "PscDetailEntity", pscDetailEntityList.size()); + } + + public void bindPscDetail(PreparedStatement pstmt, PscDetailEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getImoNo()); // 3. imo_no + pstmt.setString(idx++, entity.getInspectionId()); // 4. inspection_id + pstmt.setString(idx++, entity.getTypeId()); // 5. type_id + pstmt.setString(idx++, entity.getClsgnNo()); // 6. clsgn_no + pstmt.setString(idx++, entity.getChrter()); // 7. chrter + pstmt.setString(idx++, entity.getClfic()); // 8. clfic + pstmt.setString(idx++, entity.getCountry()); // 9. country + pstmt.setTimestamp(idx++, entity.getInspectionYmd() != null ? Timestamp.valueOf(entity.getInspectionYmd()) : null); // 10. inspection_ymd + pstmt.setTimestamp(idx++, entity.getTkoffPrmtYmd() != null ? Timestamp.valueOf(entity.getTkoffPrmtYmd()) : null); // 11. tkoff_prmt_ymd + pstmt.setString(idx++, entity.getShipDetainedYn()); // 12. ship_detained_yn + pstmt.setString(idx++, entity.getDwt()); // 13. dwt + pstmt.setString(idx++, entity.getExpndInspectionYn()); // 14. expnd_inspection_yn + pstmt.setString(idx++, entity.getFlg()); // 15. flg + pstmt.setString(idx++, entity.getFolwInspectionYn()); // 16. folw_inspection_yn + pstmt.setString(idx++, entity.getGt()); // 17. gt + pstmt.setString(idx++, entity.getInspectionPortNm()); // 18. inspection_port_nm + pstmt.setTimestamp(idx++, entity.getLastMdfcnDt() != null ? Timestamp.valueOf(entity.getLastMdfcnDt()) : null); // 19. last_mdfcn_dt + pstmt.setString(idx++, entity.getShipMngr()); // 20. ship_mngr + pstmt.setObject(idx++, entity.getDetainedDays(), Types.INTEGER); // 21. detained_days + pstmt.setString(idx++, entity.getDefectCnt()); // 22. defect_cnt + pstmt.setBigDecimal(idx++, entity.getDefectCntDays()); // 23. defect_cnt_days + pstmt.setString(idx++, entity.getEtcInspectionType()); // 24. etc_inspection_type + pstmt.setString(idx++, entity.getShponr()); // 25. shponr + pstmt.setString(idx++, entity.getShipNm()); // 26. ship_nm + pstmt.setString(idx++, entity.getShipTypeCd()); // 27. ship_type_cd + pstmt.setString(idx++, entity.getShipTypeNm()); // 28. ship_type_nm + pstmt.setString(idx++, entity.getDataSrc()); // 29. data_src + pstmt.setString(idx++, entity.getUnPortCd()); // 30. un_port_cd + pstmt.setString(idx++, entity.getBuildYy()); // 31. build_yy + } + + @Override + public void savePscDefect(List pscDefectEntityList) { + String sql = PscSql.getPscDefectUpsertSql(tableMetaInfo.targetTbPscDefect); + if (pscDefectEntityList == null || pscDefectEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "PscDefectEntity", pscDefectEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, pscDefectEntityList, pscDefectEntityList.size(), + (ps, entity) -> { + try { + bindPscDefect(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "PscDefectEntity", pscDefectEntityList.size()); + } + + public void bindPscDefect(PreparedStatement pstmt, PscDefectEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getDefectId()); // 3. defect_id + pstmt.setString(idx++, entity.getInspectionId()); // 4. inspection_id + pstmt.setString(idx++, entity.getActnOne()); // 5. actn_one + pstmt.setString(idx++, entity.getActnTwo()); // 6. actn_two + pstmt.setString(idx++, entity.getActnThr()); // 7. actn_thr + pstmt.setString(idx++, entity.getActnCdOne()); // 8. actn_cd_one + pstmt.setString(idx++, entity.getActnCdTwo()); // 9. actn_cd_two + pstmt.setString(idx++, entity.getActnCdThr()); // 10. actn_cd_thr + pstmt.setString(idx++, entity.getClficRespsbYn()); // 11. clfic_respsb_yn + pstmt.setString(idx++, entity.getDefectCd()); // 12. defect_cd + pstmt.setString(idx++, entity.getDefectCn()); // 13. defect_cn + pstmt.setString(idx++, entity.getDefectIemCd()); // 14. defect_iem_cd + pstmt.setString(idx++, entity.getDetainedReasonDefect()); // 15. detained_reason_defect + pstmt.setString(idx++, entity.getMainDefectCd()); // 16. main_defect_cd + pstmt.setString(idx++, entity.getMainDefectCn()); // 17. main_defect_cn + pstmt.setString(idx++, entity.getDefectTypeCd()); // 18. defect_type_cd + pstmt.setString(idx++, entity.getDefectTypeNm()); // 19. defect_type_nm + pstmt.setString(idx++, entity.getEtcActn()); // 20. etc_actn + pstmt.setString(idx++, entity.getEtcPubcEnginesRespsb()); // 21. etc_pubc_engines_respsb + pstmt.setString(idx++, entity.getPubcEnginesRespsb()); // 22. pubc_engines_respsb + pstmt.setString(idx++, entity.getPubcEnginesRespsbCd()); // 23. pubc_engines_respsb_cd + pstmt.setString(idx++, entity.getPubcEnginesRespsbYn()); // 24. pubc_engines_respsb_yn + pstmt.setString(idx++, entity.getAcdntDamgYn()); // 25. acdnt_damg_yn + } + + @Override + public void savePscAllCertificate(List pscAllCertificateEntityList) { + String sql = PscSql.getPscAllCertificateUpsertSql(tableMetaInfo.targetTbPscOaCertf); + if (pscAllCertificateEntityList == null || pscAllCertificateEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "PscAllCertificateEntity", pscAllCertificateEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, pscAllCertificateEntityList, pscAllCertificateEntityList.size(), + (ps, entity) -> { + try { + bindPscAllCertificate(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "PscAllCertificateEntity", pscAllCertificateEntityList.size()); + } + + public void bindPscAllCertificate(PreparedStatement pstmt, PscAllCertificateEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getCertId()); // 3. cert_id + pstmt.setString(idx++, entity.getInspectionId()); // 4. inspection_id + pstmt.setString(idx++, entity.getImoNo()); // 5. imo_no + pstmt.setString(idx++, entity.getCertfNmCd()); // 6. certf_nm_cd + pstmt.setString(idx++, entity.getCertfNm()); // 7. certf_nm + pstmt.setString(idx++, entity.getIssueEnginesCd()); // 8. issue_engines_cd + pstmt.setString(idx++, entity.getIssueEngines()); // 9. issue_engines + pstmt.setString(idx++, entity.getEtcIssueEngines()); // 10. etc_issue_engines + pstmt.setString(idx++, entity.getIssueYmd()); // 11. issue_ymd + pstmt.setTimestamp(idx++, entity.getExpryYmd() != null ? Timestamp.valueOf(entity.getExpryYmd()) : null); // 12. expry_ymd + pstmt.setTimestamp(idx++, entity.getLastInspectionYmd() != null ? Timestamp.valueOf(entity.getLastInspectionYmd()) : null); // 13. last_inspection_ymd + pstmt.setString(idx++, entity.getInspectionEnginesCd()); // 14. inspection_engines_cd + pstmt.setString(idx++, entity.getInspectionEngines()); // 15. inspection_engines + pstmt.setString(idx++, entity.getEtcInspectionEngines()); // 16. etc_inspection_engines + pstmt.setString(idx++, entity.getRecentInspectionPlc()); // 17. recent_inspection_plc + pstmt.setString(idx++, entity.getRecentInspectionPlcCd()); // 18. recent_inspection_plc_cd + pstmt.setString(idx++, entity.getInspectionEnginesType()); // 19. inspection_engines_type + pstmt.setString(idx++, entity.getCheckYmd()); // 20. check_ymd + pstmt.setString(idx++, entity.getInsptr()); // 21. insptr + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/psc/repository/PscSql.java b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/repository/PscSql.java new file mode 100644 index 0000000..2e0af2c --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/repository/PscSql.java @@ -0,0 +1,169 @@ +package com.snp.batch.jobs.datasync.batch.psc.repository; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class PscSql { + private static String TARGET_SCHEMA; + public PscSql(@Value("${app.batch.target-schema.name}") String targetSchema) { + TARGET_SCHEMA = targetSchema; + } + public static String getPscDetailUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, imo_no, inspection_id, type_id, + clsgn_no, chrter, clfic, country, inspection_ymd, tkoff_prmt_ymd, + ship_detained_yn, dwt, expnd_inspection_yn, flg, + folw_inspection_yn, gt, inspection_port_nm, + last_mdfcn_dt, ship_mngr, detained_days, defect_cnt, + defect_cnt_days, etc_inspection_type, shponr, + ship_nm, ship_type_cd, ship_type_nm, data_src, + un_port_cd, build_yy + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, + ?, ?, ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, + ?, ?, ?, ?, + ?, ? + ) + ON CONFLICT (inspection_id) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + imo_no = EXCLUDED.imo_no, + type_id = EXCLUDED.type_id, + clsgn_no = EXCLUDED.clsgn_no, + chrter = EXCLUDED.chrter, + clfic = EXCLUDED.clfic, + country = EXCLUDED.country, + inspection_ymd = EXCLUDED.inspection_ymd, + tkoff_prmt_ymd = EXCLUDED.tkoff_prmt_ymd, + ship_detained_yn = EXCLUDED.ship_detained_yn, + dwt = EXCLUDED.dwt, + expnd_inspection_yn = EXCLUDED.expnd_inspection_yn, + flg = EXCLUDED.flg, + folw_inspection_yn = EXCLUDED.folw_inspection_yn, + gt = EXCLUDED.gt, + inspection_port_nm = EXCLUDED.inspection_port_nm, + last_mdfcn_dt = EXCLUDED.last_mdfcn_dt, + ship_mngr = EXCLUDED.ship_mngr, + detained_days = EXCLUDED.detained_days, + defect_cnt = EXCLUDED.defect_cnt, + defect_cnt_days = EXCLUDED.defect_cnt_days, + etc_inspection_type = EXCLUDED.etc_inspection_type, + shponr = EXCLUDED.shponr, + ship_nm = EXCLUDED.ship_nm, + ship_type_cd = EXCLUDED.ship_type_cd, + ship_type_nm = EXCLUDED.ship_type_nm, + data_src = EXCLUDED.data_src, + un_port_cd = EXCLUDED.un_port_cd, + build_yy = EXCLUDED.build_yy; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getPscDefectUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, defect_id, inspection_id, actn_one, actn_two, actn_thr, + actn_cd_one, actn_cd_two, actn_cd_thr, clfic_respsb_yn, + defect_cd, defect_cn, defect_iem_cd, detained_reason_defect, + main_defect_cd, main_defect_cn, defect_type_cd, + defect_type_nm, etc_actn, etc_pubc_engines_respsb, + pubc_engines_respsb, pubc_engines_respsb_cd, pubc_engines_respsb_yn, + acdnt_damg_yn + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, + ?, ?, ?, + ?, ?, ?, + ? + ) + ON CONFLICT (defect_id, inspection_id) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + actn_one = EXCLUDED.actn_one, + actn_two = EXCLUDED.actn_two, + actn_thr = EXCLUDED.actn_thr, + actn_cd_one = EXCLUDED.actn_cd_one, + actn_cd_two = EXCLUDED.actn_cd_two, + actn_cd_thr = EXCLUDED.actn_cd_thr, + clfic_respsb_yn = EXCLUDED.clfic_respsb_yn, + defect_cd = EXCLUDED.defect_cd, + defect_cn = EXCLUDED.defect_cn, + defect_iem_cd = EXCLUDED.defect_iem_cd, + detained_reason_defect = EXCLUDED.detained_reason_defect, + main_defect_cd = EXCLUDED.main_defect_cd, + main_defect_cn = EXCLUDED.main_defect_cn, + defect_type_cd = EXCLUDED.defect_type_cd, + defect_type_nm = EXCLUDED.defect_type_nm, + etc_actn = EXCLUDED.etc_actn, + etc_pubc_engines_respsb = EXCLUDED.etc_pubc_engines_respsb, + pubc_engines_respsb = EXCLUDED.pubc_engines_respsb, + pubc_engines_respsb_cd = EXCLUDED.pubc_engines_respsb_cd, + pubc_engines_respsb_yn = EXCLUDED.pubc_engines_respsb_yn, + acdnt_damg_yn = EXCLUDED.acdnt_damg_yn; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getPscAllCertificateUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, cert_id, inspection_id, imo_no, + certf_nm_cd, certf_nm, issue_engines_cd, + issue_engines, etc_issue_engines, issue_ymd, + expry_ymd, last_inspection_ymd, inspection_engines_cd, + inspection_engines, etc_inspection_engines, recent_inspection_plc, + recent_inspection_plc_cd, inspection_engines_type, check_ymd, + insptr + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, + ?, ?, ?, + ?, ?, ?, + ?, ?, ?, + ?, ?, ?, + ?, ?, ?, + ? + ) + ON CONFLICT (cert_id, inspection_id) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + imo_no = EXCLUDED.imo_no, + certf_nm_cd = EXCLUDED.certf_nm_cd, + certf_nm = EXCLUDED.certf_nm, + issue_engines_cd = EXCLUDED.issue_engines_cd, + issue_engines = EXCLUDED.issue_engines, + etc_issue_engines = EXCLUDED.etc_issue_engines, + issue_ymd = EXCLUDED.issue_ymd, + expry_ymd = EXCLUDED.expry_ymd, + last_inspection_ymd = EXCLUDED.last_inspection_ymd, + inspection_engines_cd = EXCLUDED.inspection_engines_cd, + inspection_engines = EXCLUDED.inspection_engines, + etc_inspection_engines = EXCLUDED.etc_inspection_engines, + recent_inspection_plc = EXCLUDED.recent_inspection_plc, + recent_inspection_plc_cd = EXCLUDED.recent_inspection_plc_cd, + inspection_engines_type = EXCLUDED.inspection_engines_type, + check_ymd = EXCLUDED.check_ymd, + insptr = EXCLUDED.insptr; + """.formatted(TARGET_SCHEMA, targetTable); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/psc/writer/PscAllCertificateWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/writer/PscAllCertificateWriter.java new file mode 100644 index 0000000..dd3fcf6 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/writer/PscAllCertificateWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.psc.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.psc.entity.PscAllCertificateEntity; +import com.snp.batch.jobs.datasync.batch.psc.repository.PscRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class PscAllCertificateWriter extends BaseChunkedWriter { + private final PscRepository pscRepository; + + public PscAllCertificateWriter(PscRepository pscRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("PscAllCertificateEntity", transactionManager, subChunkSize); + this.pscRepository = pscRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + pscRepository.savePscAllCertificate(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/psc/writer/PscDefectWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/writer/PscDefectWriter.java new file mode 100644 index 0000000..d92b8a4 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/writer/PscDefectWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.psc.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.psc.entity.PscDefectEntity; +import com.snp.batch.jobs.datasync.batch.psc.repository.PscRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class PscDefectWriter extends BaseChunkedWriter { + private final PscRepository pscRepository; + + public PscDefectWriter(PscRepository pscRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("PscDefectEntity", transactionManager, subChunkSize); + this.pscRepository = pscRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + pscRepository.savePscDefect(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/psc/writer/PscDetailWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/writer/PscDetailWriter.java new file mode 100644 index 0000000..cffc29b --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/psc/writer/PscDetailWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.psc.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.psc.entity.PscDetailEntity; +import com.snp.batch.jobs.datasync.batch.psc.repository.PscRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class PscDetailWriter extends BaseChunkedWriter { + private final PscRepository pscRepository; + + public PscDetailWriter(PscRepository pscRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("PscDetailEntity", transactionManager, subChunkSize); + this.pscRepository = pscRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + pscRepository.savePscDetail(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/risk/config/RiskSyncJobConfig.java b/src/main/java/com/snp/batch/jobs/datasync/batch/risk/config/RiskSyncJobConfig.java new file mode 100644 index 0000000..90c5d57 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/risk/config/RiskSyncJobConfig.java @@ -0,0 +1,136 @@ +package com.snp.batch.jobs.datasync.batch.risk.config; + +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.common.util.BatchWriteListener; +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.GroupByExecutionIdChunkListener; +import com.snp.batch.common.util.GroupByExecutionIdPolicy; +import com.snp.batch.common.util.GroupByExecutionIdReadListener; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.risk.dto.RiskDto; +import com.snp.batch.jobs.datasync.batch.risk.entity.RiskEntity; +import com.snp.batch.jobs.datasync.batch.risk.processor.RiskProcessor; +import com.snp.batch.jobs.datasync.batch.risk.reader.RiskReader; +import com.snp.batch.jobs.datasync.batch.risk.repository.RiskRepository; +import com.snp.batch.jobs.datasync.batch.risk.writer.RiskWriter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.PlatformTransactionManager; + +import javax.sql.DataSource; + +@Slf4j +@Configuration +public class RiskSyncJobConfig extends BaseJobConfig { + private final TableMetaInfo tableMetaInfo; + private final RiskRepository riskRepository; + private final DataSource batchDataSource; + private final DataSource businessDataSource; + private final JdbcTemplate businessJdbcTemplate; + private final int subChunkSize; + + /** + * 생성자 주입 + */ + public RiskSyncJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + RiskRepository riskRepository, + TableMetaInfo tableMetaInfo, + @Qualifier("batchDataSource") DataSource batchDataSource, + @Qualifier("businessDataSource") DataSource businessDataSource, + @Value("${app.batch.sub-chunk-size:5000}") int subChunkSize + ) { + super(jobRepository, transactionManager); + this.riskRepository = riskRepository; + this.tableMetaInfo = tableMetaInfo; + this.batchDataSource = batchDataSource; + this.businessDataSource = businessDataSource; + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.subChunkSize = subChunkSize; + } + + @Override + protected String getJobName() { + return "riskDataSyncJob"; + } + + @Override + protected String getStepName() { + return "riskSyncStep"; + } + + @Override + protected ItemReader createReader() { + return riskReader(businessDataSource, tableMetaInfo); + } + + @Override + protected ItemProcessor createProcessor() { + return new RiskProcessor(); + } + + @Override + protected ItemWriter createWriter() { + return new RiskWriter(riskRepository, transactionManager, subChunkSize); + } + + // --- Risk Reader --- + + @Bean + @StepScope + public ItemReader riskReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new RiskReader(businessDataSource, tableMetaInfo); + } + + // --- Listeners --- + + @Bean + public BatchWriteListener riskWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceRisk); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + // --- Steps --- + + @Bean(name = "riskSyncStep") + public Step riskSyncStep() { + log.info("Step 생성: riskSyncStep"); + return new StepBuilder(getStepName(), jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(createReader()) + .processor(createProcessor()) + .writer(createWriter()) + .listener(new GroupByExecutionIdReadListener()) + .listener(new GroupByExecutionIdChunkListener()) + .listener(riskWriteListener()) + .build(); + } + + @Override + protected Job createJobFlow(JobBuilder jobBuilder) { + return jobBuilder + .start(riskSyncStep()) + .build(); + } + + @Bean(name = "riskDataSyncJob") + public Job riskDataSyncJob() { + return job(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/risk/dto/RiskDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/risk/dto/RiskDto.java new file mode 100644 index 0000000..5479872 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/risk/dto/RiskDto.java @@ -0,0 +1,60 @@ +package com.snp.batch.jobs.datasync.batch.risk.dto; +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import java.time.LocalDateTime; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class RiskDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String imoNo; + private LocalDateTime lastMdfcnDt; + private String riskDataMaint; + private String aisNotrcvElpsDays; + private String aisLwrnkDays; + private String aisUpImoDesc; + private String othrShipNmVoyYn; + private String mmsiAnomMessage; + private String recentDarkActv; + private String portPrtcll; + private String portRisk; + private String stsJob; + private String driftChg; + private String riskEvent; + private String ntnltyChg; + private String ntnltyPrsMouPerf; + private String ntnltyTkyMouPerf; + private String ntnltyUscgMouPerf; + private String uscgExclShipCert; + private String pscInspectionElpsHr; + private String pscInspection; + private String pscDefect; + private String pscDetained; + private String nowSmgrcEvdc; + private String doccChg; + private String nowClfic; + private String clficStatusChg; + private String pniInsrnc; + private String shipNmChg; + private String gboChg; + private String vslage; + private String ilglFshrViol; + private String draftChg; + private String recentSanctionPrtcll; + private String snglShipVoy; + private String fltsfty; + private String fltPsc; + private String spcInspectionOvdue; + private String ownrUnk; + private String rssPortCall; + private String rssOwnrReg; + private String rssSts; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/risk/entity/RiskEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/risk/entity/RiskEntity.java new file mode 100644 index 0000000..5ccb1c4 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/risk/entity/RiskEntity.java @@ -0,0 +1,60 @@ +package com.snp.batch.jobs.datasync.batch.risk.entity; +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; +import java.time.LocalDateTime; + +@Data +@SuperBuilder +@AllArgsConstructor +public class RiskEntity implements JobExecutionGroupable { + private String imoNo; + private LocalDateTime lastMdfcnDt; + private String riskDataMaint; + private String aisNotrcvElpsDays; + private String aisLwrnkDays; + private String aisUpImoDesc; + private String othrShipNmVoyYn; + private String mmsiAnomMessage; + private String recentDarkActv; + private String portPrtcll; + private String portRisk; + private String stsJob; + private String driftChg; + private String riskEvent; + private String ntnltyChg; + private String ntnltyPrsMouPerf; + private String ntnltyTkyMouPerf; + private String ntnltyUscgMouPerf; + private String uscgExclShipCert; + private String pscInspectionElpsHr; + private String pscInspection; + private String pscDefect; + private String pscDetained; + private String nowSmgrcEvdc; + private String doccChg; + private String nowClfic; + private String clficStatusChg; + private String pniInsrnc; + private String shipNmChg; + private String gboChg; + private String vslage; + private String ilglFshrViol; + private String draftChg; + private String recentSanctionPrtcll; + private String snglShipVoy; + private String fltsfty; + private String fltPsc; + private String spcInspectionOvdue; + private String ownrUnk; + private String rssPortCall; + private String rssOwnrReg; + private String rssSts; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/risk/processor/RiskProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/risk/processor/RiskProcessor.java new file mode 100644 index 0000000..2e5c3dd --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/risk/processor/RiskProcessor.java @@ -0,0 +1,58 @@ +package com.snp.batch.jobs.datasync.batch.risk.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.risk.dto.RiskDto; +import com.snp.batch.jobs.datasync.batch.risk.entity.RiskEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class RiskProcessor extends BaseProcessor { + @Override + protected RiskEntity processItem(RiskDto dto) throws Exception { + return RiskEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .imoNo(dto.getImoNo()) + .lastMdfcnDt(dto.getLastMdfcnDt()) + .riskDataMaint(dto.getRiskDataMaint()) + .aisNotrcvElpsDays(dto.getAisNotrcvElpsDays()) + .aisLwrnkDays(dto.getAisLwrnkDays()) + .aisUpImoDesc(dto.getAisUpImoDesc()) + .othrShipNmVoyYn(dto.getOthrShipNmVoyYn()) + .mmsiAnomMessage(dto.getMmsiAnomMessage()) + .recentDarkActv(dto.getRecentDarkActv()) + .portPrtcll(dto.getPortPrtcll()) + .portRisk(dto.getPortRisk()) + .stsJob(dto.getStsJob()) + .driftChg(dto.getDriftChg()) + .riskEvent(dto.getRiskEvent()) + .ntnltyChg(dto.getNtnltyChg()) + .ntnltyPrsMouPerf(dto.getNtnltyPrsMouPerf()) + .ntnltyTkyMouPerf(dto.getNtnltyTkyMouPerf()) + .ntnltyUscgMouPerf(dto.getNtnltyUscgMouPerf()) + .uscgExclShipCert(dto.getUscgExclShipCert()) + .pscInspectionElpsHr(dto.getPscInspectionElpsHr()) + .pscInspection(dto.getPscInspection()) + .pscDefect(dto.getPscDefect()) + .pscDetained(dto.getPscDetained()) + .nowSmgrcEvdc(dto.getNowSmgrcEvdc()) + .doccChg(dto.getDoccChg()) + .nowClfic(dto.getNowClfic()) + .clficStatusChg(dto.getClficStatusChg()) + .pniInsrnc(dto.getPniInsrnc()) + .shipNmChg(dto.getShipNmChg()) + .gboChg(dto.getGboChg()) + .vslage(dto.getVslage()) + .ilglFshrViol(dto.getIlglFshrViol()) + .draftChg(dto.getDraftChg()) + .recentSanctionPrtcll(dto.getRecentSanctionPrtcll()) + .snglShipVoy(dto.getSnglShipVoy()) + .fltsfty(dto.getFltsfty()) + .fltPsc(dto.getFltPsc()) + .spcInspectionOvdue(dto.getSpcInspectionOvdue()) + .ownrUnk(dto.getOwnrUnk()) + .rssPortCall(dto.getRssPortCall()) + .rssOwnrReg(dto.getRssOwnrReg()) + .rssSts(dto.getRssSts()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/risk/reader/RiskReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/risk/reader/RiskReader.java new file mode 100644 index 0000000..724efd0 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/risk/reader/RiskReader.java @@ -0,0 +1,108 @@ +package com.snp.batch.jobs.datasync.batch.risk.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.risk.dto.RiskDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class RiskReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public RiskReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public RiskDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceRisk), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[RiskReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceRisk); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + Timestamp lastMdfcnDtTs = rs.getTimestamp("last_mdfcn_dt"); + + return RiskDto.builder() + .jobExecutionId(targetId) + .imoNo(rs.getString("imo_no")) + .lastMdfcnDt(lastMdfcnDtTs != null ? lastMdfcnDtTs.toLocalDateTime() : null) + .riskDataMaint(rs.getString("risk_data_maint")) + .aisNotrcvElpsDays(rs.getString("ais_notrcv_elps_days")) + .aisLwrnkDays(rs.getString("ais_lwrnk_days")) + .aisUpImoDesc(rs.getString("ais_up_imo_desc")) + .othrShipNmVoyYn(rs.getString("othr_ship_nm_voy_yn")) + .mmsiAnomMessage(rs.getString("mmsi_anom_message")) + .recentDarkActv(rs.getString("recent_dark_actv")) + .portPrtcll(rs.getString("port_prtcll")) + .portRisk(rs.getString("port_risk")) + .stsJob(rs.getString("sts_job")) + .driftChg(rs.getString("drift_chg")) + .riskEvent(rs.getString("risk_event")) + .ntnltyChg(rs.getString("ntnlty_chg")) + .ntnltyPrsMouPerf(rs.getString("ntnlty_prs_mou_perf")) + .ntnltyTkyMouPerf(rs.getString("ntnlty_tky_mou_perf")) + .ntnltyUscgMouPerf(rs.getString("ntnlty_uscg_mou_perf")) + .uscgExclShipCert(rs.getString("uscg_excl_ship_cert")) + .pscInspectionElpsHr(rs.getString("psc_inspection_elps_hr")) + .pscInspection(rs.getString("psc_inspection")) + .pscDefect(rs.getString("psc_defect")) + .pscDetained(rs.getString("psc_detained")) + .nowSmgrcEvdc(rs.getString("now_smgrc_evdc")) + .doccChg(rs.getString("docc_chg")) + .nowClfic(rs.getString("now_clfic")) + .clficStatusChg(rs.getString("clfic_status_chg")) + .pniInsrnc(rs.getString("pni_insrnc")) + .shipNmChg(rs.getString("ship_nm_chg")) + .gboChg(rs.getString("gbo_chg")) + .vslage(rs.getString("vslage")) + .ilglFshrViol(rs.getString("ilgl_fshr_viol")) + .draftChg(rs.getString("draft_chg")) + .recentSanctionPrtcll(rs.getString("recent_sanction_prtcll")) + .snglShipVoy(rs.getString("sngl_ship_voy")) + .fltsfty(rs.getString("fltsfty")) + .fltPsc(rs.getString("flt_psc")) + .spcInspectionOvdue(rs.getString("spc_inspection_ovdue")) + .ownrUnk(rs.getString("ownr_unk")) + .rssPortCall(rs.getString("rss_port_call")) + .rssOwnrReg(rs.getString("rss_ownr_reg")) + .rssSts(rs.getString("rss_sts")) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceRisk); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/risk/repository/RiskRepository.java b/src/main/java/com/snp/batch/jobs/datasync/batch/risk/repository/RiskRepository.java new file mode 100644 index 0000000..485f564 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/risk/repository/RiskRepository.java @@ -0,0 +1,14 @@ +package com.snp.batch.jobs.datasync.batch.risk.repository; + +import com.snp.batch.jobs.datasync.batch.risk.entity.RiskEntity; + +import java.util.List; + +/** + * RiskEntity Repository 인터페이스 + * 구현체: RiskRepositoryImpl (JdbcTemplate 기반) + */ +public interface RiskRepository { + void saveRisk(List riskEntityList); + void saveRiskHistory(List riskEntityList); +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/risk/repository/RiskRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/datasync/batch/risk/repository/RiskRepositoryImpl.java new file mode 100644 index 0000000..fbc101f --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/risk/repository/RiskRepositoryImpl.java @@ -0,0 +1,165 @@ +package com.snp.batch.jobs.datasync.batch.risk.repository; + +import com.snp.batch.common.batch.repository.MultiDataSourceJdbcRepository; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.risk.entity.RiskEntity; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.stereotype.Repository; + +import javax.sql.DataSource; +import java.sql.PreparedStatement; +import java.sql.Timestamp; +import java.util.List; + +/** + * RiskEntity Repository (JdbcTemplate 기반) + */ +@Slf4j +@Repository("riskRepository") +public class RiskRepositoryImpl extends MultiDataSourceJdbcRepository implements RiskRepository { + + private DataSource batchDataSource; + private DataSource businessDataSource; + private final TableMetaInfo tableMetaInfo; + + public RiskRepositoryImpl(@Qualifier("batchDataSource") DataSource batchDataSource, + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + + super(new JdbcTemplate(batchDataSource), new JdbcTemplate(businessDataSource)); + + this.batchDataSource = batchDataSource; + this.businessDataSource = businessDataSource; + this.tableMetaInfo = tableMetaInfo; + } + + @Override + protected String getTableName() { + return null; + } + + @Override + protected RowMapper getRowMapper() { + return null; + } + + @Override + protected Long extractId(RiskEntity entity) { + return null; + } + + @Override + protected String getInsertSql() { + return null; + } + + @Override + protected String getUpdateSql() { + return null; + } + + @Override + protected void setInsertParameters(PreparedStatement ps, RiskEntity entity) throws Exception { + } + + @Override + protected void setUpdateParameters(PreparedStatement ps, RiskEntity entity) throws Exception { + } + + @Override + protected String getEntityName() { + return null; + } + + @Override + public void saveRisk(List riskEntityList) { + String sql = RiskSql.getRiskUpsertSql(tableMetaInfo.targetTbShipRiskInfo, "imo_no"); + if (riskEntityList == null || riskEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "RiskEntity", riskEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, riskEntityList, riskEntityList.size(), + (ps, entity) -> { + try { + bindRisk(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "RiskEntity", riskEntityList.size()); + } + + @Override + public void saveRiskHistory(List riskEntityList) { + String sql = RiskSql.getRiskUpsertSql(tableMetaInfo.targetTbShipRiskHstry, "imo_no, last_mdfcn_dt"); + if (riskEntityList == null || riskEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "RiskEntity", riskEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, riskEntityList, riskEntityList.size(), + (ps, entity) -> { + try { + bindRisk(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "RiskEntity", riskEntityList.size()); + } + + public void bindRisk(PreparedStatement pstmt, RiskEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getImoNo()); // 2. imo_no + pstmt.setTimestamp(idx++, entity.getLastMdfcnDt() != null ? Timestamp.valueOf(entity.getLastMdfcnDt()) : null); // 3. last_mdfcn_dt + pstmt.setString(idx++, entity.getRiskDataMaint()); // 4. risk_data_maint + pstmt.setString(idx++, entity.getAisNotrcvElpsDays()); // 5. ais_notrcv_elps_days + pstmt.setString(idx++, entity.getAisLwrnkDays()); // 6. ais_lwrnk_days + pstmt.setString(idx++, entity.getAisUpImoDesc()); // 7. ais_up_imo_desc + pstmt.setString(idx++, entity.getOthrShipNmVoyYn()); // 8. othr_ship_nm_voy_yn + pstmt.setString(idx++, entity.getMmsiAnomMessage()); // 9. mmsi_anom_message + pstmt.setString(idx++, entity.getRecentDarkActv()); // 10. recent_dark_actv + pstmt.setString(idx++, entity.getPortPrtcll()); // 11. port_prtcll + pstmt.setString(idx++, entity.getPortRisk()); // 12. port_risk + pstmt.setString(idx++, entity.getStsJob()); // 13. sts_job + pstmt.setString(idx++, entity.getDriftChg()); // 14. drift_chg + pstmt.setString(idx++, entity.getRiskEvent()); // 15. risk_event + pstmt.setString(idx++, entity.getNtnltyChg()); // 16. ntnlty_chg + pstmt.setString(idx++, entity.getNtnltyPrsMouPerf()); // 17. ntnlty_prs_mou_perf + pstmt.setString(idx++, entity.getNtnltyTkyMouPerf()); // 18. ntnlty_tky_mou_perf + pstmt.setString(idx++, entity.getNtnltyUscgMouPerf()); // 19. ntnlty_uscg_mou_perf + pstmt.setString(idx++, entity.getUscgExclShipCert()); // 20. uscg_excl_ship_cert + pstmt.setString(idx++, entity.getPscInspectionElpsHr()); // 21. psc_inspection_elps_hr + pstmt.setString(idx++, entity.getPscInspection()); // 22. psc_inspection + pstmt.setString(idx++, entity.getPscDefect()); // 23. psc_defect + pstmt.setString(idx++, entity.getPscDetained()); // 24. psc_detained + pstmt.setString(idx++, entity.getNowSmgrcEvdc()); // 25. now_smgrc_evdc + pstmt.setString(idx++, entity.getDoccChg()); // 26. docc_chg + pstmt.setString(idx++, entity.getNowClfic()); // 27. now_clfic + pstmt.setString(idx++, entity.getClficStatusChg()); // 28. clfic_status_chg + pstmt.setString(idx++, entity.getPniInsrnc()); // 29. pni_insrnc + pstmt.setString(idx++, entity.getShipNmChg()); // 30. ship_nm_chg + pstmt.setString(idx++, entity.getGboChg()); // 31. gbo_chg + pstmt.setString(idx++, entity.getVslage()); // 32. vslage + pstmt.setString(idx++, entity.getIlglFshrViol()); // 33. ilgl_fshr_viol + pstmt.setString(idx++, entity.getDraftChg()); // 34. draft_chg + pstmt.setString(idx++, entity.getRecentSanctionPrtcll()); // 35. recent_sanction_prtcll + pstmt.setString(idx++, entity.getSnglShipVoy()); // 36. sngl_ship_voy + pstmt.setString(idx++, entity.getFltsfty()); // 37. fltsfty + pstmt.setString(idx++, entity.getFltPsc()); // 38. flt_psc + pstmt.setString(idx++, entity.getSpcInspectionOvdue()); // 39. spc_inspection_ovdue + pstmt.setString(idx++, entity.getOwnrUnk()); // 40. ownr_unk + pstmt.setString(idx++, entity.getRssPortCall()); // 41. rss_port_call + pstmt.setString(idx++, entity.getRssOwnrReg()); // 42. rss_ownr_reg + pstmt.setString(idx++, entity.getRssSts()); // 43. rss_sts + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/risk/repository/RiskSql.java b/src/main/java/com/snp/batch/jobs/datasync/batch/risk/repository/RiskSql.java new file mode 100644 index 0000000..bf62205 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/risk/repository/RiskSql.java @@ -0,0 +1,90 @@ +package com.snp.batch.jobs.datasync.batch.risk.repository; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class RiskSql { + private static String TARGET_SCHEMA; + public RiskSql(@Value("${app.batch.target-schema.name}") String targetSchema) { + TARGET_SCHEMA = targetSchema; + } + + public static String getRiskUpsertSql(String targetTable, String targetIndex) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + imo_no, last_mdfcn_dt, risk_data_maint, ais_notrcv_elps_days, + ais_lwrnk_days, ais_up_imo_desc, othr_ship_nm_voy_yn, mmsi_anom_message, + recent_dark_actv, port_prtcll, port_risk, sts_job, + drift_chg, risk_event, ntnlty_chg, ntnlty_prs_mou_perf, + ntnlty_tky_mou_perf, ntnlty_uscg_mou_perf, uscg_excl_ship_cert, psc_inspection_elps_hr, + psc_inspection, psc_defect, psc_detained, now_smgrc_evdc, + docc_chg, now_clfic, clfic_status_chg, pni_insrnc, + ship_nm_chg, gbo_chg, vslage, ilgl_fshr_viol, + draft_chg, recent_sanction_prtcll, sngl_ship_voy, fltsfty, + flt_psc, spc_inspection_ovdue, ownr_unk, rss_port_call, + rss_ownr_reg, rss_sts + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ? + ) + ON CONFLICT (%s) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + last_mdfcn_dt = EXCLUDED.last_mdfcn_dt, + risk_data_maint = EXCLUDED.risk_data_maint, + ais_notrcv_elps_days = EXCLUDED.ais_notrcv_elps_days, + ais_lwrnk_days = EXCLUDED.ais_lwrnk_days, + ais_up_imo_desc = EXCLUDED.ais_up_imo_desc, + othr_ship_nm_voy_yn = EXCLUDED.othr_ship_nm_voy_yn, + mmsi_anom_message = EXCLUDED.mmsi_anom_message, + recent_dark_actv = EXCLUDED.recent_dark_actv, + port_prtcll = EXCLUDED.port_prtcll, + port_risk = EXCLUDED.port_risk, + sts_job = EXCLUDED.sts_job, + drift_chg = EXCLUDED.drift_chg, + risk_event = EXCLUDED.risk_event, + ntnlty_chg = EXCLUDED.ntnlty_chg, + ntnlty_prs_mou_perf = EXCLUDED.ntnlty_prs_mou_perf, + ntnlty_tky_mou_perf = EXCLUDED.ntnlty_tky_mou_perf, + ntnlty_uscg_mou_perf = EXCLUDED.ntnlty_uscg_mou_perf, + uscg_excl_ship_cert = EXCLUDED.uscg_excl_ship_cert, + psc_inspection_elps_hr = EXCLUDED.psc_inspection_elps_hr, + psc_inspection = EXCLUDED.psc_inspection, + psc_defect = EXCLUDED.psc_defect, + psc_detained = EXCLUDED.psc_detained, + now_smgrc_evdc = EXCLUDED.now_smgrc_evdc, + docc_chg = EXCLUDED.docc_chg, + now_clfic = EXCLUDED.now_clfic, + clfic_status_chg = EXCLUDED.clfic_status_chg, + pni_insrnc = EXCLUDED.pni_insrnc, + ship_nm_chg = EXCLUDED.ship_nm_chg, + gbo_chg = EXCLUDED.gbo_chg, + vslage = EXCLUDED.vslage, + ilgl_fshr_viol = EXCLUDED.ilgl_fshr_viol, + draft_chg = EXCLUDED.draft_chg, + recent_sanction_prtcll = EXCLUDED.recent_sanction_prtcll, + sngl_ship_voy = EXCLUDED.sngl_ship_voy, + fltsfty = EXCLUDED.fltsfty, + flt_psc = EXCLUDED.flt_psc, + spc_inspection_ovdue = EXCLUDED.spc_inspection_ovdue, + ownr_unk = EXCLUDED.ownr_unk, + rss_port_call = EXCLUDED.rss_port_call, + rss_ownr_reg = EXCLUDED.rss_ownr_reg, + rss_sts = EXCLUDED.rss_sts; + """.formatted(TARGET_SCHEMA, targetTable, targetIndex); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/risk/writer/RiskWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/risk/writer/RiskWriter.java new file mode 100644 index 0000000..0a5e4cf --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/risk/writer/RiskWriter.java @@ -0,0 +1,28 @@ +package com.snp.batch.jobs.datasync.batch.risk.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.risk.entity.RiskEntity; +import com.snp.batch.jobs.datasync.batch.risk.repository.RiskRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class RiskWriter extends BaseChunkedWriter { + private final RiskRepository riskRepository; + + public RiskWriter(RiskRepository riskRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("RiskEntity", transactionManager, subChunkSize); + this.riskRepository = riskRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + riskRepository.saveRisk(items); + riskRepository.saveRiskHistory(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/config/ShipDetailSyncJobConfig.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/config/ShipDetailSyncJobConfig.java new file mode 100644 index 0000000..176f991 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/config/ShipDetailSyncJobConfig.java @@ -0,0 +1,1034 @@ +package com.snp.batch.jobs.datasync.batch.ship.config; + +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.common.util.BatchWriteListener; +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.GroupByExecutionIdChunkListener; +import com.snp.batch.common.util.GroupByExecutionIdPolicy; +import com.snp.batch.common.util.GroupByExecutionIdReadListener; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.BareboatCharterHistoryDto; +import com.snp.batch.jobs.datasync.batch.ship.dto.CallsignAndMmsiHistoryDto; +import com.snp.batch.jobs.datasync.batch.ship.dto.ClassHistoryDto; +import com.snp.batch.jobs.datasync.batch.ship.dto.CompanyVesselRelationshipsDto; +import com.snp.batch.jobs.datasync.batch.ship.dto.CrewListDto; +import com.snp.batch.jobs.datasync.batch.ship.dto.DarkActivityConfirmedDto; +import com.snp.batch.jobs.datasync.batch.ship.dto.FlagHistoryDto; +import com.snp.batch.jobs.datasync.batch.ship.dto.GroupBeneficialOwnerHistoryDto; +import com.snp.batch.jobs.datasync.batch.ship.dto.IceClassDto; +import com.snp.batch.jobs.datasync.batch.ship.dto.NameHistoryDto; +import com.snp.batch.jobs.datasync.batch.ship.dto.OperatorHistoryDto; +import com.snp.batch.jobs.datasync.batch.ship.dto.OwnerHistoryDto; +import com.snp.batch.jobs.datasync.batch.ship.dto.PandIHistoryDto; +import com.snp.batch.jobs.datasync.batch.ship.dto.SafetyManagementCertificateHistDto; +import com.snp.batch.jobs.datasync.batch.ship.dto.ShipAddInfoDto; +import com.snp.batch.jobs.datasync.batch.ship.dto.ShipManagerHistoryDto; +import com.snp.batch.jobs.datasync.batch.ship.dto.SisterShipLinksDto; +import com.snp.batch.jobs.datasync.batch.ship.dto.SpecialFeatureDto; +import com.snp.batch.jobs.datasync.batch.ship.dto.StatusHistoryDto; +import com.snp.batch.jobs.datasync.batch.ship.dto.StowageCommodityDto; +import com.snp.batch.jobs.datasync.batch.ship.dto.SurveyDatesDto; +import com.snp.batch.jobs.datasync.batch.ship.dto.SurveyDatesHistoryUniqueDto; +import com.snp.batch.jobs.datasync.batch.ship.dto.TbCompanyDetailDto; +import com.snp.batch.jobs.datasync.batch.ship.dto.TechnicalManagerHistoryDto; +import com.snp.batch.jobs.datasync.batch.ship.dto.ThrustersDto; +import com.snp.batch.jobs.datasync.batch.ship.dto.ShipInfoMstDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.BareboatCharterHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.CallsignAndMmsiHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.ClassHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.CompanyVesselRelationshipsEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.CrewListEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.DarkActivityConfirmedEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.FlagHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.GroupBeneficialOwnerHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.IceClassEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.NameHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.OperatorHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.OwnerHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.PandIHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.SafetyManagementCertificateHistEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.ShipAddInfoEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.ShipManagerHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.SisterShipLinksEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.SpecialFeatureEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.StatusHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.StowageCommodityEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.SurveyDatesEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.SurveyDatesHistoryUniqueEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.TbCompanyDetailEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.TechnicalManagerHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.ThrustersEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.ShipInfoMstEntity; +import com.snp.batch.jobs.datasync.batch.ship.processor.BareboatCharterHistoryProcessor; +import com.snp.batch.jobs.datasync.batch.ship.processor.CallsignAndMmsiHistoryProcessor; +import com.snp.batch.jobs.datasync.batch.ship.processor.ClassHistoryProcessor; +import com.snp.batch.jobs.datasync.batch.ship.processor.CompanyVesselRelationshipsProcessor; +import com.snp.batch.jobs.datasync.batch.ship.processor.CrewListProcessor; +import com.snp.batch.jobs.datasync.batch.ship.processor.DarkActivityConfirmedProcessor; +import com.snp.batch.jobs.datasync.batch.ship.processor.FlagHistoryProcessor; +import com.snp.batch.jobs.datasync.batch.ship.processor.GroupBeneficialOwnerHistoryProcessor; +import com.snp.batch.jobs.datasync.batch.ship.processor.IceClassProcessor; +import com.snp.batch.jobs.datasync.batch.ship.processor.NameHistoryProcessor; +import com.snp.batch.jobs.datasync.batch.ship.processor.OperatorHistoryProcessor; +import com.snp.batch.jobs.datasync.batch.ship.processor.OwnerHistoryProcessor; +import com.snp.batch.jobs.datasync.batch.ship.processor.PandIHistoryProcessor; +import com.snp.batch.jobs.datasync.batch.ship.processor.SafetyManagementCertificateHistProcessor; +import com.snp.batch.jobs.datasync.batch.ship.processor.ShipAddInfoProcessor; +import com.snp.batch.jobs.datasync.batch.ship.processor.ShipManagerHistoryProcessor; +import com.snp.batch.jobs.datasync.batch.ship.processor.SisterShipLinksProcessor; +import com.snp.batch.jobs.datasync.batch.ship.processor.SpecialFeatureProcessor; +import com.snp.batch.jobs.datasync.batch.ship.processor.StatusHistoryProcessor; +import com.snp.batch.jobs.datasync.batch.ship.processor.StowageCommodityProcessor; +import com.snp.batch.jobs.datasync.batch.ship.processor.SurveyDatesProcessor; +import com.snp.batch.jobs.datasync.batch.ship.processor.SurveyDatesHistoryUniqueProcessor; +import com.snp.batch.jobs.datasync.batch.ship.processor.TbCompanyDetailProcessor; +import com.snp.batch.jobs.datasync.batch.ship.processor.TechnicalManagerHistoryProcessor; +import com.snp.batch.jobs.datasync.batch.ship.processor.ThrustersProcessor; +import com.snp.batch.jobs.datasync.batch.ship.processor.ShipDataProcessor; +import com.snp.batch.jobs.datasync.batch.ship.reader.BareboatCharterHistoryReader; +import com.snp.batch.jobs.datasync.batch.ship.reader.CallsignAndMmsiHistoryReader; +import com.snp.batch.jobs.datasync.batch.ship.reader.ClassHistoryReader; +import com.snp.batch.jobs.datasync.batch.ship.reader.CompanyVesselRelationshipsReader; +import com.snp.batch.jobs.datasync.batch.ship.reader.CrewListReader; +import com.snp.batch.jobs.datasync.batch.ship.reader.DarkActivityConfirmedReader; +import com.snp.batch.jobs.datasync.batch.ship.reader.FlagHistoryReader; +import com.snp.batch.jobs.datasync.batch.ship.reader.GroupBeneficialOwnerHistoryReader; +import com.snp.batch.jobs.datasync.batch.ship.reader.IceClassReader; +import com.snp.batch.jobs.datasync.batch.ship.reader.NameHistoryReader; +import com.snp.batch.jobs.datasync.batch.ship.reader.OperatorHistoryReader; +import com.snp.batch.jobs.datasync.batch.ship.reader.OwnerHistoryReader; +import com.snp.batch.jobs.datasync.batch.ship.reader.PandIHistoryReader; +import com.snp.batch.jobs.datasync.batch.ship.reader.SafetyManagementCertificateHistReader; +import com.snp.batch.jobs.datasync.batch.ship.reader.ShipAddInfoReader; +import com.snp.batch.jobs.datasync.batch.ship.reader.ShipManagerHistoryReader; +import com.snp.batch.jobs.datasync.batch.ship.reader.SisterShipLinksReader; +import com.snp.batch.jobs.datasync.batch.ship.reader.SpecialFeatureReader; +import com.snp.batch.jobs.datasync.batch.ship.reader.StatusHistoryReader; +import com.snp.batch.jobs.datasync.batch.ship.reader.StowageCommodityReader; +import com.snp.batch.jobs.datasync.batch.ship.reader.SurveyDatesReader; +import com.snp.batch.jobs.datasync.batch.ship.reader.SurveyDatesHistoryUniqueReader; +import com.snp.batch.jobs.datasync.batch.ship.reader.TbCompanyDetailReader; +import com.snp.batch.jobs.datasync.batch.ship.reader.TechnicalManagerHistoryReader; +import com.snp.batch.jobs.datasync.batch.ship.reader.ThrustersReader; +import com.snp.batch.jobs.datasync.batch.ship.reader.ShipDataReader; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipDataSql; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import com.snp.batch.jobs.datasync.batch.ship.writer.BareboatCharterHistoryWriter; +import com.snp.batch.jobs.datasync.batch.ship.writer.CallsignAndMmsiHistoryWriter; +import com.snp.batch.jobs.datasync.batch.ship.writer.ClassHistoryWriter; +import com.snp.batch.jobs.datasync.batch.ship.writer.CompanyVesselRelationshipsWriter; +import com.snp.batch.jobs.datasync.batch.ship.writer.CrewListWriter; +import com.snp.batch.jobs.datasync.batch.ship.writer.DarkActivityConfirmedWriter; +import com.snp.batch.jobs.datasync.batch.ship.writer.FlagHistoryWriter; +import com.snp.batch.jobs.datasync.batch.ship.writer.GroupBeneficialOwnerHistoryWriter; +import com.snp.batch.jobs.datasync.batch.ship.writer.IceClassWriter; +import com.snp.batch.jobs.datasync.batch.ship.writer.NameHistoryWriter; +import com.snp.batch.jobs.datasync.batch.ship.writer.OperatorHistoryWriter; +import com.snp.batch.jobs.datasync.batch.ship.writer.OwnerHistoryWriter; +import com.snp.batch.jobs.datasync.batch.ship.writer.PandIHistoryWriter; +import com.snp.batch.jobs.datasync.batch.ship.writer.SafetyManagementCertificateHistWriter; +import com.snp.batch.jobs.datasync.batch.ship.writer.ShipAddInfoWriter; +import com.snp.batch.jobs.datasync.batch.ship.writer.ShipManagerHistoryWriter; +import com.snp.batch.jobs.datasync.batch.ship.writer.SisterShipLinksWriter; +import com.snp.batch.jobs.datasync.batch.ship.writer.SpecialFeatureWriter; +import com.snp.batch.jobs.datasync.batch.ship.writer.StatusHistoryWriter; +import com.snp.batch.jobs.datasync.batch.ship.writer.StowageCommodityWriter; +import com.snp.batch.jobs.datasync.batch.ship.writer.SurveyDatesWriter; +import com.snp.batch.jobs.datasync.batch.ship.writer.SurveyDatesHistoryUniqueWriter; +import com.snp.batch.jobs.datasync.batch.ship.writer.TbCompanyDetailWriter; +import com.snp.batch.jobs.datasync.batch.ship.writer.TechnicalManagerHistoryWriter; +import com.snp.batch.jobs.datasync.batch.ship.writer.ThrustersWriter; +import com.snp.batch.jobs.datasync.batch.ship.writer.ShipDataWriter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.PlatformTransactionManager; + +import javax.sql.DataSource; + +@Slf4j +@Configuration +public class ShipDetailSyncJobConfig extends BaseJobConfig { + private final TableMetaInfo tableMetaInfo; + private final ShipRepository shipRepository; + private final DataSource batchDataSource; // ⭐ 1. 필드 선언 + private final DataSource businessDataSource; // ⭐ 1. 필드 선언 + private final JdbcTemplate businessJdbcTemplate; // ⭐ 템플릿 필드 추가 + private final int subChunkSize; + + /** + * 생성자 주입 + */ + public ShipDetailSyncJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + ShipRepository shipRepository, + TableMetaInfo tableMetaInfo, + @Qualifier("batchDataSource") DataSource batchDataSource, // ⭐ 2. batchDataSource 주입 + @Qualifier("businessDataSource") DataSource businessDataSource, // ⭐ 2. businessDataSource 주입 + @Value("${app.batch.sub-chunk-size:5000}") int subChunkSize + ) { + super(jobRepository, transactionManager); + this.shipRepository = shipRepository; + this.tableMetaInfo = tableMetaInfo; + this.batchDataSource = batchDataSource; // ⭐ 3. 필드에 할당 + this.businessDataSource = businessDataSource; // ⭐ 3. 필드에 할당 + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); // ⭐ 초기화 + this.subChunkSize = subChunkSize; + } + + @Override + protected String getJobName() { + return "snpMdaDataSyncJob"; + } + + @Override + protected String getStepName() { + return "snpShipDetailSyncStep"; + } + + @Override + protected ItemReader createReader() { + return shipDataReader(businessDataSource, tableMetaInfo); + } + + @Bean + @StepScope + public ItemReader shipDataReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new ShipDataReader(businessDataSource, tableMetaInfo); + } + + @Override + protected ItemProcessor createProcessor() { + return new ShipDataProcessor(); + } + + @Override + protected ItemWriter createWriter() { + return new ShipDataWriter(shipRepository, transactionManager, subChunkSize); + } + + // --- ShipAddInfo Step --- + + @Bean + @StepScope + public ItemReader shipAddInfoReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new ShipAddInfoReader(businessDataSource, tableMetaInfo); + } + + // --- BareboatCharterHistory Step --- + + @Bean + @StepScope + public ItemReader bareboatCharterHistoryReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new BareboatCharterHistoryReader(businessDataSource, tableMetaInfo); + } + + // --- CallsignAndMmsiHistory Step --- + + @Bean + @StepScope + public ItemReader callsignAndMmsiHistoryReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new CallsignAndMmsiHistoryReader(businessDataSource, tableMetaInfo); + } + + // --- ClassHistory Step --- + + @Bean + @StepScope + public ItemReader classHistoryReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new ClassHistoryReader(businessDataSource, tableMetaInfo); + } + + // --- CompanyVesselRelationships Step --- + + @Bean + @StepScope + public ItemReader companyVesselRelationshipsReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new CompanyVesselRelationshipsReader(businessDataSource, tableMetaInfo); + } + + // --- CrewList Step --- + + @Bean + @StepScope + public ItemReader crewListReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new CrewListReader(businessDataSource, tableMetaInfo); + } + + // --- DarkActivityConfirmed Step --- + + @Bean + @StepScope + public ItemReader darkActivityConfirmedReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new DarkActivityConfirmedReader(businessDataSource, tableMetaInfo); + } + + // --- FlagHistory Step --- + + @Bean + @StepScope + public ItemReader flagHistoryReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new FlagHistoryReader(businessDataSource, tableMetaInfo); + } + + // --- GroupBeneficialOwnerHistory Step --- + + @Bean + @StepScope + public ItemReader groupBeneficialOwnerHistoryReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new GroupBeneficialOwnerHistoryReader(businessDataSource, tableMetaInfo); + } + + // --- IceClass Step --- + + @Bean + @StepScope + public ItemReader iceClassReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new IceClassReader(businessDataSource, tableMetaInfo); + } + + // --- NameHistory Step --- + + @Bean + @StepScope + public ItemReader nameHistoryReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new NameHistoryReader(businessDataSource, tableMetaInfo); + } + + // --- OperatorHistory Step --- + + @Bean + @StepScope + public ItemReader operatorHistoryReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new OperatorHistoryReader(businessDataSource, tableMetaInfo); + } + + // --- OwnerHistory Step --- + + @Bean + @StepScope + public ItemReader ownerHistoryReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new OwnerHistoryReader(businessDataSource, tableMetaInfo); + } + + // --- PandIHistory Step --- + + @Bean + @StepScope + public ItemReader pandIHistoryReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new PandIHistoryReader(businessDataSource, tableMetaInfo); + } + + // --- SafetyManagementCertificateHist Step --- + + @Bean + @StepScope + public ItemReader safetyManagementCertificateHistReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new SafetyManagementCertificateHistReader(businessDataSource, tableMetaInfo); + } + + // --- ShipManagerHistory Step --- + + @Bean + @StepScope + public ItemReader shipManagerHistoryReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new ShipManagerHistoryReader(businessDataSource, tableMetaInfo); + } + + // --- SisterShipLinks Step --- + + @Bean + @StepScope + public ItemReader sisterShipLinksReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new SisterShipLinksReader(businessDataSource, tableMetaInfo); + } + + // --- SpecialFeature Step --- + + @Bean + @StepScope + public ItemReader specialFeatureReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new SpecialFeatureReader(businessDataSource, tableMetaInfo); + } + + // --- StatusHistory Step --- + + @Bean + @StepScope + public ItemReader statusHistoryReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new StatusHistoryReader(businessDataSource, tableMetaInfo); + } + + // --- StowageCommodity Step --- + + @Bean + @StepScope + public ItemReader stowageCommodityReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new StowageCommodityReader(businessDataSource, tableMetaInfo); + } + + // --- SurveyDates Step --- + + @Bean + @StepScope + public ItemReader surveyDatesReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new SurveyDatesReader(businessDataSource, tableMetaInfo); + } + + // --- SurveyDatesHistoryUnique Step --- + + @Bean + @StepScope + public ItemReader surveyDatesHistoryUniqueReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new SurveyDatesHistoryUniqueReader(businessDataSource, tableMetaInfo); + } + + // --- TechnicalManagerHistory Step --- + + @Bean + @StepScope + public ItemReader technicalManagerHistoryReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new TechnicalManagerHistoryReader(businessDataSource, tableMetaInfo); + } + + // --- Thrusters Step --- + + @Bean + @StepScope + public ItemReader thrustersReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new ThrustersReader(businessDataSource, tableMetaInfo); + } + + // --- TbCompanyDetail Step --- + + @Bean + @StepScope + public ItemReader tbCompanyDetailReader( + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + return new TbCompanyDetailReader(businessDataSource, tableMetaInfo); + } + + // --- Listeners --- + @Bean + public BatchWriteListener shipWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceShipDetailData); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener ownerHistoryWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceOwnerHistory); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener shipAddInfoWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceAdditionalShipsData); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener bareboatCharterHistoryWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceBareboatCharterHistory); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener callsignAndMmsiHistoryWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceCallsignAndMmsiHistory); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener classHistoryWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceClassHistory); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener companyVesselRelationshipsWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceCompanyVesselRelationships); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener crewListWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceCrewList); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener darkActivityConfirmedWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceDarkActivityConfirmed); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener flagHistoryWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceFlagHistory); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener groupBeneficialOwnerHistoryWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceGroupBeneficialOwnerHistory); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener iceClassWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceIceClass); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener nameHistoryWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceNameHistory); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener operatorHistoryWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceOperatorHistory); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener pandIHistoryWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourcePandiHistory); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener safetyManagementCertificateHistWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceSafetyManagementCertificateHist); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener shipManagerHistoryWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceShipManagerHistory); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener sisterShipLinksWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceSisterShipLinks); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener specialFeatureWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceSpecialFeature); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener statusHistoryWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceStatusHistory); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener stowageCommodityWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceStowageCommodity); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener surveyDatesWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceSurveyDates); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener surveyDatesHistoryUniqueWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceSurveyDatesHistoryUnique); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener technicalManagerHistoryWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTechnicalManagerHistory); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener thrustersWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceThrusters); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + @Bean + public BatchWriteListener tbCompanyDetailWriteListener() { + String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTbCompanyDetail); + return new BatchWriteListener<>(businessJdbcTemplate, sql); + } + + // --- Steps --- + + @Bean(name = "snpShipDetailSyncStep") + public Step snpShipDetailSyncStep() { + return new StepBuilder(getStepName(), jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(createReader()) + .processor(createProcessor()) + .writer(createWriter()) + .listener(new GroupByExecutionIdReadListener()) // Reader 리스너 (ThreadLocal 설정) + .listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 (ThreadLocal 정리) + .listener(shipWriteListener()) // Write 완료 후 batch_flag 업데이트 + .build(); + } + + @Bean(name = "ownerHistorySyncStep") + public Step ownerHistorySyncStep() { + log.info("Step 생성: ownerHistorySyncStep"); + return new StepBuilder("ownerHistorySyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(ownerHistoryReader(businessDataSource, tableMetaInfo)) + .processor(new OwnerHistoryProcessor()) + .writer(new OwnerHistoryWriter(shipRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) // Reader 리스너 + .listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 + .listener(ownerHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트 + .build(); + } + + @Bean(name = "shipAddInfoSyncStep") + public Step shipAddInfoSyncStep() { + log.info("Step 생성: shipAddInfoSyncStep"); + return new StepBuilder("shipAddInfoSyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(shipAddInfoReader(businessDataSource, tableMetaInfo)) + .processor(new ShipAddInfoProcessor()) + .writer(new ShipAddInfoWriter(shipRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) // Reader 리스너 + .listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 + .listener(shipAddInfoWriteListener()) // Write 완료 후 batch_flag 업데이트 + .build(); + } + + @Bean(name = "bareboatCharterHistorySyncStep") + public Step bareboatCharterHistorySyncStep() { + log.info("Step 생성: bareboatCharterHistorySyncStep"); + return new StepBuilder("bareboatCharterHistorySyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(bareboatCharterHistoryReader(businessDataSource, tableMetaInfo)) + .processor(new BareboatCharterHistoryProcessor()) + .writer(new BareboatCharterHistoryWriter(shipRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) // Reader 리스너 + .listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 + .listener(bareboatCharterHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트 + .build(); + } + + @Bean(name = "callsignAndMmsiHistorySyncStep") + public Step callsignAndMmsiHistorySyncStep() { + log.info("Step 생성: callsignAndMmsiHistorySyncStep"); + return new StepBuilder("callsignAndMmsiHistorySyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(callsignAndMmsiHistoryReader(businessDataSource, tableMetaInfo)) + .processor(new CallsignAndMmsiHistoryProcessor()) + .writer(new CallsignAndMmsiHistoryWriter(shipRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) // Reader 리스너 + .listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 + .listener(callsignAndMmsiHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트 + .build(); + } + + @Bean(name = "classHistorySyncStep") + public Step classHistorySyncStep() { + log.info("Step 생성: classHistorySyncStep"); + return new StepBuilder("classHistorySyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(classHistoryReader(businessDataSource, tableMetaInfo)) + .processor(new ClassHistoryProcessor()) + .writer(new ClassHistoryWriter(shipRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) // Reader 리스너 + .listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 + .listener(classHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트 + .build(); + } + + @Bean(name = "companyVesselRelationshipsSyncStep") + public Step companyVesselRelationshipsSyncStep() { + log.info("Step 생성: companyVesselRelationshipsSyncStep"); + return new StepBuilder("companyVesselRelationshipsSyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(companyVesselRelationshipsReader(businessDataSource, tableMetaInfo)) + .processor(new CompanyVesselRelationshipsProcessor()) + .writer(new CompanyVesselRelationshipsWriter(shipRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) // Reader 리스너 + .listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 + .listener(companyVesselRelationshipsWriteListener()) // Write 완료 후 batch_flag 업데이트 + .build(); + } + + @Bean(name = "crewListSyncStep") + public Step crewListSyncStep() { + log.info("Step 생성: crewListSyncStep"); + return new StepBuilder("crewListSyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(crewListReader(businessDataSource, tableMetaInfo)) + .processor(new CrewListProcessor()) + .writer(new CrewListWriter(shipRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) // Reader 리스너 + .listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 + .listener(crewListWriteListener()) // Write 완료 후 batch_flag 업데이트 + .build(); + } + + @Bean(name = "darkActivityConfirmedSyncStep") + public Step darkActivityConfirmedSyncStep() { + log.info("Step 생성: darkActivityConfirmedSyncStep"); + return new StepBuilder("darkActivityConfirmedSyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(darkActivityConfirmedReader(businessDataSource, tableMetaInfo)) + .processor(new DarkActivityConfirmedProcessor()) + .writer(new DarkActivityConfirmedWriter(shipRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) // Reader 리스너 + .listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 + .listener(darkActivityConfirmedWriteListener()) // Write 완료 후 batch_flag 업데이트 + .build(); + } + + @Bean(name = "flagHistorySyncStep") + public Step flagHistorySyncStep() { + log.info("Step 생성: flagHistorySyncStep"); + return new StepBuilder("flagHistorySyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(flagHistoryReader(businessDataSource, tableMetaInfo)) + .processor(new FlagHistoryProcessor()) + .writer(new FlagHistoryWriter(shipRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) // Reader 리스너 + .listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 + .listener(flagHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트 + .build(); + } + + @Bean(name = "groupBeneficialOwnerHistorySyncStep") + public Step groupBeneficialOwnerHistorySyncStep() { + log.info("Step 생성: groupBeneficialOwnerHistorySyncStep"); + return new StepBuilder("groupBeneficialOwnerHistorySyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(groupBeneficialOwnerHistoryReader(businessDataSource, tableMetaInfo)) + .processor(new GroupBeneficialOwnerHistoryProcessor()) + .writer(new GroupBeneficialOwnerHistoryWriter(shipRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) // Reader 리스너 + .listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 + .listener(groupBeneficialOwnerHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트 + .build(); + } + + @Bean(name = "iceClassSyncStep") + public Step iceClassSyncStep() { + log.info("Step 생성: iceClassSyncStep"); + return new StepBuilder("iceClassSyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(iceClassReader(businessDataSource, tableMetaInfo)) + .processor(new IceClassProcessor()) + .writer(new IceClassWriter(shipRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) // Reader 리스너 + .listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 + .listener(iceClassWriteListener()) // Write 완료 후 batch_flag 업데이트 + .build(); + } + + @Bean(name = "nameHistorySyncStep") + public Step nameHistorySyncStep() { + log.info("Step 생성: nameHistorySyncStep"); + return new StepBuilder("nameHistorySyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(nameHistoryReader(businessDataSource, tableMetaInfo)) + .processor(new NameHistoryProcessor()) + .writer(new NameHistoryWriter(shipRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) // Reader 리스너 + .listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 + .listener(nameHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트 + .build(); + } + + @Bean(name = "operatorHistorySyncStep") + public Step operatorHistorySyncStep() { + log.info("Step 생성: operatorHistorySyncStep"); + return new StepBuilder("operatorHistorySyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(operatorHistoryReader(businessDataSource, tableMetaInfo)) + .processor(new OperatorHistoryProcessor()) + .writer(new OperatorHistoryWriter(shipRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) // Reader 리스너 + .listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 + .listener(operatorHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트 + .build(); + } + + @Bean(name = "pandIHistorySyncStep") + public Step pandIHistorySyncStep() { + log.info("Step 생성: pandIHistorySyncStep"); + return new StepBuilder("pandIHistorySyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(pandIHistoryReader(businessDataSource, tableMetaInfo)) + .processor(new PandIHistoryProcessor()) + .writer(new PandIHistoryWriter(shipRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) // Reader 리스너 + .listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 + .listener(pandIHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트 + .build(); + } + + @Bean(name = "safetyManagementCertificateHistSyncStep") + public Step safetyManagementCertificateHistSyncStep() { + log.info("Step 생성: safetyManagementCertificateHistSyncStep"); + return new StepBuilder("safetyManagementCertificateHistSyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(safetyManagementCertificateHistReader(businessDataSource, tableMetaInfo)) + .processor(new SafetyManagementCertificateHistProcessor()) + .writer(new SafetyManagementCertificateHistWriter(shipRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) // Reader 리스너 + .listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 + .listener(safetyManagementCertificateHistWriteListener()) // Write 완료 후 batch_flag 업데이트 + .build(); + } + + @Bean(name = "shipManagerHistorySyncStep") + public Step shipManagerHistorySyncStep() { + log.info("Step 생성: shipManagerHistorySyncStep"); + return new StepBuilder("shipManagerHistorySyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(shipManagerHistoryReader(businessDataSource, tableMetaInfo)) + .processor(new ShipManagerHistoryProcessor()) + .writer(new ShipManagerHistoryWriter(shipRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) // Reader 리스너 + .listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 + .listener(shipManagerHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트 + .build(); + } + + @Bean(name = "sisterShipLinksSyncStep") + public Step sisterShipLinksSyncStep() { + log.info("Step 생성: sisterShipLinksSyncStep"); + return new StepBuilder("sisterShipLinksSyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(sisterShipLinksReader(businessDataSource, tableMetaInfo)) + .processor(new SisterShipLinksProcessor()) + .writer(new SisterShipLinksWriter(shipRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) // Reader 리스너 + .listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 + .listener(sisterShipLinksWriteListener()) // Write 완료 후 batch_flag 업데이트 + .build(); + } + + @Bean(name = "specialFeatureSyncStep") + public Step specialFeatureSyncStep() { + log.info("Step 생성: specialFeatureSyncStep"); + return new StepBuilder("specialFeatureSyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(specialFeatureReader(businessDataSource, tableMetaInfo)) + .processor(new SpecialFeatureProcessor()) + .writer(new SpecialFeatureWriter(shipRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) // Reader 리스너 + .listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 + .listener(specialFeatureWriteListener()) // Write 완료 후 batch_flag 업데이트 + .build(); + } + + @Bean(name = "statusHistorySyncStep") + public Step statusHistorySyncStep() { + log.info("Step 생성: statusHistorySyncStep"); + return new StepBuilder("statusHistorySyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(statusHistoryReader(businessDataSource, tableMetaInfo)) + .processor(new StatusHistoryProcessor()) + .writer(new StatusHistoryWriter(shipRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) // Reader 리스너 + .listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 + .listener(statusHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트 + .build(); + } + + @Bean(name = "stowageCommoditySyncStep") + public Step stowageCommoditySyncStep() { + log.info("Step 생성: stowageCommoditySyncStep"); + return new StepBuilder("stowageCommoditySyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(stowageCommodityReader(businessDataSource, tableMetaInfo)) + .processor(new StowageCommodityProcessor()) + .writer(new StowageCommodityWriter(shipRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) // Reader 리스너 + .listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 + .listener(stowageCommodityWriteListener()) // Write 완료 후 batch_flag 업데이트 + .build(); + } + + @Bean(name = "surveyDatesSyncStep") + public Step surveyDatesSyncStep() { + log.info("Step 생성: surveyDatesSyncStep"); + return new StepBuilder("surveyDatesSyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(surveyDatesReader(businessDataSource, tableMetaInfo)) + .processor(new SurveyDatesProcessor()) + .writer(new SurveyDatesWriter(shipRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) // Reader 리스너 + .listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 + .listener(surveyDatesWriteListener()) // Write 완료 후 batch_flag 업데이트 + .build(); + } + + @Bean(name = "surveyDatesHistoryUniqueSyncStep") + public Step surveyDatesHistoryUniqueSyncStep() { + log.info("Step 생성: surveyDatesHistoryUniqueSyncStep"); + return new StepBuilder("surveyDatesHistoryUniqueSyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(surveyDatesHistoryUniqueReader(businessDataSource, tableMetaInfo)) + .processor(new SurveyDatesHistoryUniqueProcessor()) + .writer(new SurveyDatesHistoryUniqueWriter(shipRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) // Reader 리스너 + .listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 + .listener(surveyDatesHistoryUniqueWriteListener()) // Write 완료 후 batch_flag 업데이트 + .build(); + } + + @Bean(name = "technicalManagerHistorySyncStep") + public Step technicalManagerHistorySyncStep() { + log.info("Step 생성: technicalManagerHistorySyncStep"); + return new StepBuilder("technicalManagerHistorySyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(technicalManagerHistoryReader(businessDataSource, tableMetaInfo)) + .processor(new TechnicalManagerHistoryProcessor()) + .writer(new TechnicalManagerHistoryWriter(shipRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) // Reader 리스너 + .listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 + .listener(technicalManagerHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트 + .build(); + } + + @Bean(name = "thrustersSyncStep") + public Step thrustersSyncStep() { + log.info("Step 생성: thrustersSyncStep"); + return new StepBuilder("thrustersSyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(thrustersReader(businessDataSource, tableMetaInfo)) + .processor(new ThrustersProcessor()) + .writer(new ThrustersWriter(shipRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) // Reader 리스너 + .listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 + .listener(thrustersWriteListener()) // Write 완료 후 batch_flag 업데이트 + .build(); + } + + @Bean(name = "tbCompanyDetailSyncStep") + public Step tbCompanyDetailSyncStep() { + log.info("Step 생성: tbCompanyDetailSyncStep"); + return new StepBuilder("tbCompanyDetailSyncStep", jobRepository) + .chunk(new GroupByExecutionIdPolicy(), transactionManager) + .reader(tbCompanyDetailReader(businessDataSource, tableMetaInfo)) + .processor(new TbCompanyDetailProcessor()) + .writer(new TbCompanyDetailWriter(shipRepository, transactionManager, subChunkSize)) + .listener(new GroupByExecutionIdReadListener()) // Reader 리스너 + .listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 + .listener(tbCompanyDetailWriteListener()) // Write 완료 후 batch_flag 업데이트 + .build(); + } + + @Override + protected Job createJobFlow(JobBuilder jobBuilder) { + return jobBuilder + .start(snpShipDetailSyncStep()) + .next(shipAddInfoSyncStep()) + .next(bareboatCharterHistorySyncStep()) + .next(callsignAndMmsiHistorySyncStep()) + .next(classHistorySyncStep()) + .next(companyVesselRelationshipsSyncStep()) + .next(crewListSyncStep()) + .next(darkActivityConfirmedSyncStep()) + .next(flagHistorySyncStep()) + .next(groupBeneficialOwnerHistorySyncStep()) + .next(iceClassSyncStep()) + .next(nameHistorySyncStep()) + .next(operatorHistorySyncStep()) + .next(ownerHistorySyncStep()) + .next(pandIHistorySyncStep()) + .next(safetyManagementCertificateHistSyncStep()) + .next(shipManagerHistorySyncStep()) + .next(sisterShipLinksSyncStep()) + .next(specialFeatureSyncStep()) + .next(statusHistorySyncStep()) + .next(stowageCommoditySyncStep()) + .next(surveyDatesSyncStep()) + .next(surveyDatesHistoryUniqueSyncStep()) + .next(technicalManagerHistorySyncStep()) + .next(thrustersSyncStep()) + .next(tbCompanyDetailSyncStep()) + .build(); + } + + @Bean(name = "snpMdaDataSyncJob") + public Job snpMdaDataSyncJob() { + return job(); + } + +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/BareboatCharterHistoryDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/BareboatCharterHistoryDto.java new file mode 100644 index 0000000..79263d6 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/BareboatCharterHistoryDto.java @@ -0,0 +1,24 @@ +package com.snp.batch.jobs.datasync.batch.ship.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class BareboatCharterHistoryDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String imoNo; + private String bbctrSeq; + private String efectStaDay; + private String bbctrCompanyCd; + private String bbctrCompany; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/CallsignAndMmsiHistoryDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/CallsignAndMmsiHistoryDto.java new file mode 100644 index 0000000..5c505ef --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/CallsignAndMmsiHistoryDto.java @@ -0,0 +1,24 @@ +package com.snp.batch.jobs.datasync.batch.ship.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class CallsignAndMmsiHistoryDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String imoNo; + private String shipIdntfSeq; + private String efectStaDay; + private String clsgnNo; + private String mmsiNo; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/ClassHistoryDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/ClassHistoryDto.java new file mode 100644 index 0000000..3d0751f --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/ClassHistoryDto.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class ClassHistoryDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String imoNo; + private String clficHstrySeq; + private String efectStaDay; + private String clficCd; + private String clficId; + private String clficAstnNm; + private String clficHasYn; + private String nowYn; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/CompanyVesselRelationshipsDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/CompanyVesselRelationshipsDto.java new file mode 100644 index 0000000..8f6c33e --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/CompanyVesselRelationshipsDto.java @@ -0,0 +1,40 @@ +package com.snp.batch.jobs.datasync.batch.ship.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class CompanyVesselRelationshipsDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String imoNo; + private String doccHasCompanyCd; + private String doccHasCompany; + private String groupActlOwnr; + private String groupActlOwnrCd; + private String shipOperator; + private String shipOperatorCd; + private String rgOwnr; + private String rgOwnrCd; + private String shipMngCompany; + private String shipMngCompanyCd; + private String techMngCompany; + private String techMngCompanyCd; + private String doccGroup; + private String doccGroupCd; + private String shipOperatorGroup; + private String shipOperatorGroupCd; + private String shipMngCompanyGroup; + private String shipMngCompanyGroupCd; + private String techMngCompanyGroup; + private String techMngCompanyGroupCd; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/CrewListDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/CrewListDto.java new file mode 100644 index 0000000..2a83008 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/CrewListDto.java @@ -0,0 +1,32 @@ +package com.snp.batch.jobs.datasync.batch.ship.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import java.math.BigDecimal; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class CrewListDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String imoNo; + private String crewId; + private String shipNm; + private String ntnlty; + private String crewRstrYmd; + private BigDecimal oaCrewCnt; + private BigDecimal genCrewCnt; + private BigDecimal offcrCnt; + private BigDecimal apprOffcrCnt; + private BigDecimal trneCnt; + private BigDecimal embrkMntncCrewCnt; + private BigDecimal unrprtCnt; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/DarkActivityConfirmedDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/DarkActivityConfirmedDto.java new file mode 100644 index 0000000..8a18e19 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/DarkActivityConfirmedDto.java @@ -0,0 +1,46 @@ +package com.snp.batch.jobs.datasync.batch.ship.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import java.time.LocalDateTime; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class DarkActivityConfirmedDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String imoNo; + private String mmsiNo; + private Long darkHr; + private Long darkActvStatus; + private String shipNm; + private String darkActv; + private Long zoneId; + private String zoneNm; + private String zoneCountry; + private LocalDateTime darkTmUtc; + private Double darkLat; + private Double darkLon; + private Double darkSpd; + private Double darkHeading; + private Double darkDraft; + private LocalDateTime nxtCptrTmUtc; + private Double nxtCptrSpd; + private Double nxtCptrDraft; + private Double nxtCptrHeading; + private String darkRptDestAis; + private String lastPrtcllPort; + private String lastPoccntryCd; + private String lastPoccntry; + private Double nxtCptrLat; + private Double nxtCptrLon; + private String nxtCptrRptDestAis; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/FlagHistoryDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/FlagHistoryDto.java new file mode 100644 index 0000000..b84b6d8 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/FlagHistoryDto.java @@ -0,0 +1,24 @@ +package com.snp.batch.jobs.datasync.batch.ship.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class FlagHistoryDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String imoNo; + private String shipCountryHstrySeq; + private String efectStaDay; + private String countryCd; + private String country; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/GroupBeneficialOwnerHistoryDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/GroupBeneficialOwnerHistoryDto.java new file mode 100644 index 0000000..a197614 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/GroupBeneficialOwnerHistoryDto.java @@ -0,0 +1,25 @@ +package com.snp.batch.jobs.datasync.batch.ship.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class GroupBeneficialOwnerHistoryDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String imoNo; + private String shipGroupRevnOwnrHstrySeq; + private String efectStaDay; + private String groupActlOwnrCd; + private String groupActlOwnr; + private String companyStatus; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/IceClassDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/IceClassDto.java new file mode 100644 index 0000000..3662141 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/IceClassDto.java @@ -0,0 +1,22 @@ +package com.snp.batch.jobs.datasync.batch.ship.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class IceClassDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String imoNo; + private String iceGrdCd; + private String iceGrd; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/NameHistoryDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/NameHistoryDto.java new file mode 100644 index 0000000..1267473 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/NameHistoryDto.java @@ -0,0 +1,23 @@ +package com.snp.batch.jobs.datasync.batch.ship.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class NameHistoryDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String imoNo; + private String shipNmChgHstrySeq; + private String efectStaDay; + private String shipNm; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/OperatorHistoryDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/OperatorHistoryDto.java new file mode 100644 index 0000000..518bf2c --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/OperatorHistoryDto.java @@ -0,0 +1,25 @@ +package com.snp.batch.jobs.datasync.batch.ship.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class OperatorHistoryDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String imoNo; + private String shipOperatorHstrySeq; + private String efectStaDay; + private String shipOperatorCd; + private String shipOperator; + private String companyStatus; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/OwnerHistoryDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/OwnerHistoryDto.java new file mode 100644 index 0000000..2983db4 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/OwnerHistoryDto.java @@ -0,0 +1,25 @@ +package com.snp.batch.jobs.datasync.batch.ship.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class OwnerHistoryDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String imoNo; + private String shipOwnrHstrySeq; + private String efectStaDay; + private String ownrCd; + private String ownr; + private String companyStatus; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/PandIHistoryDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/PandIHistoryDto.java new file mode 100644 index 0000000..453743f --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/PandIHistoryDto.java @@ -0,0 +1,25 @@ +package com.snp.batch.jobs.datasync.batch.ship.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class PandIHistoryDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String imoNo; + private String shipPrtcRpnHstrySeq; + private String efectStaDay; + private String pniClubCd; + private String pniClubNm; + private String src; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/SafetyManagementCertificateHistDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/SafetyManagementCertificateHistDto.java new file mode 100644 index 0000000..e3fda53 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/SafetyManagementCertificateHistDto.java @@ -0,0 +1,33 @@ +package com.snp.batch.jobs.datasync.batch.ship.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class SafetyManagementCertificateHistDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String imoNo; + private String shipSftyMngEvdcSeq; + private String smgrcSrngEngines; + private String smgrcSysCatConvArbt; + private String smgrcExpryDay; + private String smgrcIssueDay; + private String smgrcDoccCompany; + private String smgrcNtnlty; + private String smgrcIssueEngines; + private String smgrcEtcDesc; + private String smgrcShipNm; + private String smgrcShipType; + private String smgrcSrc; + private String smgrcCompanyCd; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/ShipAddInfoDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/ShipAddInfoDto.java new file mode 100644 index 0000000..a8e9160 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/ShipAddInfoDto.java @@ -0,0 +1,30 @@ +package com.snp.batch.jobs.datasync.batch.ship.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class ShipAddInfoDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String imoNo; + private String shipEml; + private String maxDpwt; + private String maxDrillDepth; + private String drillBrg; + private String oceanProdFacility; + private String deckHeatExch; + private String dehtexMatral; + private String portblTwinDeck; + private String fixedTwinDeck; + private String shipSatlitCommId; + private String shipSatlitCmrspCd; + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/ShipInfoMstDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/ShipInfoMstDto.java new file mode 100644 index 0000000..e44d9d3 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/ShipInfoMstDto.java @@ -0,0 +1,102 @@ +package com.snp.batch.jobs.datasync.batch.ship.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class ShipInfoMstDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String imoNo; + private String mmsiNo; + private String shipNm; + private String clsgnNo; + private String frmlaRegNo; + private String fshrPrmtNo; + private String shipNtnlty; + private String ntnltyCd; + private String loadPort; + private String clfic; + private String clficDesc; + private String shipStatus; + private String shipTypeGroup; + private String shipTypeLvTwo; + private String shipTypeLvThr; + private String shipTypeLvFour; + private String shipTypeLvFive; + private String shipTypeLvFiveDtldType; + private String shipTypeLvFiveHullType; + private String shipTypeLvFiveLwrnkGroup; + private String buildYy; + private String buildYmd; + private String shpyrd; + private String shpyrdOffclNm; + private String shpyrdBuildNo; + private String buildDesc; + private String modfHstryDesc; + private String whlnthLoa; + private String regLength; + private String lbp; + private String formnBreadth; + private String maxBreadth; + private String depth; + private String draft; + private String keelMastHg; + private String bulbBow; + private String gt; + private String ntTon; + private String dwt; + private String displacement; + private String lightDisplacementTon; + private String cgt; + private String fldngOneCmPerTonTpci; + private String tonEfectDay; + private String calcfrmDwt; + private String teuCnt; + private String teuCapacity; + private String grainCapacityM3; + private String baleCapacity; + private String liquidCapacity; + private String gasM3; + private String insulatedM3; + private String passengerCapacity; + private String bollardPull; + private String svcSpd; + private String mainEngineType; + private String fuelCnsmpSpdOne; + private String fuelCnsmpamtValOne; + private String fuelCnsmpSpdTwo; + private String fuelCnsmpamtValTwo; + private String totalFuelCapacityM3; + private String blrMftr; + private String proplrMftr; + private String cargoCapacityM3Desc; + private String eqpmntDesc; + private String hdn; + private String hatcheDesc; + private String laneDoorRampDesc; + private String spcTankDesc; + private String tankDesc; + private String prmovrDesc; + private String prmovrOvrvwDesc; + private String auxDesc; + private String asstGnrtrDesc; + private String fuelDesc; + private String docCompanyCd; + private String groupActlOwnrCompanyCd; + private String operator; + private String operatorCompanyCd; + private String shipMngrCompanyCd; + private String techMngrCd; + private String regShponrCd; + private String lastMdfcnDt; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/ShipManagerHistoryDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/ShipManagerHistoryDto.java new file mode 100644 index 0000000..31c5557 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/ShipManagerHistoryDto.java @@ -0,0 +1,25 @@ +package com.snp.batch.jobs.datasync.batch.ship.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class ShipManagerHistoryDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String imoNo; + private String shipMngCompanySeq; + private String efectStaDay; + private String shipMngrCd; + private String shipMngr; + private String companyStatus; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/SisterShipLinksDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/SisterShipLinksDto.java new file mode 100644 index 0000000..a3d98e5 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/SisterShipLinksDto.java @@ -0,0 +1,21 @@ +package com.snp.batch.jobs.datasync.batch.ship.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class SisterShipLinksDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String imoNo; + private String linkImoNo; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/SpecialFeatureDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/SpecialFeatureDto.java new file mode 100644 index 0000000..34e86ae --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/SpecialFeatureDto.java @@ -0,0 +1,23 @@ +package com.snp.batch.jobs.datasync.batch.ship.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class SpecialFeatureDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String imoNo; + private String shipSpcFetrSeq; + private String spcMttrCd; + private String spcMttr; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/StatusHistoryDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/StatusHistoryDto.java new file mode 100644 index 0000000..8f41a9b --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/StatusHistoryDto.java @@ -0,0 +1,24 @@ +package com.snp.batch.jobs.datasync.batch.ship.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class StatusHistoryDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String imoNo; + private String shipStatusHstrySeq; + private String statusCd; + private String statusChgYmd; + private String status; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/StowageCommodityDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/StowageCommodityDto.java new file mode 100644 index 0000000..f99f429 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/StowageCommodityDto.java @@ -0,0 +1,25 @@ +package com.snp.batch.jobs.datasync.batch.ship.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class StowageCommodityDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String imoNo; + private String shipCargoCapacitySeq; + private String capacityCd; + private String capacityCdDesc; + private String cargoCd; + private String cargoNm; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/SurveyDatesDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/SurveyDatesDto.java new file mode 100644 index 0000000..7925511 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/SurveyDatesDto.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class SurveyDatesDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String imoNo; + private String clficCd; + private String clfic; + private String dckngInspection; + private String fxtmInspection; + private String annualInspection; + private String mchnFxtmInspectionYmd; + private String tlsftInspectionYmd; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/SurveyDatesHistoryUniqueDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/SurveyDatesHistoryUniqueDto.java new file mode 100644 index 0000000..e2c4350 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/SurveyDatesHistoryUniqueDto.java @@ -0,0 +1,24 @@ +package com.snp.batch.jobs.datasync.batch.ship.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class SurveyDatesHistoryUniqueDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String imoNo; + private String clficCd; + private String inspectionType; + private String inspectionYmd; + private String clfic; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/TbCompanyDetailDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/TbCompanyDetailDto.java new file mode 100644 index 0000000..ecc8cc2 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/TbCompanyDetailDto.java @@ -0,0 +1,48 @@ +package com.snp.batch.jobs.datasync.batch.ship.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class TbCompanyDetailDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String companyCd; + private String lastUpdYmd; + private String careCd; + private String companyStatus; + private String fullNm; + private String companyNameAbbr; + private String companyFndnYmd; + private String prntCompanyCd; + private String countryNm; + private String ctyNm; + private String oaAddr; + private String emlAddr; + private String tel; + private String faxNo; + private String wbstUrl; + private String countryCtrl; + private String countryCtrlCd; + private String countryReg; + private String countryRegCd; + private String regionCd; + private String distNm; + private String distNo; + private String mailAddrRear; + private String mailAddrFrnt; + private String poBox; + private String dtlAddrOne; + private String dtlAddrTwo; + private String dtlAddrThr; + private String tlx; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/TechnicalManagerHistoryDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/TechnicalManagerHistoryDto.java new file mode 100644 index 0000000..8a2913e --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/TechnicalManagerHistoryDto.java @@ -0,0 +1,25 @@ +package com.snp.batch.jobs.datasync.batch.ship.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class TechnicalManagerHistoryDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String imoNo; + private String shipTechMngCompanySeq; + private String efectStaDay; + private String techMngrCd; + private String techMngr; + private String companyStatus; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/ThrustersDto.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/ThrustersDto.java new file mode 100644 index 0000000..0eac68a --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/dto/ThrustersDto.java @@ -0,0 +1,29 @@ +package com.snp.batch.jobs.datasync.batch.ship.dto; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import java.math.BigDecimal; + +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +public class ThrustersDto implements JobExecutionGroupable { + private Long jobExecutionId; + private String datasetVer; + private String imoNo; + private String thrstrSeq; + private String thrstrTypeCd; + private String thrstrType; + private BigDecimal thrstrCnt; + private String thrstrPosition; + private BigDecimal thrstrPowerBhp; + private BigDecimal thrstrPowerKw; + private String instlMth; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/BareboatCharterHistoryEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/BareboatCharterHistoryEntity.java new file mode 100644 index 0000000..af1469a --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/BareboatCharterHistoryEntity.java @@ -0,0 +1,24 @@ +package com.snp.batch.jobs.datasync.batch.ship.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class BareboatCharterHistoryEntity implements JobExecutionGroupable { + private String datasetVer; + private String imoNo; + private String bbctrSeq; + private String efectStaDay; + private String bbctrCompanyCd; + private String bbctrCompany; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/CallsignAndMmsiHistoryEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/CallsignAndMmsiHistoryEntity.java new file mode 100644 index 0000000..30364c2 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/CallsignAndMmsiHistoryEntity.java @@ -0,0 +1,24 @@ +package com.snp.batch.jobs.datasync.batch.ship.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class CallsignAndMmsiHistoryEntity implements JobExecutionGroupable { + private String datasetVer; + private String imoNo; + private String shipIdntfSeq; + private String efectStaDay; + private String clsgnNo; + private String mmsiNo; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/ClassHistoryEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/ClassHistoryEntity.java new file mode 100644 index 0000000..c4ed691 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/ClassHistoryEntity.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class ClassHistoryEntity implements JobExecutionGroupable { + private String datasetVer; + private String imoNo; + private String clficHstrySeq; + private String efectStaDay; + private String clficCd; + private String clficId; + private String clficAstnNm; + private String clficHasYn; + private String nowYn; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/CompanyVesselRelationshipsEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/CompanyVesselRelationshipsEntity.java new file mode 100644 index 0000000..fb1a0c8 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/CompanyVesselRelationshipsEntity.java @@ -0,0 +1,40 @@ +package com.snp.batch.jobs.datasync.batch.ship.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class CompanyVesselRelationshipsEntity implements JobExecutionGroupable { + private String datasetVer; + private String imoNo; + private String doccHasCompanyCd; + private String doccHasCompany; + private String groupActlOwnr; + private String groupActlOwnrCd; + private String shipOperator; + private String shipOperatorCd; + private String rgOwnr; + private String rgOwnrCd; + private String shipMngCompany; + private String shipMngCompanyCd; + private String techMngCompany; + private String techMngCompanyCd; + private String doccGroup; + private String doccGroupCd; + private String shipOperatorGroup; + private String shipOperatorGroupCd; + private String shipMngCompanyGroup; + private String shipMngCompanyGroupCd; + private String techMngCompanyGroup; + private String techMngCompanyGroupCd; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/CrewListEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/CrewListEntity.java new file mode 100644 index 0000000..a4d6343 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/CrewListEntity.java @@ -0,0 +1,32 @@ +package com.snp.batch.jobs.datasync.batch.ship.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; +import java.math.BigDecimal; + +@Data +@SuperBuilder +@AllArgsConstructor +public class CrewListEntity implements JobExecutionGroupable { + private String datasetVer; + private String imoNo; + private String crewId; + private String shipNm; + private String ntnlty; + private String crewRstrYmd; + private BigDecimal oaCrewCnt; + private BigDecimal genCrewCnt; + private BigDecimal offcrCnt; + private BigDecimal apprOffcrCnt; + private BigDecimal trneCnt; + private BigDecimal embrkMntncCrewCnt; + private BigDecimal unrprtCnt; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/DarkActivityConfirmedEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/DarkActivityConfirmedEntity.java new file mode 100644 index 0000000..f126ac3 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/DarkActivityConfirmedEntity.java @@ -0,0 +1,47 @@ +package com.snp.batch.jobs.datasync.batch.ship.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; +import java.time.LocalDateTime; + +@Data +@SuperBuilder +@AllArgsConstructor +@NoArgsConstructor +public class DarkActivityConfirmedEntity implements JobExecutionGroupable { + private String datasetVer; + private String imoNo; + private String mmsiNo; + private Long darkHr; + private Long darkActvStatus; + private String shipNm; + private String darkActv; + private Long zoneId; + private String zoneNm; + private String zoneCountry; + private LocalDateTime darkTmUtc; + private Double darkLat; + private Double darkLon; + private Double darkSpd; + private Double darkHeading; + private Double darkDraft; + private LocalDateTime nxtCptrTmUtc; + private Double nxtCptrSpd; + private Double nxtCptrDraft; + private Double nxtCptrHeading; + private String darkRptDestAis; + private String lastPrtcllPort; + private String lastPoccntryCd; + private String lastPoccntry; + private Double nxtCptrLat; + private Double nxtCptrLon; + private String nxtCptrRptDestAis; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/FlagHistoryEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/FlagHistoryEntity.java new file mode 100644 index 0000000..7251faf --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/FlagHistoryEntity.java @@ -0,0 +1,24 @@ +package com.snp.batch.jobs.datasync.batch.ship.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class FlagHistoryEntity implements JobExecutionGroupable { + private String datasetVer; + private String imoNo; + private String shipCountryHstrySeq; + private String efectStaDay; + private String countryCd; + private String country; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/GroupBeneficialOwnerHistoryEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/GroupBeneficialOwnerHistoryEntity.java new file mode 100644 index 0000000..68b966e --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/GroupBeneficialOwnerHistoryEntity.java @@ -0,0 +1,25 @@ +package com.snp.batch.jobs.datasync.batch.ship.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class GroupBeneficialOwnerHistoryEntity implements JobExecutionGroupable { + private String datasetVer; + private String imoNo; + private String shipGroupRevnOwnrHstrySeq; + private String efectStaDay; + private String groupActlOwnrCd; + private String groupActlOwnr; + private String companyStatus; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/IceClassEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/IceClassEntity.java new file mode 100644 index 0000000..dce616c --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/IceClassEntity.java @@ -0,0 +1,22 @@ +package com.snp.batch.jobs.datasync.batch.ship.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class IceClassEntity implements JobExecutionGroupable { + private String datasetVer; + private String imoNo; + private String iceGrdCd; + private String iceGrd; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/NameHistoryEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/NameHistoryEntity.java new file mode 100644 index 0000000..c376ced --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/NameHistoryEntity.java @@ -0,0 +1,23 @@ +package com.snp.batch.jobs.datasync.batch.ship.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class NameHistoryEntity implements JobExecutionGroupable { + private String datasetVer; + private String imoNo; + private String shipNmChgHstrySeq; + private String efectStaDay; + private String shipNm; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/OperatorHistoryEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/OperatorHistoryEntity.java new file mode 100644 index 0000000..fcb653c --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/OperatorHistoryEntity.java @@ -0,0 +1,25 @@ +package com.snp.batch.jobs.datasync.batch.ship.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class OperatorHistoryEntity implements JobExecutionGroupable { + private String datasetVer; + private String imoNo; + private String shipOperatorHstrySeq; + private String efectStaDay; + private String shipOperatorCd; + private String shipOperator; + private String companyStatus; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/OwnerHistoryEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/OwnerHistoryEntity.java new file mode 100644 index 0000000..1d64706 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/OwnerHistoryEntity.java @@ -0,0 +1,26 @@ +package com.snp.batch.jobs.datasync.batch.ship.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class OwnerHistoryEntity implements JobExecutionGroupable { + private String datasetVer; + private String imoNo; + private String shipOwnrHstrySeq; + private String efectStaDay; + private String ownrCd; + private String ownr; + private String companyStatus; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/PandIHistoryEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/PandIHistoryEntity.java new file mode 100644 index 0000000..1782cf5 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/PandIHistoryEntity.java @@ -0,0 +1,25 @@ +package com.snp.batch.jobs.datasync.batch.ship.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class PandIHistoryEntity implements JobExecutionGroupable { + private String datasetVer; + private String imoNo; + private String shipPrtcRpnHstrySeq; + private String efectStaDay; + private String pniClubCd; + private String pniClubNm; + private String src; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/SafetyManagementCertificateHistEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/SafetyManagementCertificateHistEntity.java new file mode 100644 index 0000000..af8344c --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/SafetyManagementCertificateHistEntity.java @@ -0,0 +1,33 @@ +package com.snp.batch.jobs.datasync.batch.ship.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class SafetyManagementCertificateHistEntity implements JobExecutionGroupable { + private String datasetVer; + private String imoNo; + private String shipSftyMngEvdcSeq; + private String smgrcSrngEngines; + private String smgrcSysCatConvArbt; + private String smgrcExpryDay; + private String smgrcIssueDay; + private String smgrcDoccCompany; + private String smgrcNtnlty; + private String smgrcIssueEngines; + private String smgrcEtcDesc; + private String smgrcShipNm; + private String smgrcShipType; + private String smgrcSrc; + private String smgrcCompanyCd; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/ShipAddInfoEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/ShipAddInfoEntity.java new file mode 100644 index 0000000..11d8ec7 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/ShipAddInfoEntity.java @@ -0,0 +1,32 @@ +package com.snp.batch.jobs.datasync.batch.ship.entity; +import com.snp.batch.common.util.JobExecutionGroupable; +import jakarta.persistence.*; +import lombok.*; +import lombok.experimental.SuperBuilder; + +import java.time.LocalDateTime; + +@Data +@SuperBuilder +@AllArgsConstructor +public class ShipAddInfoEntity implements JobExecutionGroupable { + private String datasetVer; + private String imoNo; + private String shipEml; + private String maxDpwt; + private String maxDrillDepth; + private String drillBrg; + private String oceanProdFacility; + private String deckHeatExch; + private String dehtexMatral; + private String portblTwinDeck; + private String fixedTwinDeck; + private String shipSatlitCommId; + private String shipSatlitCmrspCd; + + private Long jobExecutionId; + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/ShipInfoMstEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/ShipInfoMstEntity.java new file mode 100644 index 0000000..aedfaf6 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/ShipInfoMstEntity.java @@ -0,0 +1,102 @@ +package com.snp.batch.jobs.datasync.batch.ship.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class ShipInfoMstEntity implements JobExecutionGroupable { + private String datasetVer; + private String imoNo; + private String mmsiNo; + private String shipNm; + private String clsgnNo; + private String frmlaRegNo; + private String fshrPrmtNo; + private String shipNtnlty; + private String ntnltyCd; + private String loadPort; + private String clfic; + private String clficDesc; + private String shipStatus; + private String shipTypeGroup; + private String shipTypeLvTwo; + private String shipTypeLvThr; + private String shipTypeLvFour; + private String shipTypeLvFive; + private String shipTypeLvFiveDtldType; + private String shipTypeLvFiveHullType; + private String shipTypeLvFiveLwrnkGroup; + private String buildYy; + private String buildYmd; + private String shpyrd; + private String shpyrdOffclNm; + private String shpyrdBuildNo; + private String buildDesc; + private String modfHstryDesc; + private String whlnthLoa; + private String regLength; + private String lbp; + private String formnBreadth; + private String maxBreadth; + private String depth; + private String draft; + private String keelMastHg; + private String bulbBow; + private String gt; + private String ntTon; + private String dwt; + private String displacement; + private String lightDisplacementTon; + private String cgt; + private String fldngOneCmPerTonTpci; + private String tonEfectDay; + private String calcfrmDwt; + private String teuCnt; + private String teuCapacity; + private String grainCapacityM3; + private String baleCapacity; + private String liquidCapacity; + private String gasM3; + private String insulatedM3; + private String passengerCapacity; + private String bollardPull; + private String svcSpd; + private String mainEngineType; + private String fuelCnsmpSpdOne; + private String fuelCnsmpamtValOne; + private String fuelCnsmpSpdTwo; + private String fuelCnsmpamtValTwo; + private String totalFuelCapacityM3; + private String blrMftr; + private String proplrMftr; + private String cargoCapacityM3Desc; + private String eqpmntDesc; + private String hdn; + private String hatcheDesc; + private String laneDoorRampDesc; + private String spcTankDesc; + private String tankDesc; + private String prmovrDesc; + private String prmovrOvrvwDesc; + private String auxDesc; + private String asstGnrtrDesc; + private String fuelDesc; + private String docCompanyCd; + private String groupActlOwnrCompanyCd; + private String operator; + private String operatorCompanyCd; + private String shipMngrCompanyCd; + private String techMngrCd; + private String regShponrCd; + private String lastMdfcnDt; + + private Long jobExecutionId; + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/ShipManagerHistoryEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/ShipManagerHistoryEntity.java new file mode 100644 index 0000000..6605347 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/ShipManagerHistoryEntity.java @@ -0,0 +1,25 @@ +package com.snp.batch.jobs.datasync.batch.ship.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class ShipManagerHistoryEntity implements JobExecutionGroupable { + private String datasetVer; + private String imoNo; + private String shipMngCompanySeq; + private String efectStaDay; + private String shipMngrCd; + private String shipMngr; + private String companyStatus; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/SisterShipLinksEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/SisterShipLinksEntity.java new file mode 100644 index 0000000..62cf683 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/SisterShipLinksEntity.java @@ -0,0 +1,21 @@ +package com.snp.batch.jobs.datasync.batch.ship.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class SisterShipLinksEntity implements JobExecutionGroupable { + private String datasetVer; + private String imoNo; + private String linkImoNo; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/SpecialFeatureEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/SpecialFeatureEntity.java new file mode 100644 index 0000000..cb16e18 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/SpecialFeatureEntity.java @@ -0,0 +1,23 @@ +package com.snp.batch.jobs.datasync.batch.ship.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class SpecialFeatureEntity implements JobExecutionGroupable { + private String datasetVer; + private String imoNo; + private String shipSpcFetrSeq; + private String spcMttrCd; + private String spcMttr; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/StatusHistoryEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/StatusHistoryEntity.java new file mode 100644 index 0000000..77c6ba9 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/StatusHistoryEntity.java @@ -0,0 +1,24 @@ +package com.snp.batch.jobs.datasync.batch.ship.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class StatusHistoryEntity implements JobExecutionGroupable { + private String datasetVer; + private String imoNo; + private String shipStatusHstrySeq; + private String statusCd; + private String statusChgYmd; + private String status; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/StowageCommodityEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/StowageCommodityEntity.java new file mode 100644 index 0000000..0a1f85b --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/StowageCommodityEntity.java @@ -0,0 +1,25 @@ +package com.snp.batch.jobs.datasync.batch.ship.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class StowageCommodityEntity implements JobExecutionGroupable { + private String datasetVer; + private String imoNo; + private String shipCargoCapacitySeq; + private String capacityCd; + private String capacityCdDesc; + private String cargoCd; + private String cargoNm; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/SurveyDatesEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/SurveyDatesEntity.java new file mode 100644 index 0000000..93d3989 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/SurveyDatesEntity.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class SurveyDatesEntity implements JobExecutionGroupable { + private String datasetVer; + private String imoNo; + private String clficCd; + private String clfic; + private String dckngInspection; + private String fxtmInspection; + private String annualInspection; + private String mchnFxtmInspectionYmd; + private String tlsftInspectionYmd; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/SurveyDatesHistoryUniqueEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/SurveyDatesHistoryUniqueEntity.java new file mode 100644 index 0000000..6620912 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/SurveyDatesHistoryUniqueEntity.java @@ -0,0 +1,24 @@ +package com.snp.batch.jobs.datasync.batch.ship.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class SurveyDatesHistoryUniqueEntity implements JobExecutionGroupable { + private String datasetVer; + private String imoNo; + private String clficCd; + private String inspectionType; + private String inspectionYmd; + private String clfic; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/TbCompanyDetailEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/TbCompanyDetailEntity.java new file mode 100644 index 0000000..0f8bdef --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/TbCompanyDetailEntity.java @@ -0,0 +1,48 @@ +package com.snp.batch.jobs.datasync.batch.ship.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class TbCompanyDetailEntity implements JobExecutionGroupable { + private String datasetVer; + private String companyCd; + private String lastUpdYmd; + private String careCd; + private String companyStatus; + private String fullNm; + private String companyNameAbbr; + private String companyFndnYmd; + private String prntCompanyCd; + private String countryNm; + private String ctyNm; + private String oaAddr; + private String emlAddr; + private String tel; + private String faxNo; + private String wbstUrl; + private String countryCtrl; + private String countryCtrlCd; + private String countryReg; + private String countryRegCd; + private String regionCd; + private String distNm; + private String distNo; + private String mailAddrRear; + private String mailAddrFrnt; + private String poBox; + private String dtlAddrOne; + private String dtlAddrTwo; + private String dtlAddrThr; + private String tlx; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/TechnicalManagerHistoryEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/TechnicalManagerHistoryEntity.java new file mode 100644 index 0000000..9b27eef --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/TechnicalManagerHistoryEntity.java @@ -0,0 +1,25 @@ +package com.snp.batch.jobs.datasync.batch.ship.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@AllArgsConstructor +public class TechnicalManagerHistoryEntity implements JobExecutionGroupable { + private String datasetVer; + private String imoNo; + private String shipTechMngCompanySeq; + private String efectStaDay; + private String techMngrCd; + private String techMngr; + private String companyStatus; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/ThrustersEntity.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/ThrustersEntity.java new file mode 100644 index 0000000..78c1193 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/entity/ThrustersEntity.java @@ -0,0 +1,29 @@ +package com.snp.batch.jobs.datasync.batch.ship.entity; + +import com.snp.batch.common.util.JobExecutionGroupable; +import lombok.*; +import lombok.experimental.SuperBuilder; +import java.math.BigDecimal; + +@Data +@SuperBuilder +@AllArgsConstructor +public class ThrustersEntity implements JobExecutionGroupable { + private String datasetVer; + private String imoNo; + private String thrstrSeq; + private String thrstrTypeCd; + private String thrstrType; + private BigDecimal thrstrCnt; + private String thrstrPosition; + private BigDecimal thrstrPowerBhp; + private BigDecimal thrstrPowerKw; + private String instlMth; + + private Long jobExecutionId; + + @Override + public Long getJobExecutionId() { + return this.jobExecutionId; + } +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/BareboatCharterHistoryProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/BareboatCharterHistoryProcessor.java new file mode 100644 index 0000000..8f29990 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/BareboatCharterHistoryProcessor.java @@ -0,0 +1,22 @@ +package com.snp.batch.jobs.datasync.batch.ship.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.ship.dto.BareboatCharterHistoryDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.BareboatCharterHistoryEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class BareboatCharterHistoryProcessor extends BaseProcessor { + @Override + protected BareboatCharterHistoryEntity processItem(BareboatCharterHistoryDto dto) throws Exception { + return BareboatCharterHistoryEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .imoNo(dto.getImoNo()) + .bbctrSeq(dto.getBbctrSeq()) + .efectStaDay(dto.getEfectStaDay()) + .bbctrCompanyCd(dto.getBbctrCompanyCd()) + .bbctrCompany(dto.getBbctrCompany()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/CallsignAndMmsiHistoryProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/CallsignAndMmsiHistoryProcessor.java new file mode 100644 index 0000000..8146472 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/CallsignAndMmsiHistoryProcessor.java @@ -0,0 +1,22 @@ +package com.snp.batch.jobs.datasync.batch.ship.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.ship.dto.CallsignAndMmsiHistoryDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.CallsignAndMmsiHistoryEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class CallsignAndMmsiHistoryProcessor extends BaseProcessor { + @Override + protected CallsignAndMmsiHistoryEntity processItem(CallsignAndMmsiHistoryDto dto) throws Exception { + return CallsignAndMmsiHistoryEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .imoNo(dto.getImoNo()) + .shipIdntfSeq(dto.getShipIdntfSeq()) + .efectStaDay(dto.getEfectStaDay()) + .clsgnNo(dto.getClsgnNo()) + .mmsiNo(dto.getMmsiNo()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/ClassHistoryProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/ClassHistoryProcessor.java new file mode 100644 index 0000000..f49969f --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/ClassHistoryProcessor.java @@ -0,0 +1,25 @@ +package com.snp.batch.jobs.datasync.batch.ship.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.ship.dto.ClassHistoryDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.ClassHistoryEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class ClassHistoryProcessor extends BaseProcessor { + @Override + protected ClassHistoryEntity processItem(ClassHistoryDto dto) throws Exception { + return ClassHistoryEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .imoNo(dto.getImoNo()) + .clficHstrySeq(dto.getClficHstrySeq()) + .efectStaDay(dto.getEfectStaDay()) + .clficCd(dto.getClficCd()) + .clficId(dto.getClficId()) + .clficAstnNm(dto.getClficAstnNm()) + .clficHasYn(dto.getClficHasYn()) + .nowYn(dto.getNowYn()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/CompanyVesselRelationshipsProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/CompanyVesselRelationshipsProcessor.java new file mode 100644 index 0000000..65e45a0 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/CompanyVesselRelationshipsProcessor.java @@ -0,0 +1,38 @@ +package com.snp.batch.jobs.datasync.batch.ship.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.ship.dto.CompanyVesselRelationshipsDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.CompanyVesselRelationshipsEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class CompanyVesselRelationshipsProcessor extends BaseProcessor { + @Override + protected CompanyVesselRelationshipsEntity processItem(CompanyVesselRelationshipsDto dto) throws Exception { + return CompanyVesselRelationshipsEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .imoNo(dto.getImoNo()) + .doccHasCompanyCd(dto.getDoccHasCompanyCd()) + .doccHasCompany(dto.getDoccHasCompany()) + .groupActlOwnr(dto.getGroupActlOwnr()) + .groupActlOwnrCd(dto.getGroupActlOwnrCd()) + .shipOperator(dto.getShipOperator()) + .shipOperatorCd(dto.getShipOperatorCd()) + .rgOwnr(dto.getRgOwnr()) + .rgOwnrCd(dto.getRgOwnrCd()) + .shipMngCompany(dto.getShipMngCompany()) + .shipMngCompanyCd(dto.getShipMngCompanyCd()) + .techMngCompany(dto.getTechMngCompany()) + .techMngCompanyCd(dto.getTechMngCompanyCd()) + .doccGroup(dto.getDoccGroup()) + .doccGroupCd(dto.getDoccGroupCd()) + .shipOperatorGroup(dto.getShipOperatorGroup()) + .shipOperatorGroupCd(dto.getShipOperatorGroupCd()) + .shipMngCompanyGroup(dto.getShipMngCompanyGroup()) + .shipMngCompanyGroupCd(dto.getShipMngCompanyGroupCd()) + .techMngCompanyGroup(dto.getTechMngCompanyGroup()) + .techMngCompanyGroupCd(dto.getTechMngCompanyGroupCd()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/CrewListProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/CrewListProcessor.java new file mode 100644 index 0000000..903d87f --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/CrewListProcessor.java @@ -0,0 +1,29 @@ +package com.snp.batch.jobs.datasync.batch.ship.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.ship.dto.CrewListDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.CrewListEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class CrewListProcessor extends BaseProcessor { + @Override + protected CrewListEntity processItem(CrewListDto dto) throws Exception { + return CrewListEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .imoNo(dto.getImoNo()) + .crewId(dto.getCrewId()) + .shipNm(dto.getShipNm()) + .ntnlty(dto.getNtnlty()) + .crewRstrYmd(dto.getCrewRstrYmd()) + .oaCrewCnt(dto.getOaCrewCnt()) + .genCrewCnt(dto.getGenCrewCnt()) + .offcrCnt(dto.getOffcrCnt()) + .apprOffcrCnt(dto.getApprOffcrCnt()) + .trneCnt(dto.getTrneCnt()) + .embrkMntncCrewCnt(dto.getEmbrkMntncCrewCnt()) + .unrprtCnt(dto.getUnrprtCnt()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/DarkActivityConfirmedProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/DarkActivityConfirmedProcessor.java new file mode 100644 index 0000000..b953d0b --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/DarkActivityConfirmedProcessor.java @@ -0,0 +1,43 @@ +package com.snp.batch.jobs.datasync.batch.ship.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.ship.dto.DarkActivityConfirmedDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.DarkActivityConfirmedEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class DarkActivityConfirmedProcessor extends BaseProcessor { + @Override + protected DarkActivityConfirmedEntity processItem(DarkActivityConfirmedDto dto) throws Exception { + return DarkActivityConfirmedEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .imoNo(dto.getImoNo()) + .mmsiNo(dto.getMmsiNo()) + .darkHr(dto.getDarkHr()) + .darkActvStatus(dto.getDarkActvStatus()) + .shipNm(dto.getShipNm()) + .darkActv(dto.getDarkActv()) + .zoneId(dto.getZoneId()) + .zoneNm(dto.getZoneNm()) + .zoneCountry(dto.getZoneCountry()) + .darkTmUtc(dto.getDarkTmUtc()) + .darkLat(dto.getDarkLat()) + .darkLon(dto.getDarkLon()) + .darkSpd(dto.getDarkSpd()) + .darkHeading(dto.getDarkHeading()) + .darkDraft(dto.getDarkDraft()) + .nxtCptrTmUtc(dto.getNxtCptrTmUtc()) + .nxtCptrSpd(dto.getNxtCptrSpd()) + .nxtCptrDraft(dto.getNxtCptrDraft()) + .nxtCptrHeading(dto.getNxtCptrHeading()) + .darkRptDestAis(dto.getDarkRptDestAis()) + .lastPrtcllPort(dto.getLastPrtcllPort()) + .lastPoccntryCd(dto.getLastPoccntryCd()) + .lastPoccntry(dto.getLastPoccntry()) + .nxtCptrLat(dto.getNxtCptrLat()) + .nxtCptrLon(dto.getNxtCptrLon()) + .nxtCptrRptDestAis(dto.getNxtCptrRptDestAis()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/FlagHistoryProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/FlagHistoryProcessor.java new file mode 100644 index 0000000..fa77a40 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/FlagHistoryProcessor.java @@ -0,0 +1,22 @@ +package com.snp.batch.jobs.datasync.batch.ship.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.ship.dto.FlagHistoryDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.FlagHistoryEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class FlagHistoryProcessor extends BaseProcessor { + @Override + protected FlagHistoryEntity processItem(FlagHistoryDto dto) throws Exception { + return FlagHistoryEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .imoNo(dto.getImoNo()) + .shipCountryHstrySeq(dto.getShipCountryHstrySeq()) + .efectStaDay(dto.getEfectStaDay()) + .countryCd(dto.getCountryCd()) + .country(dto.getCountry()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/GroupBeneficialOwnerHistoryProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/GroupBeneficialOwnerHistoryProcessor.java new file mode 100644 index 0000000..4c89782 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/GroupBeneficialOwnerHistoryProcessor.java @@ -0,0 +1,23 @@ +package com.snp.batch.jobs.datasync.batch.ship.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.ship.dto.GroupBeneficialOwnerHistoryDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.GroupBeneficialOwnerHistoryEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class GroupBeneficialOwnerHistoryProcessor extends BaseProcessor { + @Override + protected GroupBeneficialOwnerHistoryEntity processItem(GroupBeneficialOwnerHistoryDto dto) throws Exception { + return GroupBeneficialOwnerHistoryEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .imoNo(dto.getImoNo()) + .shipGroupRevnOwnrHstrySeq(dto.getShipGroupRevnOwnrHstrySeq()) + .efectStaDay(dto.getEfectStaDay()) + .groupActlOwnrCd(dto.getGroupActlOwnrCd()) + .groupActlOwnr(dto.getGroupActlOwnr()) + .companyStatus(dto.getCompanyStatus()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/IceClassProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/IceClassProcessor.java new file mode 100644 index 0000000..9bf4d43 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/IceClassProcessor.java @@ -0,0 +1,20 @@ +package com.snp.batch.jobs.datasync.batch.ship.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.ship.dto.IceClassDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.IceClassEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class IceClassProcessor extends BaseProcessor { + @Override + protected IceClassEntity processItem(IceClassDto dto) throws Exception { + return IceClassEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .imoNo(dto.getImoNo()) + .iceGrdCd(dto.getIceGrdCd()) + .iceGrd(dto.getIceGrd()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/NameHistoryProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/NameHistoryProcessor.java new file mode 100644 index 0000000..d2d9107 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/NameHistoryProcessor.java @@ -0,0 +1,21 @@ +package com.snp.batch.jobs.datasync.batch.ship.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.ship.dto.NameHistoryDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.NameHistoryEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class NameHistoryProcessor extends BaseProcessor { + @Override + protected NameHistoryEntity processItem(NameHistoryDto dto) throws Exception { + return NameHistoryEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .imoNo(dto.getImoNo()) + .shipNmChgHstrySeq(dto.getShipNmChgHstrySeq()) + .efectStaDay(dto.getEfectStaDay()) + .shipNm(dto.getShipNm()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/OperatorHistoryProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/OperatorHistoryProcessor.java new file mode 100644 index 0000000..a6e4472 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/OperatorHistoryProcessor.java @@ -0,0 +1,23 @@ +package com.snp.batch.jobs.datasync.batch.ship.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.ship.dto.OperatorHistoryDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.OperatorHistoryEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class OperatorHistoryProcessor extends BaseProcessor { + @Override + protected OperatorHistoryEntity processItem(OperatorHistoryDto dto) throws Exception { + return OperatorHistoryEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .imoNo(dto.getImoNo()) + .shipOperatorHstrySeq(dto.getShipOperatorHstrySeq()) + .efectStaDay(dto.getEfectStaDay()) + .shipOperatorCd(dto.getShipOperatorCd()) + .shipOperator(dto.getShipOperator()) + .companyStatus(dto.getCompanyStatus()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/OwnerHistoryProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/OwnerHistoryProcessor.java new file mode 100644 index 0000000..4533a40 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/OwnerHistoryProcessor.java @@ -0,0 +1,23 @@ +package com.snp.batch.jobs.datasync.batch.ship.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.ship.dto.OwnerHistoryDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.OwnerHistoryEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class OwnerHistoryProcessor extends BaseProcessor { + @Override + protected OwnerHistoryEntity processItem(OwnerHistoryDto dto) throws Exception { + return OwnerHistoryEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .imoNo(dto.getImoNo()) + .shipOwnrHstrySeq(dto.getShipOwnrHstrySeq()) + .efectStaDay(dto.getEfectStaDay()) + .ownrCd(dto.getOwnrCd()) + .ownr(dto.getOwnr()) + .companyStatus(dto.getCompanyStatus()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/PandIHistoryProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/PandIHistoryProcessor.java new file mode 100644 index 0000000..e2ca4c5 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/PandIHistoryProcessor.java @@ -0,0 +1,23 @@ +package com.snp.batch.jobs.datasync.batch.ship.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.ship.dto.PandIHistoryDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.PandIHistoryEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class PandIHistoryProcessor extends BaseProcessor { + @Override + protected PandIHistoryEntity processItem(PandIHistoryDto dto) throws Exception { + return PandIHistoryEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .imoNo(dto.getImoNo()) + .shipPrtcRpnHstrySeq(dto.getShipPrtcRpnHstrySeq()) + .efectStaDay(dto.getEfectStaDay()) + .pniClubCd(dto.getPniClubCd()) + .pniClubNm(dto.getPniClubNm()) + .src(dto.getSrc()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/SafetyManagementCertificateHistProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/SafetyManagementCertificateHistProcessor.java new file mode 100644 index 0000000..da4d06f --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/SafetyManagementCertificateHistProcessor.java @@ -0,0 +1,31 @@ +package com.snp.batch.jobs.datasync.batch.ship.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.ship.dto.SafetyManagementCertificateHistDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.SafetyManagementCertificateHistEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class SafetyManagementCertificateHistProcessor extends BaseProcessor { + @Override + protected SafetyManagementCertificateHistEntity processItem(SafetyManagementCertificateHistDto dto) throws Exception { + return SafetyManagementCertificateHistEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .imoNo(dto.getImoNo()) + .shipSftyMngEvdcSeq(dto.getShipSftyMngEvdcSeq()) + .smgrcSrngEngines(dto.getSmgrcSrngEngines()) + .smgrcSysCatConvArbt(dto.getSmgrcSysCatConvArbt()) + .smgrcExpryDay(dto.getSmgrcExpryDay()) + .smgrcIssueDay(dto.getSmgrcIssueDay()) + .smgrcDoccCompany(dto.getSmgrcDoccCompany()) + .smgrcNtnlty(dto.getSmgrcNtnlty()) + .smgrcIssueEngines(dto.getSmgrcIssueEngines()) + .smgrcEtcDesc(dto.getSmgrcEtcDesc()) + .smgrcShipNm(dto.getSmgrcShipNm()) + .smgrcShipType(dto.getSmgrcShipType()) + .smgrcSrc(dto.getSmgrcSrc()) + .smgrcCompanyCd(dto.getSmgrcCompanyCd()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/ShipAddInfoProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/ShipAddInfoProcessor.java new file mode 100644 index 0000000..1c6d05c --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/ShipAddInfoProcessor.java @@ -0,0 +1,29 @@ +package com.snp.batch.jobs.datasync.batch.ship.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.ship.dto.ShipAddInfoDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.ShipAddInfoEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class ShipAddInfoProcessor extends BaseProcessor { + @Override + protected ShipAddInfoEntity processItem(ShipAddInfoDto dto) throws Exception { + return ShipAddInfoEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) // job_execution_id 전달 + .datasetVer(dto.getDatasetVer()) + .imoNo(dto.getImoNo()) + .shipEml(dto.getShipEml()) + .maxDpwt(dto.getMaxDpwt()) + .maxDrillDepth(dto.getMaxDrillDepth()) + .drillBrg(dto.getDrillBrg()) + .oceanProdFacility(dto.getOceanProdFacility()) + .deckHeatExch(dto.getDeckHeatExch()) + .dehtexMatral(dto.getDehtexMatral()) + .portblTwinDeck(dto.getPortblTwinDeck()) + .fixedTwinDeck(dto.getFixedTwinDeck()) + .shipSatlitCommId(dto.getShipSatlitCommId()) + .shipSatlitCmrspCd(dto.getShipSatlitCmrspCd()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/ShipDataProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/ShipDataProcessor.java new file mode 100644 index 0000000..8dc28f3 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/ShipDataProcessor.java @@ -0,0 +1,101 @@ +package com.snp.batch.jobs.datasync.batch.ship.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.ship.dto.ShipInfoMstDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.ShipInfoMstEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class ShipDataProcessor extends BaseProcessor { + @Override + protected ShipInfoMstEntity processItem(ShipInfoMstDto dto) throws Exception { + + return ShipInfoMstEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) // job_execution_id 전달 + .datasetVer(dto.getDatasetVer()) + .imoNo(dto.getImoNo()) + .mmsiNo(dto.getMmsiNo()) + .shipNm(dto.getShipNm()) + .clsgnNo(dto.getClsgnNo()) + .frmlaRegNo(dto.getFrmlaRegNo()) + .fshrPrmtNo(dto.getFshrPrmtNo()) + .shipNtnlty(dto.getShipNtnlty()) + .ntnltyCd(dto.getNtnltyCd()) + .loadPort(dto.getLoadPort()) + .clfic(dto.getClfic()) + .clficDesc(dto.getClficDesc()) + .shipStatus(dto.getShipStatus()) + .shipTypeGroup(dto.getShipTypeGroup()) + .shipTypeLvTwo(dto.getShipTypeLvTwo()) + .shipTypeLvThr(dto.getShipTypeLvThr()) + .shipTypeLvFour(dto.getShipTypeLvFour()) + .shipTypeLvFive(dto.getShipTypeLvFive()) + .shipTypeLvFiveDtldType(dto.getShipTypeLvFiveDtldType()) + .shipTypeLvFiveHullType(dto.getShipTypeLvFiveHullType()) + .shipTypeLvFiveLwrnkGroup(dto.getShipTypeLvFiveLwrnkGroup()) + .buildYy(dto.getBuildYy()) + .buildYmd(dto.getBuildYmd()) + .shpyrd(dto.getShpyrd()) + .shpyrdOffclNm(dto.getShpyrdOffclNm()) + .shpyrdBuildNo(dto.getShpyrdBuildNo()) + .buildDesc(dto.getBuildDesc()) + .modfHstryDesc(dto.getModfHstryDesc()) + .whlnthLoa(dto.getWhlnthLoa()) + .regLength(dto.getRegLength()) + .lbp(dto.getLbp()) + .formnBreadth(dto.getFormnBreadth()) + .maxBreadth(dto.getMaxBreadth()) + .depth(dto.getDepth()) + .draft(dto.getDraft()) + .keelMastHg(dto.getKeelMastHg()) + .bulbBow(dto.getBulbBow()) + .gt(dto.getGt()) + .ntTon(dto.getNtTon()) + .dwt(dto.getDwt()) + .displacement(dto.getDisplacement()) + .lightDisplacementTon(dto.getLightDisplacementTon()) + .cgt(dto.getCgt()) + .fldngOneCmPerTonTpci(dto.getFldngOneCmPerTonTpci()) + .tonEfectDay(dto.getTonEfectDay()) + .calcfrmDwt(dto.getCalcfrmDwt()) + .teuCnt(dto.getTeuCnt()) + .teuCapacity(dto.getTeuCapacity()) + .grainCapacityM3(dto.getGrainCapacityM3()) + .baleCapacity(dto.getBaleCapacity()) + .liquidCapacity(dto.getLiquidCapacity()) + .gasM3(dto.getGasM3()) + .insulatedM3(dto.getInsulatedM3()) + .passengerCapacity(dto.getPassengerCapacity()) + .bollardPull(dto.getBollardPull()) + .svcSpd(dto.getSvcSpd()) + .mainEngineType(dto.getMainEngineType()) + .fuelCnsmpSpdOne(dto.getFuelCnsmpSpdOne()) + .fuelCnsmpamtValOne(dto.getFuelCnsmpamtValOne()) + .fuelCnsmpSpdTwo(dto.getFuelCnsmpSpdTwo()) + .fuelCnsmpamtValTwo(dto.getFuelCnsmpamtValTwo()) + .totalFuelCapacityM3(dto.getTotalFuelCapacityM3()) + .blrMftr(dto.getBlrMftr()) + .proplrMftr(dto.getProplrMftr()) + .cargoCapacityM3Desc(dto.getCargoCapacityM3Desc()) + .eqpmntDesc(dto.getEqpmntDesc()) + .hdn(dto.getHdn()) + .hatcheDesc(dto.getHatcheDesc()) + .laneDoorRampDesc(dto.getLaneDoorRampDesc()) + .spcTankDesc(dto.getSpcTankDesc()) + .tankDesc(dto.getTankDesc()) + .prmovrDesc(dto.getPrmovrDesc()) + .prmovrOvrvwDesc(dto.getPrmovrOvrvwDesc()) + .auxDesc(dto.getAuxDesc()) + .asstGnrtrDesc(dto.getAsstGnrtrDesc()) + .fuelDesc(dto.getFuelDesc()) + .docCompanyCd(dto.getDocCompanyCd()) + .groupActlOwnrCompanyCd(dto.getGroupActlOwnrCompanyCd()) + .operator(dto.getOperator()) + .operatorCompanyCd(dto.getOperatorCompanyCd()) + .shipMngrCompanyCd(dto.getShipMngrCompanyCd()) + .techMngrCd(dto.getTechMngrCd()) + .regShponrCd(dto.getRegShponrCd()) + .lastMdfcnDt(dto.getLastMdfcnDt()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/ShipManagerHistoryProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/ShipManagerHistoryProcessor.java new file mode 100644 index 0000000..9edb1e0 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/ShipManagerHistoryProcessor.java @@ -0,0 +1,23 @@ +package com.snp.batch.jobs.datasync.batch.ship.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.ship.dto.ShipManagerHistoryDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.ShipManagerHistoryEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class ShipManagerHistoryProcessor extends BaseProcessor { + @Override + protected ShipManagerHistoryEntity processItem(ShipManagerHistoryDto dto) throws Exception { + return ShipManagerHistoryEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .imoNo(dto.getImoNo()) + .shipMngCompanySeq(dto.getShipMngCompanySeq()) + .efectStaDay(dto.getEfectStaDay()) + .shipMngrCd(dto.getShipMngrCd()) + .shipMngr(dto.getShipMngr()) + .companyStatus(dto.getCompanyStatus()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/SisterShipLinksProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/SisterShipLinksProcessor.java new file mode 100644 index 0000000..83a1149 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/SisterShipLinksProcessor.java @@ -0,0 +1,19 @@ +package com.snp.batch.jobs.datasync.batch.ship.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.ship.dto.SisterShipLinksDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.SisterShipLinksEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class SisterShipLinksProcessor extends BaseProcessor { + @Override + protected SisterShipLinksEntity processItem(SisterShipLinksDto dto) throws Exception { + return SisterShipLinksEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .imoNo(dto.getImoNo()) + .linkImoNo(dto.getLinkImoNo()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/SpecialFeatureProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/SpecialFeatureProcessor.java new file mode 100644 index 0000000..939bd98 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/SpecialFeatureProcessor.java @@ -0,0 +1,21 @@ +package com.snp.batch.jobs.datasync.batch.ship.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.ship.dto.SpecialFeatureDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.SpecialFeatureEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class SpecialFeatureProcessor extends BaseProcessor { + @Override + protected SpecialFeatureEntity processItem(SpecialFeatureDto dto) throws Exception { + return SpecialFeatureEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .imoNo(dto.getImoNo()) + .shipSpcFetrSeq(dto.getShipSpcFetrSeq()) + .spcMttrCd(dto.getSpcMttrCd()) + .spcMttr(dto.getSpcMttr()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/StatusHistoryProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/StatusHistoryProcessor.java new file mode 100644 index 0000000..57392bb --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/StatusHistoryProcessor.java @@ -0,0 +1,22 @@ +package com.snp.batch.jobs.datasync.batch.ship.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.ship.dto.StatusHistoryDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.StatusHistoryEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class StatusHistoryProcessor extends BaseProcessor { + @Override + protected StatusHistoryEntity processItem(StatusHistoryDto dto) throws Exception { + return StatusHistoryEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .imoNo(dto.getImoNo()) + .shipStatusHstrySeq(dto.getShipStatusHstrySeq()) + .statusCd(dto.getStatusCd()) + .statusChgYmd(dto.getStatusChgYmd()) + .status(dto.getStatus()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/StowageCommodityProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/StowageCommodityProcessor.java new file mode 100644 index 0000000..3a3b278 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/StowageCommodityProcessor.java @@ -0,0 +1,23 @@ +package com.snp.batch.jobs.datasync.batch.ship.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.ship.dto.StowageCommodityDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.StowageCommodityEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class StowageCommodityProcessor extends BaseProcessor { + @Override + protected StowageCommodityEntity processItem(StowageCommodityDto dto) throws Exception { + return StowageCommodityEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .imoNo(dto.getImoNo()) + .shipCargoCapacitySeq(dto.getShipCargoCapacitySeq()) + .capacityCd(dto.getCapacityCd()) + .capacityCdDesc(dto.getCapacityCdDesc()) + .cargoCd(dto.getCargoCd()) + .cargoNm(dto.getCargoNm()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/SurveyDatesHistoryUniqueProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/SurveyDatesHistoryUniqueProcessor.java new file mode 100644 index 0000000..f1e7f98 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/SurveyDatesHistoryUniqueProcessor.java @@ -0,0 +1,22 @@ +package com.snp.batch.jobs.datasync.batch.ship.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.ship.dto.SurveyDatesHistoryUniqueDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.SurveyDatesHistoryUniqueEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class SurveyDatesHistoryUniqueProcessor extends BaseProcessor { + @Override + protected SurveyDatesHistoryUniqueEntity processItem(SurveyDatesHistoryUniqueDto dto) throws Exception { + return SurveyDatesHistoryUniqueEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .imoNo(dto.getImoNo()) + .clficCd(dto.getClficCd()) + .inspectionType(dto.getInspectionType()) + .inspectionYmd(dto.getInspectionYmd()) + .clfic(dto.getClfic()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/SurveyDatesProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/SurveyDatesProcessor.java new file mode 100644 index 0000000..780da16 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/SurveyDatesProcessor.java @@ -0,0 +1,25 @@ +package com.snp.batch.jobs.datasync.batch.ship.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.ship.dto.SurveyDatesDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.SurveyDatesEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class SurveyDatesProcessor extends BaseProcessor { + @Override + protected SurveyDatesEntity processItem(SurveyDatesDto dto) throws Exception { + return SurveyDatesEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .imoNo(dto.getImoNo()) + .clficCd(dto.getClficCd()) + .clfic(dto.getClfic()) + .dckngInspection(dto.getDckngInspection()) + .fxtmInspection(dto.getFxtmInspection()) + .annualInspection(dto.getAnnualInspection()) + .mchnFxtmInspectionYmd(dto.getMchnFxtmInspectionYmd()) + .tlsftInspectionYmd(dto.getTlsftInspectionYmd()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/TbCompanyDetailProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/TbCompanyDetailProcessor.java new file mode 100644 index 0000000..870a164 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/TbCompanyDetailProcessor.java @@ -0,0 +1,46 @@ +package com.snp.batch.jobs.datasync.batch.ship.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.ship.dto.TbCompanyDetailDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.TbCompanyDetailEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class TbCompanyDetailProcessor extends BaseProcessor { + @Override + protected TbCompanyDetailEntity processItem(TbCompanyDetailDto dto) throws Exception { + return TbCompanyDetailEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .companyCd(dto.getCompanyCd()) + .lastUpdYmd(dto.getLastUpdYmd()) + .careCd(dto.getCareCd()) + .companyStatus(dto.getCompanyStatus()) + .fullNm(dto.getFullNm()) + .companyNameAbbr(dto.getCompanyNameAbbr()) + .companyFndnYmd(dto.getCompanyFndnYmd()) + .prntCompanyCd(dto.getPrntCompanyCd()) + .countryNm(dto.getCountryNm()) + .ctyNm(dto.getCtyNm()) + .oaAddr(dto.getOaAddr()) + .emlAddr(dto.getEmlAddr()) + .tel(dto.getTel()) + .faxNo(dto.getFaxNo()) + .wbstUrl(dto.getWbstUrl()) + .countryCtrl(dto.getCountryCtrl()) + .countryCtrlCd(dto.getCountryCtrlCd()) + .countryReg(dto.getCountryReg()) + .countryRegCd(dto.getCountryRegCd()) + .regionCd(dto.getRegionCd()) + .distNm(dto.getDistNm()) + .distNo(dto.getDistNo()) + .mailAddrRear(dto.getMailAddrRear()) + .mailAddrFrnt(dto.getMailAddrFrnt()) + .poBox(dto.getPoBox()) + .dtlAddrOne(dto.getDtlAddrOne()) + .dtlAddrTwo(dto.getDtlAddrTwo()) + .dtlAddrThr(dto.getDtlAddrThr()) + .tlx(dto.getTlx()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/TechnicalManagerHistoryProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/TechnicalManagerHistoryProcessor.java new file mode 100644 index 0000000..63e7a5a --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/TechnicalManagerHistoryProcessor.java @@ -0,0 +1,23 @@ +package com.snp.batch.jobs.datasync.batch.ship.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.ship.dto.TechnicalManagerHistoryDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.TechnicalManagerHistoryEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class TechnicalManagerHistoryProcessor extends BaseProcessor { + @Override + protected TechnicalManagerHistoryEntity processItem(TechnicalManagerHistoryDto dto) throws Exception { + return TechnicalManagerHistoryEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .imoNo(dto.getImoNo()) + .shipTechMngCompanySeq(dto.getShipTechMngCompanySeq()) + .efectStaDay(dto.getEfectStaDay()) + .techMngrCd(dto.getTechMngrCd()) + .techMngr(dto.getTechMngr()) + .companyStatus(dto.getCompanyStatus()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/ThrustersProcessor.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/ThrustersProcessor.java new file mode 100644 index 0000000..d17c93e --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/processor/ThrustersProcessor.java @@ -0,0 +1,26 @@ +package com.snp.batch.jobs.datasync.batch.ship.processor; + +import com.snp.batch.common.batch.processor.BaseProcessor; +import com.snp.batch.jobs.datasync.batch.ship.dto.ThrustersDto; +import com.snp.batch.jobs.datasync.batch.ship.entity.ThrustersEntity; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class ThrustersProcessor extends BaseProcessor { + @Override + protected ThrustersEntity processItem(ThrustersDto dto) throws Exception { + return ThrustersEntity.builder() + .jobExecutionId(dto.getJobExecutionId()) + .datasetVer(dto.getDatasetVer()) + .imoNo(dto.getImoNo()) + .thrstrSeq(dto.getThrstrSeq()) + .thrstrTypeCd(dto.getThrstrTypeCd()) + .thrstrType(dto.getThrstrType()) + .thrstrCnt(dto.getThrstrCnt()) + .thrstrPosition(dto.getThrstrPosition()) + .thrstrPowerBhp(dto.getThrstrPowerBhp()) + .thrstrPowerKw(dto.getThrstrPowerKw()) + .instlMth(dto.getInstlMth()) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/BareboatCharterHistoryReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/BareboatCharterHistoryReader.java new file mode 100644 index 0000000..3ab79b7 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/BareboatCharterHistoryReader.java @@ -0,0 +1,73 @@ +package com.snp.batch.jobs.datasync.batch.ship.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.BareboatCharterHistoryDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class BareboatCharterHistoryReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public BareboatCharterHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public BareboatCharterHistoryDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + + if (allDataBuffer.isEmpty()) { + return null; + } + + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceBareboatCharterHistory), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[BareboatCharterHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceBareboatCharterHistory); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return BareboatCharterHistoryDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .imoNo(rs.getString("imo_no")) + .bbctrSeq(rs.getString("bbctr_seq")) + .efectStaDay(rs.getString("efect_sta_day")) + .bbctrCompanyCd(rs.getString("bbctr_company_cd")) + .bbctrCompany(rs.getString("bbctr_company")) + .build(); + }, nextTargetId); + + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceBareboatCharterHistory); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/CallsignAndMmsiHistoryReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/CallsignAndMmsiHistoryReader.java new file mode 100644 index 0000000..4d54f93 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/CallsignAndMmsiHistoryReader.java @@ -0,0 +1,73 @@ +package com.snp.batch.jobs.datasync.batch.ship.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.CallsignAndMmsiHistoryDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class CallsignAndMmsiHistoryReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public CallsignAndMmsiHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public CallsignAndMmsiHistoryDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + + if (allDataBuffer.isEmpty()) { + return null; + } + + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceCallsignAndMmsiHistory), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[CallsignAndMmsiHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceCallsignAndMmsiHistory); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return CallsignAndMmsiHistoryDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .imoNo(rs.getString("imo_no")) + .shipIdntfSeq(rs.getString("ship_idntf_seq")) + .efectStaDay(rs.getString("efect_sta_day")) + .clsgnNo(rs.getString("clsgn_no")) + .mmsiNo(rs.getString("mmsi_no")) + .build(); + }, nextTargetId); + + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceCallsignAndMmsiHistory); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/ClassHistoryReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/ClassHistoryReader.java new file mode 100644 index 0000000..d5ccbf2 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/ClassHistoryReader.java @@ -0,0 +1,76 @@ +package com.snp.batch.jobs.datasync.batch.ship.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.ClassHistoryDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class ClassHistoryReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public ClassHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public ClassHistoryDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + + if (allDataBuffer.isEmpty()) { + return null; + } + + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceClassHistory), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[ClassHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceClassHistory); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return ClassHistoryDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .imoNo(rs.getString("imo_no")) + .clficHstrySeq(rs.getString("clfic_hstry_seq")) + .efectStaDay(rs.getString("efect_sta_day")) + .clficCd(rs.getString("clfic_cd")) + .clficId(rs.getString("clfic_id")) + .clficAstnNm(rs.getString("clfic_asctn_nm")) + .clficHasYn(rs.getString("clfic_has_yn")) + .nowYn(rs.getString("now_yn")) + .build(); + }, nextTargetId); + + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceClassHistory); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/CompanyVesselRelationshipsReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/CompanyVesselRelationshipsReader.java new file mode 100644 index 0000000..f313ab8 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/CompanyVesselRelationshipsReader.java @@ -0,0 +1,89 @@ +package com.snp.batch.jobs.datasync.batch.ship.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.CompanyVesselRelationshipsDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class CompanyVesselRelationshipsReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public CompanyVesselRelationshipsReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public CompanyVesselRelationshipsDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + + if (allDataBuffer.isEmpty()) { + return null; + } + + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceCompanyVesselRelationships), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[CompanyVesselRelationshipsReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceCompanyVesselRelationships); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return CompanyVesselRelationshipsDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .imoNo(rs.getString("imo_no")) + .doccHasCompanyCd(rs.getString("docc_has_company_cd")) + .doccHasCompany(rs.getString("docc_has_company")) + .groupActlOwnr(rs.getString("group_actl_ownr")) + .groupActlOwnrCd(rs.getString("group_actl_ownr_cd")) + .shipOperator(rs.getString("ship_operator")) + .shipOperatorCd(rs.getString("ship_operator_cd")) + .rgOwnr(rs.getString("rg_ownr")) + .rgOwnrCd(rs.getString("rg_ownr_cd")) + .shipMngCompany(rs.getString("ship_mng_company")) + .shipMngCompanyCd(rs.getString("ship_mng_company_cd")) + .techMngCompany(rs.getString("tech_mng_company")) + .techMngCompanyCd(rs.getString("tech_mng_company_cd")) + .doccGroup(rs.getString("docc_group")) + .doccGroupCd(rs.getString("docc_group_cd")) + .shipOperatorGroup(rs.getString("ship_operator_group")) + .shipOperatorGroupCd(rs.getString("ship_operator_group_cd")) + .shipMngCompanyGroup(rs.getString("ship_mng_company_group")) + .shipMngCompanyGroupCd(rs.getString("ship_mng_company_group_cd")) + .techMngCompanyGroup(rs.getString("tech_mng_company_group")) + .techMngCompanyGroupCd(rs.getString("tech_mng_company_group_cd")) + .build(); + }, nextTargetId); + + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceCompanyVesselRelationships); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/CrewListReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/CrewListReader.java new file mode 100644 index 0000000..f98da96 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/CrewListReader.java @@ -0,0 +1,80 @@ +package com.snp.batch.jobs.datasync.batch.ship.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.CrewListDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class CrewListReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public CrewListReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public CrewListDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + + if (allDataBuffer.isEmpty()) { + return null; + } + + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceCrewList), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[CrewListReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceCrewList); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return CrewListDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .imoNo(rs.getString("imo_no")) + .crewId(rs.getString("crew_id")) + .shipNm(rs.getString("ship_nm")) + .ntnlty(rs.getString("ntnlty")) + .crewRstrYmd(rs.getString("crew_rstr_ymd")) + .oaCrewCnt(rs.getBigDecimal("oa_crew_cnt")) + .genCrewCnt(rs.getBigDecimal("gen_crew_cnt")) + .offcrCnt(rs.getBigDecimal("offcr_cnt")) + .apprOffcrCnt(rs.getBigDecimal("appr_offcr_cnt")) + .trneCnt(rs.getBigDecimal("trne_cnt")) + .embrkMntncCrewCnt(rs.getBigDecimal("embrk_mntnc_crew_cnt")) + .unrprtCnt(rs.getBigDecimal("unrprt_cnt")) + .build(); + }, nextTargetId); + + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceCrewList); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/DarkActivityConfirmedReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/DarkActivityConfirmedReader.java new file mode 100644 index 0000000..b053cb4 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/DarkActivityConfirmedReader.java @@ -0,0 +1,94 @@ +package com.snp.batch.jobs.datasync.batch.ship.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.DarkActivityConfirmedDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class DarkActivityConfirmedReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public DarkActivityConfirmedReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public DarkActivityConfirmedDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + + if (allDataBuffer.isEmpty()) { + return null; + } + + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceDarkActivityConfirmed), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[DarkActivityConfirmedReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceDarkActivityConfirmed); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return DarkActivityConfirmedDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .imoNo(rs.getString("imo_no")) + .mmsiNo(rs.getString("mmsi_no")) + .darkHr(rs.getObject("dark_hr", Long.class)) + .darkActvStatus(rs.getObject("dark_actv_status", Long.class)) + .shipNm(rs.getString("ship_nm")) + .darkActv(rs.getString("dark_actv")) + .zoneId(rs.getObject("zone_id", Long.class)) + .zoneNm(rs.getString("zone_nm")) + .zoneCountry(rs.getString("zone_country")) + .darkTmUtc(rs.getTimestamp("dark_tm_utc") != null ? rs.getTimestamp("dark_tm_utc").toLocalDateTime() : null) + .darkLat(rs.getObject("dark_lat", Double.class)) + .darkLon(rs.getObject("dark_lon", Double.class)) + .darkSpd(rs.getObject("dark_spd", Double.class)) + .darkHeading(rs.getObject("dark_heading", Double.class)) + .darkDraft(rs.getObject("dark_draft", Double.class)) + .nxtCptrTmUtc(rs.getTimestamp("nxt_cptr_tm_utc") != null ? rs.getTimestamp("nxt_cptr_tm_utc").toLocalDateTime() : null) + .nxtCptrSpd(rs.getObject("nxt_cptr_spd", Double.class)) + .nxtCptrDraft(rs.getObject("nxt_cptr_draft", Double.class)) + .nxtCptrHeading(rs.getObject("nxt_cptr_heading", Double.class)) + .darkRptDestAis(rs.getString("dark_rpt_dest_ais")) + .lastPrtcllPort(rs.getString("last_prtcll_port")) + .lastPoccntryCd(rs.getString("last_poccntry_cd")) + .lastPoccntry(rs.getString("last_poccntry")) + .nxtCptrLat(rs.getObject("nxt_cptr_lat", Double.class)) + .nxtCptrLon(rs.getObject("nxt_cptr_lon", Double.class)) + .nxtCptrRptDestAis(rs.getString("nxt_cptr_rpt_dest_ais")) + .build(); + }, nextTargetId); + + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceDarkActivityConfirmed); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/FlagHistoryReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/FlagHistoryReader.java new file mode 100644 index 0000000..be404c3 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/FlagHistoryReader.java @@ -0,0 +1,73 @@ +package com.snp.batch.jobs.datasync.batch.ship.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.FlagHistoryDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class FlagHistoryReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public FlagHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public FlagHistoryDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + + if (allDataBuffer.isEmpty()) { + return null; + } + + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceFlagHistory), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[FlagHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceFlagHistory); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return FlagHistoryDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .imoNo(rs.getString("imo_no")) + .shipCountryHstrySeq(rs.getString("ship_country_hstry_seq")) + .efectStaDay(rs.getString("efect_sta_day")) + .countryCd(rs.getString("country_cd")) + .country(rs.getString("country")) + .build(); + }, nextTargetId); + + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceFlagHistory); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/GroupBeneficialOwnerHistoryReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/GroupBeneficialOwnerHistoryReader.java new file mode 100644 index 0000000..a59d587 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/GroupBeneficialOwnerHistoryReader.java @@ -0,0 +1,74 @@ +package com.snp.batch.jobs.datasync.batch.ship.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.GroupBeneficialOwnerHistoryDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class GroupBeneficialOwnerHistoryReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public GroupBeneficialOwnerHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public GroupBeneficialOwnerHistoryDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + + if (allDataBuffer.isEmpty()) { + return null; + } + + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceGroupBeneficialOwnerHistory), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[GroupBeneficialOwnerHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceGroupBeneficialOwnerHistory); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return GroupBeneficialOwnerHistoryDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .imoNo(rs.getString("imo_no")) + .shipGroupRevnOwnrHstrySeq(rs.getString("ship_group_revn_ownr_hstry_seq")) + .efectStaDay(rs.getString("efect_sta_day")) + .groupActlOwnrCd(rs.getString("group_actl_ownr_cd")) + .groupActlOwnr(rs.getString("group_actl_ownr")) + .companyStatus(rs.getString("company_status")) + .build(); + }, nextTargetId); + + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceGroupBeneficialOwnerHistory); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/IceClassReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/IceClassReader.java new file mode 100644 index 0000000..232ecdb --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/IceClassReader.java @@ -0,0 +1,71 @@ +package com.snp.batch.jobs.datasync.batch.ship.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.IceClassDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class IceClassReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public IceClassReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public IceClassDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + + if (allDataBuffer.isEmpty()) { + return null; + } + + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceIceClass), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[IceClassReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceIceClass); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return IceClassDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .imoNo(rs.getString("imo_no")) + .iceGrdCd(rs.getString("ice_grd_cd")) + .iceGrd(rs.getString("ice_grd")) + .build(); + }, nextTargetId); + + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceIceClass); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/NameHistoryReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/NameHistoryReader.java new file mode 100644 index 0000000..07dba2c --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/NameHistoryReader.java @@ -0,0 +1,72 @@ +package com.snp.batch.jobs.datasync.batch.ship.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.NameHistoryDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class NameHistoryReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public NameHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public NameHistoryDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + + if (allDataBuffer.isEmpty()) { + return null; + } + + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceNameHistory), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[NameHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceNameHistory); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return NameHistoryDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .imoNo(rs.getString("imo_no")) + .shipNmChgHstrySeq(rs.getString("ship_nm_chg_hstry_seq")) + .efectStaDay(rs.getString("efect_sta_day")) + .shipNm(rs.getString("ship_nm")) + .build(); + }, nextTargetId); + + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceNameHistory); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/OperatorHistoryReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/OperatorHistoryReader.java new file mode 100644 index 0000000..c24687f --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/OperatorHistoryReader.java @@ -0,0 +1,74 @@ +package com.snp.batch.jobs.datasync.batch.ship.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.OperatorHistoryDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class OperatorHistoryReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public OperatorHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public OperatorHistoryDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + + if (allDataBuffer.isEmpty()) { + return null; + } + + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceOperatorHistory), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[OperatorHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceOperatorHistory); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return OperatorHistoryDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .imoNo(rs.getString("imo_no")) + .shipOperatorHstrySeq(rs.getString("ship_operator_hstry_seq")) + .efectStaDay(rs.getString("efect_sta_day")) + .shipOperatorCd(rs.getString("ship_operator_cd")) + .shipOperator(rs.getString("ship_operator")) + .companyStatus(rs.getString("company_status")) + .build(); + }, nextTargetId); + + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceOperatorHistory); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/OwnerHistoryReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/OwnerHistoryReader.java new file mode 100644 index 0000000..416a4c4 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/OwnerHistoryReader.java @@ -0,0 +1,74 @@ +package com.snp.batch.jobs.datasync.batch.ship.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.OwnerHistoryDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class OwnerHistoryReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public OwnerHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public OwnerHistoryDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + + if (allDataBuffer.isEmpty()) { + return null; + } + + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceOwnerHistory), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[OwnerHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceOwnerHistory); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return OwnerHistoryDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .imoNo(rs.getString("imo_no")) + .shipOwnrHstrySeq(rs.getString("ship_ownr_hstry_seq")) + .efectStaDay(rs.getString("efect_sta_day")) + .ownrCd(rs.getString("ownr_cd")) + .ownr(rs.getString("ownr")) + .companyStatus(rs.getString("company_status")) + .build(); + }, nextTargetId); + + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceOwnerHistory); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/PandIHistoryReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/PandIHistoryReader.java new file mode 100644 index 0000000..0733783 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/PandIHistoryReader.java @@ -0,0 +1,74 @@ +package com.snp.batch.jobs.datasync.batch.ship.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.PandIHistoryDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class PandIHistoryReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public PandIHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public PandIHistoryDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + + if (allDataBuffer.isEmpty()) { + return null; + } + + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourcePandiHistory), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[PandIHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourcePandiHistory); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return PandIHistoryDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .imoNo(rs.getString("imo_no")) + .shipPrtcRpnHstrySeq(rs.getString("ship_prtc_rpn_hstry_seq")) + .efectStaDay(rs.getString("efect_sta_day")) + .pniClubCd(rs.getString("pni_club_cd")) + .pniClubNm(rs.getString("pni_club_nm")) + .src(rs.getString("src")) + .build(); + }, nextTargetId); + + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourcePandiHistory); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/SafetyManagementCertificateHistReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/SafetyManagementCertificateHistReader.java new file mode 100644 index 0000000..9e9ecd2 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/SafetyManagementCertificateHistReader.java @@ -0,0 +1,82 @@ +package com.snp.batch.jobs.datasync.batch.ship.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.SafetyManagementCertificateHistDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class SafetyManagementCertificateHistReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public SafetyManagementCertificateHistReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public SafetyManagementCertificateHistDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + + if (allDataBuffer.isEmpty()) { + return null; + } + + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceSafetyManagementCertificateHist), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[SafetyManagementCertificateHistReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceSafetyManagementCertificateHist); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return SafetyManagementCertificateHistDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .imoNo(rs.getString("imo_no")) + .shipSftyMngEvdcSeq(rs.getString("ship_sfty_mng_evdc_seq")) + .smgrcSrngEngines(rs.getString("smgrc_srng_engines")) + .smgrcSysCatConvArbt(rs.getString("smgrc_sys_cat_conv_arbt")) + .smgrcExpryDay(rs.getString("smgrc_expry_day")) + .smgrcIssueDay(rs.getString("smgrc_issue_day")) + .smgrcDoccCompany(rs.getString("smgrc_docc_company")) + .smgrcNtnlty(rs.getString("smgrc_ntnlty")) + .smgrcIssueEngines(rs.getString("smgrc_issue_engines")) + .smgrcEtcDesc(rs.getString("smgrc_etc_desc")) + .smgrcShipNm(rs.getString("smgrc_ship_nm")) + .smgrcShipType(rs.getString("smgrc_ship_type")) + .smgrcSrc(rs.getString("smgrc_src")) + .smgrcCompanyCd(rs.getString("smgrc_company_cd")) + .build(); + }, nextTargetId); + + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceSafetyManagementCertificateHist); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/ShipAddInfoReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/ShipAddInfoReader.java new file mode 100644 index 0000000..e4f1efe --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/ShipAddInfoReader.java @@ -0,0 +1,84 @@ +package com.snp.batch.jobs.datasync.batch.ship.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.ShipAddInfoDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class ShipAddInfoReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public ShipAddInfoReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public ShipAddInfoDto read() throws Exception { + // 1. 버퍼가 비어있을 때만 DB에서 "다음 처리 대상 ID 하나"의 데이터를 긁어옵니다. + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + + if (allDataBuffer.isEmpty()) { + return null; // 진짜 데이터가 없으면 종료 + } + + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + // 1. 아직 'N'인 최소 ID 하나를 찾음 + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceAdditionalShipsData), Long.class); + } catch (Exception e) { + return; // 대상 없음 + } + + if (nextTargetId != null) { + log.info("[ShipAddInfoReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + + // 2. 해당 ID의 데이터만 버퍼에 로드 + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceAdditionalShipsData); + final Long targetId = nextTargetId; // lambda 내부에서 사용하기 위해 final 변수로 + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return ShipAddInfoDto.builder() + .jobExecutionId(targetId) // job_execution_id 설정 + .datasetVer(rs.getString("dataset_ver")) + .imoNo(rs.getString("imo_no")) + .shipEml(rs.getString("ship_eml")) + .maxDpwt(rs.getString("max_dpwt")) + .maxDrillDepth(rs.getString("max_drill_depth")) + .drillBrg(rs.getString("drill_brg")) + .oceanProdFacility(rs.getString("ocean_prod_facility")) + .deckHeatExch(rs.getString("deck_heat_exch")) + .dehtexMatral(rs.getString("dehtex_matral")) + .portblTwinDeck(rs.getString("portbl_twin_deck")) + .fixedTwinDeck(rs.getString("fixed_twin_deck")) + .shipSatlitCommId(rs.getString("ship_satlit_comm_id")) + .shipSatlitCmrspCd(rs.getString("ship_satlit_cmrsp_cd")) + .build(); + }, nextTargetId); + + // 3. 해당 ID 'P'로 변경 + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceAdditionalShipsData); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/ShipDataReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/ShipDataReader.java new file mode 100644 index 0000000..8a3c792 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/ShipDataReader.java @@ -0,0 +1,156 @@ +package com.snp.batch.jobs.datasync.batch.ship.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.ShipInfoMstDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class ShipDataReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public ShipDataReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public ShipInfoMstDto read() throws Exception { + // 1. 버퍼가 비어있을 때만 DB에서 "다음 처리 대상 ID 하나"의 데이터를 긁어옵니다. + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + + if (allDataBuffer.isEmpty()) { + return null; // 진짜 데이터가 없으면 종료 + } + + return allDataBuffer.remove(0); + } + + + private void fetchNextGroup() { + // 1. 아직 'N'인 최소 ID 하나를 찾음 + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject(CommonSql.getNextTargetQuery(tableMetaInfo.sourceShipDetailData), Long.class); + } catch (Exception e) { + return; // 대상 없음 + } + + if (nextTargetId != null) { + log.info("[ShipDataReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + + // 2. 해당 ID의 데이터만 버퍼에 로드 + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceShipDetailData); + final Long targetId = nextTargetId; // lambda 내부에서 사용하기 위해 final 변수로 + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return ShipInfoMstDto.builder() + .jobExecutionId(targetId) // job_execution_id 설정 + .datasetVer(rs.getString("dataset_ver")) + .imoNo(rs.getString("imo_no")) + .mmsiNo(rs.getString("mmsi_no")) + .shipNm(rs.getString("ship_nm")) + .clsgnNo(rs.getString("clsgn_no")) + .frmlaRegNo(rs.getString("frmla_reg_no")) + .fshrPrmtNo(rs.getString("fshr_prmt_no")) + .shipNtnlty(rs.getString("ship_ntnlty")) + .ntnltyCd(rs.getString("ntnlty_cd")) + .loadPort(rs.getString("load_port")) + .clfic(rs.getString("clfic")) + .clficDesc(rs.getString("clfic_desc")) + .shipStatus(rs.getString("ship_status")) + .shipTypeGroup(rs.getString("ship_type_group")) + .shipTypeLvTwo(rs.getString("ship_type_lv_two")) + .shipTypeLvThr(rs.getString("ship_type_lv_thr")) + .shipTypeLvFour(rs.getString("ship_type_lv_four")) + .shipTypeLvFive(rs.getString("ship_type_lv_five")) + .shipTypeLvFiveDtldType(rs.getString("ship_type_lv_five_dtld_type")) + .shipTypeLvFiveHullType(rs.getString("ship_type_lv_five_hull_type")) + .shipTypeLvFiveLwrnkGroup(rs.getString("ship_type_lv_five_lwrnk_group")) + .buildYy(rs.getString("build_yy")) + .buildYmd(rs.getString("build_ymd")) + .shpyrd(rs.getString("shpyrd")) + .shpyrdOffclNm(rs.getString("shpyrd_offcl_nm")) + .shpyrdBuildNo(rs.getString("shpyrd_build_no")) + .buildDesc(rs.getString("build_desc")) + .modfHstryDesc(rs.getString("modf_hstry_desc")) + .whlnthLoa(rs.getString("whlnth_loa")) + .regLength(rs.getString("reg_length")) + .lbp(rs.getString("lbp")) + .formnBreadth(rs.getString("formn_breadth")) + .maxBreadth(rs.getString("max_breadth")) + .depth(rs.getString("depth")) + .draft(rs.getString("draft")) + .keelMastHg(rs.getString("keel_mast_hg")) + .bulbBow(rs.getString("bulb_bow")) + .gt(rs.getString("gt")) + .ntTon(rs.getString("nt_ton")) + .dwt(rs.getString("dwt")) + .displacement(rs.getString("displacement")) + .lightDisplacementTon(rs.getString("light_displacement_ton")) + .cgt(rs.getString("cgt")) + .fldngOneCmPerTonTpci(rs.getString("fldng_one_cm_per_ton_tpci")) + .tonEfectDay(rs.getString("ton_efect_day")) + .calcfrmDwt(rs.getString("calcfrm_dwt")) + .teuCnt(rs.getString("teu_cnt")) + .teuCapacity(rs.getString("teu_capacity")) + .grainCapacityM3(rs.getString("grain_capacity_m3")) + .baleCapacity(rs.getString("bale_capacity")) + .liquidCapacity(rs.getString("liquid_capacity")) + .gasM3(rs.getString("gas_m3")) + .insulatedM3(rs.getString("insulated_m3")) + .passengerCapacity(rs.getString("passenger_capacity")) + .bollardPull(rs.getString("bollard_pull")) + .svcSpd(rs.getString("svc_spd")) + .mainEngineType(rs.getString("main_engine_type")) + .fuelCnsmpSpdOne(rs.getString("fuel_cnsmp_spd_one")) + .fuelCnsmpamtValOne(rs.getString("fuel_cnsmpamt_val_one")) + .fuelCnsmpSpdTwo(rs.getString("fuel_cnsmp_spd_two")) + .fuelCnsmpamtValTwo(rs.getString("fuel_cnsmpamt_val_two")) + .totalFuelCapacityM3(rs.getString("total_fuel_capacity_m3")) + .blrMftr(rs.getString("blr_mftr")) + .proplrMftr(rs.getString("proplr_mftr")) + .cargoCapacityM3Desc(rs.getString("cargo_capacity_m3_desc")) + .eqpmntDesc(rs.getString("eqpmnt_desc")) + .hdn(rs.getString("hdn")) + .hatcheDesc(rs.getString("hatche_desc")) + .laneDoorRampDesc(rs.getString("lane_door_ramp_desc")) + .spcTankDesc(rs.getString("spc_tank_desc")) + .tankDesc(rs.getString("tank_desc")) + .prmovrDesc(rs.getString("prmovr_desc")) + .prmovrOvrvwDesc(rs.getString("prmovr_ovrvw_desc")) + .auxDesc(rs.getString("aux_desc")) + .asstGnrtrDesc(rs.getString("asst_gnrtr_desc")) + .fuelDesc(rs.getString("fuel_desc")) + .docCompanyCd(rs.getString("doc_company_cd")) + .groupActlOwnrCompanyCd(rs.getString("group_actl_ownr_company_cd")) + .operator(rs.getString("operator")) + .operatorCompanyCd(rs.getString("operator_company_cd")) + .shipMngrCompanyCd(rs.getString("ship_mngr_company_cd")) + .techMngrCd(rs.getString("tech_mngr_cd")) + .regShponrCd(rs.getString("reg_shponr_cd")) + .lastMdfcnDt(rs.getString("last_mdfcn_dt")) + .build(); + }, nextTargetId); + + // 3. 해당 ID 'P'로 변경 + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceShipDetailData); + businessJdbcTemplate.update(sql, targetExecutionId); + } + +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/ShipManagerHistoryReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/ShipManagerHistoryReader.java new file mode 100644 index 0000000..452bf3e --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/ShipManagerHistoryReader.java @@ -0,0 +1,74 @@ +package com.snp.batch.jobs.datasync.batch.ship.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.ShipManagerHistoryDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class ShipManagerHistoryReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public ShipManagerHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public ShipManagerHistoryDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + + if (allDataBuffer.isEmpty()) { + return null; + } + + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceShipManagerHistory), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[ShipManagerHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceShipManagerHistory); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return ShipManagerHistoryDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .imoNo(rs.getString("imo_no")) + .shipMngCompanySeq(rs.getString("ship_mng_company_seq")) + .efectStaDay(rs.getString("efect_sta_day")) + .shipMngrCd(rs.getString("ship_mngr_cd")) + .shipMngr(rs.getString("ship_mngr")) + .companyStatus(rs.getString("company_status")) + .build(); + }, nextTargetId); + + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceShipManagerHistory); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/SisterShipLinksReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/SisterShipLinksReader.java new file mode 100644 index 0000000..6bd9ab7 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/SisterShipLinksReader.java @@ -0,0 +1,66 @@ +package com.snp.batch.jobs.datasync.batch.ship.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.SisterShipLinksDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class SisterShipLinksReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public SisterShipLinksReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public SisterShipLinksDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceSisterShipLinks), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[SisterShipLinksReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceSisterShipLinks); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return SisterShipLinksDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .imoNo(rs.getString("imo_no")) + .linkImoNo(rs.getString("link_imo_no")) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceSisterShipLinks); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/SpecialFeatureReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/SpecialFeatureReader.java new file mode 100644 index 0000000..0032157 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/SpecialFeatureReader.java @@ -0,0 +1,68 @@ +package com.snp.batch.jobs.datasync.batch.ship.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.SpecialFeatureDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class SpecialFeatureReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public SpecialFeatureReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public SpecialFeatureDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceSpecialFeature), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[SpecialFeatureReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceSpecialFeature); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return SpecialFeatureDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .imoNo(rs.getString("imo_no")) + .shipSpcFetrSeq(rs.getString("ship_spc_fetr_seq")) + .spcMttrCd(rs.getString("spc_mttr_cd")) + .spcMttr(rs.getString("spc_mttr")) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceSpecialFeature); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/StatusHistoryReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/StatusHistoryReader.java new file mode 100644 index 0000000..ef55147 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/StatusHistoryReader.java @@ -0,0 +1,69 @@ +package com.snp.batch.jobs.datasync.batch.ship.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.StatusHistoryDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class StatusHistoryReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public StatusHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public StatusHistoryDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceStatusHistory), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[StatusHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceStatusHistory); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return StatusHistoryDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .imoNo(rs.getString("imo_no")) + .shipStatusHstrySeq(rs.getString("ship_status_hstry_seq")) + .statusCd(rs.getString("status_cd")) + .statusChgYmd(rs.getString("status_chg_ymd")) + .status(rs.getString("status")) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceStatusHistory); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/StowageCommodityReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/StowageCommodityReader.java new file mode 100644 index 0000000..416b5ef --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/StowageCommodityReader.java @@ -0,0 +1,70 @@ +package com.snp.batch.jobs.datasync.batch.ship.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.StowageCommodityDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class StowageCommodityReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public StowageCommodityReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public StowageCommodityDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceStowageCommodity), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[StowageCommodityReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceStowageCommodity); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return StowageCommodityDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .imoNo(rs.getString("imo_no")) + .shipCargoCapacitySeq(rs.getString("ship_cargo_capacity_seq")) + .capacityCd(rs.getString("capacity_cd")) + .capacityCdDesc(rs.getString("capacity_cd_desc")) + .cargoCd(rs.getString("cargo_cd")) + .cargoNm(rs.getString("cargo_nm")) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceStowageCommodity); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/SurveyDatesHistoryUniqueReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/SurveyDatesHistoryUniqueReader.java new file mode 100644 index 0000000..d30a5ff --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/SurveyDatesHistoryUniqueReader.java @@ -0,0 +1,69 @@ +package com.snp.batch.jobs.datasync.batch.ship.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.SurveyDatesHistoryUniqueDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class SurveyDatesHistoryUniqueReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public SurveyDatesHistoryUniqueReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public SurveyDatesHistoryUniqueDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceSurveyDatesHistoryUnique), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[SurveyDatesHistoryUniqueReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceSurveyDatesHistoryUnique); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return SurveyDatesHistoryUniqueDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .imoNo(rs.getString("imo_no")) + .clficCd(rs.getString("clfic_cd")) + .inspectionType(rs.getString("inspection_type")) + .inspectionYmd(rs.getString("inspection_ymd")) + .clfic(rs.getString("clfic")) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceSurveyDatesHistoryUnique); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/SurveyDatesReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/SurveyDatesReader.java new file mode 100644 index 0000000..5611cc6 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/SurveyDatesReader.java @@ -0,0 +1,72 @@ +package com.snp.batch.jobs.datasync.batch.ship.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.SurveyDatesDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class SurveyDatesReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public SurveyDatesReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public SurveyDatesDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceSurveyDates), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[SurveyDatesReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceSurveyDates); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return SurveyDatesDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .imoNo(rs.getString("imo_no")) + .clficCd(rs.getString("clfic_cd")) + .clfic(rs.getString("clfic")) + .dckngInspection(rs.getString("dckng_inspection")) + .fxtmInspection(rs.getString("fxtm_inspection")) + .annualInspection(rs.getString("annual_inspection")) + .mchnFxtmInspectionYmd(rs.getString("mchn_fxtm_inspection_ymd")) + .tlsftInspectionYmd(rs.getString("tlsft_inspection_ymd")) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceSurveyDates); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/TbCompanyDetailReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/TbCompanyDetailReader.java new file mode 100644 index 0000000..9ec5fb3 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/TbCompanyDetailReader.java @@ -0,0 +1,93 @@ +package com.snp.batch.jobs.datasync.batch.ship.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.TbCompanyDetailDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class TbCompanyDetailReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public TbCompanyDetailReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public TbCompanyDetailDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceTbCompanyDetail), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[TbCompanyDetailReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTbCompanyDetail); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return TbCompanyDetailDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .companyCd(rs.getString("company_cd")) + .lastUpdYmd(rs.getString("last_upd_ymd")) + .careCd(rs.getString("care_cd")) + .companyStatus(rs.getString("company_status")) + .fullNm(rs.getString("full_nm")) + .companyNameAbbr(rs.getString("company_name_abbr")) + .companyFndnYmd(rs.getString("company_fndn_ymd")) + .prntCompanyCd(rs.getString("prnt_company_cd")) + .countryNm(rs.getString("country_nm")) + .ctyNm(rs.getString("cty_nm")) + .oaAddr(rs.getString("oa_addr")) + .emlAddr(rs.getString("eml_addr")) + .tel(rs.getString("tel")) + .faxNo(rs.getString("fax_no")) + .wbstUrl(rs.getString("wbst_url")) + .countryCtrl(rs.getString("country_ctrl")) + .countryCtrlCd(rs.getString("country_ctrl_cd")) + .countryReg(rs.getString("country_reg")) + .countryRegCd(rs.getString("country_reg_cd")) + .regionCd(rs.getString("region_cd")) + .distNm(rs.getString("dist_nm")) + .distNo(rs.getString("dist_no")) + .mailAddrRear(rs.getString("mail_addr_rear")) + .mailAddrFrnt(rs.getString("mail_addr_frnt")) + .poBox(rs.getString("po_box")) + .dtlAddrOne(rs.getString("dtl_addr_one")) + .dtlAddrTwo(rs.getString("dtl_addr_two")) + .dtlAddrThr(rs.getString("dtl_addr_thr")) + .tlx(rs.getString("tlx")) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTbCompanyDetail); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/TechnicalManagerHistoryReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/TechnicalManagerHistoryReader.java new file mode 100644 index 0000000..1b07d39 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/TechnicalManagerHistoryReader.java @@ -0,0 +1,70 @@ +package com.snp.batch.jobs.datasync.batch.ship.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.TechnicalManagerHistoryDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class TechnicalManagerHistoryReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public TechnicalManagerHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public TechnicalManagerHistoryDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceTechnicalManagerHistory), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[TechnicalManagerHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTechnicalManagerHistory); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return TechnicalManagerHistoryDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .imoNo(rs.getString("imo_no")) + .shipTechMngCompanySeq(rs.getString("ship_tech_mng_company_seq")) + .efectStaDay(rs.getString("efect_sta_day")) + .techMngrCd(rs.getString("tech_mngr_cd")) + .techMngr(rs.getString("tech_mngr")) + .companyStatus(rs.getString("company_status")) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTechnicalManagerHistory); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/ThrustersReader.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/ThrustersReader.java new file mode 100644 index 0000000..c25a0dc --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/reader/ThrustersReader.java @@ -0,0 +1,73 @@ +package com.snp.batch.jobs.datasync.batch.ship.reader; + +import com.snp.batch.common.util.CommonSql; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.dto.ThrustersDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.item.ItemReader; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +public class ThrustersReader implements ItemReader { + private final TableMetaInfo tableMetaInfo; + private final JdbcTemplate businessJdbcTemplate; + private List allDataBuffer = new ArrayList<>(); + + public ThrustersReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { + this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); + this.tableMetaInfo = tableMetaInfo; + } + + @Override + public ThrustersDto read() throws Exception { + if (allDataBuffer.isEmpty()) { + fetchNextGroup(); + } + if (allDataBuffer.isEmpty()) { + return null; + } + return allDataBuffer.remove(0); + } + + private void fetchNextGroup() { + Long nextTargetId = null; + try { + nextTargetId = businessJdbcTemplate.queryForObject( + CommonSql.getNextTargetQuery(tableMetaInfo.sourceThrusters), Long.class); + } catch (Exception e) { + return; + } + + if (nextTargetId != null) { + log.info("[ThrustersReader] 다음 처리 대상 ID 발견: {}", nextTargetId); + String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceThrusters); + final Long targetId = nextTargetId; + this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { + return ThrustersDto.builder() + .jobExecutionId(targetId) + .datasetVer(rs.getString("dataset_ver")) + .imoNo(rs.getString("imo_no")) + .thrstrSeq(rs.getString("thrstr_seq")) + .thrstrTypeCd(rs.getString("thrstr_type_cd")) + .thrstrType(rs.getString("thrstr_type")) + .thrstrCnt(rs.getBigDecimal("thrstr_cnt")) + .thrstrPosition(rs.getString("thrstr_position")) + .thrstrPowerBhp(rs.getBigDecimal("thrstr_power_bhp")) + .thrstrPowerKw(rs.getBigDecimal("thrstr_power_kw")) + .instlMth(rs.getString("instl_mth")) + .build(); + }, nextTargetId); + updateBatchProcessing(nextTargetId); + } + } + + private void updateBatchProcessing(Long targetExecutionId) { + String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceThrusters); + businessJdbcTemplate.update(sql, targetExecutionId); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/repository/ShipDataSql.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/repository/ShipDataSql.java new file mode 100644 index 0000000..e5c0d06 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/repository/ShipDataSql.java @@ -0,0 +1,861 @@ +package com.snp.batch.jobs.datasync.batch.ship.repository; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@Component +public class ShipDataSql { + private static String TARGET_SCHEMA; + public ShipDataSql(@Value("${app.batch.target-schema.name}") String targetSchema) { + TARGET_SCHEMA = targetSchema; + } + + public static String getShipInfoMstUpsertSql (String targetTable){ + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, dataset_ver, imo_no, + mmsi_no, ship_nm, clsgn_no, frmla_reg_no, fshr_prmt_no, + ship_ntnlty, ntnlty_cd, load_port, clfic, clfic_desc, ship_status, + ship_type_group, ship_type_lv_two, ship_type_lv_thr, ship_type_lv_four, ship_type_lv_five, + ship_type_lv_five_dtld_type, ship_type_lv_five_hull_type, ship_type_lv_five_lwrnk_group, build_yy, + build_ymd, shpyrd, shpyrd_offcl_nm, shpyrd_build_no, build_desc, + modf_hstry_desc, whlnth_loa, reg_length, lbp, + formn_breadth, max_breadth, depth, draft, keel_mast_hg, bulb_bow, gt, + nt_ton, dwt, displacement, light_displacement_ton, cgt, + fldng_one_cm_per_ton_tpci, ton_efect_day, calcfrm_dwt, teu_cnt, teu_capacity, + grain_capacity_m3, bale_capacity, liquid_capacity, gas_m3, insulated_m3, passenger_capacity, + bollard_pull, svc_spd, main_engine_type, fuel_cnsmp_spd_one, fuel_cnsmpamt_val_one, + fuel_cnsmp_spd_two, fuel_cnsmpamt_val_two, total_fuel_capacity_m3, blr_mftr, proplr_mftr, + cargo_capacity_m3_desc, eqpmnt_desc, hdn, hatche_desc, + lane_door_ramp_desc, spc_tank_desc, tank_desc, prmovr_desc, + prmovr_ovrvw_desc, aux_desc, asst_gnrtr_desc, + fuel_desc, doc_company_cd, group_actl_ownr_company_cd, + operator, operator_company_cd, ship_mngr_company_cd, tech_mngr_cd, reg_shponr_cd, last_mdfcn_dt + ) + VALUES ( + CURRENT_TIMESTAMP, 'SYSTEM', ?, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, + ?, ?, ?, + ?, ?, ?, ?, ?, ? + ) + ON CONFLICT (imo_no) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + mmsi_no = EXCLUDED.mmsi_no, + ship_nm = EXCLUDED.ship_nm, + clsgn_no = EXCLUDED.clsgn_no, + frmla_reg_no = EXCLUDED.frmla_reg_no, + fshr_prmt_no = EXCLUDED.fshr_prmt_no, + ship_ntnlty = EXCLUDED.ship_ntnlty, + ntnlty_cd = EXCLUDED.ntnlty_cd, + load_port = EXCLUDED.load_port, + clfic = EXCLUDED.clfic, + clfic_desc = EXCLUDED.clfic_desc, + ship_status = EXCLUDED.ship_status, + ship_type_group = EXCLUDED.ship_type_group, + ship_type_lv_two = EXCLUDED.ship_type_lv_two, + ship_type_lv_thr = EXCLUDED.ship_type_lv_thr, + ship_type_lv_four = EXCLUDED.ship_type_lv_four, + ship_type_lv_five = EXCLUDED.ship_type_lv_five, + ship_type_lv_five_dtld_type = EXCLUDED.ship_type_lv_five_dtld_type, + ship_type_lv_five_hull_type = EXCLUDED.ship_type_lv_five_hull_type, + ship_type_lv_five_lwrnk_group = EXCLUDED.ship_type_lv_five_lwrnk_group, + build_yy = EXCLUDED.build_yy, + build_ymd = EXCLUDED.build_ymd, + shpyrd = EXCLUDED.shpyrd, + shpyrd_offcl_nm = EXCLUDED.shpyrd_offcl_nm, + shpyrd_build_no = EXCLUDED.shpyrd_build_no, + build_desc = EXCLUDED.build_desc, + modf_hstry_desc = EXCLUDED.modf_hstry_desc, + whlnth_loa = EXCLUDED.whlnth_loa, + reg_length = EXCLUDED.reg_length, + lbp = EXCLUDED.lbp, + formn_breadth = EXCLUDED.formn_breadth, + max_breadth = EXCLUDED.max_breadth, + depth = EXCLUDED.depth, + draft = EXCLUDED.draft, + keel_mast_hg = EXCLUDED.keel_mast_hg, + bulb_bow = EXCLUDED.bulb_bow, + gt = EXCLUDED.gt, + nt_ton = EXCLUDED.nt_ton, + dwt = EXCLUDED.dwt, + displacement = EXCLUDED.displacement, + light_displacement_ton = EXCLUDED.light_displacement_ton, + cgt = EXCLUDED.cgt, + fldng_one_cm_per_ton_tpci = EXCLUDED.fldng_one_cm_per_ton_tpci, + ton_efect_day = EXCLUDED.ton_efect_day, + calcfrm_dwt = EXCLUDED.calcfrm_dwt, + teu_cnt = EXCLUDED.teu_cnt, + teu_capacity = EXCLUDED.teu_capacity, + grain_capacity_m3 = EXCLUDED.grain_capacity_m3, + bale_capacity = EXCLUDED.bale_capacity, + liquid_capacity = EXCLUDED.liquid_capacity, + gas_m3 = EXCLUDED.gas_m3, + insulated_m3 = EXCLUDED.insulated_m3, + passenger_capacity = EXCLUDED.passenger_capacity, + bollard_pull = EXCLUDED.bollard_pull, + svc_spd = EXCLUDED.svc_spd, + main_engine_type = EXCLUDED.main_engine_type, + fuel_cnsmp_spd_one = EXCLUDED.fuel_cnsmp_spd_one, + fuel_cnsmpamt_val_one = EXCLUDED.fuel_cnsmpamt_val_one, + fuel_cnsmp_spd_two = EXCLUDED.fuel_cnsmp_spd_two, + fuel_cnsmpamt_val_two = EXCLUDED.fuel_cnsmpamt_val_two, + total_fuel_capacity_m3 = EXCLUDED.total_fuel_capacity_m3, + blr_mftr = EXCLUDED.blr_mftr, + proplr_mftr = EXCLUDED.proplr_mftr, + cargo_capacity_m3_desc = EXCLUDED.cargo_capacity_m3_desc, + eqpmnt_desc = EXCLUDED.eqpmnt_desc, + hdn = EXCLUDED.hdn, + hatche_desc = EXCLUDED.hatche_desc, + lane_door_ramp_desc = EXCLUDED.lane_door_ramp_desc, + spc_tank_desc = EXCLUDED.spc_tank_desc, + tank_desc = EXCLUDED.tank_desc, + prmovr_desc = EXCLUDED.prmovr_desc, + prmovr_ovrvw_desc = EXCLUDED.prmovr_ovrvw_desc, + aux_desc = EXCLUDED.aux_desc, + asst_gnrtr_desc = EXCLUDED.asst_gnrtr_desc, + fuel_desc = EXCLUDED.fuel_desc, + doc_company_cd = EXCLUDED.doc_company_cd, + group_actl_ownr_company_cd = EXCLUDED.group_actl_ownr_company_cd, + operator = EXCLUDED.operator, + operator_company_cd = EXCLUDED.operator_company_cd, + ship_mngr_company_cd = EXCLUDED.ship_mngr_company_cd, + tech_mngr_cd = EXCLUDED.tech_mngr_cd, + reg_shponr_cd = EXCLUDED.reg_shponr_cd, + last_mdfcn_dt = EXCLUDED.last_mdfcn_dt; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getShipMainInfoUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + imo_no, mmsi_no, ship_nm, clsgn_no, country_nm, + ship_reg_hrbr, clfic_asctn_nm, ship_knd_lv_five, ship_knd_dtl_lv_five, + ship_build_yy, shpyrd_nm, ship_whlnth, ship_molbth, ship_depth, ship_draft, + ship_total_ton, dwt, cntnr_units, svc_crspd, main_engine_fom, + ship_status, ship_operator, ship_country_cd, ship_knd_lv_two, cargo_type, + last_mdfcn_dt + ) + VALUES ( + CURRENT_TIMESTAMP, 'SYSTEM', + ?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, + ? + ) + ON CONFLICT (imo_no) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + mmsi_no = EXCLUDED.mmsi_no, + ship_nm = EXCLUDED.ship_nm, + clsgn_no = EXCLUDED.clsgn_no, + country_nm = EXCLUDED.country_nm, + ship_reg_hrbr = EXCLUDED.ship_reg_hrbr, + clfic_asctn_nm = EXCLUDED.clfic_asctn_nm, + ship_knd_lv_five = EXCLUDED.ship_knd_lv_five, + ship_knd_dtl_lv_five = EXCLUDED.ship_knd_dtl_lv_five, + ship_build_yy = EXCLUDED.ship_build_yy, + shpyrd_nm = EXCLUDED.shpyrd_nm, + ship_whlnth = EXCLUDED.ship_whlnth, + ship_molbth = EXCLUDED.ship_molbth, + ship_depth = EXCLUDED.ship_depth, + ship_draft = EXCLUDED.ship_draft, + ship_total_ton = EXCLUDED.ship_total_ton, + dwt = EXCLUDED.dwt, + cntnr_units = EXCLUDED.cntnr_units, + svc_crspd = EXCLUDED.svc_crspd, + main_engine_fom = EXCLUDED.main_engine_fom, + ship_status = EXCLUDED.ship_status, + ship_operator = EXCLUDED.ship_operator, + ship_country_cd = EXCLUDED.ship_country_cd, + ship_knd_lv_two = EXCLUDED.ship_knd_lv_two, + cargo_type = EXCLUDED.cargo_type, + last_mdfcn_dt = EXCLUDED.last_mdfcn_dt; + """.formatted(TARGET_SCHEMA, targetTable); + } + + + public static String getShipAddInfoUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, imo_no, ship_eml, max_dpwt, max_drill_depth, + drill_brg, ocean_prod_facility, deck_heat_exch, dehtex_matral, + portbl_twin_deck, fixed_twin_deck, ship_satlit_comm_id, ship_satlit_cmrsp_cd + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, ? + ) + ON CONFLICT (imo_no) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + ship_eml = EXCLUDED.ship_eml, + max_dpwt = EXCLUDED.max_dpwt, + max_drill_depth = EXCLUDED.max_drill_depth, + drill_brg = EXCLUDED.drill_brg, + ocean_prod_facility = EXCLUDED.ocean_prod_facility, + deck_heat_exch = EXCLUDED.deck_heat_exch, + dehtex_matral = EXCLUDED.dehtex_matral, + portbl_twin_deck = EXCLUDED.portbl_twin_deck, + fixed_twin_deck = EXCLUDED.fixed_twin_deck, + ship_satlit_comm_id = EXCLUDED.ship_satlit_comm_id, + ship_satlit_cmrsp_cd = EXCLUDED.ship_satlit_cmrsp_cd; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getBareboatCharterHistoryUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, imo_no, bbctr_seq, efect_sta_day, bbctr_company_cd, bbctr_company + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, ? + ) + ON CONFLICT (imo_no, bbctr_seq, efect_sta_day, bbctr_company_cd) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + bbctr_company = EXCLUDED.bbctr_company; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getCallsignAndMmsiHistoryUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, imo_no, ship_idntf_seq, efect_sta_day, clsgn_no, mmsi_no + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, ? + ) + ON CONFLICT (imo_no, ship_idntf_seq, efect_sta_day) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + clsgn_no = EXCLUDED.clsgn_no, + mmsi_no = EXCLUDED.mmsi_no; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getClassHistoryUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, imo_no, clfic_hstry_seq, efect_sta_day, clfic_cd, + clfic_id, clfic_asctn_nm, clfic_has_yn, now_yn + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ? + ) + ON CONFLICT (imo_no, clfic_hstry_seq, efect_sta_day, clfic_cd) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + clfic_id = EXCLUDED.clfic_id, + clfic_asctn_nm = EXCLUDED.clfic_asctn_nm, + clfic_has_yn = EXCLUDED.clfic_has_yn, + now_yn = EXCLUDED.now_yn; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getCompanyVesselRelationshipsUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, imo_no, docc_has_company_cd, docc_has_company, group_actl_ownr, + group_actl_ownr_cd, ship_operator, ship_operator_cd, rg_ownr, + rg_ownr_cd, ship_mng_company, ship_mng_company_cd, tech_mng_company, + tech_mng_company_cd, docc_group, docc_group_cd, ship_operator_group, + ship_operator_group_cd, ship_mng_company_group, ship_mng_company_group_cd, + tech_mng_company_group, tech_mng_company_group_cd + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, + ?, ? + ) + ON CONFLICT (imo_no) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + docc_has_company_cd = EXCLUDED.docc_has_company_cd, + docc_has_company = EXCLUDED.docc_has_company, + group_actl_ownr = EXCLUDED.group_actl_ownr, + group_actl_ownr_cd = EXCLUDED.group_actl_ownr_cd, + ship_operator = EXCLUDED.ship_operator, + ship_operator_cd = EXCLUDED.ship_operator_cd, + rg_ownr = EXCLUDED.rg_ownr, + rg_ownr_cd = EXCLUDED.rg_ownr_cd, + ship_mng_company = EXCLUDED.ship_mng_company, + ship_mng_company_cd = EXCLUDED.ship_mng_company_cd, + tech_mng_company = EXCLUDED.tech_mng_company, + tech_mng_company_cd = EXCLUDED.tech_mng_company_cd, + docc_group = EXCLUDED.docc_group, + docc_group_cd = EXCLUDED.docc_group_cd, + ship_operator_group = EXCLUDED.ship_operator_group, + ship_operator_group_cd = EXCLUDED.ship_operator_group_cd, + ship_mng_company_group = EXCLUDED.ship_mng_company_group, + ship_mng_company_group_cd = EXCLUDED.ship_mng_company_group_cd, + tech_mng_company_group = EXCLUDED.tech_mng_company_group, + tech_mng_company_group_cd = EXCLUDED.tech_mng_company_group_cd; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getCrewListUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, imo_no, crew_id, ship_nm, ntnlty, + crew_rstr_ymd, oa_crew_cnt, gen_crew_cnt, offcr_cnt, + appr_offcr_cnt, trne_cnt, embrk_mntnc_crew_cnt, unrprt_cnt + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, ? + ) + ON CONFLICT (imo_no, crew_id, ship_nm, ntnlty) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + crew_rstr_ymd = EXCLUDED.crew_rstr_ymd, + oa_crew_cnt = EXCLUDED.oa_crew_cnt, + gen_crew_cnt = EXCLUDED.gen_crew_cnt, + offcr_cnt = EXCLUDED.offcr_cnt, + appr_offcr_cnt = EXCLUDED.appr_offcr_cnt, + trne_cnt = EXCLUDED.trne_cnt, + embrk_mntnc_crew_cnt = EXCLUDED.embrk_mntnc_crew_cnt, + unrprt_cnt = EXCLUDED.unrprt_cnt; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getDarkActivityConfirmedUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, imo_no, mmsi_no, dark_hr, dark_actv_status, + ship_nm, dark_actv, zone_id, zone_nm, zone_country, + dark_tm_utc, dark_lat, dark_lon, dark_spd, dark_heading, + dark_draft, nxt_cptr_tm_utc, nxt_cptr_spd, nxt_cptr_draft, + nxt_cptr_heading, dark_rpt_dest_ais, last_prtcll_port, + last_poccntry_cd, last_poccntry, nxt_cptr_lat, + nxt_cptr_lon, nxt_cptr_rpt_dest_ais + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ?, + ?, ?, ?, + ?, ?, ?, + ?, ? + ) + ON CONFLICT (imo_no, mmsi_no, dark_tm_utc, dark_actv_status) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + ship_nm = EXCLUDED.ship_nm, + dark_actv = EXCLUDED.dark_actv, + zone_id = EXCLUDED.zone_id, + zone_nm = EXCLUDED.zone_nm, + zone_country = EXCLUDED.zone_country, + dark_tm_utc = EXCLUDED.dark_tm_utc, + dark_lat = EXCLUDED.dark_lat, + dark_lon = EXCLUDED.dark_lon, + dark_spd = EXCLUDED.dark_spd, + dark_heading = EXCLUDED.dark_heading, + dark_draft = EXCLUDED.dark_draft, + nxt_cptr_tm_utc = EXCLUDED.nxt_cptr_tm_utc, + nxt_cptr_spd = EXCLUDED.nxt_cptr_spd, + nxt_cptr_draft = EXCLUDED.nxt_cptr_draft, + nxt_cptr_heading = EXCLUDED.nxt_cptr_heading, + dark_rpt_dest_ais = EXCLUDED.dark_rpt_dest_ais, + last_prtcll_port = EXCLUDED.last_prtcll_port, + last_poccntry_cd = EXCLUDED.last_poccntry_cd, + last_poccntry = EXCLUDED.last_poccntry, + nxt_cptr_lat = EXCLUDED.nxt_cptr_lat, + nxt_cptr_lon = EXCLUDED.nxt_cptr_lon, + nxt_cptr_rpt_dest_ais = EXCLUDED.nxt_cptr_rpt_dest_ais; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getFlagHistoryUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, imo_no, ship_country_hstry_seq, efect_sta_day, country_cd, country + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, ? + ) + ON CONFLICT (imo_no, ship_country_hstry_seq, efect_sta_day, country_cd) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + country = EXCLUDED.country; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getGroupBeneficialOwnerHistoryUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, imo_no, ship_group_revn_ownr_hstry_seq, efect_sta_day, + group_actl_ownr_cd, group_actl_ownr, company_status + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, ?, ? + ) + ON CONFLICT (imo_no, ship_group_revn_ownr_hstry_seq, efect_sta_day, group_actl_ownr_cd) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + group_actl_ownr = EXCLUDED.group_actl_ownr, + company_status = EXCLUDED.company_status; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getIceClassUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, imo_no, ice_grd_cd, ice_grd + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ? + ) + ON CONFLICT (imo_no, ice_grd_cd) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + ice_grd = EXCLUDED.ice_grd; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getNameHistoryUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, imo_no, ship_nm_chg_hstry_seq, efect_sta_day, ship_nm + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ? + ) + ON CONFLICT (imo_no, ship_nm_chg_hstry_seq, efect_sta_day) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + ship_nm = EXCLUDED.ship_nm; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getOperatorHistoryUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, imo_no, ship_operator_hstry_seq, efect_sta_day, + ship_operator_cd, ship_operator, company_status + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, ?, ? + ) + ON CONFLICT (imo_no, ship_operator_hstry_seq, efect_sta_day, ship_operator_cd) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + ship_operator = EXCLUDED.ship_operator, + company_status = EXCLUDED.company_status; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getOwnerHistoryUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, imo_no, ship_ownr_hstry_seq, efect_sta_day, + ownr_cd, ownr, company_status + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, ?, ? + ) + ON CONFLICT (imo_no, ship_ownr_hstry_seq, efect_sta_day, ownr_cd) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + ownr = EXCLUDED.ownr, + company_status = EXCLUDED.company_status; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getPandIHistoryUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, imo_no, ship_prtc_rpn_hstry_seq, efect_sta_day, + pni_club_cd, pni_club_nm, src + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, ?, ? + ) + ON CONFLICT (imo_no, ship_prtc_rpn_hstry_seq, pni_club_cd, efect_sta_day) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + pni_club_nm = EXCLUDED.pni_club_nm, + src = EXCLUDED.src; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getSafetyManagementCertificateHistUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, imo_no, ship_sfty_mng_evdc_seq, + smgrc_srng_engines, smgrc_sys_cat_conv_arbt, + smgrc_expry_day, smgrc_issue_day, + smgrc_docc_company, smgrc_ntnlty, + smgrc_issue_engines, smgrc_etc_desc, + smgrc_ship_nm, smgrc_ship_type, + smgrc_src, smgrc_company_cd + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, + ?, ?, + ?, ?, + ?, ?, + ?, ?, + ?, ?, + ?, ? + ) + ON CONFLICT (imo_no, ship_sfty_mng_evdc_seq) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + smgrc_srng_engines = EXCLUDED.smgrc_srng_engines, + smgrc_sys_cat_conv_arbt = EXCLUDED.smgrc_sys_cat_conv_arbt, + smgrc_expry_day = EXCLUDED.smgrc_expry_day, + smgrc_issue_day = EXCLUDED.smgrc_issue_day, + smgrc_docc_company = EXCLUDED.smgrc_docc_company, + smgrc_ntnlty = EXCLUDED.smgrc_ntnlty, + smgrc_issue_engines = EXCLUDED.smgrc_issue_engines, + smgrc_etc_desc = EXCLUDED.smgrc_etc_desc, + smgrc_ship_nm = EXCLUDED.smgrc_ship_nm, + smgrc_ship_type = EXCLUDED.smgrc_ship_type, + smgrc_src = EXCLUDED.smgrc_src, + smgrc_company_cd = EXCLUDED.smgrc_company_cd; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getShipManagerHistoryUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, imo_no, ship_mng_company_seq, efect_sta_day, + ship_mngr_cd, ship_mngr, company_status + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, ?, ? + ) + ON CONFLICT (imo_no, ship_mng_company_seq, efect_sta_day, ship_mngr_cd) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + ship_mngr = EXCLUDED.ship_mngr, + company_status = EXCLUDED.company_status; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getSisterShipLinksUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, imo_no, link_imo_no + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ? + ) + ON CONFLICT (imo_no, link_imo_no) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getSpecialFeatureUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, imo_no, ship_spc_fetr_seq, spc_mttr_cd, spc_mttr + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ? + ) + ON CONFLICT (imo_no, ship_spc_fetr_seq, spc_mttr_cd) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + spc_mttr = EXCLUDED.spc_mttr; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getStatusHistoryUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, imo_no, ship_status_hstry_seq, status_cd, status_chg_ymd, status + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, ? + ) + ON CONFLICT (imo_no, ship_status_hstry_seq, status_cd, status_chg_ymd) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + status = EXCLUDED.status; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getStowageCommodityUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, imo_no, ship_cargo_capacity_seq, capacity_cd, capacity_cd_desc, + cargo_cd, cargo_nm + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, + ?, ? + ) + ON CONFLICT (imo_no, ship_cargo_capacity_seq, capacity_cd, cargo_cd) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + capacity_cd_desc = EXCLUDED.capacity_cd_desc, + cargo_nm = EXCLUDED.cargo_nm; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getSurveyDatesUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, imo_no, clfic_cd, clfic, + dckng_inspection, fxtm_inspection, annual_inspection, + mchn_fxtm_inspection_ymd, tlsft_inspection_ymd + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, + ?, ?, ?, + ?, ? + ) + ON CONFLICT (imo_no, clfic_cd) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + clfic = EXCLUDED.clfic, + dckng_inspection = EXCLUDED.dckng_inspection, + fxtm_inspection = EXCLUDED.fxtm_inspection, + annual_inspection = EXCLUDED.annual_inspection, + mchn_fxtm_inspection_ymd = EXCLUDED.mchn_fxtm_inspection_ymd, + tlsft_inspection_ymd = EXCLUDED.tlsft_inspection_ymd; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getSurveyDatesHistoryUniqueUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, imo_no, clfic_cd, inspection_type, inspection_ymd, clfic + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, ? + ) + ON CONFLICT (imo_no, clfic_cd, inspection_type, inspection_ymd) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + clfic = EXCLUDED.clfic; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getTechnicalManagerHistoryUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, imo_no, ship_tech_mng_company_seq, efect_sta_day, + tech_mngr_cd, tech_mngr, company_status + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, ?, ? + ) + ON CONFLICT (imo_no, ship_tech_mng_company_seq, efect_sta_day, tech_mngr_cd) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + tech_mngr = EXCLUDED.tech_mngr, + company_status = EXCLUDED.company_status; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getThrustersUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, imo_no, thrstr_seq, thrstr_type_cd, thrstr_type, + thrstr_cnt, thrstr_position, thrstr_power_bhp, thrstr_power_kw, + instl_mth + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ?, + ? + ) + ON CONFLICT (imo_no, thrstr_seq) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + thrstr_type_cd = EXCLUDED.thrstr_type_cd, + thrstr_type = EXCLUDED.thrstr_type, + thrstr_cnt = EXCLUDED.thrstr_cnt, + thrstr_position = EXCLUDED.thrstr_position, + thrstr_power_bhp = EXCLUDED.thrstr_power_bhp, + thrstr_power_kw = EXCLUDED.thrstr_power_kw, + instl_mth = EXCLUDED.instl_mth; + """.formatted(TARGET_SCHEMA, targetTable); + } + + public static String getTbCompanyDetailUpsertSql(String targetTable) { + return """ + INSERT INTO %s.%s ( + crt_dt, creatr_id, + dataset_ver, company_cd, last_upd_ymd, care_cd, company_status, + full_nm, company_name_abbr, company_fndn_ymd, prnt_company_cd, country_nm, + cty_nm, oa_addr, eml_addr, tel, fax_no, + wbst_url, country_ctrl, country_ctrl_cd, + country_reg, country_reg_cd, + region_cd, dist_nm, dist_no, mail_addr_rear, mail_addr_frnt, + po_box, dtl_addr_one, dtl_addr_two, dtl_addr_thr, tlx + ) + VALUES ( + CURRENT_TIMESTAMP, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, + ?, ?, + ?, ?, ?, ?, ?, + ?, ?, ?, ?, ? + ) + ON CONFLICT (company_cd) + DO UPDATE SET + mdfcn_dt = CURRENT_TIMESTAMP, + mdfr_id = 'SYSTEM', + dataset_ver = EXCLUDED.dataset_ver, + last_upd_ymd = EXCLUDED.last_upd_ymd, + care_cd = EXCLUDED.care_cd, + company_status = EXCLUDED.company_status, + full_nm = EXCLUDED.full_nm, + company_name_abbr = EXCLUDED.company_name_abbr, + company_fndn_ymd = EXCLUDED.company_fndn_ymd, + prnt_company_cd = EXCLUDED.prnt_company_cd, + country_nm = EXCLUDED.country_nm, + cty_nm = EXCLUDED.cty_nm, + oa_addr = EXCLUDED.oa_addr, + eml_addr = EXCLUDED.eml_addr, + tel = EXCLUDED.tel, + fax_no = EXCLUDED.fax_no, + wbst_url = EXCLUDED.wbst_url, + country_ctrl = EXCLUDED.country_ctrl, + country_ctrl_cd = EXCLUDED.country_ctrl_cd, + country_reg = EXCLUDED.country_reg, + country_reg_cd = EXCLUDED.country_reg_cd, + region_cd = EXCLUDED.region_cd, + dist_nm = EXCLUDED.dist_nm, + dist_no = EXCLUDED.dist_no, + mail_addr_rear = EXCLUDED.mail_addr_rear, + mail_addr_frnt = EXCLUDED.mail_addr_frnt, + po_box = EXCLUDED.po_box, + dtl_addr_one = EXCLUDED.dtl_addr_one, + dtl_addr_two = EXCLUDED.dtl_addr_two, + dtl_addr_thr = EXCLUDED.dtl_addr_thr, + tlx = EXCLUDED.tlx; + """.formatted(TARGET_SCHEMA, targetTable); + } + +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/repository/ShipRepository.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/repository/ShipRepository.java new file mode 100644 index 0000000..bfe8ea9 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/repository/ShipRepository.java @@ -0,0 +1,64 @@ +package com.snp.batch.jobs.datasync.batch.ship.repository; + +import com.snp.batch.jobs.datasync.batch.ship.entity.BareboatCharterHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.CallsignAndMmsiHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.ClassHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.CompanyVesselRelationshipsEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.CrewListEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.DarkActivityConfirmedEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.FlagHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.GroupBeneficialOwnerHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.IceClassEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.NameHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.OperatorHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.OwnerHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.PandIHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.SafetyManagementCertificateHistEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.ShipAddInfoEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.ShipManagerHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.SisterShipLinksEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.SpecialFeatureEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.StatusHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.StowageCommodityEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.SurveyDatesEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.SurveyDatesHistoryUniqueEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.TbCompanyDetailEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.TechnicalManagerHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.ThrustersEntity; +import com.snp.batch.jobs.datasync.batch.ship.entity.ShipInfoMstEntity; + +import java.util.List; + +/** + * ShipEntity Repository 인터페이스 + * 구현체: ShipRepositoryImpl (JdbcTemplate 기반) + */ +public interface ShipRepository { + void saveShipInfoMst(List shipInfoMstEntityList); + void saveShipMainInfo(List shipInfoMstEntityList); + void saveShipAddInfo(List shipAddInfoEntityList); + void saveBareboatCharterHistory(List bareboatCharterHistoryEntityList); + void saveCallsignAndMmsiHistory(List callsignAndMmsiHistoryEntityList); + void saveClassHistory(List classHistoryEntityList); + void saveCompanyVesselRelationships(List companyVesselRelationshipsEntityList); + void saveCrewList(List crewListEntityList); + void saveDarkActivityConfirmed(List darkActivityConfirmedEntityList); + void saveFlagHistory(List flagHistoryEntityList); + void saveGroupBeneficialOwnerHistory(List groupBeneficialOwnerHistoryEntityList); + void saveIceClass(List iceClassEntityList); + void saveNameHistory(List nameHistoryEntityList); + void saveOperatorHistory(List operatorHistoryEntityList); + void saveOwnerHistory(List ownerHistoryEntityList); + void savePandIHistory(List pandIHistoryEntityList); + void saveSafetyManagementCertificateHist(List safetyManagementCertificateHistEntityList); + void saveShipManagerHistory(List shipManagerHistoryEntityList); + void saveSisterShipLinks(List sisterShipLinksEntityList); + void saveSpecialFeature(List specialFeatureEntityList); + void saveStatusHistory(List statusHistoryEntityList); + void saveStowageCommodity(List stowageCommodityEntityList); + void saveSurveyDates(List surveyDatesEntityList); + void saveSurveyDatesHistoryUnique(List surveyDatesHistoryUniqueEntityList); + void saveTechnicalManagerHistory(List technicalManagerHistoryEntityList); + void saveThrusters(List thrustersEntityList); + void saveTbCompanyDetail(List tbCompanyDetailEntityList); +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/repository/ShipRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/repository/ShipRepositoryImpl.java new file mode 100644 index 0000000..4ab0ee8 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/repository/ShipRepositoryImpl.java @@ -0,0 +1,1128 @@ +package com.snp.batch.jobs.datasync.batch.ship.repository; + +import com.snp.batch.common.batch.repository.MultiDataSourceJdbcRepository; +import com.snp.batch.common.util.TableMetaInfo; +import com.snp.batch.jobs.datasync.batch.ship.entity.*; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.stereotype.Repository; + +import javax.sql.DataSource; +import java.sql.PreparedStatement; +import java.sql.Timestamp; +import java.sql.Types; +import java.util.List; + +/** + * ShipEntity Repository (JdbcTemplate 기반) + */ +@Slf4j +@Repository("shipRepository") +public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository implements ShipRepository { + + // (필요한 경우 DataSource는 필드로 유지) + private DataSource batchDataSource; + private DataSource businessDataSource; + private final TableMetaInfo tableMetaInfo; + + + public ShipRepositoryImpl(@Qualifier("batchDataSource") DataSource batchDataSource, + @Qualifier("businessDataSource") DataSource businessDataSource, + TableMetaInfo tableMetaInfo) { + + super(new JdbcTemplate(batchDataSource), new JdbcTemplate(businessDataSource)); + + this.batchDataSource = batchDataSource; + this.businessDataSource = businessDataSource; + this.tableMetaInfo = tableMetaInfo; + } + + @Override + protected String getTableName() { + return null; + } + + @Override + protected RowMapper getRowMapper() { + return null; + } + + @Override + protected Long extractId(ShipInfoMstEntity entity) { + return null; + } + + @Override + protected String getInsertSql() { + return null; + } + + @Override + protected String getUpdateSql() { + return null; + } + + @Override + protected void setInsertParameters(PreparedStatement ps, ShipInfoMstEntity entity) throws Exception { + + } + + @Override + protected void setUpdateParameters(PreparedStatement ps, ShipInfoMstEntity entity) throws Exception { + + } + + @Override + protected String getEntityName() { + return null; + } + + @Override + public void saveShipInfoMst(List shipInfoMstEntityList) { + String sql = ShipDataSql.getShipInfoMstUpsertSql(tableMetaInfo.targetTbShipInfoMst); + if (shipInfoMstEntityList == null || shipInfoMstEntityList.isEmpty()) { + return; + } + batchJdbcTemplate.batchUpdate(sql, shipInfoMstEntityList, shipInfoMstEntityList.size(), + (ps, entity) -> { + try { + bindShipInfoMst(ps, (ShipInfoMstEntity) entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + } + + @Override + public void saveShipMainInfo(List shipInfoMstEntityList) { + String core20Sql = ShipDataSql.getShipMainInfoUpsertSql(tableMetaInfo.targetTbShipMainInfo); + if (shipInfoMstEntityList == null || shipInfoMstEntityList.isEmpty()) { + return; + } + batchJdbcTemplate.batchUpdate(core20Sql, shipInfoMstEntityList, shipInfoMstEntityList.size(), + (ps, entity) -> { + try { + bindShipMainInfo(ps, (ShipInfoMstEntity) entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + } + + public void bindShipInfoMst(PreparedStatement pstmt, ShipInfoMstEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, entity.getDatasetVer()); + pstmt.setString(idx++, entity.getImoNo()); + pstmt.setString(idx++, entity.getMmsiNo()); + pstmt.setString(idx++, entity.getShipNm()); + pstmt.setString(idx++, entity.getClsgnNo()); + pstmt.setString(idx++, entity.getFrmlaRegNo()); + pstmt.setString(idx++, entity.getFshrPrmtNo()); + pstmt.setString(idx++, entity.getShipNtnlty()); + pstmt.setString(idx++, entity.getNtnltyCd()); + pstmt.setString(idx++, entity.getLoadPort()); + pstmt.setString(idx++, entity.getClfic()); + pstmt.setString(idx++, entity.getClficDesc()); + pstmt.setString(idx++, entity.getShipStatus()); + pstmt.setString(idx++, entity.getShipTypeGroup()); + pstmt.setString(idx++, entity.getShipTypeLvTwo()); + pstmt.setString(idx++, entity.getShipTypeLvThr()); + pstmt.setString(idx++, entity.getShipTypeLvFour()); + pstmt.setString(idx++, entity.getShipTypeLvFive()); + pstmt.setString(idx++, entity.getShipTypeLvFiveDtldType()); + pstmt.setString(idx++, entity.getShipTypeLvFiveHullType()); + pstmt.setString(idx++, entity.getShipTypeLvFiveLwrnkGroup()); + pstmt.setString(idx++, entity.getBuildYy()); + pstmt.setString(idx++, entity.getBuildYmd()); + pstmt.setString(idx++, entity.getShpyrd()); + pstmt.setString(idx++, entity.getShpyrdOffclNm()); + pstmt.setString(idx++, entity.getShpyrdBuildNo()); + pstmt.setString(idx++, entity.getBuildDesc()); + pstmt.setString(idx++, entity.getModfHstryDesc()); + pstmt.setString(idx++, entity.getWhlnthLoa()); + pstmt.setString(idx++, entity.getRegLength()); + pstmt.setString(idx++, entity.getLbp()); + pstmt.setString(idx++, entity.getFormnBreadth()); + pstmt.setString(idx++, entity.getMaxBreadth()); + pstmt.setString(idx++, entity.getDepth()); + pstmt.setString(idx++, entity.getDraft()); + pstmt.setString(idx++, entity.getKeelMastHg()); + pstmt.setString(idx++, entity.getBulbBow()); + pstmt.setString(idx++, entity.getGt()); + pstmt.setString(idx++, entity.getNtTon()); + pstmt.setString(idx++, entity.getDwt()); + pstmt.setString(idx++, entity.getDisplacement()); + pstmt.setString(idx++, entity.getLightDisplacementTon()); + pstmt.setString(idx++, entity.getCgt()); + pstmt.setString(idx++, entity.getFldngOneCmPerTonTpci()); + pstmt.setString(idx++, entity.getTonEfectDay()); + pstmt.setString(idx++, entity.getCalcfrmDwt()); + pstmt.setString(idx++, entity.getTeuCnt()); + pstmt.setString(idx++, entity.getTeuCapacity()); + pstmt.setString(idx++, entity.getGrainCapacityM3()); + pstmt.setString(idx++, entity.getBaleCapacity()); + pstmt.setString(idx++, entity.getLiquidCapacity()); + pstmt.setString(idx++, entity.getGasM3()); + pstmt.setString(idx++, entity.getInsulatedM3()); + pstmt.setString(idx++, entity.getPassengerCapacity()); + pstmt.setString(idx++, entity.getBollardPull()); + pstmt.setString(idx++, entity.getSvcSpd()); + pstmt.setString(idx++, entity.getMainEngineType()); + pstmt.setString(idx++, entity.getFuelCnsmpSpdOne()); + pstmt.setString(idx++, entity.getFuelCnsmpamtValOne()); + pstmt.setString(idx++, entity.getFuelCnsmpSpdTwo()); + pstmt.setString(idx++, entity.getFuelCnsmpamtValTwo()); + pstmt.setString(idx++, entity.getTotalFuelCapacityM3()); + pstmt.setString(idx++, entity.getBlrMftr()); + pstmt.setString(idx++, entity.getProplrMftr()); + pstmt.setString(idx++, entity.getCargoCapacityM3Desc()); + pstmt.setString(idx++, entity.getEqpmntDesc()); + pstmt.setString(idx++, entity.getHdn()); + pstmt.setString(idx++, entity.getHatcheDesc()); + pstmt.setString(idx++, entity.getLaneDoorRampDesc()); + pstmt.setString(idx++, entity.getSpcTankDesc()); + pstmt.setString(idx++, entity.getTankDesc()); + pstmt.setString(idx++, entity.getPrmovrDesc()); + pstmt.setString(idx++, entity.getPrmovrOvrvwDesc()); + pstmt.setString(idx++, entity.getAuxDesc()); + pstmt.setString(idx++, entity.getAsstGnrtrDesc()); + pstmt.setString(idx++, entity.getFuelDesc()); + pstmt.setString(idx++, entity.getDocCompanyCd()); + pstmt.setString(idx++, entity.getGroupActlOwnrCompanyCd()); + pstmt.setString(idx++, entity.getOperator()); + pstmt.setString(idx++, entity.getOperatorCompanyCd()); + pstmt.setString(idx++, entity.getShipMngrCompanyCd()); + pstmt.setString(idx++, entity.getTechMngrCd()); + pstmt.setString(idx++, entity.getRegShponrCd()); + pstmt.setString(idx++, entity.getLastMdfcnDt()); + } + + public void bindShipMainInfo(PreparedStatement pstmt, ShipInfoMstEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, entity.getImoNo()); + pstmt.setString(idx++, entity.getMmsiNo()); + pstmt.setString(idx++, entity.getShipNm()); + pstmt.setString(idx++, entity.getClsgnNo()); + pstmt.setString(idx++, entity.getShipNtnlty()); + pstmt.setString(idx++, entity.getLoadPort()); + pstmt.setString(idx++, entity.getClfic()); + pstmt.setString(idx++, entity.getShipTypeLvFive()); + pstmt.setString(idx++, entity.getShipTypeLvFiveDtldType()); + pstmt.setString(idx++, entity.getBuildYy()); + pstmt.setString(idx++, entity.getShpyrd()); + pstmt.setObject(idx++, entity.getWhlnthLoa(), Types.DOUBLE); + pstmt.setObject(idx++, entity.getFormnBreadth(), Types.DOUBLE); + pstmt.setObject(idx++, entity.getDepth(), Types.DOUBLE); + pstmt.setObject(idx++, entity.getDraft(), Types.DOUBLE); + pstmt.setString(idx++, entity.getGt()); + pstmt.setString(idx++, entity.getDwt()); + pstmt.setString(idx++, entity.getTeuCnt()); + pstmt.setObject(idx++, entity.getSvcSpd(), Types.DOUBLE); + pstmt.setString(idx++, entity.getMainEngineType()); + pstmt.setString(idx++, entity.getShipStatus()); + pstmt.setString(idx++, entity.getOperator()); + pstmt.setString(idx++, entity.getNtnltyCd()); + pstmt.setString(idx++, entity.getShipTypeLvTwo()); + pstmt.setString(idx++, entity.getShipTypeLvThr()); + pstmt.setString(idx++, entity.getLastMdfcnDt()); + } + + @Override + public void saveShipAddInfo(List shipAddInfoEntityList) { + String sql = ShipDataSql.getShipAddInfoUpsertSql(tableMetaInfo.targetTbShipAddInfo); + if (shipAddInfoEntityList == null || shipAddInfoEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "ShipAddInfoEntity", shipAddInfoEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, shipAddInfoEntityList, shipAddInfoEntityList.size(), + (ps, entity) -> { + try { + bindShipAddInfo(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "ShipAddInfoEntity", shipAddInfoEntityList.size()); + } + + public void bindShipAddInfo(PreparedStatement pstmt, ShipAddInfoEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getImoNo()); // 3. imo_no + pstmt.setString(idx++, entity.getShipEml()); // 4. ship_eml + pstmt.setString(idx++, entity.getMaxDpwt()); // 5. max_dpwt + pstmt.setString(idx++, entity.getMaxDrillDepth()); // 6. max_drill_depth + pstmt.setString(idx++, entity.getDrillBrg()); // 7. drill_brg + pstmt.setString(idx++, entity.getOceanProdFacility()); // 8. ocean_prod_facility + pstmt.setString(idx++, entity.getDeckHeatExch()); // 9. deck_heat_exch + pstmt.setString(idx++, entity.getDehtexMatral()); // 10. dehtex_matral + pstmt.setString(idx++, entity.getPortblTwinDeck()); // 11. portbl_twin_deck + pstmt.setString(idx++, entity.getFixedTwinDeck()); // 12. fixed_twin_deck + pstmt.setString(idx++, entity.getShipSatlitCommId()); // 13. ship_satlit_comm_id + pstmt.setString(idx++, entity.getShipSatlitCmrspCd()); // 14. ship_satlit_cmrsp_cd + } + + @Override + public void saveBareboatCharterHistory(List bareboatCharterHistoryEntityList) { + String sql = ShipDataSql.getBareboatCharterHistoryUpsertSql(tableMetaInfo.targetTbShipBbctrHstry); + if (bareboatCharterHistoryEntityList == null || bareboatCharterHistoryEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "BareboatCharterHistoryEntity", bareboatCharterHistoryEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, bareboatCharterHistoryEntityList, bareboatCharterHistoryEntityList.size(), + (ps, entity) -> { + try { + bindBareboatCharterHistory(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "BareboatCharterHistoryEntity", bareboatCharterHistoryEntityList.size()); + } + + public void bindBareboatCharterHistory(PreparedStatement pstmt, BareboatCharterHistoryEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getImoNo()); // 3. imo_no + pstmt.setString(idx++, entity.getBbctrSeq()); // 4. bbctr_seq + pstmt.setString(idx++, entity.getEfectStaDay()); // 5. efect_sta_day + pstmt.setString(idx++, entity.getBbctrCompanyCd()); // 6. bbctr_company_cd + pstmt.setString(idx++, entity.getBbctrCompany()); // 7. bbctr_company + } + + @Override + public void saveCallsignAndMmsiHistory(List callsignAndMmsiHistoryEntityList) { + String sql = ShipDataSql.getCallsignAndMmsiHistoryUpsertSql(tableMetaInfo.targetTbShipIdntfInfoHstry); + if (callsignAndMmsiHistoryEntityList == null || callsignAndMmsiHistoryEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "CallsignAndMmsiHistoryEntity", callsignAndMmsiHistoryEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, callsignAndMmsiHistoryEntityList, callsignAndMmsiHistoryEntityList.size(), + (ps, entity) -> { + try { + bindCallsignAndMmsiHistory(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "CallsignAndMmsiHistoryEntity", callsignAndMmsiHistoryEntityList.size()); + } + + public void bindCallsignAndMmsiHistory(PreparedStatement pstmt, CallsignAndMmsiHistoryEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getImoNo()); // 3. imo_no + pstmt.setString(idx++, entity.getShipIdntfSeq()); // 4. ship_idntf_seq + pstmt.setString(idx++, entity.getEfectStaDay()); // 5. efect_sta_day + pstmt.setString(idx++, entity.getClsgnNo()); // 6. clsgn_no + pstmt.setString(idx++, entity.getMmsiNo()); // 7. mmsi_no + } + + @Override + public void saveClassHistory(List classHistoryEntityList) { + String sql = ShipDataSql.getClassHistoryUpsertSql(tableMetaInfo.targetTbShipClficHstry); + if (classHistoryEntityList == null || classHistoryEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "ClassHistoryEntity", classHistoryEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, classHistoryEntityList, classHistoryEntityList.size(), + (ps, entity) -> { + try { + bindClassHistory(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "ClassHistoryEntity", classHistoryEntityList.size()); + } + + public void bindClassHistory(PreparedStatement pstmt, ClassHistoryEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getImoNo()); // 3. imo_no + pstmt.setString(idx++, entity.getClficHstrySeq()); // 4. clfic_hstry_seq + pstmt.setString(idx++, entity.getEfectStaDay()); // 5. efect_sta_day + pstmt.setString(idx++, entity.getClficCd()); // 6. clfic_cd + pstmt.setString(idx++, entity.getClficId()); // 7. clfic_id + pstmt.setString(idx++, entity.getClficAstnNm()); // 8. clfic_asctn_nm + pstmt.setString(idx++, entity.getClficHasYn()); // 9. clfic_has_yn + pstmt.setString(idx++, entity.getNowYn()); // 10. now_yn + } + + @Override + public void saveCompanyVesselRelationships(List companyVesselRelationshipsEntityList) { + String sql = ShipDataSql.getCompanyVesselRelationshipsUpsertSql(tableMetaInfo.targetTbShipCompanyRel); + if (companyVesselRelationshipsEntityList == null || companyVesselRelationshipsEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "CompanyVesselRelationshipsEntity", companyVesselRelationshipsEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, companyVesselRelationshipsEntityList, companyVesselRelationshipsEntityList.size(), + (ps, entity) -> { + try { + bindCompanyVesselRelationships(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "CompanyVesselRelationshipsEntity", companyVesselRelationshipsEntityList.size()); + } + + public void bindCompanyVesselRelationships(PreparedStatement pstmt, CompanyVesselRelationshipsEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getImoNo()); // 3. imo_no + pstmt.setString(idx++, entity.getDoccHasCompanyCd()); // 4. docc_has_company_cd + pstmt.setString(idx++, entity.getDoccHasCompany()); // 5. docc_has_company + pstmt.setString(idx++, entity.getGroupActlOwnr()); // 6. group_actl_ownr + pstmt.setString(idx++, entity.getGroupActlOwnrCd()); // 7. group_actl_ownr_cd + pstmt.setString(idx++, entity.getShipOperator()); // 8. ship_operator + pstmt.setString(idx++, entity.getShipOperatorCd()); // 9. ship_operator_cd + pstmt.setString(idx++, entity.getRgOwnr()); // 10. rg_ownr + pstmt.setString(idx++, entity.getRgOwnrCd()); // 11. rg_ownr_cd + pstmt.setString(idx++, entity.getShipMngCompany()); // 12. ship_mng_company + pstmt.setString(idx++, entity.getShipMngCompanyCd()); // 13. ship_mng_company_cd + pstmt.setString(idx++, entity.getTechMngCompany()); // 14. tech_mng_company + pstmt.setString(idx++, entity.getTechMngCompanyCd()); // 15. tech_mng_company_cd + pstmt.setString(idx++, entity.getDoccGroup()); // 16. docc_group + pstmt.setString(idx++, entity.getDoccGroupCd()); // 17. docc_group_cd + pstmt.setString(idx++, entity.getShipOperatorGroup()); // 18. ship_operator_group + pstmt.setString(idx++, entity.getShipOperatorGroupCd()); // 19. ship_operator_group_cd + pstmt.setString(idx++, entity.getShipMngCompanyGroup()); // 20. ship_mng_company_group + pstmt.setString(idx++, entity.getShipMngCompanyGroupCd()); // 21. ship_mng_company_group_cd + pstmt.setString(idx++, entity.getTechMngCompanyGroup()); // 22. tech_mng_company_group + pstmt.setString(idx++, entity.getTechMngCompanyGroupCd()); // 23. tech_mng_company_group_cd + } + + @Override + public void saveCrewList(List crewListEntityList) { + String sql = ShipDataSql.getCrewListUpsertSql(tableMetaInfo.targetTbShipCrewList); + if (crewListEntityList == null || crewListEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "CrewListEntity", crewListEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, crewListEntityList, crewListEntityList.size(), + (ps, entity) -> { + try { + bindCrewList(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "CrewListEntity", crewListEntityList.size()); + } + + public void bindCrewList(PreparedStatement pstmt, CrewListEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getImoNo()); // 3. imo_no + pstmt.setString(idx++, entity.getCrewId()); // 4. crew_id + pstmt.setString(idx++, entity.getShipNm()); // 5. ship_nm + pstmt.setString(idx++, entity.getNtnlty()); // 6. ntnlty + pstmt.setString(idx++, entity.getCrewRstrYmd()); // 7. crew_rstr_ymd + pstmt.setBigDecimal(idx++, entity.getOaCrewCnt()); // 8. oa_crew_cnt + pstmt.setBigDecimal(idx++, entity.getGenCrewCnt()); // 9. gen_crew_cnt + pstmt.setBigDecimal(idx++, entity.getOffcrCnt()); // 10. offcr_cnt + pstmt.setBigDecimal(idx++, entity.getApprOffcrCnt()); // 11. appr_offcr_cnt + pstmt.setBigDecimal(idx++, entity.getTrneCnt()); // 12. trne_cnt + pstmt.setBigDecimal(idx++, entity.getEmbrkMntncCrewCnt()); // 13. embrk_mntnc_crew_cnt + pstmt.setBigDecimal(idx++, entity.getUnrprtCnt()); // 14. unrprt_cnt + } + + @Override + public void saveDarkActivityConfirmed(List darkActivityConfirmedEntityList) { + String sql = ShipDataSql.getDarkActivityConfirmedUpsertSql(tableMetaInfo.targetTbShipDarkActvIdnty); + if (darkActivityConfirmedEntityList == null || darkActivityConfirmedEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "DarkActivityConfirmedEntity", darkActivityConfirmedEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, darkActivityConfirmedEntityList, darkActivityConfirmedEntityList.size(), + (ps, entity) -> { + try { + bindDarkActivityConfirmed(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "DarkActivityConfirmedEntity", darkActivityConfirmedEntityList.size()); + } + + public void bindDarkActivityConfirmed(PreparedStatement pstmt, DarkActivityConfirmedEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getImoNo()); // 3. imo_no + pstmt.setString(idx++, entity.getMmsiNo()); // 4. mmsi_no + pstmt.setObject(idx++, entity.getDarkHr(), Types.BIGINT); // 5. dark_hr + pstmt.setObject(idx++, entity.getDarkActvStatus(), Types.BIGINT); // 6. dark_actv_status + pstmt.setString(idx++, entity.getShipNm()); // 7. ship_nm + pstmt.setString(idx++, entity.getDarkActv()); // 8. dark_actv + pstmt.setObject(idx++, entity.getZoneId(), Types.BIGINT); // 9. zone_id + pstmt.setString(idx++, entity.getZoneNm()); // 10. zone_nm + pstmt.setString(idx++, entity.getZoneCountry()); // 11. zone_country + pstmt.setTimestamp(idx++, entity.getDarkTmUtc() != null ? Timestamp.valueOf(entity.getDarkTmUtc()) : null); // 12. dark_tm_utc + pstmt.setObject(idx++, entity.getDarkLat(), Types.DOUBLE); // 13. dark_lat + pstmt.setObject(idx++, entity.getDarkLon(), Types.DOUBLE); // 14. dark_lon + pstmt.setObject(idx++, entity.getDarkSpd(), Types.DOUBLE); // 15. dark_spd + pstmt.setObject(idx++, entity.getDarkHeading(), Types.DOUBLE); // 16. dark_heading + pstmt.setObject(idx++, entity.getDarkDraft(), Types.DOUBLE); // 17. dark_draft + pstmt.setTimestamp(idx++, entity.getNxtCptrTmUtc() != null ? Timestamp.valueOf(entity.getNxtCptrTmUtc()) : null); // 18. nxt_cptr_tm_utc + pstmt.setObject(idx++, entity.getNxtCptrSpd(), Types.DOUBLE); // 19. nxt_cptr_spd + pstmt.setObject(idx++, entity.getNxtCptrDraft(), Types.DOUBLE); // 20. nxt_cptr_draft + pstmt.setObject(idx++, entity.getNxtCptrHeading(), Types.DOUBLE); // 21. nxt_cptr_heading + pstmt.setString(idx++, entity.getDarkRptDestAis()); // 22. dark_rpt_dest_ais + pstmt.setString(idx++, entity.getLastPrtcllPort()); // 23. last_prtcll_port + pstmt.setString(idx++, entity.getLastPoccntryCd()); // 24. last_poccntry_cd + pstmt.setString(idx++, entity.getLastPoccntry()); // 25. last_poccntry + pstmt.setObject(idx++, entity.getNxtCptrLat(), Types.DOUBLE); // 26. nxt_cptr_lat + pstmt.setObject(idx++, entity.getNxtCptrLon(), Types.DOUBLE); // 27. nxt_cptr_lon + pstmt.setString(idx++, entity.getNxtCptrRptDestAis()); // 28. nxt_cptr_rpt_dest_ais + } + + @Override + public void saveFlagHistory(List flagHistoryEntityList) { + String sql = ShipDataSql.getFlagHistoryUpsertSql(tableMetaInfo.targetTbShipCountryHstry); + if (flagHistoryEntityList == null || flagHistoryEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "FlagHistoryEntity", flagHistoryEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, flagHistoryEntityList, flagHistoryEntityList.size(), + (ps, entity) -> { + try { + bindFlagHistory(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "FlagHistoryEntity", flagHistoryEntityList.size()); + } + + public void bindFlagHistory(PreparedStatement pstmt, FlagHistoryEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getImoNo()); // 3. imo_no + pstmt.setString(idx++, entity.getShipCountryHstrySeq()); // 4. ship_country_hstry_seq + pstmt.setString(idx++, entity.getEfectStaDay()); // 5. efect_sta_day + pstmt.setString(idx++, entity.getCountryCd()); // 6. country_cd + pstmt.setString(idx++, entity.getCountry()); // 7. country + } + + @Override + public void saveGroupBeneficialOwnerHistory(List groupBeneficialOwnerHistoryEntityList) { + String sql = ShipDataSql.getGroupBeneficialOwnerHistoryUpsertSql(tableMetaInfo.targetTbShipGroupRevnOwnrHstry); + if (groupBeneficialOwnerHistoryEntityList == null || groupBeneficialOwnerHistoryEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "GroupBeneficialOwnerHistoryEntity", groupBeneficialOwnerHistoryEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, groupBeneficialOwnerHistoryEntityList, groupBeneficialOwnerHistoryEntityList.size(), + (ps, entity) -> { + try { + bindGroupBeneficialOwnerHistory(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "GroupBeneficialOwnerHistoryEntity", groupBeneficialOwnerHistoryEntityList.size()); + } + + public void bindGroupBeneficialOwnerHistory(PreparedStatement pstmt, GroupBeneficialOwnerHistoryEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getImoNo()); // 3. imo_no + pstmt.setString(idx++, entity.getShipGroupRevnOwnrHstrySeq()); // 4. ship_group_revn_ownr_hstry_seq + pstmt.setString(idx++, entity.getEfectStaDay()); // 5. efect_sta_day + pstmt.setString(idx++, entity.getGroupActlOwnrCd()); // 6. group_actl_ownr_cd + pstmt.setString(idx++, entity.getGroupActlOwnr()); // 7. group_actl_ownr + pstmt.setString(idx++, entity.getCompanyStatus()); // 8. company_status + } + + @Override + public void saveIceClass(List iceClassEntityList) { + String sql = ShipDataSql.getIceClassUpsertSql(tableMetaInfo.targetTbShipIceGrd); + if (iceClassEntityList == null || iceClassEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "IceClassEntity", iceClassEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, iceClassEntityList, iceClassEntityList.size(), + (ps, entity) -> { + try { + bindIceClass(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "IceClassEntity", iceClassEntityList.size()); + } + + public void bindIceClass(PreparedStatement pstmt, IceClassEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getImoNo()); // 3. imo_no + pstmt.setString(idx++, entity.getIceGrdCd()); // 4. ice_grd_cd + pstmt.setString(idx++, entity.getIceGrd()); // 5. ice_grd + } + + @Override + public void saveNameHistory(List nameHistoryEntityList) { + String sql = ShipDataSql.getNameHistoryUpsertSql(tableMetaInfo.targetTbShipNmChgHstry); + if (nameHistoryEntityList == null || nameHistoryEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "NameHistoryEntity", nameHistoryEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, nameHistoryEntityList, nameHistoryEntityList.size(), + (ps, entity) -> { + try { + bindNameHistory(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "NameHistoryEntity", nameHistoryEntityList.size()); + } + + public void bindNameHistory(PreparedStatement pstmt, NameHistoryEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getImoNo()); // 3. imo_no + pstmt.setString(idx++, entity.getShipNmChgHstrySeq()); // 4. ship_nm_chg_hstry_seq + pstmt.setString(idx++, entity.getEfectStaDay()); // 5. efect_sta_day + pstmt.setString(idx++, entity.getShipNm()); // 6. ship_nm + } + + @Override + public void saveOperatorHistory(List operatorHistoryEntityList) { + String sql = ShipDataSql.getOperatorHistoryUpsertSql(tableMetaInfo.targetTbShipOperatorHstry); + if (operatorHistoryEntityList == null || operatorHistoryEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "OperatorHistoryEntity", operatorHistoryEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, operatorHistoryEntityList, operatorHistoryEntityList.size(), + (ps, entity) -> { + try { + bindOperatorHistory(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "OperatorHistoryEntity", operatorHistoryEntityList.size()); + } + + public void bindOperatorHistory(PreparedStatement pstmt, OperatorHistoryEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getImoNo()); // 3. imo_no + pstmt.setString(idx++, entity.getShipOperatorHstrySeq()); // 4. ship_operator_hstry_seq + pstmt.setString(idx++, entity.getEfectStaDay()); // 5. efect_sta_day + pstmt.setString(idx++, entity.getShipOperatorCd()); // 6. ship_operator_cd + pstmt.setString(idx++, entity.getShipOperator()); // 7. ship_operator + pstmt.setString(idx++, entity.getCompanyStatus()); // 8. company_status + } + + @Override + public void saveOwnerHistory(List ownerHistoryEntityList) { + String sql = ShipDataSql.getOwnerHistoryUpsertSql(tableMetaInfo.targetTbShipOwnrHstry); + if (ownerHistoryEntityList == null || ownerHistoryEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "OwnerHistoryEntity", ownerHistoryEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, ownerHistoryEntityList, ownerHistoryEntityList.size(), + (ps, entity) -> { + try { + bindOwnerHistory(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "OwnerHistoryEntity", ownerHistoryEntityList.size()); + } + + public void bindOwnerHistory(PreparedStatement pstmt, OwnerHistoryEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getImoNo()); // 3. imo_no + pstmt.setString(idx++, entity.getShipOwnrHstrySeq()); // 4. ship_ownr_hstry_seq + pstmt.setString(idx++, entity.getEfectStaDay()); // 5. efect_sta_day + pstmt.setString(idx++, entity.getOwnrCd()); // 6. ownr_cd + pstmt.setString(idx++, entity.getOwnr()); // 7. ownr + pstmt.setString(idx++, entity.getCompanyStatus()); // 8. company_status + } + + @Override + public void savePandIHistory(List pandIHistoryEntityList) { + String sql = ShipDataSql.getPandIHistoryUpsertSql(tableMetaInfo.targetTbShipPrtcRpnHstry); + if (pandIHistoryEntityList == null || pandIHistoryEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "PandIHistoryEntity", pandIHistoryEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, pandIHistoryEntityList, pandIHistoryEntityList.size(), + (ps, entity) -> { + try { + bindPandIHistory(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "PandIHistoryEntity", pandIHistoryEntityList.size()); + } + + public void bindPandIHistory(PreparedStatement pstmt, PandIHistoryEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getImoNo()); // 3. imo_no + pstmt.setString(idx++, entity.getShipPrtcRpnHstrySeq()); // 4. ship_prtc_rpn_hstry_seq + pstmt.setString(idx++, entity.getEfectStaDay()); // 5. efect_sta_day + pstmt.setString(idx++, entity.getPniClubCd()); // 6. pni_club_cd + pstmt.setString(idx++, entity.getPniClubNm()); // 7. pni_club_nm + pstmt.setString(idx++, entity.getSrc()); // 8. src + } + + @Override + public void saveSafetyManagementCertificateHist(List safetyManagementCertificateHistEntityList) { + String sql = ShipDataSql.getSafetyManagementCertificateHistUpsertSql(tableMetaInfo.targetTbShipSftyMngEvdcHstry); + if (safetyManagementCertificateHistEntityList == null || safetyManagementCertificateHistEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "SafetyManagementCertificateHistEntity", safetyManagementCertificateHistEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, safetyManagementCertificateHistEntityList, safetyManagementCertificateHistEntityList.size(), + (ps, entity) -> { + try { + bindSafetyManagementCertificateHist(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "SafetyManagementCertificateHistEntity", safetyManagementCertificateHistEntityList.size()); + } + + public void bindSafetyManagementCertificateHist(PreparedStatement pstmt, SafetyManagementCertificateHistEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getImoNo()); // 3. imo_no + pstmt.setString(idx++, entity.getShipSftyMngEvdcSeq()); // 4. ship_sfty_mng_evdc_seq + pstmt.setString(idx++, entity.getSmgrcSrngEngines()); // 5. smgrc_srng_engines + pstmt.setString(idx++, entity.getSmgrcSysCatConvArbt()); // 6. smgrc_sys_cat_conv_arbt + pstmt.setString(idx++, entity.getSmgrcExpryDay()); // 7. smgrc_expry_day + pstmt.setString(idx++, entity.getSmgrcIssueDay()); // 8. smgrc_issue_day + pstmt.setString(idx++, entity.getSmgrcDoccCompany()); // 9. smgrc_docc_company + pstmt.setString(idx++, entity.getSmgrcNtnlty()); // 10. smgrc_ntnlty + pstmt.setString(idx++, entity.getSmgrcIssueEngines()); // 11. smgrc_issue_engines + pstmt.setString(idx++, entity.getSmgrcEtcDesc()); // 12. smgrc_etc_desc + pstmt.setString(idx++, entity.getSmgrcShipNm()); // 13. smgrc_ship_nm + pstmt.setString(idx++, entity.getSmgrcShipType()); // 14. smgrc_ship_type + pstmt.setString(idx++, entity.getSmgrcSrc()); // 15. smgrc_src + pstmt.setString(idx++, entity.getSmgrcCompanyCd()); // 16. smgrc_company_cd + } + + @Override + public void saveShipManagerHistory(List shipManagerHistoryEntityList) { + String sql = ShipDataSql.getShipManagerHistoryUpsertSql(tableMetaInfo.targetTbShipMngCompanyHstry); + if (shipManagerHistoryEntityList == null || shipManagerHistoryEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "ShipManagerHistoryEntity", shipManagerHistoryEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, shipManagerHistoryEntityList, shipManagerHistoryEntityList.size(), + (ps, entity) -> { + try { + bindShipManagerHistory(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "ShipManagerHistoryEntity", shipManagerHistoryEntityList.size()); + } + + public void bindShipManagerHistory(PreparedStatement pstmt, ShipManagerHistoryEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getImoNo()); // 3. imo_no + pstmt.setString(idx++, entity.getShipMngCompanySeq()); // 4. ship_mng_company_seq + pstmt.setString(idx++, entity.getEfectStaDay()); // 5. efect_sta_day + pstmt.setString(idx++, entity.getShipMngrCd()); // 6. ship_mngr_cd + pstmt.setString(idx++, entity.getShipMngr()); // 7. ship_mngr + pstmt.setString(idx++, entity.getCompanyStatus()); // 8. company_status + } + + @Override + public void saveSisterShipLinks(List sisterShipLinksEntityList) { + String sql = ShipDataSql.getSisterShipLinksUpsertSql(tableMetaInfo.targetTbShipSstrvslRel); + if (sisterShipLinksEntityList == null || sisterShipLinksEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "SisterShipLinksEntity", sisterShipLinksEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, sisterShipLinksEntityList, sisterShipLinksEntityList.size(), + (ps, entity) -> { + try { + bindSisterShipLinks(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "SisterShipLinksEntity", sisterShipLinksEntityList.size()); + } + + public void bindSisterShipLinks(PreparedStatement pstmt, SisterShipLinksEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getImoNo()); // 3. imo_no + pstmt.setString(idx++, entity.getLinkImoNo()); // 4. link_imo_no + } + + @Override + public void saveSpecialFeature(List specialFeatureEntityList) { + String sql = ShipDataSql.getSpecialFeatureUpsertSql(tableMetaInfo.targetTbShipSpcFetr); + if (specialFeatureEntityList == null || specialFeatureEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "SpecialFeatureEntity", specialFeatureEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, specialFeatureEntityList, specialFeatureEntityList.size(), + (ps, entity) -> { + try { + bindSpecialFeature(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "SpecialFeatureEntity", specialFeatureEntityList.size()); + } + + public void bindSpecialFeature(PreparedStatement pstmt, SpecialFeatureEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getImoNo()); // 3. imo_no + pstmt.setString(idx++, entity.getShipSpcFetrSeq()); // 4. ship_spc_fetr_seq + pstmt.setString(idx++, entity.getSpcMttrCd()); // 5. spc_mttr_cd + pstmt.setString(idx++, entity.getSpcMttr()); // 6. spc_mttr + } + + @Override + public void saveStatusHistory(List statusHistoryEntityList) { + String sql = ShipDataSql.getStatusHistoryUpsertSql(tableMetaInfo.targetTbShipStatusHstry); + if (statusHistoryEntityList == null || statusHistoryEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "StatusHistoryEntity", statusHistoryEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, statusHistoryEntityList, statusHistoryEntityList.size(), + (ps, entity) -> { + try { + bindStatusHistory(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "StatusHistoryEntity", statusHistoryEntityList.size()); + } + + public void bindStatusHistory(PreparedStatement pstmt, StatusHistoryEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getImoNo()); // 3. imo_no + pstmt.setString(idx++, entity.getShipStatusHstrySeq()); // 4. ship_status_hstry_seq + pstmt.setString(idx++, entity.getStatusCd()); // 5. status_cd + pstmt.setString(idx++, entity.getStatusChgYmd()); // 6. status_chg_ymd + pstmt.setString(idx++, entity.getStatus()); // 7. status + } + + @Override + public void saveStowageCommodity(List stowageCommodityEntityList) { + String sql = ShipDataSql.getStowageCommodityUpsertSql(tableMetaInfo.targetTbShipCargoCapacity); + if (stowageCommodityEntityList == null || stowageCommodityEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "StowageCommodityEntity", stowageCommodityEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, stowageCommodityEntityList, stowageCommodityEntityList.size(), + (ps, entity) -> { + try { + bindStowageCommodity(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "StowageCommodityEntity", stowageCommodityEntityList.size()); + } + + public void bindStowageCommodity(PreparedStatement pstmt, StowageCommodityEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getImoNo()); // 3. imo_no + pstmt.setString(idx++, entity.getShipCargoCapacitySeq()); // 4. ship_cargo_capacity_seq + pstmt.setString(idx++, entity.getCapacityCd()); // 5. capacity_cd + pstmt.setString(idx++, entity.getCapacityCdDesc()); // 6. capacity_cd_desc + pstmt.setString(idx++, entity.getCargoCd()); // 7. cargo_cd + pstmt.setString(idx++, entity.getCargoNm()); // 8. cargo_nm + } + + @Override + public void saveSurveyDates(List surveyDatesEntityList) { + String sql = ShipDataSql.getSurveyDatesUpsertSql(tableMetaInfo.targetTbShipInspectionYmd); + if (surveyDatesEntityList == null || surveyDatesEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "SurveyDatesEntity", surveyDatesEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, surveyDatesEntityList, surveyDatesEntityList.size(), + (ps, entity) -> { + try { + bindSurveyDates(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "SurveyDatesEntity", surveyDatesEntityList.size()); + } + + public void bindSurveyDates(PreparedStatement pstmt, SurveyDatesEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getImoNo()); // 3. imo_no + pstmt.setString(idx++, entity.getClficCd()); // 4. clfic_cd + pstmt.setString(idx++, entity.getClfic()); // 5. clfic + pstmt.setString(idx++, entity.getDckngInspection()); // 6. dckng_inspection + pstmt.setString(idx++, entity.getFxtmInspection()); // 7. fxtm_inspection + pstmt.setString(idx++, entity.getAnnualInspection()); // 8. annual_inspection + pstmt.setString(idx++, entity.getMchnFxtmInspectionYmd()); // 9. mchn_fxtm_inspection_ymd + pstmt.setString(idx++, entity.getTlsftInspectionYmd()); // 10. tlsft_inspection_ymd + } + + @Override + public void saveSurveyDatesHistoryUnique(List surveyDatesHistoryUniqueEntityList) { + String sql = ShipDataSql.getSurveyDatesHistoryUniqueUpsertSql(tableMetaInfo.targetTbShipInspectionYmdHstry); + if (surveyDatesHistoryUniqueEntityList == null || surveyDatesHistoryUniqueEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "SurveyDatesHistoryUniqueEntity", surveyDatesHistoryUniqueEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, surveyDatesHistoryUniqueEntityList, surveyDatesHistoryUniqueEntityList.size(), + (ps, entity) -> { + try { + bindSurveyDatesHistoryUnique(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "SurveyDatesHistoryUniqueEntity", surveyDatesHistoryUniqueEntityList.size()); + } + + public void bindSurveyDatesHistoryUnique(PreparedStatement pstmt, SurveyDatesHistoryUniqueEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getImoNo()); // 3. imo_no + pstmt.setString(idx++, entity.getClficCd()); // 4. clfic_cd + pstmt.setString(idx++, entity.getInspectionType()); // 5. inspection_type + pstmt.setString(idx++, entity.getInspectionYmd()); // 6. inspection_ymd + pstmt.setString(idx++, entity.getClfic()); // 7. clfic + } + + @Override + public void saveTechnicalManagerHistory(List technicalManagerHistoryEntityList) { + String sql = ShipDataSql.getTechnicalManagerHistoryUpsertSql(tableMetaInfo.targetTbShipTechMngCompanyHstry); + if (technicalManagerHistoryEntityList == null || technicalManagerHistoryEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "TechnicalManagerHistoryEntity", technicalManagerHistoryEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, technicalManagerHistoryEntityList, technicalManagerHistoryEntityList.size(), + (ps, entity) -> { + try { + bindTechnicalManagerHistory(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "TechnicalManagerHistoryEntity", technicalManagerHistoryEntityList.size()); + } + + public void bindTechnicalManagerHistory(PreparedStatement pstmt, TechnicalManagerHistoryEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getImoNo()); // 3. imo_no + pstmt.setString(idx++, entity.getShipTechMngCompanySeq()); // 4. ship_tech_mng_company_seq + pstmt.setString(idx++, entity.getEfectStaDay()); // 5. efect_sta_day + pstmt.setString(idx++, entity.getTechMngrCd()); // 6. tech_mngr_cd + pstmt.setString(idx++, entity.getTechMngr()); // 7. tech_mngr + pstmt.setString(idx++, entity.getCompanyStatus()); // 8. company_status + } + + @Override + public void saveThrusters(List thrustersEntityList) { + String sql = ShipDataSql.getThrustersUpsertSql(tableMetaInfo.targetTbThrstrInfo); + if (thrustersEntityList == null || thrustersEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "ThrustersEntity", thrustersEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, thrustersEntityList, thrustersEntityList.size(), + (ps, entity) -> { + try { + bindThrusters(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "ThrustersEntity", thrustersEntityList.size()); + } + + public void bindThrusters(PreparedStatement pstmt, ThrustersEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getImoNo()); // 3. imo_no + pstmt.setString(idx++, entity.getThrstrSeq()); // 4. thrstr_seq + pstmt.setString(idx++, entity.getThrstrTypeCd()); // 5. thrstr_type_cd + pstmt.setString(idx++, entity.getThrstrType()); // 6. thrstr_type + pstmt.setBigDecimal(idx++, entity.getThrstrCnt()); // 7. thrstr_cnt + pstmt.setString(idx++, entity.getThrstrPosition()); // 8. thrstr_position + pstmt.setBigDecimal(idx++, entity.getThrstrPowerBhp()); // 9. thrstr_power_bhp + pstmt.setBigDecimal(idx++, entity.getThrstrPowerKw()); // 10. thrstr_power_kw + pstmt.setString(idx++, entity.getInstlMth()); // 11. instl_mth + } + + @Override + public void saveTbCompanyDetail(List tbCompanyDetailEntityList) { + String sql = ShipDataSql.getTbCompanyDetailUpsertSql(tableMetaInfo.targetTbCompanyDtlInfo); + if (tbCompanyDetailEntityList == null || tbCompanyDetailEntityList.isEmpty()) { + return; + } + log.debug("{} 배치 삽입 시작: {} 건", "TbCompanyDetailEntity", tbCompanyDetailEntityList.size()); + + batchJdbcTemplate.batchUpdate(sql, tbCompanyDetailEntityList, tbCompanyDetailEntityList.size(), + (ps, entity) -> { + try { + bindTbCompanyDetail(ps, entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패", e); + throw new RuntimeException(e); + } + }); + + log.debug("{} 배치 삽입 완료: {} 건", "TbCompanyDetailEntity", tbCompanyDetailEntityList.size()); + } + + public void bindTbCompanyDetail(PreparedStatement pstmt, TbCompanyDetailEntity entity) throws Exception { + int idx = 1; + pstmt.setString(idx++, "SYSTEM"); // 1. creatr_id + pstmt.setString(idx++, entity.getDatasetVer()); // 2. dataset_ver + pstmt.setString(idx++, entity.getCompanyCd()); // 3. company_cd + pstmt.setString(idx++, entity.getLastUpdYmd()); // 4. last_upd_ymd + pstmt.setString(idx++, entity.getCareCd()); // 5. care_cd + pstmt.setString(idx++, entity.getCompanyStatus()); // 6. company_status + pstmt.setString(idx++, entity.getFullNm()); // 7. full_nm + pstmt.setString(idx++, entity.getCompanyNameAbbr()); // 8. company_name_abbr + pstmt.setString(idx++, entity.getCompanyFndnYmd()); // 9. company_fndn_ymd + pstmt.setString(idx++, entity.getPrntCompanyCd()); // 10. prnt_company_cd + pstmt.setString(idx++, entity.getCountryNm()); // 11. country_nm + pstmt.setString(idx++, entity.getCtyNm()); // 12. cty_nm + pstmt.setString(idx++, entity.getOaAddr()); // 13. oa_addr + pstmt.setString(idx++, entity.getEmlAddr()); // 14. eml_addr + pstmt.setString(idx++, entity.getTel()); // 15. tel + pstmt.setString(idx++, entity.getFaxNo()); // 16. fax_no + pstmt.setString(idx++, entity.getWbstUrl()); // 17. wbst_url + pstmt.setString(idx++, entity.getCountryCtrl()); // 18. country_ctrl + pstmt.setString(idx++, entity.getCountryCtrlCd()); // 19. country_ctrl_cd + pstmt.setString(idx++, entity.getCountryReg()); // 20. country_reg + pstmt.setString(idx++, entity.getCountryRegCd()); // 21. country_reg_cd + pstmt.setString(idx++, entity.getRegionCd()); // 22. region_cd + pstmt.setString(idx++, entity.getDistNm()); // 23. dist_nm + pstmt.setString(idx++, entity.getDistNo()); // 24. dist_no + pstmt.setString(idx++, entity.getMailAddrRear()); // 25. mail_addr_rear + pstmt.setString(idx++, entity.getMailAddrFrnt()); // 26. mail_addr_frnt + pstmt.setString(idx++, entity.getPoBox()); // 27. po_box + pstmt.setString(idx++, entity.getDtlAddrOne()); // 28. dtl_addr_one + pstmt.setString(idx++, entity.getDtlAddrTwo()); // 29. dtl_addr_two + pstmt.setString(idx++, entity.getDtlAddrThr()); // 30. dtl_addr_thr + pstmt.setString(idx++, entity.getTlx()); // 31. tlx + } + +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/BareboatCharterHistoryWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/BareboatCharterHistoryWriter.java new file mode 100644 index 0000000..411b04a --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/BareboatCharterHistoryWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.ship.entity.BareboatCharterHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class BareboatCharterHistoryWriter extends BaseChunkedWriter { + private final ShipRepository shipRepository; + + public BareboatCharterHistoryWriter(ShipRepository shipRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("BareboatCharterHistoryEntity", transactionManager, subChunkSize); + this.shipRepository = shipRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + shipRepository.saveBareboatCharterHistory(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/CallsignAndMmsiHistoryWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/CallsignAndMmsiHistoryWriter.java new file mode 100644 index 0000000..4bd0e2b --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/CallsignAndMmsiHistoryWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.ship.entity.CallsignAndMmsiHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class CallsignAndMmsiHistoryWriter extends BaseChunkedWriter { + private final ShipRepository shipRepository; + + public CallsignAndMmsiHistoryWriter(ShipRepository shipRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("CallsignAndMmsiHistoryEntity", transactionManager, subChunkSize); + this.shipRepository = shipRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + shipRepository.saveCallsignAndMmsiHistory(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/ClassHistoryWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/ClassHistoryWriter.java new file mode 100644 index 0000000..d7c3c96 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/ClassHistoryWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.ship.entity.ClassHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class ClassHistoryWriter extends BaseChunkedWriter { + private final ShipRepository shipRepository; + + public ClassHistoryWriter(ShipRepository shipRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("ClassHistoryEntity", transactionManager, subChunkSize); + this.shipRepository = shipRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + shipRepository.saveClassHistory(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/CompanyVesselRelationshipsWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/CompanyVesselRelationshipsWriter.java new file mode 100644 index 0000000..45d3266 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/CompanyVesselRelationshipsWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.ship.entity.CompanyVesselRelationshipsEntity; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class CompanyVesselRelationshipsWriter extends BaseChunkedWriter { + private final ShipRepository shipRepository; + + public CompanyVesselRelationshipsWriter(ShipRepository shipRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("CompanyVesselRelationshipsEntity", transactionManager, subChunkSize); + this.shipRepository = shipRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + shipRepository.saveCompanyVesselRelationships(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/CrewListWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/CrewListWriter.java new file mode 100644 index 0000000..111df48 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/CrewListWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.ship.entity.CrewListEntity; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class CrewListWriter extends BaseChunkedWriter { + private final ShipRepository shipRepository; + + public CrewListWriter(ShipRepository shipRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("CrewListEntity", transactionManager, subChunkSize); + this.shipRepository = shipRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + shipRepository.saveCrewList(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/DarkActivityConfirmedWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/DarkActivityConfirmedWriter.java new file mode 100644 index 0000000..3d33393 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/DarkActivityConfirmedWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.ship.entity.DarkActivityConfirmedEntity; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class DarkActivityConfirmedWriter extends BaseChunkedWriter { + private final ShipRepository shipRepository; + + public DarkActivityConfirmedWriter(ShipRepository shipRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("DarkActivityConfirmedEntity", transactionManager, subChunkSize); + this.shipRepository = shipRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + shipRepository.saveDarkActivityConfirmed(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/FlagHistoryWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/FlagHistoryWriter.java new file mode 100644 index 0000000..78adff1 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/FlagHistoryWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.ship.entity.FlagHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class FlagHistoryWriter extends BaseChunkedWriter { + private final ShipRepository shipRepository; + + public FlagHistoryWriter(ShipRepository shipRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("FlagHistoryEntity", transactionManager, subChunkSize); + this.shipRepository = shipRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + shipRepository.saveFlagHistory(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/GroupBeneficialOwnerHistoryWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/GroupBeneficialOwnerHistoryWriter.java new file mode 100644 index 0000000..be57150 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/GroupBeneficialOwnerHistoryWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.ship.entity.GroupBeneficialOwnerHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class GroupBeneficialOwnerHistoryWriter extends BaseChunkedWriter { + private final ShipRepository shipRepository; + + public GroupBeneficialOwnerHistoryWriter(ShipRepository shipRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("GroupBeneficialOwnerHistoryEntity", transactionManager, subChunkSize); + this.shipRepository = shipRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + shipRepository.saveGroupBeneficialOwnerHistory(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/IceClassWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/IceClassWriter.java new file mode 100644 index 0000000..743ee8e --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/IceClassWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.ship.entity.IceClassEntity; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class IceClassWriter extends BaseChunkedWriter { + private final ShipRepository shipRepository; + + public IceClassWriter(ShipRepository shipRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("IceClassEntity", transactionManager, subChunkSize); + this.shipRepository = shipRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + shipRepository.saveIceClass(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/NameHistoryWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/NameHistoryWriter.java new file mode 100644 index 0000000..f8a6981 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/NameHistoryWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.ship.entity.NameHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class NameHistoryWriter extends BaseChunkedWriter { + private final ShipRepository shipRepository; + + public NameHistoryWriter(ShipRepository shipRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("NameHistoryEntity", transactionManager, subChunkSize); + this.shipRepository = shipRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + shipRepository.saveNameHistory(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/OperatorHistoryWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/OperatorHistoryWriter.java new file mode 100644 index 0000000..8991ca3 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/OperatorHistoryWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.ship.entity.OperatorHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class OperatorHistoryWriter extends BaseChunkedWriter { + private final ShipRepository shipRepository; + + public OperatorHistoryWriter(ShipRepository shipRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("OperatorHistoryEntity", transactionManager, subChunkSize); + this.shipRepository = shipRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + shipRepository.saveOperatorHistory(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/OwnerHistoryWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/OwnerHistoryWriter.java new file mode 100644 index 0000000..ea56d6a --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/OwnerHistoryWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.ship.entity.OwnerHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class OwnerHistoryWriter extends BaseChunkedWriter { + private final ShipRepository shipRepository; + + public OwnerHistoryWriter(ShipRepository shipRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("OwnerHistoryEntity", transactionManager, subChunkSize); + this.shipRepository = shipRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + shipRepository.saveOwnerHistory(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/PandIHistoryWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/PandIHistoryWriter.java new file mode 100644 index 0000000..f7d6889 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/PandIHistoryWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.ship.entity.PandIHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class PandIHistoryWriter extends BaseChunkedWriter { + private final ShipRepository shipRepository; + + public PandIHistoryWriter(ShipRepository shipRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("PandIHistoryEntity", transactionManager, subChunkSize); + this.shipRepository = shipRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + shipRepository.savePandIHistory(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/SafetyManagementCertificateHistWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/SafetyManagementCertificateHistWriter.java new file mode 100644 index 0000000..55e5547 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/SafetyManagementCertificateHistWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.ship.entity.SafetyManagementCertificateHistEntity; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class SafetyManagementCertificateHistWriter extends BaseChunkedWriter { + private final ShipRepository shipRepository; + + public SafetyManagementCertificateHistWriter(ShipRepository shipRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("SafetyManagementCertificateHistEntity", transactionManager, subChunkSize); + this.shipRepository = shipRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + shipRepository.saveSafetyManagementCertificateHist(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/ShipAddInfoWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/ShipAddInfoWriter.java new file mode 100644 index 0000000..86675a2 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/ShipAddInfoWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.ship.entity.ShipAddInfoEntity; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class ShipAddInfoWriter extends BaseChunkedWriter { + private final ShipRepository shipRepository; + + public ShipAddInfoWriter(ShipRepository shipRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("ShipAddInfoEntity", transactionManager, subChunkSize); + this.shipRepository = shipRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + shipRepository.saveShipAddInfo(items); // 선박_부가_정보 [ID: S&P-SHIP-003] + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/ShipDataWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/ShipDataWriter.java new file mode 100644 index 0000000..bfa118c --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/ShipDataWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.ship.entity.ShipInfoMstEntity; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class ShipDataWriter extends BaseChunkedWriter { + private final ShipRepository shipRepository; + + public ShipDataWriter(ShipRepository shipRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("ShipInfoMstEntity", transactionManager, subChunkSize); + this.shipRepository = shipRepository; + } + @Override + protected void writeItems(List items) throws Exception { + if(items.isEmpty()){ + return; + } + shipRepository.saveShipInfoMst(items); // 선박_정보_마스터 [ID: S&P-SHIP-001] + shipRepository.saveShipMainInfo(items); // 선박_주요_정보 [ID: S&P-SHIP-002] + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/ShipManagerHistoryWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/ShipManagerHistoryWriter.java new file mode 100644 index 0000000..e096431 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/ShipManagerHistoryWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.ship.entity.ShipManagerHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class ShipManagerHistoryWriter extends BaseChunkedWriter { + private final ShipRepository shipRepository; + + public ShipManagerHistoryWriter(ShipRepository shipRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("ShipManagerHistoryEntity", transactionManager, subChunkSize); + this.shipRepository = shipRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + shipRepository.saveShipManagerHistory(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/SisterShipLinksWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/SisterShipLinksWriter.java new file mode 100644 index 0000000..967ec91 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/SisterShipLinksWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.ship.entity.SisterShipLinksEntity; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class SisterShipLinksWriter extends BaseChunkedWriter { + private final ShipRepository shipRepository; + + public SisterShipLinksWriter(ShipRepository shipRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("SisterShipLinksEntity", transactionManager, subChunkSize); + this.shipRepository = shipRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + shipRepository.saveSisterShipLinks(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/SpecialFeatureWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/SpecialFeatureWriter.java new file mode 100644 index 0000000..4867546 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/SpecialFeatureWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.ship.entity.SpecialFeatureEntity; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class SpecialFeatureWriter extends BaseChunkedWriter { + private final ShipRepository shipRepository; + + public SpecialFeatureWriter(ShipRepository shipRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("SpecialFeatureEntity", transactionManager, subChunkSize); + this.shipRepository = shipRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + shipRepository.saveSpecialFeature(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/StatusHistoryWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/StatusHistoryWriter.java new file mode 100644 index 0000000..88de0b7 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/StatusHistoryWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.ship.entity.StatusHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class StatusHistoryWriter extends BaseChunkedWriter { + private final ShipRepository shipRepository; + + public StatusHistoryWriter(ShipRepository shipRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("StatusHistoryEntity", transactionManager, subChunkSize); + this.shipRepository = shipRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + shipRepository.saveStatusHistory(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/StowageCommodityWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/StowageCommodityWriter.java new file mode 100644 index 0000000..1871916 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/StowageCommodityWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.ship.entity.StowageCommodityEntity; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class StowageCommodityWriter extends BaseChunkedWriter { + private final ShipRepository shipRepository; + + public StowageCommodityWriter(ShipRepository shipRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("StowageCommodityEntity", transactionManager, subChunkSize); + this.shipRepository = shipRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + shipRepository.saveStowageCommodity(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/SurveyDatesHistoryUniqueWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/SurveyDatesHistoryUniqueWriter.java new file mode 100644 index 0000000..689c1b8 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/SurveyDatesHistoryUniqueWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.ship.entity.SurveyDatesHistoryUniqueEntity; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class SurveyDatesHistoryUniqueWriter extends BaseChunkedWriter { + private final ShipRepository shipRepository; + + public SurveyDatesHistoryUniqueWriter(ShipRepository shipRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("SurveyDatesHistoryUniqueEntity", transactionManager, subChunkSize); + this.shipRepository = shipRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + shipRepository.saveSurveyDatesHistoryUnique(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/SurveyDatesWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/SurveyDatesWriter.java new file mode 100644 index 0000000..5e3b32e --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/SurveyDatesWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.ship.entity.SurveyDatesEntity; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class SurveyDatesWriter extends BaseChunkedWriter { + private final ShipRepository shipRepository; + + public SurveyDatesWriter(ShipRepository shipRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("SurveyDatesEntity", transactionManager, subChunkSize); + this.shipRepository = shipRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + shipRepository.saveSurveyDates(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/TbCompanyDetailWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/TbCompanyDetailWriter.java new file mode 100644 index 0000000..682ab91 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/TbCompanyDetailWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.ship.entity.TbCompanyDetailEntity; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class TbCompanyDetailWriter extends BaseChunkedWriter { + private final ShipRepository shipRepository; + + public TbCompanyDetailWriter(ShipRepository shipRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("TbCompanyDetailEntity", transactionManager, subChunkSize); + this.shipRepository = shipRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + shipRepository.saveTbCompanyDetail(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/TechnicalManagerHistoryWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/TechnicalManagerHistoryWriter.java new file mode 100644 index 0000000..9932ce9 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/TechnicalManagerHistoryWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.ship.entity.TechnicalManagerHistoryEntity; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class TechnicalManagerHistoryWriter extends BaseChunkedWriter { + private final ShipRepository shipRepository; + + public TechnicalManagerHistoryWriter(ShipRepository shipRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("TechnicalManagerHistoryEntity", transactionManager, subChunkSize); + this.shipRepository = shipRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + shipRepository.saveTechnicalManagerHistory(items); + } +} diff --git a/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/ThrustersWriter.java b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/ThrustersWriter.java new file mode 100644 index 0000000..9bd361a --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/datasync/batch/ship/writer/ThrustersWriter.java @@ -0,0 +1,27 @@ +package com.snp.batch.jobs.datasync.batch.ship.writer; + +import com.snp.batch.common.batch.writer.BaseChunkedWriter; +import com.snp.batch.jobs.datasync.batch.ship.entity.ThrustersEntity; +import com.snp.batch.jobs.datasync.batch.ship.repository.ShipRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.transaction.PlatformTransactionManager; + +import java.util.List; + +@Slf4j +public class ThrustersWriter extends BaseChunkedWriter { + private final ShipRepository shipRepository; + + public ThrustersWriter(ShipRepository shipRepository, PlatformTransactionManager transactionManager, int subChunkSize) { + super("ThrustersEntity", transactionManager, subChunkSize); + this.shipRepository = shipRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + if (items.isEmpty()) { + return; + } + shipRepository.saveThrusters(items); + } +} diff --git a/src/main/java/com/snp/batch/scheduler/QuartzBatchJob.java b/src/main/java/com/snp/batch/scheduler/QuartzBatchJob.java new file mode 100644 index 0000000..d723614 --- /dev/null +++ b/src/main/java/com/snp/batch/scheduler/QuartzBatchJob.java @@ -0,0 +1,52 @@ +package com.snp.batch.scheduler; + +import com.snp.batch.service.QuartzJobService; +import lombok.extern.slf4j.Slf4j; +import org.quartz.Job; +import org.quartz.JobExecutionContext; +import org.quartz.JobExecutionException; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * Quartz Job 구현체 + * Quartz 스케줄러에 의해 실행되어 실제 Spring Batch Job을 호출 + */ +@Slf4j +@Component +public class QuartzBatchJob implements Job { + + private static final long serialVersionUID = 1L; + + @Autowired + private transient QuartzJobService quartzJobService; + + /** + * Quartz 스케줄러에 의해 호출되는 메서드 + * + * @param context JobExecutionContext + * @throws JobExecutionException 실행 중 발생한 예외 + */ + @Override + public void execute(JobExecutionContext context) throws JobExecutionException { + // JobDataMap에서 배치 작업 이름 가져오기 + String jobName = context.getJobDetail().getJobDataMap().getString("jobName"); + + log.info("========================================"); + log.info("Quartz 스케줄러 트리거 발생"); + log.info("실행할 배치 작업: {}", jobName); + log.info("트리거 시간: {}", context.getFireTime()); + log.info("다음 실행 시간: {}", context.getNextFireTime()); + log.info("========================================"); + + try { + // QuartzJobService를 통해 실제 Spring Batch Job 실행 + quartzJobService.executeBatchJob(jobName); + + } catch (Exception e) { + log.error("Quartz Job 실행 중 에러 발생", e); + // JobExecutionException으로 래핑하여 Quartz에 에러 전파 + throw new JobExecutionException("Failed to execute batch job: " + jobName, e); + } + } +} diff --git a/src/main/java/com/snp/batch/scheduler/SchedulerInitializer.java b/src/main/java/com/snp/batch/scheduler/SchedulerInitializer.java new file mode 100644 index 0000000..0fc863f --- /dev/null +++ b/src/main/java/com/snp/batch/scheduler/SchedulerInitializer.java @@ -0,0 +1,120 @@ +package com.snp.batch.scheduler; + +import com.snp.batch.global.model.JobScheduleEntity; +import com.snp.batch.global.repository.JobScheduleRepository; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.quartz.*; +import org.springframework.boot.context.event.ApplicationReadyEvent; +import org.springframework.context.event.EventListener; +import org.springframework.stereotype.Component; + +import java.util.List; + +/** + * 애플리케이션 시작 시 DB에 저장된 스케줄을 Quartz에 자동 로드 + * ApplicationReadyEvent를 수신하여 모든 빈 초기화 후 실행 + */ +@Slf4j +@Component +@RequiredArgsConstructor +public class SchedulerInitializer { + + private final JobScheduleRepository scheduleRepository; + private final Scheduler scheduler; + + /** + * 애플리케이션 준비 완료 시 호출 + * DB의 활성화된 스케줄을 Quartz에 로드 + */ + @EventListener(ApplicationReadyEvent.class) + public void initializeSchedules() { + log.info("========================================"); + log.info("스케줄러 초기화 시작"); + log.info("========================================"); + + try { + // DB에서 활성화된 스케줄 조회 + List activeSchedules = scheduleRepository.findAllActive(); + + if (activeSchedules.isEmpty()) { + log.info("활성화된 스케줄이 없습니다."); + return; + } + + log.info("총 {}개의 활성 스케줄을 로드합니다.", activeSchedules.size()); + + int successCount = 0; + int failCount = 0; + + // 각 스케줄을 Quartz에 등록 + for (JobScheduleEntity schedule : activeSchedules) { + try { + registerSchedule(schedule); + successCount++; + log.info("✓ 스케줄 로드 성공: {} (Cron: {})", + schedule.getJobName(), schedule.getCronExpression()); + + } catch (Exception e) { + failCount++; + log.error("✗ 스케줄 로드 실패: {}", schedule.getJobName(), e); + } + } + + log.info("========================================"); + log.info("스케줄러 초기화 완료"); + log.info("성공: {}개, 실패: {}개", successCount, failCount); + log.info("========================================"); + + // Quartz 스케줄러 시작 + if (!scheduler.isStarted()) { + scheduler.start(); + log.info("Quartz 스케줄러 시작됨"); + } + + } catch (Exception e) { + log.error("스케줄러 초기화 중 에러 발생", e); + } + } + + /** + * 개별 스케줄을 Quartz에 등록 + * + * @param schedule JobScheduleEntity + * @throws SchedulerException Quartz 스케줄러 예외 + */ + private void registerSchedule(JobScheduleEntity schedule) throws SchedulerException { + String jobName = schedule.getJobName(); + JobKey jobKey = new JobKey(jobName, "batch-jobs"); + TriggerKey triggerKey = new TriggerKey(jobName + "-trigger", "batch-triggers"); + + // 기존 스케줄 확인 및 삭제 + if (scheduler.checkExists(jobKey)) { + scheduler.deleteJob(jobKey); + log.debug("기존 Quartz Job 삭제: {}", jobName); + } + + // JobDetail 생성 + JobDetail jobDetail = JobBuilder.newJob(QuartzBatchJob.class) + .withIdentity(jobKey) + .usingJobData("jobName", jobName) + .withDescription(schedule.getDescription()) + .storeDurably(true) + .build(); + + // CronTrigger 생성 + CronTrigger trigger = TriggerBuilder.newTrigger() + .withIdentity(triggerKey) + .withSchedule(CronScheduleBuilder.cronSchedule(schedule.getCronExpression())) + .forJob(jobKey) + .build(); + + // Quartz에 스케줄 등록 + scheduler.scheduleJob(jobDetail, trigger); + + // 다음 실행 시간 로깅 + if (trigger.getNextFireTime() != null) { + log.debug(" → 다음 실행 예정: {}", trigger.getNextFireTime()); + } + } +} diff --git a/src/main/java/com/snp/batch/service/BatchService.java b/src/main/java/com/snp/batch/service/BatchService.java new file mode 100644 index 0000000..7570881 --- /dev/null +++ b/src/main/java/com/snp/batch/service/BatchService.java @@ -0,0 +1,675 @@ +package com.snp.batch.service; + +import com.snp.batch.global.dto.JobExecutionDto; +import com.snp.batch.global.repository.TimelineRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobInstance; +import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.JobParametersBuilder; +import org.springframework.batch.core.explore.JobExplorer; +import org.springframework.batch.core.launch.JobLauncher; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Lazy; +import org.springframework.stereotype.Service; + +import java.util.*; +import java.util.stream.Collectors; + +@Slf4j +@Service +public class BatchService { + + private final JobLauncher jobLauncher; + private final JobExplorer jobExplorer; + private final JobOperator jobOperator; + private final Map jobMap; + private final ScheduleService scheduleService; + private final TimelineRepository timelineRepository; + + @Autowired + public BatchService(JobLauncher jobLauncher, + JobExplorer jobExplorer, + JobOperator jobOperator, + Map jobMap, + @Lazy ScheduleService scheduleService, + TimelineRepository timelineRepository) { + this.jobLauncher = jobLauncher; + this.jobExplorer = jobExplorer; + this.jobOperator = jobOperator; + this.jobMap = jobMap; + this.scheduleService = scheduleService; + this.timelineRepository = timelineRepository; + } + + public Long executeJob(String jobName) throws Exception { + return executeJob(jobName, null); + } + + public Long executeJob(String jobName, Map params) throws Exception { + Job job = jobMap.get(jobName); + if (job == null) { + throw new IllegalArgumentException("Job not found: " + jobName); + } + + JobParametersBuilder builder = new JobParametersBuilder() + .addLong("timestamp", System.currentTimeMillis()); + + // 동적 파라미터 추가 + if (params != null && !params.isEmpty()) { + params.forEach((key, value) -> { + // timestamp는 자동 생성되므로 무시 + if (!"timestamp".equals(key)) { + builder.addString(key, value); + } + }); + } + + JobParameters jobParameters = builder.toJobParameters(); + JobExecution jobExecution = jobLauncher.run(job, jobParameters); + return jobExecution.getId(); + } + + public List listAllJobs() { + return new ArrayList<>(jobMap.keySet()); + } + + public List getJobExecutions(String jobName) { + List jobInstances = jobExplorer.findJobInstancesByJobName(jobName, 0, 100); + + return jobInstances.stream() + .flatMap(instance -> jobExplorer.getJobExecutions(instance).stream()) + .map(this::convertToDto) + .sorted(Comparator.comparing(JobExecutionDto::getExecutionId).reversed()) + .collect(Collectors.toList()); + } + + public List getRecentExecutions(int limit) { + List> rows = timelineRepository.findRecentExecutionsWithDetail(limit); + return rows.stream() + .map(this::convertMapToDto) + .collect(Collectors.toList()); + } + + public JobExecutionDto getExecutionDetails(Long executionId) { + JobExecution jobExecution = jobExplorer.getJobExecution(executionId); + if (jobExecution == null) { + throw new IllegalArgumentException("Job execution not found: " + executionId); + } + return convertToDto(jobExecution); + } + + public com.snp.batch.global.dto.JobExecutionDetailDto getExecutionDetailWithSteps(Long executionId) { + JobExecution jobExecution = jobExplorer.getJobExecution(executionId); + if (jobExecution == null) { + throw new IllegalArgumentException("Job execution not found: " + executionId); + } + return convertToDetailDto(jobExecution); + } + + public void stopExecution(Long executionId) throws Exception { + jobOperator.stop(executionId); + } + + + private JobExecutionDto convertToDto(JobExecution jobExecution) { + return JobExecutionDto.builder() + .executionId(jobExecution.getId()) + .jobName(jobExecution.getJobInstance().getJobName()) + .status(jobExecution.getStatus().name()) + .startTime(jobExecution.getStartTime()) + .endTime(jobExecution.getEndTime()) + .exitCode(jobExecution.getExitStatus().getExitCode()) + .exitMessage(jobExecution.getExitStatus().getExitDescription()) + .build(); + } + + private JobExecutionDto convertMapToDto(Map data) { + java.sql.Timestamp startTs = (java.sql.Timestamp) data.get("startTime"); + java.sql.Timestamp endTs = (java.sql.Timestamp) data.get("endTime"); + return JobExecutionDto.builder() + .executionId(((Number) data.get("executionId")).longValue()) + .jobName((String) data.get("jobName")) + .status((String) data.get("status")) + .startTime(startTs != null ? startTs.toLocalDateTime() : null) + .endTime(endTs != null ? endTs.toLocalDateTime() : null) + .exitCode((String) data.get("exitCode")) + .exitMessage((String) data.get("exitMessage")) + .build(); + } + + private com.snp.batch.global.dto.JobExecutionDetailDto convertToDetailDto(JobExecution jobExecution) { + // 실행 시간 계산 + Long duration = null; + if (jobExecution.getStartTime() != null && jobExecution.getEndTime() != null) { + duration = java.time.Duration.between( + jobExecution.getStartTime(), + jobExecution.getEndTime() + ).toMillis(); + } + + // Job Parameters 변환 (timestamp는 포맷팅) + Map params = new java.util.LinkedHashMap<>(); + jobExecution.getJobParameters().getParameters().forEach((key, value) -> { + Object paramValue = value.getValue(); + + // timestamp 파라미터는 포맷팅된 문자열도 함께 표시 + if ("timestamp".equals(key) && paramValue instanceof Long) { + Long timestamp = (Long) paramValue; + java.time.LocalDateTime dateTime = java.time.LocalDateTime.ofInstant( + java.time.Instant.ofEpochMilli(timestamp), + java.time.ZoneId.systemDefault() + ); + String formatted = dateTime.format(java.time.format.DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")); + params.put(key, timestamp + " (" + formatted + ")"); + } else { + params.put(key, paramValue); + } + }); + + // Step Executions 변환 + List stepDtos = + jobExecution.getStepExecutions().stream() + .map(this::convertStepToDto) + .collect(Collectors.toList()); + + // 전체 통계 계산 + int totalReadCount = stepDtos.stream().mapToInt(s -> s.getReadCount() != null ? s.getReadCount() : 0).sum(); + int totalWriteCount = stepDtos.stream().mapToInt(s -> s.getWriteCount() != null ? s.getWriteCount() : 0).sum(); + int totalSkipCount = stepDtos.stream().mapToInt(s -> + (s.getReadSkipCount() != null ? s.getReadSkipCount() : 0) + + (s.getProcessSkipCount() != null ? s.getProcessSkipCount() : 0) + + (s.getWriteSkipCount() != null ? s.getWriteSkipCount() : 0) + ).sum(); + int totalFilterCount = stepDtos.stream().mapToInt(s -> s.getFilterCount() != null ? s.getFilterCount() : 0).sum(); + + return com.snp.batch.global.dto.JobExecutionDetailDto.builder() + .executionId(jobExecution.getId()) + .jobName(jobExecution.getJobInstance().getJobName()) + .status(jobExecution.getStatus().name()) + .startTime(jobExecution.getStartTime()) + .endTime(jobExecution.getEndTime()) + .exitCode(jobExecution.getExitStatus().getExitCode()) + .exitMessage(jobExecution.getExitStatus().getExitDescription()) + .jobParameters(params) + .jobInstanceId(jobExecution.getJobInstance().getInstanceId()) + .duration(duration) + .readCount(totalReadCount) + .writeCount(totalWriteCount) + .skipCount(totalSkipCount) + .filterCount(totalFilterCount) + .stepExecutions(stepDtos) + .build(); + } + + private com.snp.batch.global.dto.JobExecutionDetailDto.StepExecutionDto convertStepToDto( + org.springframework.batch.core.StepExecution stepExecution) { + + Long duration = null; + if (stepExecution.getStartTime() != null && stepExecution.getEndTime() != null) { + duration = java.time.Duration.between( + stepExecution.getStartTime(), + stepExecution.getEndTime() + ).toMillis(); + } + + // StepExecutionContext에서 API 정보 추출 + com.snp.batch.global.dto.JobExecutionDetailDto.ApiCallInfo apiCallInfo = extractApiCallInfo(stepExecution); + + return com.snp.batch.global.dto.JobExecutionDetailDto.StepExecutionDto.builder() + .stepExecutionId(stepExecution.getId()) + .stepName(stepExecution.getStepName()) + .status(stepExecution.getStatus().name()) + .startTime(stepExecution.getStartTime()) + .endTime(stepExecution.getEndTime()) + .readCount((int) stepExecution.getReadCount()) + .writeCount((int) stepExecution.getWriteCount()) + .commitCount((int) stepExecution.getCommitCount()) + .rollbackCount((int) stepExecution.getRollbackCount()) + .readSkipCount((int) stepExecution.getReadSkipCount()) + .processSkipCount((int) stepExecution.getProcessSkipCount()) + .writeSkipCount((int) stepExecution.getWriteSkipCount()) + .filterCount((int) stepExecution.getFilterCount()) + .exitCode(stepExecution.getExitStatus().getExitCode()) + .exitMessage(stepExecution.getExitStatus().getExitDescription()) + .duration(duration) + .apiCallInfo(apiCallInfo) // API 정보 추가 + .build(); + } + + /** + * StepExecutionContext에서 API 호출 정보 추출 + * + * @param stepExecution Step 실행 정보 + * @return API 호출 정보 (없으면 null) + */ + private com.snp.batch.global.dto.JobExecutionDetailDto.ApiCallInfo extractApiCallInfo( + org.springframework.batch.core.StepExecution stepExecution) { + + org.springframework.batch.item.ExecutionContext context = stepExecution.getExecutionContext(); + + // API URL이 없으면 API를 사용하지 않는 Step + if (!context.containsKey("apiUrl")) { + return null; + } + + // API 정보 추출 + String apiUrl = context.getString("apiUrl"); + String method = context.getString("apiMethod", "GET"); + Integer totalCalls = context.getInt("totalApiCalls", 0); + Integer completedCalls = context.getInt("completedApiCalls", 0); + String lastCallTime = context.getString("lastCallTime", ""); + + // API Parameters 추출 + Map parameters = null; + if (context.containsKey("apiParameters")) { + Object paramsObj = context.get("apiParameters"); + if (paramsObj instanceof Map) { + parameters = (Map) paramsObj; + } + } + + return com.snp.batch.global.dto.JobExecutionDetailDto.ApiCallInfo.builder() + .apiUrl(apiUrl) + .method(method) + .parameters(parameters) + .totalCalls(totalCalls) + .completedCalls(completedCalls) + .lastCallTime(lastCallTime) + .build(); + } + + public com.snp.batch.global.dto.TimelineResponse getTimeline(String view, String dateStr) { + try { + java.time.LocalDate date = java.time.LocalDate.parse(dateStr.substring(0, 10)); + java.util.List periods = new ArrayList<>(); + String periodLabel = ""; + + // 조회 범위 설정 + java.time.LocalDateTime rangeStart; + java.time.LocalDateTime rangeEnd; + + if ("day".equals(view)) { + // 일별: 24시간 + periodLabel = date.format(java.time.format.DateTimeFormatter.ofPattern("yyyy년 MM월 dd일")); + rangeStart = date.atStartOfDay(); + rangeEnd = rangeStart.plusDays(1); + + for (int hour = 0; hour < 24; hour++) { + periods.add(com.snp.batch.global.dto.TimelineResponse.PeriodInfo.builder() + .key(date.toString() + "-" + String.format("%02d", hour)) + .label(String.format("%02d:00", hour)) + .build()); + } + } else if ("week".equals(view)) { + // 주별: 7일 + java.time.LocalDate startOfWeek = date.with(java.time.DayOfWeek.MONDAY); + java.time.LocalDate endOfWeek = startOfWeek.plusDays(6); + periodLabel = String.format("%s ~ %s", + startOfWeek.format(java.time.format.DateTimeFormatter.ofPattern("MM/dd")), + endOfWeek.format(java.time.format.DateTimeFormatter.ofPattern("MM/dd"))); + + rangeStart = startOfWeek.atStartOfDay(); + rangeEnd = endOfWeek.plusDays(1).atStartOfDay(); + + for (int day = 0; day < 7; day++) { + java.time.LocalDate current = startOfWeek.plusDays(day); + periods.add(com.snp.batch.global.dto.TimelineResponse.PeriodInfo.builder() + .key(current.toString()) + .label(current.format(java.time.format.DateTimeFormatter.ofPattern("MM/dd (E)", java.util.Locale.KOREAN))) + .build()); + } + } else if ("month".equals(view)) { + // 월별: 해당 월의 모든 날 + java.time.YearMonth yearMonth = java.time.YearMonth.from(date); + periodLabel = date.format(java.time.format.DateTimeFormatter.ofPattern("yyyy년 MM월")); + + rangeStart = yearMonth.atDay(1).atStartOfDay(); + rangeEnd = yearMonth.atEndOfMonth().plusDays(1).atStartOfDay(); + + for (int day = 1; day <= yearMonth.lengthOfMonth(); day++) { + java.time.LocalDate current = yearMonth.atDay(day); + periods.add(com.snp.batch.global.dto.TimelineResponse.PeriodInfo.builder() + .key(current.toString()) + .label(String.format("%d일", day)) + .build()); + } + } else { + throw new IllegalArgumentException("Invalid view type: " + view); + } + + // 활성 스케줄 조회 + java.util.List activeSchedules = scheduleService.getAllActiveSchedules(); + Map scheduleMap = activeSchedules.stream() + .collect(Collectors.toMap( + com.snp.batch.global.dto.ScheduleResponse::getJobName, + s -> s + )); + + // 모든 Job의 실행 이력을 한 번의 쿼리로 조회 (경량화) + List> allExecutions = timelineRepository.findAllExecutionsByDateRange(rangeStart, rangeEnd); + + // Job별로 그룹화 + Map>> executionsByJob = allExecutions.stream() + .collect(Collectors.groupingBy(exec -> (String) exec.get("jobName"))); + + // 타임라인 스케줄 구성 + java.util.List schedules = new ArrayList<>(); + + // 실행 이력이 있거나 스케줄이 있는 모든 Job 처리 + Set allJobNames = new HashSet<>(executionsByJob.keySet()); + allJobNames.addAll(scheduleMap.keySet()); + + for (String jobName : allJobNames) { + if (!jobMap.containsKey(jobName)) { + continue; // 현재 존재하지 않는 Job은 스킵 + } + + List> jobExecutions = executionsByJob.getOrDefault(jobName, Collections.emptyList()); + Map executions = new HashMap<>(); + + // 각 period에 대해 실행 이력 또는 예정 상태 매핑 + for (com.snp.batch.global.dto.TimelineResponse.PeriodInfo period : periods) { + Map matchedExecution = findExecutionForPeriodFromMap(jobExecutions, period, view); + + if (matchedExecution != null) { + // 과거 실행 이력이 있는 경우 + java.sql.Timestamp startTimestamp = (java.sql.Timestamp) matchedExecution.get("startTime"); + java.sql.Timestamp endTimestamp = (java.sql.Timestamp) matchedExecution.get("endTime"); + + executions.put(period.getKey(), com.snp.batch.global.dto.TimelineResponse.ExecutionInfo.builder() + .executionId(((Number) matchedExecution.get("executionId")).longValue()) + .status((String) matchedExecution.get("status")) + .startTime(startTimestamp != null ? startTimestamp.toLocalDateTime().toString() : null) + .endTime(endTimestamp != null ? endTimestamp.toLocalDateTime().toString() : null) + .build()); + } else if (scheduleMap.containsKey(jobName)) { + // 스케줄이 있고, 실행 이력이 없는 경우 - 미래 예정 시간 체크 + com.snp.batch.global.dto.ScheduleResponse schedule = scheduleMap.get(jobName); + if (isScheduledForPeriod(schedule, period, view)) { + executions.put(period.getKey(), com.snp.batch.global.dto.TimelineResponse.ExecutionInfo.builder() + .status("SCHEDULED") + .startTime(null) + .endTime(null) + .build()); + } + } + } + + if (!executions.isEmpty()) { + schedules.add(com.snp.batch.global.dto.TimelineResponse.ScheduleTimeline.builder() + .jobName(jobName) + .executions(executions) + .build()); + } + } + + return com.snp.batch.global.dto.TimelineResponse.builder() + .periodLabel(periodLabel) + .periods(periods) + .schedules(schedules) + .build(); + + } catch (Exception e) { + log.error("Error generating timeline", e); + throw new RuntimeException("Failed to generate timeline", e); + } + } + + /** + * Map 기반 실행 이력에서 특정 Period에 해당하는 실행 찾기 + */ + private Map findExecutionForPeriodFromMap( + List> executions, + com.snp.batch.global.dto.TimelineResponse.PeriodInfo period, + String view) { + + return executions.stream() + .filter(exec -> exec.get("startTime") != null) + .filter(exec -> { + java.sql.Timestamp timestamp = (java.sql.Timestamp) exec.get("startTime"); + java.time.LocalDateTime startTime = timestamp.toLocalDateTime(); + String periodKey = period.getKey(); + + if ("day".equals(view)) { + // 시간별 매칭 (key format: "2025-10-14-00") + int lastDashIndex = periodKey.lastIndexOf('-'); + String dateStr = periodKey.substring(0, lastDashIndex); + int hour = Integer.parseInt(periodKey.substring(lastDashIndex + 1)); + + java.time.LocalDate periodDate = java.time.LocalDate.parse(dateStr); + + return startTime.toLocalDate().equals(periodDate) && + startTime.getHour() == hour; + } else { + // 일별 매칭 + java.time.LocalDate periodDate = java.time.LocalDate.parse(periodKey); + return startTime.toLocalDate().equals(periodDate); + } + }) + .max(Comparator.comparing(exec -> ((java.sql.Timestamp) exec.get("startTime")).toLocalDateTime())) + .orElse(null); + } + + private boolean isJobScheduled(String jobName) { + // 스케줄이 있는지 확인 + try { + scheduleService.getScheduleByJobName(jobName); + return true; + } catch (Exception e) { + return false; + } + } + + private boolean isScheduledForPeriod(com.snp.batch.global.dto.ScheduleResponse schedule, + com.snp.batch.global.dto.TimelineResponse.PeriodInfo period, + String view) { + if (schedule.getNextFireTime() == null) { + return false; + } + + java.time.LocalDateTime nextFireTime = schedule.getNextFireTime() + .toInstant() + .atZone(java.time.ZoneId.systemDefault()) + .toLocalDateTime(); + + String periodKey = period.getKey(); + + if ("day".equals(view)) { + // 시간별 매칭 (key format: "2025-10-14-00") + int lastDashIndex = periodKey.lastIndexOf('-'); + String dateStr = periodKey.substring(0, lastDashIndex); + int hour = Integer.parseInt(periodKey.substring(lastDashIndex + 1)); + + java.time.LocalDate periodDate = java.time.LocalDate.parse(dateStr); + java.time.LocalDateTime periodStart = periodDate.atTime(hour, 0); + java.time.LocalDateTime periodEnd = periodStart.plusHours(1); + + return !nextFireTime.isBefore(periodStart) && nextFireTime.isBefore(periodEnd); + } else { + // 일별 매칭 + java.time.LocalDate periodDate = java.time.LocalDate.parse(periodKey); + java.time.LocalDateTime periodStart = periodDate.atStartOfDay(); + java.time.LocalDateTime periodEnd = periodStart.plusDays(1); + + return !nextFireTime.isBefore(periodStart) && nextFireTime.isBefore(periodEnd); + } + } + + public List getPeriodExecutions(String jobName, String view, String periodKey) { + List jobInstances = jobExplorer.findJobInstancesByJobName(jobName, 0, 1000); + + return jobInstances.stream() + .flatMap(instance -> jobExplorer.getJobExecutions(instance).stream()) + .filter(exec -> exec.getStartTime() != null) + .filter(exec -> matchesPeriod(exec, view, periodKey)) + .sorted(Comparator.comparing(JobExecution::getStartTime).reversed()) + .map(this::convertToDto) + .collect(Collectors.toList()); + } + + private boolean matchesPeriod(JobExecution execution, String view, String periodKey) { + java.time.LocalDateTime startTime = execution.getStartTime(); + + if ("day".equals(view)) { + // 시간별 매칭 (key format: "2025-10-14-00") + int lastDashIndex = periodKey.lastIndexOf('-'); + String dateStr = periodKey.substring(0, lastDashIndex); + int hour = Integer.parseInt(periodKey.substring(lastDashIndex + 1)); + + java.time.LocalDate periodDate = java.time.LocalDate.parse(dateStr); + + return startTime.toLocalDate().equals(periodDate) && + startTime.getHour() == hour; + } else { + // 일별 매칭 + java.time.LocalDate periodDate = java.time.LocalDate.parse(periodKey); + return startTime.toLocalDate().equals(periodDate); + } + } + + /** + * 대시보드 데이터 조회 (한 번의 호출로 모든 데이터 반환) + */ + public com.snp.batch.global.dto.DashboardResponse getDashboardData() { + // 1. 스케줄 통계 + java.util.List allSchedules = scheduleService.getAllSchedules(); + int totalSchedules = allSchedules.size(); + int activeSchedules = (int) allSchedules.stream().filter(com.snp.batch.global.dto.ScheduleResponse::getActive).count(); + int inactiveSchedules = totalSchedules - activeSchedules; + int totalJobs = jobMap.size(); + + com.snp.batch.global.dto.DashboardResponse.Stats stats = com.snp.batch.global.dto.DashboardResponse.Stats.builder() + .totalSchedules(totalSchedules) + .activeSchedules(activeSchedules) + .inactiveSchedules(inactiveSchedules) + .totalJobs(totalJobs) + .build(); + + // 2. 실행 중인 Job (한 번의 쿼리) + List> runningData = timelineRepository.findRunningExecutions(); + List runningJobs = runningData.stream() + .map(data -> { + java.sql.Timestamp startTimestamp = (java.sql.Timestamp) data.get("startTime"); + return com.snp.batch.global.dto.DashboardResponse.RunningJob.builder() + .jobName((String) data.get("jobName")) + .executionId(((Number) data.get("executionId")).longValue()) + .status((String) data.get("status")) + .startTime(startTimestamp != null ? startTimestamp.toLocalDateTime() : null) + .build(); + }) + .collect(Collectors.toList()); + + // 3. 최근 실행 이력 (한 번의 쿼리로 상위 10개) + List> recentData = timelineRepository.findRecentExecutions(10); + List recentExecutions = recentData.stream() + .map(data -> { + java.sql.Timestamp startTimestamp = (java.sql.Timestamp) data.get("startTime"); + java.sql.Timestamp endTimestamp = (java.sql.Timestamp) data.get("endTime"); + return com.snp.batch.global.dto.DashboardResponse.RecentExecution.builder() + .executionId(((Number) data.get("executionId")).longValue()) + .jobName((String) data.get("jobName")) + .status((String) data.get("status")) + .startTime(startTimestamp != null ? startTimestamp.toLocalDateTime() : null) + .endTime(endTimestamp != null ? endTimestamp.toLocalDateTime() : null) + .build(); + }) + .collect(Collectors.toList()); + + return com.snp.batch.global.dto.DashboardResponse.builder() + .stats(stats) + .runningJobs(runningJobs) + .recentExecutions(recentExecutions) + .build(); + } + + // ── F7: Job 상세 목록 ──────────────────────────────────────── + + public List getJobsWithDetail() { + List> lastExecutions = timelineRepository.findLastExecutionPerJob(); + Map> lastExecMap = lastExecutions.stream() + .collect(Collectors.toMap( + data -> (String) data.get("jobName"), + data -> data + )); + + List schedules = scheduleService.getAllSchedules(); + Map cronMap = schedules.stream() + .collect(Collectors.toMap( + com.snp.batch.global.dto.ScheduleResponse::getJobName, + com.snp.batch.global.dto.ScheduleResponse::getCronExpression, + (a, b) -> a + )); + + return jobMap.keySet().stream() + .sorted() + .map(jobName -> { + com.snp.batch.global.dto.JobDetailDto.LastExecution lastExec = null; + Map execData = lastExecMap.get(jobName); + if (execData != null) { + java.sql.Timestamp startTs = (java.sql.Timestamp) execData.get("startTime"); + java.sql.Timestamp endTs = (java.sql.Timestamp) execData.get("endTime"); + lastExec = com.snp.batch.global.dto.JobDetailDto.LastExecution.builder() + .executionId(((Number) execData.get("executionId")).longValue()) + .status((String) execData.get("status")) + .startTime(startTs != null ? startTs.toLocalDateTime() : null) + .endTime(endTs != null ? endTs.toLocalDateTime() : null) + .build(); + } + + return com.snp.batch.global.dto.JobDetailDto.builder() + .jobName(jobName) + .lastExecution(lastExec) + .scheduleCron(cronMap.get(jobName)) + .build(); + }) + .collect(Collectors.toList()); + } + + // ── F8: 실행 통계 ────────────────────────────────────────── + + public com.snp.batch.global.dto.ExecutionStatisticsDto getStatistics(int days) { + List> dailyData = timelineRepository.findDailyStatistics(days); + return buildStatisticsDto(dailyData); + } + + public com.snp.batch.global.dto.ExecutionStatisticsDto getJobStatistics(String jobName, int days) { + List> dailyData = timelineRepository.findDailyStatisticsForJob(jobName, days); + return buildStatisticsDto(dailyData); + } + + private com.snp.batch.global.dto.ExecutionStatisticsDto buildStatisticsDto(List> dailyData) { + List dailyStats = dailyData.stream() + .map(data -> { + Object dateObj = data.get("execDate"); + String dateStr = dateObj != null ? dateObj.toString() : ""; + Number avgMs = (Number) data.get("avgDurationMs"); + return com.snp.batch.global.dto.ExecutionStatisticsDto.DailyStat.builder() + .date(dateStr) + .successCount(((Number) data.get("successCount")).intValue()) + .failedCount(((Number) data.get("failedCount")).intValue()) + .otherCount(((Number) data.get("otherCount")).intValue()) + .avgDurationMs(avgMs != null ? avgMs.doubleValue() : 0) + .build(); + }) + .collect(Collectors.toList()); + + int totalSuccess = dailyStats.stream().mapToInt(com.snp.batch.global.dto.ExecutionStatisticsDto.DailyStat::getSuccessCount).sum(); + int totalFailed = dailyStats.stream().mapToInt(com.snp.batch.global.dto.ExecutionStatisticsDto.DailyStat::getFailedCount).sum(); + int totalOther = dailyStats.stream().mapToInt(com.snp.batch.global.dto.ExecutionStatisticsDto.DailyStat::getOtherCount).sum(); + double avgDuration = dailyStats.stream() + .mapToDouble(com.snp.batch.global.dto.ExecutionStatisticsDto.DailyStat::getAvgDurationMs) + .average() + .orElse(0); + + return com.snp.batch.global.dto.ExecutionStatisticsDto.builder() + .dailyStats(dailyStats) + .totalExecutions(totalSuccess + totalFailed + totalOther) + .totalSuccess(totalSuccess) + .totalFailed(totalFailed) + .avgDurationMs(avgDuration) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/service/QuartzJobService.java b/src/main/java/com/snp/batch/service/QuartzJobService.java new file mode 100644 index 0000000..b94315f --- /dev/null +++ b/src/main/java/com/snp/batch/service/QuartzJobService.java @@ -0,0 +1,68 @@ +package com.snp.batch.service; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.JobParametersBuilder; +import org.springframework.batch.core.launch.JobLauncher; +import org.springframework.stereotype.Service; + +import java.util.Map; + +/** + * Quartz Job과 Spring Batch Job을 연동하는 서비스 + * Quartz 스케줄러에서 호출되어 실제 배치 작업을 실행 + */ +@Slf4j +@Service +@RequiredArgsConstructor +public class QuartzJobService { + + private final JobLauncher jobLauncher; + private final Map jobMap; + + /** + * 배치 작업 실행 + * + * @param jobName 실행할 Job 이름 + * @throws Exception Job 실행 중 발생한 예외 + */ + public void executeBatchJob(String jobName) throws Exception { + log.info("스케줄러에 의해 배치 작업 실행 시작: {}", jobName); + + // Job Bean 조회 + Job job = jobMap.get(jobName); + if (job == null) { + log.error("배치 작업을 찾을 수 없습니다: {}", jobName); + throw new IllegalArgumentException("Job not found: " + jobName); + } + + // JobParameters 생성 (timestamp를 포함하여 매번 다른 JobInstance 생성) + JobParameters jobParameters = new JobParametersBuilder() + .addLong("timestamp", System.currentTimeMillis()) + .addString("triggeredBy", "SCHEDULER") + .toJobParameters(); + + try { + // 배치 작업 실행 + var jobExecution = jobLauncher.run(job, jobParameters); + log.info("배치 작업 실행 완료: {} (Execution ID: {})", jobName, jobExecution.getId()); + log.info("실행 상태: {}", jobExecution.getStatus()); + + } catch (Exception e) { + log.error("배치 작업 실행 중 에러 발생: {}", jobName, e); + throw e; + } + } + + /** + * Job 이름 유효성 검사 + * + * @param jobName 검사할 Job 이름 + * @return boolean Job 존재 여부 + */ + public boolean isValidJob(String jobName) { + return jobMap.containsKey(jobName); + } +} diff --git a/src/main/java/com/snp/batch/service/ScheduleService.java b/src/main/java/com/snp/batch/service/ScheduleService.java new file mode 100644 index 0000000..d92ed5b --- /dev/null +++ b/src/main/java/com/snp/batch/service/ScheduleService.java @@ -0,0 +1,354 @@ +package com.snp.batch.service; + +import com.snp.batch.global.dto.ScheduleRequest; +import com.snp.batch.global.dto.ScheduleResponse; +import com.snp.batch.global.model.JobScheduleEntity; +import com.snp.batch.global.repository.JobScheduleRepository; +import com.snp.batch.scheduler.QuartzBatchJob; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.quartz.*; +import org.springframework.batch.core.Job; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * DB 영속화를 지원하는 스케줄 관리 서비스 + * Quartz 스케줄러와 DB를 동기화하여 재시작 후에도 스케줄 유지 + */ +@Slf4j +@Service +@RequiredArgsConstructor +public class ScheduleService { + + private final JobScheduleRepository scheduleRepository; + private final Scheduler scheduler; + private final Map jobMap; + private final QuartzJobService quartzJobService; + + /** + * 스케줄 생성 (DB 저장 + Quartz 등록) + * + * @param request 스케줄 요청 정보 + * @return ScheduleResponse 생성된 스케줄 정보 + * @throws Exception 스케줄 생성 중 발생한 예외 + */ + @Transactional + public ScheduleResponse createSchedule(ScheduleRequest request) throws Exception { + String jobName = request.getJobName(); + + log.info("스케줄 생성 시작: {}", jobName); + + // 1. Job 이름 유효성 검사 + if (!quartzJobService.isValidJob(jobName)) { + throw new IllegalArgumentException("Invalid job name: " + jobName + ". Job does not exist."); + } + + // 2. 중복 체크 + if (scheduleRepository.existsByJobName(jobName)) { + throw new IllegalArgumentException("Schedule already exists for job: " + jobName); + } + + // 3. Cron 표현식 유효성 검사 + try { + CronScheduleBuilder.cronSchedule(request.getCronExpression()); + } catch (Exception e) { + throw new IllegalArgumentException("Invalid cron expression: " + request.getCronExpression(), e); + } + + // 4. DB에 저장 + JobScheduleEntity entity = JobScheduleEntity.builder() + .jobName(jobName) + .cronExpression(request.getCronExpression()) + .description(request.getDescription()) + .active(request.getActive() != null ? request.getActive() : true) + .build(); + + // BaseEntity 필드는 setter로 설정하거나 PrePersist에서 자동 설정됨 + // (PrePersist가 자동으로 SYSTEM으로 설정) + + entity = scheduleRepository.save(entity); + log.info("DB에 스케줄 저장 완료: ID={}, Job={}", entity.getId(), jobName); + + // 5. Quartz에 등록 (active=true인 경우만) + if (entity.getActive()) { + try { + registerQuartzJob(entity); + log.info("Quartz 등록 완료: {}", jobName); + } catch (Exception e) { + log.error("Quartz 등록 실패 (DB 저장은 완료됨): {}", jobName, e); + } + } + + // 6. 응답 생성 + return convertToResponse(entity); + } + + /** + * 스케줄 수정 (Cron 표현식과 설명 업데이트) + * + * @param jobName Job 이름 + * @param cronExpression 새로운 Cron 표현식 + * @param description 새로운 설명 + * @return ScheduleResponse 수정된 스케줄 정보 + * @throws Exception 스케줄 수정 중 발생한 예외 + */ + @Transactional + public ScheduleResponse updateSchedule(String jobName, String cronExpression, String description) throws Exception { + log.info("스케줄 수정 시작: {} -> {}", jobName, cronExpression); + + // 1. 기존 스케줄 조회 + JobScheduleEntity entity = scheduleRepository.findByJobName(jobName) + .orElseThrow(() -> new IllegalArgumentException("Schedule not found for job: " + jobName)); + + // 2. Cron 표현식 유효성 검사 + try { + CronScheduleBuilder.cronSchedule(cronExpression); + } catch (Exception e) { + throw new IllegalArgumentException("Invalid cron expression: " + cronExpression, e); + } + + // 3. DB 업데이트 + entity.setCronExpression(cronExpression); + if (description != null) { + entity.setDescription(description); + } + entity = scheduleRepository.save(entity); + log.info("DB 스케줄 업데이트 완료: {}", jobName); + + // 4. Quartz 스케줄 재등록 + if (entity.getActive()) { + try { + unregisterQuartzJob(jobName); + registerQuartzJob(entity); + log.info("Quartz 재등록 완료: {}", jobName); + } catch (Exception e) { + log.error("Quartz 재등록 실패 (DB 업데이트는 완료됨): {}", jobName, e); + } + } + + // 5. 응답 생성 + return convertToResponse(entity); + } + + /** + * 스케줄 수정 (Cron 표현식만 업데이트) + * + * @param jobName Job 이름 + * @param cronExpression 새로운 Cron 표현식 + * @return ScheduleResponse 수정된 스케줄 정보 + * @throws Exception 스케줄 수정 중 발생한 예외 + * @deprecated updateSchedule(jobName, cronExpression, description) 사용 권장 + */ + @Deprecated + @Transactional + public ScheduleResponse updateScheduleByCron(String jobName, String cronExpression) throws Exception { + return updateSchedule(jobName, cronExpression, null); + } + + /** + * 스케줄 삭제 (DB + Quartz) + * + * @param jobName Job 이름 + * @throws Exception 스케줄 삭제 중 발생한 예외 + */ + @Transactional + public void deleteSchedule(String jobName) throws Exception { + log.info("스케줄 삭제 시작: {}", jobName); + + // 1. Quartz에서 제거 + try { + unregisterQuartzJob(jobName); + log.info("Quartz 스케줄 제거 완료: {}", jobName); + } catch (Exception e) { + log.warn("Quartz에서 스케줄 제거 실패 (무시하고 계속): {}", jobName, e); + } + + // 2. DB에서 삭제 + scheduleRepository.deleteByJobName(jobName); + log.info("DB에서 스케줄 삭제 완료: {}", jobName); + } + + /** + * 특정 Job의 스케줄 조회 + * + * @param jobName Job 이름 + * @return ScheduleResponse 스케줄 정보 + */ + @Transactional(readOnly = true) + public ScheduleResponse getScheduleByJobName(String jobName) { + JobScheduleEntity entity = scheduleRepository.findByJobName(jobName) + .orElseThrow(() -> new IllegalArgumentException("Schedule not found for job: " + jobName)); + + return convertToResponse(entity); + } + + /** + * 전체 스케줄 목록 조회 + * + * @return List 스케줄 목록 + */ + @Transactional(readOnly = true) + public List getAllSchedules() { + return scheduleRepository.findAll().stream() + .map(this::convertToResponse) + .collect(Collectors.toList()); + } + + /** + * 활성화된 스케줄 목록 조회 + * + * @return List 활성 스케줄 목록 + */ + @Transactional(readOnly = true) + public List getAllActiveSchedules() { + return scheduleRepository.findAllActive().stream() + .map(this::convertToResponse) + .collect(Collectors.toList()); + } + + /** + * 스케줄 활성화/비활성화 토글 + * + * @param jobName Job 이름 + * @param active 활성화 여부 + * @return ScheduleResponse 수정된 스케줄 정보 + * @throws Exception 스케줄 토글 중 발생한 예외 + */ + @Transactional + public ScheduleResponse toggleScheduleActive(String jobName, boolean active) throws Exception { + log.info("스케줄 활성화 상태 변경: {} -> {}", jobName, active); + + // 1. 기존 스케줄 조회 + JobScheduleEntity entity = scheduleRepository.findByJobName(jobName) + .orElseThrow(() -> new IllegalArgumentException("Schedule not found for job: " + jobName)); + + // 2. DB 업데이트 + entity.setActive(active); + entity = scheduleRepository.save(entity); + + // 3. Quartz 동기화 + try { + if (active) { + // 활성화: Quartz에 등록 + registerQuartzJob(entity); + log.info("Quartz 활성화 완료: {}", jobName); + } else { + // 비활성화: Quartz에서 제거 + unregisterQuartzJob(jobName); + log.info("Quartz 비활성화 완료: {}", jobName); + } + } catch (Exception e) { + log.error("Quartz 동기화 중 예외 발생 (DB 업데이트는 완료됨): {}", jobName, e); + } + + // 4. 응답 생성 + return convertToResponse(entity); + } + + /** + * Quartz에 Job 등록 + * + * @param entity JobScheduleEntity + * @throws SchedulerException Quartz 스케줄러 예외 + */ + private void registerQuartzJob(JobScheduleEntity entity) throws SchedulerException { + String jobName = entity.getJobName(); + JobKey jobKey = new JobKey(jobName, "batch-jobs"); + TriggerKey triggerKey = new TriggerKey(jobName + "-trigger", "batch-triggers"); + + // JobDetail 생성 + JobDetail jobDetail = JobBuilder.newJob(QuartzBatchJob.class) + .withIdentity(jobKey) + .usingJobData("jobName", jobName) + .storeDurably(true) + .build(); + + // CronTrigger 생성 + CronTrigger trigger = TriggerBuilder.newTrigger() + .withIdentity(triggerKey) + .withSchedule(CronScheduleBuilder.cronSchedule(entity.getCronExpression())) + .forJob(jobKey) + .build(); + + // 기존 Job 삭제 후 등록 + try { + scheduler.deleteJob(jobKey); + } catch (Exception e) { + log.debug("기존 Job 삭제 시도: {}", jobName); + } + + // Job 등록 + try { + scheduler.scheduleJob(jobDetail, trigger); + log.info("Quartz에 스케줄 등록 완료: {} (Cron: {})", jobName, entity.getCronExpression()); + } catch (ObjectAlreadyExistsException e) { + log.warn("Job이 이미 존재함, 재시도: {}", jobName); + scheduler.deleteJob(jobKey); + scheduler.scheduleJob(jobDetail, trigger); + log.info("Quartz에 스케줄 재등록 완료: {} (Cron: {})", jobName, entity.getCronExpression()); + } + } + + /** + * Quartz에서 Job 제거 + * + * @param jobName Job 이름 + * @throws SchedulerException Quartz 스케줄러 예외 + */ + private void unregisterQuartzJob(String jobName) throws SchedulerException { + JobKey jobKey = new JobKey(jobName, "batch-jobs"); + + if (scheduler.checkExists(jobKey)) { + scheduler.deleteJob(jobKey); + log.info("Quartz에서 스케줄 제거 완료: {}", jobName); + } + } + + /** + * Entity를 Response DTO로 변환 + * + * @param entity JobScheduleEntity + * @return ScheduleResponse + */ + private ScheduleResponse convertToResponse(JobScheduleEntity entity) { + ScheduleResponse.ScheduleResponseBuilder builder = ScheduleResponse.builder() + .id(entity.getId()) + .jobName(entity.getJobName()) + .cronExpression(entity.getCronExpression()) + .description(entity.getDescription()) + .active(entity.getActive()) + .createdAt(entity.getCreatedAt()) + .updatedAt(entity.getUpdatedAt()) + .createdBy(entity.getCreatedBy()) + .updatedBy(entity.getUpdatedBy()); + + // 다음 실행 시간 계산 (Cron 표현식 기반) + if (entity.getActive() && entity.getCronExpression() != null) { + try { + // Cron 표현식으로 임시 트리거 생성 (DB 조회 없이 계산) + CronTrigger tempTrigger = TriggerBuilder.newTrigger() + .withSchedule(CronScheduleBuilder.cronSchedule(entity.getCronExpression())) + .build(); + + Date nextFireTime = tempTrigger.getFireTimeAfter(new Date()); + if (nextFireTime != null) { + builder.nextFireTime(nextFireTime); + } + + // Trigger 상태는 active인 경우 NORMAL로 설정 + builder.triggerState("NORMAL"); + + } catch (Exception e) { + log.debug("Cron 표현식 기반 다음 실행 시간 계산 실패: {}", entity.getJobName(), e); + } + } + + return builder.build(); + } +} diff --git a/src/main/resources/application-dev.yml b/src/main/resources/application-dev.yml new file mode 100644 index 0000000..fe56683 --- /dev/null +++ b/src/main/resources/application-dev.yml @@ -0,0 +1,115 @@ +spring: + application: + name: snp-sync-batch + +# 정의한 DataSource만 사용하도록 합니다. (필수) + autoconfigure: + exclude: org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration + + # =============================================== + # 1. 배치 메타데이터 데이터소스 (Primary) + # Spring Batch의 JobRepository가 사용하는 DB입니다. + # =============================================== + batch-meta-datasource: + jdbc-url: jdbc:postgresql://211.208.115.83:5432/snpdb?currentSchema=snp_batch + username: snp + password: snp#8932 + driver-class-name: org.postgresql.Driver + # HikariCP 풀 설정 (선택적) + hikari: + maximum-pool-size: 10 + minimum-idle: 5 + connection-timeout: 30000 + + # =============================================== + # 2. 비즈니스 데이터 데이터소스 + # Spring Batch Step(ItemReader/Writer)이 데이터를 읽고 쓰는 DB입니다. + # =============================================== + business-datasource: + jdbc-url: jdbc:postgresql://211.208.115.83:5432/snpdb?currentSchema=std_snp_data + username: snp + password: snp#8932 + driver-class-name: org.postgresql.Driver + # HikariCP 풀 설정 (선택적) + hikari: + maximum-pool-size: 10 + minimum-idle: 5 + connection-timeout: 30000 + + # JPA Configuration + jpa: + hibernate: + ddl-auto: update + show-sql: false + properties: + hibernate: + dialect: org.hibernate.dialect.PostgreSQLDialect + format_sql: true + default_schema: snp_batch + + # Batch Configuration + batch: + jdbc: + table-prefix: "snp_batch.batch_" + initialize-schema: never # Changed to 'never' as tables already exist + job: + enabled: false # Prevent auto-run on startup + + # Quartz Scheduler Configuration - Using JDBC Store for persistence + quartz: + job-store-type: jdbc # JDBC store for schedule persistence + jdbc: + initialize-schema: always # Create Quartz tables if not exist + properties: + org.quartz.scheduler.instanceName: SNPBatchScheduler + org.quartz.scheduler.instanceId: AUTO + org.quartz.threadPool.threadCount: 10 + org.quartz.jobStore.class: org.quartz.impl.jdbcjobstore.JobStoreTX + org.quartz.jobStore.driverDelegateClass: org.quartz.impl.jdbcjobstore.PostgreSQLDelegate + org.quartz.jobStore.tablePrefix: QRTZ_ + org.quartz.jobStore.isClustered: false + org.quartz.jobStore.misfireThreshold: 60000 + +# Server Configuration +server: + port: 8051 + servlet: + context-path: /snp-sync + +# Actuator Configuration +management: + endpoints: + web: + exposure: + include: health,info,metrics,prometheus,batch + endpoint: + health: + show-details: always + +# Logging Configuration +logging: + level: + root: INFO + com.snp.batch: DEBUG + com.snp.batch.common.util: INFO + org.springframework.batch: WARN + org.springframework.jdbc: WARN + pattern: + console: "%d{yyyy-MM-dd HH:mm:ss} - %msg%n" + file: "%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n" + file: + name: logs/snp-sync-batch.log + +# Custom Application Properties +app: + batch: + api: + url: https://api.example.com/data + timeout: 30000 + ship-api: + url: https://shipsapi.maritime.spglobal.com + username: 7cc0517d-5ed6-452e-a06f-5bbfd6ab6ade + password: 2LLzSJNqtxWVD8zC + schedule: + enabled: true + cron: "0 0 * * * ?" # Every hour diff --git a/src/main/resources/application-prod.yml b/src/main/resources/application-prod.yml new file mode 100644 index 0000000..f75ae4d --- /dev/null +++ b/src/main/resources/application-prod.yml @@ -0,0 +1,116 @@ +spring: + application: + name: snp-sync-batch + +# 정의한 DataSource만 사용하도록 합니다. (필수) + autoconfigure: + exclude: org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration + + # =============================================== + # 1. 배치 메타데이터 데이터소스 (Primary) + # Spring Batch의 JobRepository가 사용하는 DB입니다. + # =============================================== + batch-meta-datasource: + jdbc-url: jdbc:postgresql://211.208.115.83:5432/snpdb?currentSchema=snp_batch + username: snp + password: snp#8932 + driver-class-name: org.postgresql.Driver + # HikariCP 풀 설정 (선택적) + hikari: + maximum-pool-size: 10 + minimum-idle: 5 + connection-timeout: 30000 + + # =============================================== + # 2. 비즈니스 데이터 데이터소스 + # Spring Batch Step(ItemReader/Writer)이 데이터를 읽고 쓰는 DB입니다. + # =============================================== + business-datasource: + jdbc-url: jdbc:postgresql://211.208.115.83:5432/snpdb?currentSchema=std_snp_data + username: snp + password: snp#8932 + driver-class-name: org.postgresql.Driver + # HikariCP 풀 설정 (선택적) + hikari: + maximum-pool-size: 10 + minimum-idle: 5 + connection-timeout: 30000 + + # JPA Configuration + jpa: + hibernate: + ddl-auto: update + show-sql: false + properties: + hibernate: + dialect: org.hibernate.dialect.PostgreSQLDialect + format_sql: false + default_schema: snp_batch + + # Batch Configuration + batch: + jdbc: + table-prefix: "snp_batch.batch_" + initialize-schema: never # Changed to 'never' as tables already exist + job: + enabled: false # Prevent auto-run on startup + + # Quartz Scheduler Configuration - Using JDBC Store for persistence + quartz: + job-store-type: jdbc # JDBC store for schedule persistence + jdbc: + initialize-schema: always # Create Quartz tables if not exist + properties: + org.quartz.scheduler.instanceName: SNPBatchScheduler + org.quartz.scheduler.instanceId: AUTO + org.quartz.threadPool.threadCount: 10 + org.quartz.jobStore.class: org.quartz.impl.jdbcjobstore.JobStoreTX + org.quartz.jobStore.driverDelegateClass: org.quartz.impl.jdbcjobstore.PostgreSQLDelegate + org.quartz.jobStore.tablePrefix: QRTZ_ + org.quartz.jobStore.isClustered: false + org.quartz.jobStore.misfireThreshold: 60000 + +# Server Configuration +server: + port: 8051 + servlet: + context-path: /snp-sync + +# Actuator Configuration +management: + endpoints: + web: + exposure: + include: health,info,metrics,prometheus,batch + endpoint: + health: + show-details: always + +# Logging Configuration +logging: + level: + root: INFO + com.snp.batch: INFO + org.springframework.batch: WARN + org.springframework.jdbc: WARN + pattern: + console: "%d{yyyy-MM-dd HH:mm:ss} - %msg%n" + file: "%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n" + file: + name: logs/snp-sync-batch.log + +# Custom Application Properties +app: + batch: + chunk-size: 10000 + sub-chunk-size: 5000 # Writer Sub-Chunk 분할 크기 + api: + url: https://api.example.com/data + timeout: 30000 + ship-api: + url: https://shipsapi.maritime.spglobal.com + username: 7cc0517d-5ed6-452e-a06f-5bbfd6ab6ade + password: 2LLzSJNqtxWVD8zC + schedule: + enabled: true + cron: "0 0 * * * ?" # Every hour diff --git a/src/main/resources/application.yml b/src/main/resources/application.yml new file mode 100644 index 0000000..11faca0 --- /dev/null +++ b/src/main/resources/application.yml @@ -0,0 +1,223 @@ +spring: + application: + name: snp-sync-batch + +# 정의한 DataSource만 사용하도록 합니다. (필수) + autoconfigure: + exclude: org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration + + # =============================================== + # 1. 배치 메타데이터 데이터소스 (Primary) + # Spring Batch의 JobRepository가 사용하는 DB입니다. + # =============================================== + batch-meta-datasource: + jdbc-url: jdbc:postgresql://211.208.115.83:5432/snpdb?currentSchema=snp_batch + username: snp + password: snp#8932 + driver-class-name: org.postgresql.Driver + # HikariCP 풀 설정 (선택적) + hikari: + maximum-pool-size: 10 + minimum-idle: 5 + connection-timeout: 30000 + + # =============================================== + # 2. 비즈니스 데이터 데이터소스 + # Spring Batch Step(ItemReader/Writer)이 데이터를 읽고 쓰는 DB입니다. + # =============================================== + business-datasource: + jdbc-url: jdbc:postgresql://211.208.115.83:5432/snpdb?currentSchema=std_snp_data + username: snp + password: snp#8932 + driver-class-name: org.postgresql.Driver + # HikariCP 풀 설정 (선택적) + hikari: + maximum-pool-size: 10 + minimum-idle: 5 + connection-timeout: 30000 + + # JPA Configuration + jpa: + hibernate: + ddl-auto: update + show-sql: false + properties: + hibernate: + dialect: org.hibernate.dialect.PostgreSQLDialect + format_sql: true + default_schema: snp_batch + + # Batch Configuration + batch: + jdbc: + table-prefix: "snp_batch.batch_" + initialize-schema: always # Changed to 'never' as tables already exist + job: + enabled: false # Prevent auto-run on startup + + # Quartz Scheduler Configuration - Using JDBC Store for persistence + quartz: + job-store-type: jdbc # JDBC store for schedule persistence + jdbc: + initialize-schema: always # Create Quartz tables if not exist + properties: + org.quartz.scheduler.instanceName: SNPBatchScheduler + org.quartz.scheduler.instanceId: AUTO + org.quartz.threadPool.threadCount: 10 + org.quartz.jobStore.class: org.quartz.impl.jdbcjobstore.JobStoreTX + org.quartz.jobStore.driverDelegateClass: org.quartz.impl.jdbcjobstore.PostgreSQLDelegate + org.quartz.jobStore.tablePrefix: QRTZ_ + org.quartz.jobStore.isClustered: false + org.quartz.jobStore.misfireThreshold: 60000 + +# Server Configuration +server: + port: 8051 + servlet: + context-path: /snp-sync + +# Actuator Configuration +management: + endpoints: + web: + exposure: + include: health,info,metrics,prometheus,batch + endpoint: + health: + show-details: always + +# Logging Configuration +logging: + level: + root: INFO + com.snp.batch: DEBUG + org.springframework.batch: WARN + org.springframework.jdbc: WARN + pattern: + console: "%d{yyyy-MM-dd HH:mm:ss} - %msg%n" + file: "%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n" + file: + name: logs/snp-sync-batch.log + +# Custom Application Properties +app: + batch: + chunk-size: 10000 + sub-chunk-size: 5000 # Writer Sub-Chunk 분할 크기 + source-schema: + name: std_snp_data + tables: + ship-001: tb_ship_default_info + ship-002: tb_ship_info_mst + ship-003: tb_ship_add_info + ship-004: tb_ship_bbctr_hstry + ship-005: tb_ship_idntf_info_hstry + ship-006: tb_ship_clfic_hstry + ship-007: tb_ship_company_rel + ship-008: tb_ship_crew_list + ship-009: tb_ship_dark_actv_idnty + ship-010: tb_ship_country_hstry + ship-011: tb_ship_group_revn_ownr_hstry + ship-012: tb_ship_ice_grd + ship-013: tb_ship_nm_chg_hstry + ship-014: tb_ship_operator_hstry + ship-015: tb_ship_ownr_hstry + ship-016: tb_ship_prtc_rpn_hstry + ship-017: tb_ship_sfty_mng_evdc_hstry + ship-018: tb_ship_mng_company_hstry + ship-019: tb_ship_sstrvsl_rel + ship-020: tb_ship_spc_fetr + ship-021: tb_ship_status_hstry + ship-022: tb_ship_cargo_capacity + ship-023: tb_ship_inspection_ymd + ship-024: tb_ship_inspection_ymd_hstry + ship-025: tb_ship_tech_mng_company_hstry + ship-026: tb_ship_thrstr_info + company-001: tb_company_dtl_info + event-001: tb_event_mst + event-002: tb_event_cargo + event-003: tb_event_humn_acdnt + event-004: tb_event_rel + facility-001: tb_port_facility_info + psc-001: tb_psc_mst + psc-002: tb_psc_defect + psc-003: tb_psc_oa_certf + movements-001: tb_ship_anchrgcall_hstry + movements-002: tb_ship_berthcall_hstry + movements-003: tb_ship_now_status_hstry + movements-004: tb_ship_dest_hstry + movements-005: tb_ship_prtcll_hstry + movements-006: tb_ship_sts_opert_hstry + movements-007: tb_ship_teminalcall_hstry + movements-008: tb_ship_trnst_hstry + code-001: tb_ship_type_cd + code-002: tb_ship_country_cd + risk-compliance-001: tb_ship_risk_info + risk-compliance-002: tb_ship_compliance_info + risk-compliance-003: tb_company_compliance_info + target-schema: + name: std_snp_svc + tables: + ship-001: tb_ship_info_mst + ship-002: tb_ship_main_info + ship-003: tb_ship_add_info + ship-004: tb_ship_bbctr_hstry + ship-005: tb_ship_idntf_info_hstry + ship-006: tb_ship_clfic_hstry + ship-007: tb_ship_company_rel + ship-008: tb_ship_crew_list + ship-009: tb_ship_dark_actv_idnty + ship-010: tb_ship_country_hstry + ship-011: tb_ship_group_revn_ownr_hstry + ship-012: tb_ship_ice_grd + ship-013: tb_ship_nm_chg_hstry + ship-014: tb_ship_operator_hstry + ship-015: tb_ship_ownr_hstry + ship-016: tb_ship_prtc_rpn_hstry + ship-017: tb_ship_sfty_mng_evdc_hstry + ship-018: tb_ship_mng_company_hstry + ship-019: tb_ship_sstrvsl_rel + ship-020: tb_ship_spc_fetr + ship-021: tb_ship_status_hstry + ship-022: tb_ship_cargo_capacity + ship-023: tb_ship_inspection_ymd + ship-024: tb_ship_inspection_ymd_hstry + ship-025: tb_ship_tech_mng_company_hstry + ship-026: tb_ship_thrstr_info + company-001: tb_company_dtl_info + event-001: tb_event_mst + event-002: tb_event_cargo + event-003: tb_event_humn_acdnt + event-004: tb_event_rel + facility-001: tb_port_facility_info + psc-001: tb_psc_mst + psc-002: tb_psc_defect + psc-003: tb_psc_oa_certf + movements-001: tb_ship_anchrgcall_hstry + movements-002: tb_ship_berthcall_hstry + movements-003: tb_ship_now_status_hstry + movements-004: tb_ship_dest_hstry + movements-005: tb_ship_prtcll_hstry + movements-006: tb_ship_sts_opert_hstry + movements-007: tb_ship_teminalcall_hstry + movements-008: tb_ship_trnst_hstry + code-001: tb_ship_type_cd + code-002: tb_ship_country_cd + risk-compliance-001: tb_ship_risk_info + risk-compliance-002: tb_ship_risk_hstry + risk-compliance-003: tb_ship_compliance_info + risk-compliance-004: tb_ship_compliance_hstry + risk-compliance-005: tb_ship_compliance_info_hstry + risk-compliance-006: tb_company_compliance_info + risk-compliance-007: tb_company_compliance_hstry + risk-compliance-008: tb_company_compliance_info_hstry + api: + url: https://api.example.com/data + timeout: 30000 + ship-api: + url: https://shipsapi.maritime.spglobal.com + username: 7cc0517d-5ed6-452e-a06f-5bbfd6ab6ade + password: 2LLzSJNqtxWVD8zC + schedule: + enabled: true + cron: "0 0 * * * ?" # Every hour diff --git a/src/main/resources/db/migration/V3__Create_Sample_Products_Table.sql b/src/main/resources/db/migration/V3__Create_Sample_Products_Table.sql new file mode 100644 index 0000000..43a5b2f --- /dev/null +++ b/src/main/resources/db/migration/V3__Create_Sample_Products_Table.sql @@ -0,0 +1,114 @@ +-- ======================================== +-- 샘플 제품 테이블 생성 +-- 다양한 데이터 타입 테스트용 +-- ======================================== + +-- 기존 테이블 삭제 (개발 환경에서만) +DROP TABLE IF EXISTS sample_products CASCADE; + +-- 샘플 제품 테이블 생성 +CREATE TABLE sample_products ( + -- 기본 키 (자동 증가) + id BIGSERIAL PRIMARY KEY, + + -- 제품 ID (비즈니스 키, 유니크) + product_id VARCHAR(50) NOT NULL UNIQUE, + + -- 제품명 + product_name VARCHAR(200) NOT NULL, + + -- 카테고리 + category VARCHAR(100), + + -- 가격 (DECIMAL 타입: 정밀한 소수점 계산) + price DECIMAL(10, 2), + + -- 재고 수량 (INTEGER 타입) + stock_quantity INTEGER, + + -- 활성 여부 (BOOLEAN 타입) + is_active BOOLEAN DEFAULT TRUE, + + -- 평점 (DOUBLE PRECISION 타입) + rating DOUBLE PRECISION, + + -- 제조일자 (DATE 타입) + manufacture_date DATE, + + -- 무게 (REAL/FLOAT 타입) + weight REAL, + + -- 판매 횟수 (BIGINT 타입) + sales_count BIGINT DEFAULT 0, + + -- 설명 (TEXT 타입: 긴 텍스트) + description TEXT, + + -- 태그 (JSON 문자열 저장) + tags VARCHAR(500), + + -- 감사 필드 (BaseEntity에서 상속) + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + created_by VARCHAR(100) DEFAULT 'SYSTEM', + updated_by VARCHAR(100) DEFAULT 'SYSTEM' +); + +-- ======================================== +-- 인덱스 생성 (성능 최적화) +-- ======================================== + +-- 제품 ID 인덱스 (이미 UNIQUE로 자동 생성되지만 명시적 표시) +CREATE INDEX IF NOT EXISTS idx_sample_products_product_id + ON sample_products(product_id); + +-- 카테고리 인덱스 (카테고리별 검색 최적화) +CREATE INDEX IF NOT EXISTS idx_sample_products_category + ON sample_products(category); + +-- 활성 여부 인덱스 (활성 제품 필터링 최적화) +CREATE INDEX IF NOT EXISTS idx_sample_products_is_active + ON sample_products(is_active); + +-- 제조일자 인덱스 (날짜 범위 검색 최적화) +CREATE INDEX IF NOT EXISTS idx_sample_products_manufacture_date + ON sample_products(manufacture_date); + +-- 복합 인덱스: 카테고리 + 활성 여부 (자주 함께 검색되는 조건) +CREATE INDEX IF NOT EXISTS idx_sample_products_category_active + ON sample_products(category, is_active); + +-- 생성일시 인덱스 (최신 데이터 조회 최적화) +CREATE INDEX IF NOT EXISTS idx_sample_products_created_at + ON sample_products(created_at DESC); + +-- ======================================== +-- 코멘트 추가 (테이블 및 컬럼 설명) +-- ======================================== + +COMMENT ON TABLE sample_products IS '샘플 제품 테이블 - 다양한 데이터 타입 테스트용'; + +COMMENT ON COLUMN sample_products.id IS '기본 키 (자동 증가)'; +COMMENT ON COLUMN sample_products.product_id IS '제품 ID (비즈니스 키)'; +COMMENT ON COLUMN sample_products.product_name IS '제품명'; +COMMENT ON COLUMN sample_products.category IS '카테고리'; +COMMENT ON COLUMN sample_products.price IS '가격 (DECIMAL 타입, 정밀 소수점)'; +COMMENT ON COLUMN sample_products.stock_quantity IS '재고 수량 (INTEGER)'; +COMMENT ON COLUMN sample_products.is_active IS '활성 여부 (BOOLEAN)'; +COMMENT ON COLUMN sample_products.rating IS '평점 (DOUBLE PRECISION)'; +COMMENT ON COLUMN sample_products.manufacture_date IS '제조일자 (DATE)'; +COMMENT ON COLUMN sample_products.weight IS '무게 kg (REAL/FLOAT)'; +COMMENT ON COLUMN sample_products.sales_count IS '판매 횟수 (BIGINT)'; +COMMENT ON COLUMN sample_products.description IS '설명 (TEXT, 긴 텍스트)'; +COMMENT ON COLUMN sample_products.tags IS '태그 (JSON 문자열)'; +COMMENT ON COLUMN sample_products.created_at IS '생성일시'; +COMMENT ON COLUMN sample_products.updated_at IS '수정일시'; +COMMENT ON COLUMN sample_products.created_by IS '생성자'; +COMMENT ON COLUMN sample_products.updated_by IS '수정자'; + +-- ======================================== +-- 테이블 통계 정보 +-- ======================================== + +-- 테이블 통계 업데이트 (쿼리 최적화를 위한 통계 수집) +ANALYZE sample_products; diff --git a/src/main/resources/db/schema/001_create_job_execution_lock.sql b/src/main/resources/db/schema/001_create_job_execution_lock.sql new file mode 100644 index 0000000..5e297d0 --- /dev/null +++ b/src/main/resources/db/schema/001_create_job_execution_lock.sql @@ -0,0 +1,61 @@ +-- ============================================================ +-- Job Execution Lock 테이블 생성 +-- ============================================================ +-- 목적: Job 동시 실행 방지 (분산 환경 지원) +-- 작성일: 2025-10-17 +-- 버전: 1.0.0 +-- ============================================================ + +-- 테이블 삭제 (재생성 시) +DROP TABLE IF EXISTS job_execution_lock CASCADE; + +-- 테이블 생성 +CREATE TABLE job_execution_lock ( + -- Job 이름 (Primary Key) + job_name VARCHAR(100) PRIMARY KEY, + + -- Lock 상태 (true: 실행 중, false: 대기) + locked BOOLEAN NOT NULL DEFAULT FALSE, + + -- Lock 획득 시간 + locked_at TIMESTAMP, + + -- Lock 소유자 (hostname:pid 형식) + locked_by VARCHAR(255), + + -- 현재 실행 중인 Execution ID + execution_id BIGINT, + + -- 감사 필드 + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + +-- 인덱스 생성 +CREATE INDEX idx_job_execution_lock_locked ON job_execution_lock(locked); +CREATE INDEX idx_job_execution_lock_locked_at ON job_execution_lock(locked_at); +CREATE INDEX idx_job_execution_lock_execution_id ON job_execution_lock(execution_id); + +-- 테이블 및 컬럼 주석 +COMMENT ON TABLE job_execution_lock IS 'Job 실행 Lock 관리 테이블 (동시 실행 방지)'; +COMMENT ON COLUMN job_execution_lock.job_name IS 'Job 이름 (Primary Key)'; +COMMENT ON COLUMN job_execution_lock.locked IS 'Lock 상태 (true: 실행 중, false: 대기)'; +COMMENT ON COLUMN job_execution_lock.locked_at IS 'Lock 획득 시간'; +COMMENT ON COLUMN job_execution_lock.locked_by IS 'Lock 소유자 (hostname:pid)'; +COMMENT ON COLUMN job_execution_lock.execution_id IS '현재 실행 중인 Execution ID'; +COMMENT ON COLUMN job_execution_lock.created_at IS '생성 시간'; +COMMENT ON COLUMN job_execution_lock.updated_at IS '수정 시간'; + +-- 샘플 데이터 삽입 (선택사항) +-- INSERT INTO job_execution_lock (job_name, locked, locked_at, locked_by, execution_id) +-- VALUES ('sampleProductImportJob', FALSE, NULL, NULL, NULL); +-- INSERT INTO job_execution_lock (job_name, locked, locked_at, locked_by, execution_id) +-- VALUES ('shipDataImportJob', FALSE, NULL, NULL, NULL); +-- INSERT INTO job_execution_lock (job_name, locked, locked_at, locked_by, execution_id) +-- VALUES ('shipDetailImportJob', FALSE, NULL, NULL, NULL); + +-- 권한 부여 (필요 시) +-- GRANT SELECT, INSERT, UPDATE, DELETE ON job_execution_lock TO snp; + +-- 완료 메시지 +SELECT 'job_execution_lock 테이블 생성 완료' AS status; diff --git a/src/main/resources/db/schema/ship_detail.sql b/src/main/resources/db/schema/ship_detail.sql new file mode 100644 index 0000000..8bb9aed --- /dev/null +++ b/src/main/resources/db/schema/ship_detail.sql @@ -0,0 +1,64 @@ +-- 선박 상세 정보 테이블 +CREATE TABLE IF NOT EXISTS ship_detail ( + -- 기본 키 + id BIGSERIAL PRIMARY KEY, + + -- 비즈니스 키 + imo_number VARCHAR(20) UNIQUE NOT NULL, + + -- 선박 기본 정보 + ship_name VARCHAR(200), + ship_type VARCHAR(100), + classification VARCHAR(100), + build_year INTEGER, + shipyard VARCHAR(200), + + -- 소유/운영 정보 + owner VARCHAR(200), + operator VARCHAR(200), + flag VARCHAR(100), + + -- 선박 제원 + gross_tonnage DOUBLE PRECISION, + net_tonnage DOUBLE PRECISION, + deadweight DOUBLE PRECISION, + length_overall DOUBLE PRECISION, + breadth DOUBLE PRECISION, + depth DOUBLE PRECISION, + + -- 기술 정보 + hull_material VARCHAR(100), + engine_type VARCHAR(100), + engine_power DOUBLE PRECISION, + speed DOUBLE PRECISION, + + -- 식별 정보 + mmsi VARCHAR(20), + call_sign VARCHAR(20), + + -- 상태 정보 + status VARCHAR(50), + last_updated VARCHAR(100), + + -- 감사 필드 (BaseEntity) + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + created_by VARCHAR(100) DEFAULT 'SYSTEM', + updated_by VARCHAR(100) DEFAULT 'SYSTEM' +); + +-- 인덱스 +CREATE UNIQUE INDEX IF NOT EXISTS idx_ship_detail_imo ON ship_detail(imo_number); +CREATE INDEX IF NOT EXISTS idx_ship_detail_ship_name ON ship_detail(ship_name); +CREATE INDEX IF NOT EXISTS idx_ship_detail_ship_type ON ship_detail(ship_type); +CREATE INDEX IF NOT EXISTS idx_ship_detail_flag ON ship_detail(flag); +CREATE INDEX IF NOT EXISTS idx_ship_detail_status ON ship_detail(status); + +-- 주석 +COMMENT ON TABLE ship_detail IS '선박 상세 정보'; +COMMENT ON COLUMN ship_detail.imo_number IS 'IMO 번호 (비즈니스 키)'; +COMMENT ON COLUMN ship_detail.ship_name IS '선박명'; +COMMENT ON COLUMN ship_detail.ship_type IS '선박 타입'; +COMMENT ON COLUMN ship_detail.gross_tonnage IS '총톤수'; +COMMENT ON COLUMN ship_detail.deadweight IS '재화중량톤수'; +COMMENT ON COLUMN ship_detail.length_overall IS '전체 길이 (meters)';