feat(동기화현황): 동기화 현황 메뉴 추가 및 배치 Reader 리팩토링 (#1) #5

병합
HYOJIN feature/ISSUE-1-sync-status-menu 에서 develop 로 5 commits 를 머지했습니다 2026-03-24 17:29:07 +09:00
64개의 변경된 파일1526개의 추가작업 그리고 3121개의 파일을 삭제
Showing only changes of commit edef10e4bc - Show all commits

파일 보기

@ -0,0 +1,127 @@
package com.snp.batch.common.batch.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.JobExecutionGroupable;
import com.snp.batch.common.util.TableMetaInfo;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
/**
* 동기화 Reader 추상 클래스
*
* job_execution_id 단위로 청크를 분리하는 2단계 read 방식:
* 1단계 (peek): 다음 그룹 ID만 조회, 현재 그룹과 다르면 null 반환 (청크 종료)
* 2단계 (fetch): 데이터 로드 + batch_flag NP 전환
*
* GroupByExecutionIdPolicy를 대체하여 Reader 자체에서 청크 경계를 제어한다.
*
* @param <T> DTO 타입 (JobExecutionGroupable 구현 필요)
*/
@Slf4j
public abstract class BaseSyncReader<T extends JobExecutionGroupable> implements ItemReader<T> {
protected final TableMetaInfo tableMetaInfo;
protected final JdbcTemplate businessJdbcTemplate;
private List<T> allDataBuffer = new ArrayList<>();
private Long currentGroupId = null;
private Long pendingGroupId = null;
protected BaseSyncReader(DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
}
/**
* 소스 테이블명 반환 (: tableMetaInfo.sourceIceClass)
*/
protected abstract String getSourceTable();
/**
* ResultSet DTO 매핑
*
* @param rs ResultSet (현재 row)
* @param targetId job_execution_id (DTO의 jobExecutionId 필드에 설정)
* @return 매핑된 DTO 객체
*/
protected abstract T mapRow(ResultSet rs, Long targetId) throws SQLException;
/**
* 로그 접두사 (: "IceClassReader")
*/
protected String getLogPrefix() {
return getClass().getSimpleName();
}
@Override
public T read() throws Exception {
// 1. buffer가 비어있으면 다음 그룹 확인
if (allDataBuffer.isEmpty()) {
// pending이 있으면 (이전 청크에서 감지된 다음 그룹) 바로 로드
if (pendingGroupId != null) {
fetchAndTransition(pendingGroupId);
currentGroupId = pendingGroupId;
pendingGroupId = null;
} else {
// 다음 그룹 ID peek
Long nextId = peekNextGroupId();
if (nextId == null) {
// 이상 처리할 데이터 없음
currentGroupId = null;
return null;
}
if (currentGroupId != null && !currentGroupId.equals(nextId)) {
// 다른 그룹 발견 현재 청크 종료, 다음 청크에서 처리
pendingGroupId = nextId;
currentGroupId = null;
return null;
}
// 같은 그룹이거나 호출 로드
fetchAndTransition(nextId);
currentGroupId = nextId;
}
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
}
/**
* 다음 처리 대상 job_execution_id 조회 (데이터 로드/전환 없음)
*/
private Long peekNextGroupId() {
try {
return businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(getSourceTable()), Long.class);
} catch (Exception e) {
return null;
}
}
/**
* 데이터 로드 + batch_flag NP 전환
*/
private void fetchAndTransition(Long targetId) {
log.info("[{}] 다음 처리 대상 ID 발견: {}", getLogPrefix(), targetId);
String sql = CommonSql.getTargetDataQuery(getSourceTable());
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) ->
mapRow(rs, targetId), targetId);
// NP 전환
String updateSql = CommonSql.getProcessBatchQuery(getSourceTable());
businessJdbcTemplate.update(updateSql, targetId);
}
}

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.code.config;
import com.snp.batch.common.batch.config.BaseJobConfig; import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener; import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.code.dto.FlagCodeDto; import com.snp.batch.jobs.datasync.batch.code.dto.FlagCodeDto;
import com.snp.batch.jobs.datasync.batch.code.dto.Stat5CodeDto; import com.snp.batch.jobs.datasync.batch.code.dto.Stat5CodeDto;
@ -129,12 +126,10 @@ public class CodeSyncJobConfig extends BaseJobConfig<FlagCodeDto, FlagCodeEntity
public Step flagCodeSyncStep() { public Step flagCodeSyncStep() {
log.info("Step 생성: flagCodeSyncStep"); log.info("Step 생성: flagCodeSyncStep");
return new StepBuilder(getStepName(), jobRepository) return new StepBuilder(getStepName(), jobRepository)
.<FlagCodeDto, FlagCodeEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<FlagCodeDto, FlagCodeEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader()) .reader(createReader())
.processor(createProcessor()) .processor(createProcessor())
.writer(createWriter()) .writer(createWriter())
.listener(new GroupByExecutionIdReadListener<FlagCodeDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(flagCodeWriteListener()) .listener(flagCodeWriteListener())
.build(); .build();
} }
@ -143,12 +138,10 @@ public class CodeSyncJobConfig extends BaseJobConfig<FlagCodeDto, FlagCodeEntity
public Step stat5CodeSyncStep() { public Step stat5CodeSyncStep() {
log.info("Step 생성: stat5CodeSyncStep"); log.info("Step 생성: stat5CodeSyncStep");
return new StepBuilder("stat5CodeSyncStep", jobRepository) return new StepBuilder("stat5CodeSyncStep", jobRepository)
.<Stat5CodeDto, Stat5CodeEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<Stat5CodeDto, Stat5CodeEntity>chunk(getChunkSize(), transactionManager)
.reader(stat5CodeReader(businessDataSource, tableMetaInfo)) .reader(stat5CodeReader(businessDataSource, tableMetaInfo))
.processor(new Stat5CodeProcessor()) .processor(new Stat5CodeProcessor())
.writer(new Stat5CodeWriter(codeRepository)) .writer(new Stat5CodeWriter(codeRepository))
.listener(new GroupByExecutionIdReadListener<Stat5CodeDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(stat5CodeWriteListener()) .listener(stat5CodeWriteListener())
.build(); .build();
} }

파일 보기

@ -1,68 +1,36 @@
package com.snp.batch.jobs.datasync.batch.code.reader; package com.snp.batch.jobs.datasync.batch.code.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.code.dto.FlagCodeDto; import com.snp.batch.jobs.datasync.batch.code.dto.FlagCodeDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class FlagCodeReader implements ItemReader<FlagCodeDto> { public class FlagCodeReader extends BaseSyncReader<FlagCodeDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<FlagCodeDto> allDataBuffer = new ArrayList<>();
public FlagCodeReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public FlagCodeReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public FlagCodeDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceFlagCode;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected FlagCodeDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return FlagCodeDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceFlagCode), Long.class); .datasetVer(rs.getString("dataset_ver"))
} catch (Exception e) { .shipCountryCd(rs.getString("ship_country_cd"))
return; .cdNm(rs.getString("cd_nm"))
} .isoTwoCd(rs.getString("iso_two_cd"))
.isoThrCd(rs.getString("iso_thr_cd"))
if (nextTargetId != null) { .build();
log.info("[FlagCodeReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceFlagCode);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
return FlagCodeDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
.shipCountryCd(rs.getString("ship_country_cd"))
.cdNm(rs.getString("cd_nm"))
.isoTwoCd(rs.getString("iso_two_cd"))
.isoThrCd(rs.getString("iso_thr_cd"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceFlagCode);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,75 +1,43 @@
package com.snp.batch.jobs.datasync.batch.code.reader; package com.snp.batch.jobs.datasync.batch.code.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.code.dto.Stat5CodeDto; import com.snp.batch.jobs.datasync.batch.code.dto.Stat5CodeDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class Stat5CodeReader implements ItemReader<Stat5CodeDto> { public class Stat5CodeReader extends BaseSyncReader<Stat5CodeDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<Stat5CodeDto> allDataBuffer = new ArrayList<>();
public Stat5CodeReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public Stat5CodeReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public Stat5CodeDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceStat5Code;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected Stat5CodeDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return Stat5CodeDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceStat5Code), Long.class); .lvOne(rs.getString("lv_one"))
} catch (Exception e) { .lvOneDesc(rs.getString("lv_one_desc"))
return; .lvTwo(rs.getString("lv_two"))
} .lvTwoDesc(rs.getString("lv_two_desc"))
.lvThr(rs.getString("lv_thr"))
if (nextTargetId != null) { .lvThrDesc(rs.getString("lv_thr_desc"))
log.info("[Stat5CodeReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .lvFour(rs.getString("lv_four"))
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceStat5Code); .lvFourDesc(rs.getString("lv_four_desc"))
final Long targetId = nextTargetId; .lvFive(rs.getString("lv_five"))
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .lvFiveDesc(rs.getString("lv_five_desc"))
return Stat5CodeDto.builder() .dtlDesc(rs.getString("dtl_desc"))
.jobExecutionId(targetId) .rlsIem(rs.getString("rls_iem"))
.lvOne(rs.getString("lv_one")) .build();
.lvOneDesc(rs.getString("lv_one_desc"))
.lvTwo(rs.getString("lv_two"))
.lvTwoDesc(rs.getString("lv_two_desc"))
.lvThr(rs.getString("lv_thr"))
.lvThrDesc(rs.getString("lv_thr_desc"))
.lvFour(rs.getString("lv_four"))
.lvFourDesc(rs.getString("lv_four_desc"))
.lvFive(rs.getString("lv_five"))
.lvFiveDesc(rs.getString("lv_five_desc"))
.dtlDesc(rs.getString("dtl_desc"))
.rlsIem(rs.getString("rls_iem"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceStat5Code);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.compliance.config;
import com.snp.batch.common.batch.config.BaseJobConfig; import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener; import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.compliance.dto.CompanyComplianceDto; import com.snp.batch.jobs.datasync.batch.compliance.dto.CompanyComplianceDto;
import com.snp.batch.jobs.datasync.batch.compliance.entity.CompanyComplianceEntity; import com.snp.batch.jobs.datasync.batch.compliance.entity.CompanyComplianceEntity;
@ -109,12 +106,10 @@ public class CompanyComplianceSyncJobConfig extends BaseJobConfig<CompanyComplia
public Step companyComplianceSyncStep() { public Step companyComplianceSyncStep() {
log.info("Step 생성: companyComplianceSyncStep"); log.info("Step 생성: companyComplianceSyncStep");
return new StepBuilder(getStepName(), jobRepository) return new StepBuilder(getStepName(), jobRepository)
.<CompanyComplianceDto, CompanyComplianceEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<CompanyComplianceDto, CompanyComplianceEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader()) .reader(createReader())
.processor(createProcessor()) .processor(createProcessor())
.writer(createWriter()) .writer(createWriter())
.listener(new GroupByExecutionIdReadListener<CompanyComplianceDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(companyComplianceWriteListener()) .listener(companyComplianceWriteListener())
.build(); .build();
} }

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.compliance.config;
import com.snp.batch.common.batch.config.BaseJobConfig; import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener; import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.compliance.dto.ShipComplianceDto; import com.snp.batch.jobs.datasync.batch.compliance.dto.ShipComplianceDto;
import com.snp.batch.jobs.datasync.batch.compliance.entity.ShipComplianceEntity; import com.snp.batch.jobs.datasync.batch.compliance.entity.ShipComplianceEntity;
@ -109,12 +106,10 @@ public class ShipComplianceSyncJobConfig extends BaseJobConfig<ShipComplianceDto
public Step shipComplianceSyncStep() { public Step shipComplianceSyncStep() {
log.info("Step 생성: shipComplianceSyncStep"); log.info("Step 생성: shipComplianceSyncStep");
return new StepBuilder(getStepName(), jobRepository) return new StepBuilder(getStepName(), jobRepository)
.<ShipComplianceDto, ShipComplianceEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<ShipComplianceDto, ShipComplianceEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader()) .reader(createReader())
.processor(createProcessor()) .processor(createProcessor())
.writer(createWriter()) .writer(createWriter())
.listener(new GroupByExecutionIdReadListener<ShipComplianceDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(shipComplianceWriteListener()) .listener(shipComplianceWriteListener())
.build(); .build();
} }

파일 보기

@ -1,82 +1,50 @@
package com.snp.batch.jobs.datasync.batch.compliance.reader; package com.snp.batch.jobs.datasync.batch.compliance.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.compliance.dto.CompanyComplianceDto; import com.snp.batch.jobs.datasync.batch.compliance.dto.CompanyComplianceDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
@Slf4j @Slf4j
public class CompanyComplianceReader implements ItemReader<CompanyComplianceDto> { public class CompanyComplianceReader extends BaseSyncReader<CompanyComplianceDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<CompanyComplianceDto> allDataBuffer = new ArrayList<>();
public CompanyComplianceReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public CompanyComplianceReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public CompanyComplianceDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceTbCompanyComplianceInfo;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected CompanyComplianceDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { Timestamp lstMdfcnDtTs = rs.getTimestamp("lst_mdfcn_dt");
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTbCompanyComplianceInfo), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) { return CompanyComplianceDto.builder()
log.info("[CompanyComplianceReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .jobExecutionId(targetId)
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTbCompanyComplianceInfo); .companyCd(rs.getString("company_cd"))
final Long targetId = nextTargetId; .lstMdfcnDt(lstMdfcnDtTs != null ? lstMdfcnDtTs.toLocalDateTime() : null)
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .companySnthsComplianceStatus(rs.getObject("company_snths_compliance_status") != null ? rs.getLong("company_snths_compliance_status") : null)
Timestamp lstMdfcnDtTs = rs.getTimestamp("lst_mdfcn_dt"); .companyAusSanctionList(rs.getObject("company_aus_sanction_list") != null ? rs.getLong("company_aus_sanction_list") : null)
.companyBesSanctionList(rs.getObject("company_bes_sanction_list") != null ? rs.getLong("company_bes_sanction_list") : null)
return CompanyComplianceDto.builder() .companyCanSanctionList(rs.getObject("company_can_sanction_list") != null ? rs.getLong("company_can_sanction_list") : null)
.jobExecutionId(targetId) .companyOfacSanctionCountry(rs.getObject("company_ofac_sanction_country") != null ? rs.getLong("company_ofac_sanction_country") : null)
.companyCd(rs.getString("company_cd")) .companyFatfCmptncCountry(rs.getObject("company_fatf_cmptnc_country") != null ? rs.getLong("company_fatf_cmptnc_country") : null)
.lstMdfcnDt(lstMdfcnDtTs != null ? lstMdfcnDtTs.toLocalDateTime() : null) .companyEuSanctionList(rs.getObject("company_eu_sanction_list") != null ? rs.getLong("company_eu_sanction_list") : null)
.companySnthsComplianceStatus(rs.getObject("company_snths_compliance_status") != null ? rs.getLong("company_snths_compliance_status") : null) .companyOfacSanctionList(rs.getObject("company_ofac_sanction_list") != null ? rs.getLong("company_ofac_sanction_list") : null)
.companyAusSanctionList(rs.getObject("company_aus_sanction_list") != null ? rs.getLong("company_aus_sanction_list") : null) .companyOfacNonSdnSanctionList(rs.getObject("company_ofac_non_sdn_sanction_list") != null ? rs.getLong("company_ofac_non_sdn_sanction_list") : null)
.companyBesSanctionList(rs.getObject("company_bes_sanction_list") != null ? rs.getLong("company_bes_sanction_list") : null) .companyOfacssiSanctionList(rs.getObject("company_ofacssi_sanction_list") != null ? rs.getLong("company_ofacssi_sanction_list") : null)
.companyCanSanctionList(rs.getObject("company_can_sanction_list") != null ? rs.getLong("company_can_sanction_list") : null) .companySwissSanctionList(rs.getObject("company_swiss_sanction_list") != null ? rs.getLong("company_swiss_sanction_list") : null)
.companyOfacSanctionCountry(rs.getObject("company_ofac_sanction_country") != null ? rs.getLong("company_ofac_sanction_country") : null) .companyUaeSanctionList(rs.getObject("company_uae_sanction_list") != null ? rs.getLong("company_uae_sanction_list") : null)
.companyFatfCmptncCountry(rs.getObject("company_fatf_cmptnc_country") != null ? rs.getLong("company_fatf_cmptnc_country") : null) .companyUnSanctionList(rs.getObject("company_un_sanction_list") != null ? rs.getLong("company_un_sanction_list") : null)
.companyEuSanctionList(rs.getObject("company_eu_sanction_list") != null ? rs.getLong("company_eu_sanction_list") : null) .prntCompanyComplianceRisk(rs.getObject("prnt_company_compliance_risk") != null ? rs.getLong("prnt_company_compliance_risk") : null)
.companyOfacSanctionList(rs.getObject("company_ofac_sanction_list") != null ? rs.getLong("company_ofac_sanction_list") : null) .build();
.companyOfacNonSdnSanctionList(rs.getObject("company_ofac_non_sdn_sanction_list") != null ? rs.getLong("company_ofac_non_sdn_sanction_list") : null)
.companyOfacssiSanctionList(rs.getObject("company_ofacssi_sanction_list") != null ? rs.getLong("company_ofacssi_sanction_list") : null)
.companySwissSanctionList(rs.getObject("company_swiss_sanction_list") != null ? rs.getLong("company_swiss_sanction_list") : null)
.companyUaeSanctionList(rs.getObject("company_uae_sanction_list") != null ? rs.getLong("company_uae_sanction_list") : null)
.companyUnSanctionList(rs.getObject("company_un_sanction_list") != null ? rs.getLong("company_un_sanction_list") : null)
.prntCompanyComplianceRisk(rs.getObject("prnt_company_compliance_risk") != null ? rs.getLong("prnt_company_compliance_risk") : null)
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTbCompanyComplianceInfo);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,101 +1,69 @@
package com.snp.batch.jobs.datasync.batch.compliance.reader; package com.snp.batch.jobs.datasync.batch.compliance.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.compliance.dto.ShipComplianceDto; import com.snp.batch.jobs.datasync.batch.compliance.dto.ShipComplianceDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
@Slf4j @Slf4j
public class ShipComplianceReader implements ItemReader<ShipComplianceDto> { public class ShipComplianceReader extends BaseSyncReader<ShipComplianceDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<ShipComplianceDto> allDataBuffer = new ArrayList<>();
public ShipComplianceReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public ShipComplianceReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public ShipComplianceDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceCompliance;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected ShipComplianceDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { Timestamp lastMdfcnDtTs = rs.getTimestamp("last_mdfcn_dt");
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceCompliance), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) { return ShipComplianceDto.builder()
log.info("[ShipComplianceReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .jobExecutionId(targetId)
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceCompliance); .imoNo(rs.getString("imo_no"))
final Long targetId = nextTargetId; .lastMdfcnDt(lastMdfcnDtTs != null ? lastMdfcnDtTs.toLocalDateTime() : null)
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .lglSnthsSanction(rs.getString("lgl_snths_sanction"))
Timestamp lastMdfcnDtTs = rs.getTimestamp("last_mdfcn_dt"); .shipBesSanctionList(rs.getString("ship_bes_sanction_list"))
.shipDarkActvInd(rs.getString("ship_dark_actv_ind"))
return ShipComplianceDto.builder() .shipDtldInfoNtmntd(rs.getString("ship_dtld_info_ntmntd"))
.jobExecutionId(targetId) .shipEuSanctionList(rs.getString("ship_eu_sanction_list"))
.imoNo(rs.getString("imo_no")) .shipFlgDspt(rs.getString("ship_flg_dspt"))
.lastMdfcnDt(lastMdfcnDtTs != null ? lastMdfcnDtTs.toLocalDateTime() : null) .shipFlgSanctionCountry(rs.getString("ship_flg_sanction_country"))
.lglSnthsSanction(rs.getString("lgl_snths_sanction")) .shipFlgSanctionCountryHstry(rs.getString("ship_flg_sanction_country_hstry"))
.shipBesSanctionList(rs.getString("ship_bes_sanction_list")) .shipOfacNonSdnSanctionList(rs.getString("ship_ofac_non_sdn_sanction_list"))
.shipDarkActvInd(rs.getString("ship_dark_actv_ind")) .shipOfacSanctionList(rs.getString("ship_ofac_sanction_list"))
.shipDtldInfoNtmntd(rs.getString("ship_dtld_info_ntmntd")) .shipOfacCutnList(rs.getString("ship_ofac_cutn_list"))
.shipEuSanctionList(rs.getString("ship_eu_sanction_list")) .shipOwnrOfcsSanctionList(rs.getString("ship_ownr_ofcs_sanction_list"))
.shipFlgDspt(rs.getString("ship_flg_dspt")) .shipOwnrAusSanctionList(rs.getString("ship_ownr_aus_sanction_list"))
.shipFlgSanctionCountry(rs.getString("ship_flg_sanction_country")) .shipOwnrBesSanctionList(rs.getString("ship_ownr_bes_sanction_list"))
.shipFlgSanctionCountryHstry(rs.getString("ship_flg_sanction_country_hstry")) .shipOwnrCanSanctionList(rs.getString("ship_ownr_can_sanction_list"))
.shipOfacNonSdnSanctionList(rs.getString("ship_ofac_non_sdn_sanction_list")) .shipOwnrEuSanctionList(rs.getString("ship_ownr_eu_sanction_list"))
.shipOfacSanctionList(rs.getString("ship_ofac_sanction_list")) .shipOwnrFatfRglZone(rs.getString("ship_ownr_fatf_rgl_zone"))
.shipOfacCutnList(rs.getString("ship_ofac_cutn_list")) .shipOwnrOfacSanctionHstry(rs.getString("ship_ownr_ofac_sanction_hstry"))
.shipOwnrOfcsSanctionList(rs.getString("ship_ownr_ofcs_sanction_list")) .shipOwnrOfacSanctionList(rs.getString("ship_ownr_ofac_sanction_list"))
.shipOwnrAusSanctionList(rs.getString("ship_ownr_aus_sanction_list")) .shipOwnrOfacSanctionCountry(rs.getString("ship_ownr_ofac_sanction_country"))
.shipOwnrBesSanctionList(rs.getString("ship_ownr_bes_sanction_list")) .shipOwnrPrntCompanyNcmplnc(rs.getString("ship_ownr_prnt_company_ncmplnc"))
.shipOwnrCanSanctionList(rs.getString("ship_ownr_can_sanction_list")) .shipOwnrPrntCompanyFatfRglZone(rs.getString("ship_ownr_prnt_company_fatf_rgl_zone"))
.shipOwnrEuSanctionList(rs.getString("ship_ownr_eu_sanction_list")) .shipOwnrPrntCompanyOfacSanctionCountry(rs.getString("ship_ownr_prnt_company_ofac_sanction_country"))
.shipOwnrFatfRglZone(rs.getString("ship_ownr_fatf_rgl_zone")) .shipOwnrSwiSanctionList(rs.getString("ship_ownr_swi_sanction_list"))
.shipOwnrOfacSanctionHstry(rs.getString("ship_ownr_ofac_sanction_hstry")) .shipOwnrUaeSanctionList(rs.getString("ship_ownr_uae_sanction_list"))
.shipOwnrOfacSanctionList(rs.getString("ship_ownr_ofac_sanction_list")) .shipOwnrUnSanctionList(rs.getString("ship_ownr_un_sanction_list"))
.shipOwnrOfacSanctionCountry(rs.getString("ship_ownr_ofac_sanction_country")) .shipSanctionCountryPrtcllLastTwelveM(rs.getString("ship_sanction_country_prtcll_last_twelve_m"))
.shipOwnrPrntCompanyNcmplnc(rs.getString("ship_ownr_prnt_company_ncmplnc")) .shipSanctionCountryPrtcllLastThrM(rs.getString("ship_sanction_country_prtcll_last_thr_m"))
.shipOwnrPrntCompanyFatfRglZone(rs.getString("ship_ownr_prnt_company_fatf_rgl_zone")) .shipSanctionCountryPrtcllLastSixM(rs.getString("ship_sanction_country_prtcll_last_six_m"))
.shipOwnrPrntCompanyOfacSanctionCountry(rs.getString("ship_ownr_prnt_company_ofac_sanction_country")) .shipScrtyLglDsptEvent(rs.getString("ship_scrty_lgl_dspt_event"))
.shipOwnrSwiSanctionList(rs.getString("ship_ownr_swi_sanction_list")) .shipStsPrtnrNonComplianceTwelveM(rs.getString("ship_sts_prtnr_non_compliance_twelve_m"))
.shipOwnrUaeSanctionList(rs.getString("ship_ownr_uae_sanction_list")) .shipSwiSanctionList(rs.getString("ship_swi_sanction_list"))
.shipOwnrUnSanctionList(rs.getString("ship_ownr_un_sanction_list")) .shipUnSanctionList(rs.getString("ship_un_sanction_list"))
.shipSanctionCountryPrtcllLastTwelveM(rs.getString("ship_sanction_country_prtcll_last_twelve_m")) .build();
.shipSanctionCountryPrtcllLastThrM(rs.getString("ship_sanction_country_prtcll_last_thr_m"))
.shipSanctionCountryPrtcllLastSixM(rs.getString("ship_sanction_country_prtcll_last_six_m"))
.shipScrtyLglDsptEvent(rs.getString("ship_scrty_lgl_dspt_event"))
.shipStsPrtnrNonComplianceTwelveM(rs.getString("ship_sts_prtnr_non_compliance_twelve_m"))
.shipSwiSanctionList(rs.getString("ship_swi_sanction_list"))
.shipUnSanctionList(rs.getString("ship_un_sanction_list"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceCompliance);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.event.config;
import com.snp.batch.common.batch.config.BaseJobConfig; import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener; import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.event.dto.EventCargoDto; import com.snp.batch.jobs.datasync.batch.event.dto.EventCargoDto;
import com.snp.batch.jobs.datasync.batch.event.dto.EventDto; import com.snp.batch.jobs.datasync.batch.event.dto.EventDto;
@ -175,12 +172,10 @@ public class EventSyncJobConfig extends BaseJobConfig<EventDto, EventEntity> {
public Step eventSyncStep() { public Step eventSyncStep() {
log.info("Step 생성: eventSyncStep"); log.info("Step 생성: eventSyncStep");
return new StepBuilder(getStepName(), jobRepository) return new StepBuilder(getStepName(), jobRepository)
.<EventDto, EventEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<EventDto, EventEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader()) .reader(createReader())
.processor(createProcessor()) .processor(createProcessor())
.writer(createWriter()) .writer(createWriter())
.listener(new GroupByExecutionIdReadListener<EventDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(eventWriteListener()) .listener(eventWriteListener())
.build(); .build();
} }
@ -189,12 +184,10 @@ public class EventSyncJobConfig extends BaseJobConfig<EventDto, EventEntity> {
public Step eventCargoSyncStep() { public Step eventCargoSyncStep() {
log.info("Step 생성: eventCargoSyncStep"); log.info("Step 생성: eventCargoSyncStep");
return new StepBuilder("eventCargoSyncStep", jobRepository) return new StepBuilder("eventCargoSyncStep", jobRepository)
.<EventCargoDto, EventCargoEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<EventCargoDto, EventCargoEntity>chunk(getChunkSize(), transactionManager)
.reader(eventCargoReader(businessDataSource, tableMetaInfo)) .reader(eventCargoReader(businessDataSource, tableMetaInfo))
.processor(new EventCargoProcessor()) .processor(new EventCargoProcessor())
.writer(new EventCargoWriter(eventRepository, transactionManager, subChunkSize)) .writer(new EventCargoWriter(eventRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<EventCargoDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(eventCargoWriteListener()) .listener(eventCargoWriteListener())
.build(); .build();
} }
@ -203,12 +196,10 @@ public class EventSyncJobConfig extends BaseJobConfig<EventDto, EventEntity> {
public Step eventHumanCasualtySyncStep() { public Step eventHumanCasualtySyncStep() {
log.info("Step 생성: eventHumanCasualtySyncStep"); log.info("Step 생성: eventHumanCasualtySyncStep");
return new StepBuilder("eventHumanCasualtySyncStep", jobRepository) return new StepBuilder("eventHumanCasualtySyncStep", jobRepository)
.<EventHumanCasualtyDto, EventHumanCasualtyEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<EventHumanCasualtyDto, EventHumanCasualtyEntity>chunk(getChunkSize(), transactionManager)
.reader(eventHumanCasualtyReader(businessDataSource, tableMetaInfo)) .reader(eventHumanCasualtyReader(businessDataSource, tableMetaInfo))
.processor(new EventHumanCasualtyProcessor()) .processor(new EventHumanCasualtyProcessor())
.writer(new EventHumanCasualtyWriter(eventRepository, transactionManager, subChunkSize)) .writer(new EventHumanCasualtyWriter(eventRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<EventHumanCasualtyDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(eventHumanCasualtyWriteListener()) .listener(eventHumanCasualtyWriteListener())
.build(); .build();
} }
@ -217,12 +208,10 @@ public class EventSyncJobConfig extends BaseJobConfig<EventDto, EventEntity> {
public Step eventRelationshipSyncStep() { public Step eventRelationshipSyncStep() {
log.info("Step 생성: eventRelationshipSyncStep"); log.info("Step 생성: eventRelationshipSyncStep");
return new StepBuilder("eventRelationshipSyncStep", jobRepository) return new StepBuilder("eventRelationshipSyncStep", jobRepository)
.<EventRelationshipDto, EventRelationshipEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<EventRelationshipDto, EventRelationshipEntity>chunk(getChunkSize(), transactionManager)
.reader(eventRelationshipReader(businessDataSource, tableMetaInfo)) .reader(eventRelationshipReader(businessDataSource, tableMetaInfo))
.processor(new EventRelationshipProcessor()) .processor(new EventRelationshipProcessor())
.writer(new EventRelationshipWriter(eventRepository, transactionManager, subChunkSize)) .writer(new EventRelationshipWriter(eventRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<EventRelationshipDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(eventRelationshipWriteListener()) .listener(eventRelationshipWriteListener())
.build(); .build();
} }

파일 보기

@ -1,73 +1,41 @@
package com.snp.batch.jobs.datasync.batch.event.reader; package com.snp.batch.jobs.datasync.batch.event.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.event.dto.EventCargoDto; import com.snp.batch.jobs.datasync.batch.event.dto.EventCargoDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class EventCargoReader implements ItemReader<EventCargoDto> { public class EventCargoReader extends BaseSyncReader<EventCargoDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<EventCargoDto> allDataBuffer = new ArrayList<>();
public EventCargoReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public EventCargoReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public EventCargoDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceEventCargo;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected EventCargoDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return EventCargoDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceEventCargo), Long.class); .eventId(rs.getObject("event_id") != null ? rs.getInt("event_id") : null)
} catch (Exception e) { .imoNo(rs.getString("imo_no"))
return; .type(rs.getString("type"))
} .eventSeq(rs.getString("event_seq"))
.cnt(rs.getObject("cnt") != null ? rs.getLong("cnt") : null)
if (nextTargetId != null) { .unitAbbr(rs.getString("unit_abbr"))
log.info("[EventCargoReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .unit(rs.getString("unit"))
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceEventCargo); .cargoDamg(rs.getString("cargo_damg"))
final Long targetId = nextTargetId; .riskYn(rs.getString("risk_yn"))
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .text(rs.getString("text"))
return EventCargoDto.builder() .build();
.jobExecutionId(targetId)
.eventId(rs.getObject("event_id") != null ? rs.getInt("event_id") : null)
.imoNo(rs.getString("imo_no"))
.type(rs.getString("type"))
.eventSeq(rs.getString("event_seq"))
.cnt(rs.getObject("cnt") != null ? rs.getLong("cnt") : null)
.unitAbbr(rs.getString("unit_abbr"))
.unit(rs.getString("unit"))
.cargoDamg(rs.getString("cargo_damg"))
.riskYn(rs.getString("risk_yn"))
.text(rs.getString("text"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceEventCargo);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,68 +1,36 @@
package com.snp.batch.jobs.datasync.batch.event.reader; package com.snp.batch.jobs.datasync.batch.event.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.event.dto.EventHumanCasualtyDto; import com.snp.batch.jobs.datasync.batch.event.dto.EventHumanCasualtyDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class EventHumanCasualtyReader implements ItemReader<EventHumanCasualtyDto> { public class EventHumanCasualtyReader extends BaseSyncReader<EventHumanCasualtyDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<EventHumanCasualtyDto> allDataBuffer = new ArrayList<>();
public EventHumanCasualtyReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public EventHumanCasualtyReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public EventHumanCasualtyDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceEventHumanCasualty;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected EventHumanCasualtyDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return EventHumanCasualtyDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceEventHumanCasualty), Long.class); .eventId(rs.getObject("event_id") != null ? rs.getLong("event_id") : null)
} catch (Exception e) { .type(rs.getString("type"))
return; .scope(rs.getString("scope"))
} .qualfr(rs.getString("qualfr"))
.cnt(rs.getObject("cnt") != null ? rs.getLong("cnt") : null)
if (nextTargetId != null) { .build();
log.info("[EventHumanCasualtyReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceEventHumanCasualty);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
return EventHumanCasualtyDto.builder()
.jobExecutionId(targetId)
.eventId(rs.getObject("event_id") != null ? rs.getLong("event_id") : null)
.type(rs.getString("type"))
.scope(rs.getString("scope"))
.qualfr(rs.getString("qualfr"))
.cnt(rs.getObject("cnt") != null ? rs.getLong("cnt") : null)
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceEventHumanCasualty);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,110 +1,79 @@
package com.snp.batch.jobs.datasync.batch.event.reader; package com.snp.batch.jobs.datasync.batch.event.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.event.dto.EventDto; import com.snp.batch.jobs.datasync.batch.event.dto.EventDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.ArrayList; import java.time.ZoneId;
import java.util.List;
@Slf4j @Slf4j
public class EventReader implements ItemReader<EventDto> { public class EventReader extends BaseSyncReader<EventDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<EventDto> allDataBuffer = new ArrayList<>();
public EventReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public EventReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public EventDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceEvent;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected EventDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { Timestamp pstgYmdTs = rs.getTimestamp("pstg_ymd");
nextTargetId = businessJdbcTemplate.queryForObject( Timestamp eventStartDayTs = rs.getTimestamp("event_start_day");
CommonSql.getNextTargetQuery(tableMetaInfo.sourceEvent), Long.class); Timestamp eventEndDayTs = rs.getTimestamp("event_end_day");
} catch (Exception e) {
return;
}
if (nextTargetId != null) { return EventDto.builder()
log.info("[EventReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .jobExecutionId(targetId)
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceEvent); .eventId(rs.getObject("event_id") != null ? rs.getInt("event_id") : null)
final Long targetId = nextTargetId; .acdntId(rs.getString("acdnt_id"))
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .imoNo(rs.getString("imo_no"))
Timestamp pstgYmdTs = rs.getTimestamp("pstg_ymd"); .pstgYmd(pstgYmdTs != null ? pstgYmdTs.toInstant().atZone(ZoneId.systemDefault()) : null)
Timestamp eventStartDayTs = rs.getTimestamp("event_start_day"); .eventStartDay(eventStartDayTs != null ? eventStartDayTs.toInstant().atZone(ZoneId.systemDefault()) : null)
Timestamp eventEndDayTs = rs.getTimestamp("event_end_day"); .eventEndDay(eventEndDayTs != null ? eventEndDayTs.toInstant().atZone(ZoneId.systemDefault()) : null)
.embrkTryYn(rs.getString("embrk_try_yn"))
return EventDto.builder() .cargoCapacityStatusCd(rs.getString("cargo_capacity_status_cd"))
.jobExecutionId(targetId) .acdntActn(rs.getString("acdnt_actn"))
.eventId(rs.getObject("event_id") != null ? rs.getInt("event_id") : null) .acdntZone(rs.getString("acdnt_zone"))
.acdntId(rs.getString("acdnt_id")) .acdntZoneCd(rs.getString("acdnt_zone_cd"))
.imoNo(rs.getString("imo_no")) .cfgCmpntTwo(rs.getString("cfg_cmpnt_two"))
.pstgYmd(pstgYmdTs != null ? pstgYmdTs.toInstant().atZone(java.time.ZoneId.systemDefault()) : null) .countryCd(rs.getString("country_cd"))
.eventStartDay(eventStartDayTs != null ? eventStartDayTs.toInstant().atZone(java.time.ZoneId.systemDefault()) : null) .buildYmd(rs.getString("build_ymd"))
.eventEndDay(eventEndDayTs != null ? eventEndDayTs.toInstant().atZone(java.time.ZoneId.systemDefault()) : null) .desc(rs.getString("desc"))
.embrkTryYn(rs.getString("embrk_try_yn")) .envPosition(rs.getString("env_position"))
.cargoCapacityStatusCd(rs.getString("cargo_capacity_status_cd")) .positionNm(rs.getString("position_nm"))
.acdntActn(rs.getString("acdnt_actn")) .masdGridRef(rs.getObject("masd_grid_ref") != null ? rs.getLong("masd_grid_ref") : null)
.acdntZone(rs.getString("acdnt_zone")) .ctyNm(rs.getString("cty_nm"))
.acdntZoneCd(rs.getString("acdnt_zone_cd")) .eventType(rs.getString("event_type"))
.cfgCmpntTwo(rs.getString("cfg_cmpnt_two")) .eventTypeDtl(rs.getString("event_type_dtl"))
.countryCd(rs.getString("country_cd")) .eventTypeDtlId(rs.getObject("event_type_dtl_id") != null ? rs.getLong("event_type_dtl_id") : null)
.buildYmd(rs.getString("build_ymd")) .eventTypeId(rs.getObject("event_type_id") != null ? rs.getLong("event_type_id") : null)
.desc(rs.getString("desc")) .fireduponYn(rs.getString("firedupon_yn"))
.envPosition(rs.getString("env_position")) .title(rs.getString("title"))
.positionNm(rs.getString("position_nm")) .ldtTimpt(rs.getObject("ldt_timpt") != null ? rs.getLong("ldt_timpt") : null)
.masdGridRef(rs.getObject("masd_grid_ref") != null ? rs.getLong("masd_grid_ref") : null) .signfct(rs.getString("signfct"))
.ctyNm(rs.getString("cty_nm")) .wethr(rs.getString("wethr"))
.eventType(rs.getString("event_type")) .pltnMatral(rs.getString("pltn_matral"))
.eventTypeDtl(rs.getString("event_type_dtl")) .pltnMatralCnt(rs.getObject("pltn_matral_cnt") != null ? rs.getLong("pltn_matral_cnt") : null)
.eventTypeDtlId(rs.getObject("event_type_dtl_id") != null ? rs.getLong("event_type_dtl_id") : null) .pltnMatralUnit(rs.getString("pltn_matral_unit"))
.eventTypeId(rs.getObject("event_type_id") != null ? rs.getLong("event_type_id") : null) .regShponrCdHr(rs.getString("reg_shponr_cd_hr"))
.fireduponYn(rs.getString("firedupon_yn")) .regShponrHr(rs.getString("reg_shponr_hr"))
.title(rs.getString("title")) .regShponrCountryCdHr(rs.getString("reg_shponr_country_cd_hr"))
.ldtTimpt(rs.getObject("ldt_timpt") != null ? rs.getLong("ldt_timpt") : null) .regShponrCountryHr(rs.getString("reg_shponr_country_hr"))
.signfct(rs.getString("signfct")) .shipDwt(rs.getObject("ship_dwt") != null ? rs.getLong("ship_dwt") : null)
.wethr(rs.getString("wethr")) .shipFlgCd(rs.getString("ship_flg_cd"))
.pltnMatral(rs.getString("pltn_matral")) .shipFlgDecd(rs.getString("ship_flg_decd"))
.pltnMatralCnt(rs.getObject("pltn_matral_cnt") != null ? rs.getLong("pltn_matral_cnt") : null) .shipGt(rs.getObject("ship_gt") != null ? rs.getLong("ship_gt") : null)
.pltnMatralUnit(rs.getString("pltn_matral_unit")) .shipNm(rs.getString("ship_nm"))
.regShponrCdHr(rs.getString("reg_shponr_cd_hr")) .shipType(rs.getString("ship_type"))
.regShponrHr(rs.getString("reg_shponr_hr")) .shipTypeNm(rs.getString("ship_type_nm"))
.regShponrCountryCdHr(rs.getString("reg_shponr_country_cd_hr")) .build();
.regShponrCountryHr(rs.getString("reg_shponr_country_hr"))
.shipDwt(rs.getObject("ship_dwt") != null ? rs.getLong("ship_dwt") : null)
.shipFlgCd(rs.getString("ship_flg_cd"))
.shipFlgDecd(rs.getString("ship_flg_decd"))
.shipGt(rs.getObject("ship_gt") != null ? rs.getLong("ship_gt") : null)
.shipNm(rs.getString("ship_nm"))
.shipType(rs.getString("ship_type"))
.shipTypeNm(rs.getString("ship_type_nm"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceEvent);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,70 +1,38 @@
package com.snp.batch.jobs.datasync.batch.event.reader; package com.snp.batch.jobs.datasync.batch.event.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.event.dto.EventRelationshipDto; import com.snp.batch.jobs.datasync.batch.event.dto.EventRelationshipDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class EventRelationshipReader implements ItemReader<EventRelationshipDto> { public class EventRelationshipReader extends BaseSyncReader<EventRelationshipDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<EventRelationshipDto> allDataBuffer = new ArrayList<>();
public EventRelationshipReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public EventRelationshipReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public EventRelationshipDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceEventRelationship;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected EventRelationshipDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return EventRelationshipDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceEventRelationship), Long.class); .acdntId(rs.getString("acdnt_id"))
} catch (Exception e) { .eventId(rs.getObject("event_id") != null ? rs.getLong("event_id") : null)
return; .eventIdTwo(rs.getObject("event_id_two") != null ? rs.getLong("event_id_two") : null)
} .eventTypeCd(rs.getString("event_type_cd"))
.eventType(rs.getString("event_type"))
if (nextTargetId != null) { .relTypeCd(rs.getString("rel_type_cd"))
log.info("[EventRelationshipReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .relType(rs.getString("rel_type"))
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceEventRelationship); .build();
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
return EventRelationshipDto.builder()
.jobExecutionId(targetId)
.acdntId(rs.getString("acdnt_id"))
.eventId(rs.getObject("event_id") != null ? rs.getLong("event_id") : null)
.eventIdTwo(rs.getObject("event_id_two") != null ? rs.getLong("event_id_two") : null)
.eventTypeCd(rs.getString("event_type_cd"))
.eventType(rs.getString("event_type"))
.relTypeCd(rs.getString("rel_type_cd"))
.relType(rs.getString("rel_type"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceEventRelationship);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.facility.config;
import com.snp.batch.common.batch.config.BaseJobConfig; import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener; import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.facility.dto.FacilityPortDto; import com.snp.batch.jobs.datasync.batch.facility.dto.FacilityPortDto;
import com.snp.batch.jobs.datasync.batch.facility.entity.FacilityPortEntity; import com.snp.batch.jobs.datasync.batch.facility.entity.FacilityPortEntity;
@ -112,12 +109,10 @@ public class FacilitySyncJobConfig extends BaseJobConfig<FacilityPortDto, Facili
public Step facilityPortSyncStep() { public Step facilityPortSyncStep() {
log.info("Step 생성: facilityPortSyncStep"); log.info("Step 생성: facilityPortSyncStep");
return new StepBuilder(getStepName(), jobRepository) return new StepBuilder(getStepName(), jobRepository)
.<FacilityPortDto, FacilityPortEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<FacilityPortDto, FacilityPortEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader()) .reader(createReader())
.processor(createProcessor()) .processor(createProcessor())
.writer(createWriter()) .writer(createWriter())
.listener(new GroupByExecutionIdReadListener<FacilityPortDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(facilityPortWriteListener()) .listener(facilityPortWriteListener())
.build(); .build();
} }

파일 보기

@ -1,117 +1,86 @@
package com.snp.batch.jobs.datasync.batch.facility.reader; package com.snp.batch.jobs.datasync.batch.facility.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.facility.dto.FacilityPortDto; import com.snp.batch.jobs.datasync.batch.facility.dto.FacilityPortDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.ArrayList; import java.time.ZoneId;
import java.util.List;
@Slf4j @Slf4j
public class FacilityPortReader implements ItemReader<FacilityPortDto> { public class FacilityPortReader extends BaseSyncReader<FacilityPortDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<FacilityPortDto> allDataBuffer = new ArrayList<>();
public FacilityPortReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public FacilityPortReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public FacilityPortDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceFacilityPort;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected FacilityPortDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { Timestamp lastMdfcnDtTs = rs.getTimestamp("last_mdfcn_dt");
nextTargetId = businessJdbcTemplate.queryForObject( Timestamp regYmdTs = rs.getTimestamp("reg_ymd");
CommonSql.getNextTargetQuery(tableMetaInfo.sourceFacilityPort), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) { return FacilityPortDto.builder()
log.info("[FacilityPortReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .jobExecutionId(targetId)
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceFacilityPort); .portId(rs.getObject("port_id") != null ? rs.getLong("port_id") : null)
final Long targetId = nextTargetId; .bfrId(rs.getString("bfr_id"))
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .status(rs.getString("status"))
Timestamp lastMdfcnDtTs = rs.getTimestamp("last_mdfcn_dt"); .portNm(rs.getString("port_nm"))
Timestamp regYmdTs = rs.getTimestamp("reg_ymd"); .unPortCd(rs.getString("un_port_cd"))
.countryCd(rs.getString("country_cd"))
return FacilityPortDto.builder() .countryNm(rs.getString("country_nm"))
.jobExecutionId(targetId) .areanm(rs.getString("areanm"))
.portId(rs.getObject("port_id") != null ? rs.getLong("port_id") : null) .cntntnm(rs.getString("cntntnm"))
.bfrId(rs.getString("bfr_id")) .mstPortId(rs.getString("mst_port_id"))
.status(rs.getString("status")) .latDecml(rs.getObject("lat_decml") != null ? rs.getDouble("lat_decml") : null)
.portNm(rs.getString("port_nm")) .lonDecml(rs.getObject("lon_decml") != null ? rs.getDouble("lon_decml") : null)
.unPortCd(rs.getString("un_port_cd")) .positionLat(rs.getObject("position_lat") != null ? rs.getDouble("position_lat") : null)
.countryCd(rs.getString("country_cd")) .positionLon(rs.getObject("position_lon") != null ? rs.getDouble("position_lon") : null)
.countryNm(rs.getString("country_nm")) .positionZVal(rs.getObject("position_z_val") != null ? rs.getDouble("position_z_val") : null)
.areanm(rs.getString("areanm")) .positionMvalVal(rs.getObject("position_mval_val") != null ? rs.getDouble("position_mval_val") : null)
.cntntnm(rs.getString("cntntnm")) .zValHasYn(rs.getObject("z_val_has_yn") != null ? rs.getBoolean("z_val_has_yn") : null)
.mstPortId(rs.getString("mst_port_id")) .mvalValHasYn(rs.getObject("mval_val_has_yn") != null ? rs.getBoolean("mval_val_has_yn") : null)
.latDecml(rs.getObject("lat_decml") != null ? rs.getDouble("lat_decml") : null) .positionNulYn(rs.getObject("position_nul_yn") != null ? rs.getBoolean("position_nul_yn") : null)
.lonDecml(rs.getObject("lon_decml") != null ? rs.getDouble("lon_decml") : null) .positionStsId(rs.getObject("position_sts_id") != null ? rs.getLong("position_sts_id") : null)
.positionLat(rs.getObject("position_lat") != null ? rs.getDouble("position_lat") : null) .hrZone(rs.getString("hr_zone"))
.positionLon(rs.getObject("position_lon") != null ? rs.getDouble("position_lon") : null) .daylgtSaveHr(rs.getObject("daylgt_save_hr") != null ? rs.getBoolean("daylgt_save_hr") : null)
.positionZVal(rs.getObject("position_z_val") != null ? rs.getDouble("position_z_val") : null) .maxDraft(rs.getObject("max_draft") != null ? rs.getDouble("max_draft") : null)
.positionMvalVal(rs.getObject("position_mval_val") != null ? rs.getDouble("position_mval_val") : null) .maxWhlnth(rs.getObject("max_whlnth") != null ? rs.getDouble("max_whlnth") : null)
.zValHasYn(rs.getObject("z_val_has_yn") != null ? rs.getBoolean("z_val_has_yn") : null) .maxBeam(rs.getObject("max_beam") != null ? rs.getDouble("max_beam") : null)
.mvalValHasYn(rs.getObject("mval_val_has_yn") != null ? rs.getBoolean("mval_val_has_yn") : null) .maxDwt(rs.getObject("max_dwt") != null ? rs.getDouble("max_dwt") : null)
.positionNulYn(rs.getObject("position_nul_yn") != null ? rs.getBoolean("position_nul_yn") : null) .maxSeaDraft(rs.getObject("max_sea_draft") != null ? rs.getDouble("max_sea_draft") : null)
.positionStsId(rs.getObject("position_sts_id") != null ? rs.getLong("position_sts_id") : null) .maxSeaWhlnth(rs.getObject("max_sea_whlnth") != null ? rs.getDouble("max_sea_whlnth") : null)
.hrZone(rs.getString("hr_zone")) .maxSeaBcm(rs.getObject("max_sea_bcm") != null ? rs.getDouble("max_sea_bcm") : null)
.daylgtSaveHr(rs.getObject("daylgt_save_hr") != null ? rs.getBoolean("daylgt_save_hr") : null) .maxSeaDwt(rs.getObject("max_sea_dwt") != null ? rs.getDouble("max_sea_dwt") : null)
.maxDraft(rs.getObject("max_draft") != null ? rs.getDouble("max_draft") : null) .baleCargoFacility(rs.getObject("bale_cargo_facility") != null ? rs.getBoolean("bale_cargo_facility") : null)
.maxWhlnth(rs.getObject("max_whlnth") != null ? rs.getDouble("max_whlnth") : null) .cntnrFacility(rs.getObject("cntnr_facility") != null ? rs.getBoolean("cntnr_facility") : null)
.maxBeam(rs.getObject("max_beam") != null ? rs.getDouble("max_beam") : null) .caseCargoFacility(rs.getObject("case_cargo_facility") != null ? rs.getBoolean("case_cargo_facility") : null)
.maxDwt(rs.getObject("max_dwt") != null ? rs.getDouble("max_dwt") : null) .liquidCargoFacility(rs.getObject("liquid_cargo_facility") != null ? rs.getBoolean("liquid_cargo_facility") : null)
.maxSeaDraft(rs.getObject("max_sea_draft") != null ? rs.getDouble("max_sea_draft") : null) .roroFacility(rs.getObject("roro_facility") != null ? rs.getBoolean("roro_facility") : null)
.maxSeaWhlnth(rs.getObject("max_sea_whlnth") != null ? rs.getDouble("max_sea_whlnth") : null) .paxfclty(rs.getObject("paxfclty") != null ? rs.getBoolean("paxfclty") : null)
.maxSeaBcm(rs.getObject("max_sea_bcm") != null ? rs.getDouble("max_sea_bcm") : null) .drydkfclty(rs.getObject("drydkfclty") != null ? rs.getBoolean("drydkfclty") : null)
.maxSeaDwt(rs.getObject("max_sea_dwt") != null ? rs.getDouble("max_sea_dwt") : null) .lpgFacility(rs.getObject("lpg_facility") != null ? rs.getLong("lpg_facility") : null)
.baleCargoFacility(rs.getObject("bale_cargo_facility") != null ? rs.getBoolean("bale_cargo_facility") : null) .lngFacility(rs.getObject("lng_facility") != null ? rs.getLong("lng_facility") : null)
.cntnrFacility(rs.getObject("cntnr_facility") != null ? rs.getBoolean("cntnr_facility") : null) .lngBnkr(rs.getObject("lng_bnkr") != null ? rs.getBoolean("lng_bnkr") : null)
.caseCargoFacility(rs.getObject("case_cargo_facility") != null ? rs.getBoolean("case_cargo_facility") : null) .doBnkr(rs.getObject("do_bnkr") != null ? rs.getBoolean("do_bnkr") : null)
.liquidCargoFacility(rs.getObject("liquid_cargo_facility") != null ? rs.getBoolean("liquid_cargo_facility") : null) .foBnkr(rs.getObject("fo_bnkr") != null ? rs.getBoolean("fo_bnkr") : null)
.roroFacility(rs.getObject("roro_facility") != null ? rs.getBoolean("roro_facility") : null) .ispsComplianceYn(rs.getObject("isps_compliance_yn") != null ? rs.getBoolean("isps_compliance_yn") : null)
.paxfclty(rs.getObject("paxfclty") != null ? rs.getBoolean("paxfclty") : null) .csiComplianceYn(rs.getObject("csi_compliance_yn") != null ? rs.getBoolean("csi_compliance_yn") : null)
.drydkfclty(rs.getObject("drydkfclty") != null ? rs.getBoolean("drydkfclty") : null) .freeTrdZone(rs.getObject("free_trd_zone") != null ? rs.getBoolean("free_trd_zone") : null)
.lpgFacility(rs.getObject("lpg_facility") != null ? rs.getLong("lpg_facility") : null) .ecfrdPort(rs.getObject("ecfrd_port") != null ? rs.getBoolean("ecfrd_port") : null)
.lngFacility(rs.getObject("lng_facility") != null ? rs.getLong("lng_facility") : null) .emsnCtrlArea(rs.getObject("emsn_ctrl_area") != null ? rs.getBoolean("emsn_ctrl_area") : null)
.lngBnkr(rs.getObject("lng_bnkr") != null ? rs.getBoolean("lng_bnkr") : null) .wsPort(rs.getObject("ws_port") != null ? rs.getLong("ws_port") : null)
.doBnkr(rs.getObject("do_bnkr") != null ? rs.getBoolean("do_bnkr") : null) .lastMdfcnDt(lastMdfcnDtTs != null ? lastMdfcnDtTs.toInstant().atZone(ZoneId.systemDefault()) : null)
.foBnkr(rs.getObject("fo_bnkr") != null ? rs.getBoolean("fo_bnkr") : null) .regYmd(regYmdTs != null ? regYmdTs.toInstant().atZone(ZoneId.systemDefault()) : null)
.ispsComplianceYn(rs.getObject("isps_compliance_yn") != null ? rs.getBoolean("isps_compliance_yn") : null) .build();
.csiComplianceYn(rs.getObject("csi_compliance_yn") != null ? rs.getBoolean("csi_compliance_yn") : null)
.freeTrdZone(rs.getObject("free_trd_zone") != null ? rs.getBoolean("free_trd_zone") : null)
.ecfrdPort(rs.getObject("ecfrd_port") != null ? rs.getBoolean("ecfrd_port") : null)
.emsnCtrlArea(rs.getObject("emsn_ctrl_area") != null ? rs.getBoolean("emsn_ctrl_area") : null)
.wsPort(rs.getObject("ws_port") != null ? rs.getLong("ws_port") : null)
.lastMdfcnDt(lastMdfcnDtTs != null ? lastMdfcnDtTs.toInstant().atZone(java.time.ZoneId.systemDefault()) : null)
.regYmd(regYmdTs != null ? regYmdTs.toInstant().atZone(java.time.ZoneId.systemDefault()) : null)
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceFacilityPort);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
import com.snp.batch.common.batch.config.BaseJobConfig; import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener; import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.AnchorageCallDto; import com.snp.batch.jobs.datasync.batch.movement.dto.AnchorageCallDto;
import com.snp.batch.jobs.datasync.batch.movement.entity.AnchorageCallEntity; import com.snp.batch.jobs.datasync.batch.movement.entity.AnchorageCallEntity;
@ -103,12 +100,10 @@ public class AnchorageCallSyncJobConfig extends BaseJobConfig<AnchorageCallDto,
public Step anchorageCallSyncStep() { public Step anchorageCallSyncStep() {
log.info("Step 생성: anchorageCallSyncStep"); log.info("Step 생성: anchorageCallSyncStep");
return new StepBuilder(getStepName(), jobRepository) return new StepBuilder(getStepName(), jobRepository)
.<AnchorageCallDto, AnchorageCallEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<AnchorageCallDto, AnchorageCallEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader()) .reader(createReader())
.processor(createProcessor()) .processor(createProcessor())
.writer(createWriter()) .writer(createWriter())
.listener(new GroupByExecutionIdReadListener<AnchorageCallDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(anchorageCallWriteListener()) .listener(anchorageCallWriteListener())
.build(); .build();
} }

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
import com.snp.batch.common.batch.config.BaseJobConfig; import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener; import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.BerthCallDto; import com.snp.batch.jobs.datasync.batch.movement.dto.BerthCallDto;
import com.snp.batch.jobs.datasync.batch.movement.entity.BerthCallEntity; import com.snp.batch.jobs.datasync.batch.movement.entity.BerthCallEntity;
@ -103,12 +100,10 @@ public class BerthCallSyncJobConfig extends BaseJobConfig<BerthCallDto, BerthCal
public Step berthCallSyncStep() { public Step berthCallSyncStep() {
log.info("Step 생성: berthCallSyncStep"); log.info("Step 생성: berthCallSyncStep");
return new StepBuilder(getStepName(), jobRepository) return new StepBuilder(getStepName(), jobRepository)
.<BerthCallDto, BerthCallEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<BerthCallDto, BerthCallEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader()) .reader(createReader())
.processor(createProcessor()) .processor(createProcessor())
.writer(createWriter()) .writer(createWriter())
.listener(new GroupByExecutionIdReadListener<BerthCallDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(berthCallWriteListener()) .listener(berthCallWriteListener())
.build(); .build();
} }

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
import com.snp.batch.common.batch.config.BaseJobConfig; import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener; import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.CurrentlyAtDto; import com.snp.batch.jobs.datasync.batch.movement.dto.CurrentlyAtDto;
import com.snp.batch.jobs.datasync.batch.movement.entity.CurrentlyAtEntity; import com.snp.batch.jobs.datasync.batch.movement.entity.CurrentlyAtEntity;
@ -103,12 +100,10 @@ public class CurrentlyAtSyncJobConfig extends BaseJobConfig<CurrentlyAtDto, Curr
public Step currentlyAtSyncStep() { public Step currentlyAtSyncStep() {
log.info("Step 생성: currentlyAtSyncStep"); log.info("Step 생성: currentlyAtSyncStep");
return new StepBuilder(getStepName(), jobRepository) return new StepBuilder(getStepName(), jobRepository)
.<CurrentlyAtDto, CurrentlyAtEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<CurrentlyAtDto, CurrentlyAtEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader()) .reader(createReader())
.processor(createProcessor()) .processor(createProcessor())
.writer(createWriter()) .writer(createWriter())
.listener(new GroupByExecutionIdReadListener<CurrentlyAtDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(currentlyAtWriteListener()) .listener(currentlyAtWriteListener())
.build(); .build();
} }

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
import com.snp.batch.common.batch.config.BaseJobConfig; import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener; import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.DestinationDto; import com.snp.batch.jobs.datasync.batch.movement.dto.DestinationDto;
import com.snp.batch.jobs.datasync.batch.movement.entity.DestinationEntity; import com.snp.batch.jobs.datasync.batch.movement.entity.DestinationEntity;
@ -103,12 +100,10 @@ public class DestinationSyncJobConfig extends BaseJobConfig<DestinationDto, Dest
public Step destinationSyncStep() { public Step destinationSyncStep() {
log.info("Step 생성: destinationSyncStep"); log.info("Step 생성: destinationSyncStep");
return new StepBuilder(getStepName(), jobRepository) return new StepBuilder(getStepName(), jobRepository)
.<DestinationDto, DestinationEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<DestinationDto, DestinationEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader()) .reader(createReader())
.processor(createProcessor()) .processor(createProcessor())
.writer(createWriter()) .writer(createWriter())
.listener(new GroupByExecutionIdReadListener<DestinationDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(destinationWriteListener()) .listener(destinationWriteListener())
.build(); .build();
} }

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
import com.snp.batch.common.batch.config.BaseJobConfig; import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener; import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.PortCallDto; import com.snp.batch.jobs.datasync.batch.movement.dto.PortCallDto;
import com.snp.batch.jobs.datasync.batch.movement.entity.PortCallEntity; import com.snp.batch.jobs.datasync.batch.movement.entity.PortCallEntity;
@ -103,12 +100,10 @@ public class PortCallSyncJobConfig extends BaseJobConfig<PortCallDto, PortCallEn
public Step portCallSyncStep() { public Step portCallSyncStep() {
log.info("Step 생성: portCallSyncStep"); log.info("Step 생성: portCallSyncStep");
return new StepBuilder(getStepName(), jobRepository) return new StepBuilder(getStepName(), jobRepository)
.<PortCallDto, PortCallEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<PortCallDto, PortCallEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader()) .reader(createReader())
.processor(createProcessor()) .processor(createProcessor())
.writer(createWriter()) .writer(createWriter())
.listener(new GroupByExecutionIdReadListener<PortCallDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(portCallWriteListener()) .listener(portCallWriteListener())
.build(); .build();
} }

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
import com.snp.batch.common.batch.config.BaseJobConfig; import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener; import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.StsOperationDto; import com.snp.batch.jobs.datasync.batch.movement.dto.StsOperationDto;
import com.snp.batch.jobs.datasync.batch.movement.entity.StsOperationEntity; import com.snp.batch.jobs.datasync.batch.movement.entity.StsOperationEntity;
@ -103,12 +100,10 @@ public class StsOperationSyncJobConfig extends BaseJobConfig<StsOperationDto, St
public Step stsOperationSyncStep() { public Step stsOperationSyncStep() {
log.info("Step 생성: stsOperationSyncStep"); log.info("Step 생성: stsOperationSyncStep");
return new StepBuilder(getStepName(), jobRepository) return new StepBuilder(getStepName(), jobRepository)
.<StsOperationDto, StsOperationEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<StsOperationDto, StsOperationEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader()) .reader(createReader())
.processor(createProcessor()) .processor(createProcessor())
.writer(createWriter()) .writer(createWriter())
.listener(new GroupByExecutionIdReadListener<StsOperationDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(stsOperationWriteListener()) .listener(stsOperationWriteListener())
.build(); .build();
} }

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
import com.snp.batch.common.batch.config.BaseJobConfig; import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener; import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.TerminalCallDto; import com.snp.batch.jobs.datasync.batch.movement.dto.TerminalCallDto;
import com.snp.batch.jobs.datasync.batch.movement.entity.TerminalCallEntity; import com.snp.batch.jobs.datasync.batch.movement.entity.TerminalCallEntity;
@ -103,12 +100,10 @@ public class TerminalCallSyncJobConfig extends BaseJobConfig<TerminalCallDto, Te
public Step terminalCallSyncStep() { public Step terminalCallSyncStep() {
log.info("Step 생성: terminalCallSyncStep"); log.info("Step 생성: terminalCallSyncStep");
return new StepBuilder(getStepName(), jobRepository) return new StepBuilder(getStepName(), jobRepository)
.<TerminalCallDto, TerminalCallEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<TerminalCallDto, TerminalCallEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader()) .reader(createReader())
.processor(createProcessor()) .processor(createProcessor())
.writer(createWriter()) .writer(createWriter())
.listener(new GroupByExecutionIdReadListener<TerminalCallDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(terminalCallWriteListener()) .listener(terminalCallWriteListener())
.build(); .build();
} }

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
import com.snp.batch.common.batch.config.BaseJobConfig; import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener; import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.TransitDto; import com.snp.batch.jobs.datasync.batch.movement.dto.TransitDto;
import com.snp.batch.jobs.datasync.batch.movement.entity.TransitEntity; import com.snp.batch.jobs.datasync.batch.movement.entity.TransitEntity;
@ -103,12 +100,10 @@ public class TransitSyncJobConfig extends BaseJobConfig<TransitDto, TransitEntit
public Step transitSyncStep() { public Step transitSyncStep() {
log.info("Step 생성: transitSyncStep"); log.info("Step 생성: transitSyncStep");
return new StepBuilder(getStepName(), jobRepository) return new StepBuilder(getStepName(), jobRepository)
.<TransitDto, TransitEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<TransitDto, TransitEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader()) .reader(createReader())
.processor(createProcessor()) .processor(createProcessor())
.writer(createWriter()) .writer(createWriter())
.listener(new GroupByExecutionIdReadListener<TransitDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(transitWriteListener()) .listener(transitWriteListener())
.build(); .build();
} }

파일 보기

@ -1,85 +1,52 @@
package com.snp.batch.jobs.datasync.batch.movement.reader; package com.snp.batch.jobs.datasync.batch.movement.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.AnchorageCallDto; import com.snp.batch.jobs.datasync.batch.movement.dto.AnchorageCallDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.math.BigDecimal; import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
@Slf4j @Slf4j
public class AnchorageCallReader implements ItemReader<AnchorageCallDto> { public class AnchorageCallReader extends BaseSyncReader<AnchorageCallDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<AnchorageCallDto> allDataBuffer = new ArrayList<>();
public AnchorageCallReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public AnchorageCallReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public AnchorageCallDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceTAnchorageCall;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected AnchorageCallDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTAnchorageCall), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) { return AnchorageCallDto.builder()
log.info("[AnchorageCallReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .jobExecutionId(targetId)
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTAnchorageCall); .imoNo(rs.getString("imo_no"))
final Long targetId = nextTargetId; .mvmnType(rs.getString("mvmn_type"))
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .mvmnDt(mvmnDtTs != null ? mvmnDtTs.toLocalDateTime() : null)
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt"); .prtcllId(rs.getObject("prtcll_id") != null ? rs.getInt("prtcll_id") : null)
.facilityId(rs.getObject("facility_id") != null ? rs.getInt("facility_id") : null)
return AnchorageCallDto.builder() .facilityNm(rs.getString("facility_nm"))
.jobExecutionId(targetId) .facilityType(rs.getString("facility_type"))
.imoNo(rs.getString("imo_no")) .lwrnkFacilityId(rs.getObject("lwrnk_facility_id") != null ? rs.getInt("lwrnk_facility_id") : null)
.mvmnType(rs.getString("mvmn_type")) .lwrnkFacilityDesc(rs.getString("lwrnk_facility_desc"))
.mvmnDt(mvmnDtTs != null ? mvmnDtTs.toLocalDateTime() : null) .lwrnkFacilityType(rs.getString("lwrnk_facility_type"))
.prtcllId(rs.getObject("prtcll_id") != null ? rs.getInt("prtcll_id") : null) .countryCd(rs.getString("country_cd"))
.facilityId(rs.getObject("facility_id") != null ? rs.getInt("facility_id") : null) .countryNm(rs.getString("country_nm"))
.facilityNm(rs.getString("facility_nm")) .draft(rs.getObject("draft") != null ? rs.getBigDecimal("draft") : null)
.facilityType(rs.getString("facility_type")) .lat(rs.getObject("lat") != null ? rs.getBigDecimal("lat") : null)
.lwrnkFacilityId(rs.getObject("lwrnk_facility_id") != null ? rs.getInt("lwrnk_facility_id") : null) .lon(rs.getObject("lon") != null ? rs.getBigDecimal("lon") : null)
.lwrnkFacilityDesc(rs.getString("lwrnk_facility_desc")) .positionInfo(rs.getString("position_info"))
.lwrnkFacilityType(rs.getString("lwrnk_facility_type")) .dest(rs.getString("dest"))
.countryCd(rs.getString("country_cd")) .isoTwoCountryCd(rs.getString("iso_two_country_cd"))
.countryNm(rs.getString("country_nm")) .build();
.draft(rs.getObject("draft") != null ? rs.getBigDecimal("draft") : null)
.lat(rs.getObject("lat") != null ? rs.getBigDecimal("lat") : null)
.lon(rs.getObject("lon") != null ? rs.getBigDecimal("lon") : null)
.positionInfo(rs.getString("position_info"))
.dest(rs.getString("dest"))
.isoTwoCountryCd(rs.getString("iso_two_country_cd"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTAnchorageCall);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,85 +1,53 @@
package com.snp.batch.jobs.datasync.batch.movement.reader; package com.snp.batch.jobs.datasync.batch.movement.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.BerthCallDto; import com.snp.batch.jobs.datasync.batch.movement.dto.BerthCallDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
@Slf4j @Slf4j
public class BerthCallReader implements ItemReader<BerthCallDto> { public class BerthCallReader extends BaseSyncReader<BerthCallDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<BerthCallDto> allDataBuffer = new ArrayList<>();
public BerthCallReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public BerthCallReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public BerthCallDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceTBerthCall;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected BerthCallDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
nextTargetId = businessJdbcTemplate.queryForObject( Timestamp eventStaDtTs = rs.getTimestamp("event_sta_dt");
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTBerthCall), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) { return BerthCallDto.builder()
log.info("[BerthCallReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .jobExecutionId(targetId)
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTBerthCall); .imoNo(rs.getString("imo_no"))
final Long targetId = nextTargetId; .mvmnType(rs.getString("mvmn_type"))
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .mvmnDt(mvmnDtTs != null ? mvmnDtTs.toLocalDateTime() : null)
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt"); .facilityId(rs.getObject("facility_id") != null ? rs.getInt("facility_id") : null)
Timestamp eventStaDtTs = rs.getTimestamp("event_sta_dt"); .facilityNm(rs.getString("facility_nm"))
.facilityType(rs.getString("facility_type"))
return BerthCallDto.builder() .upFacilityId(rs.getObject("up_facility_id") != null ? rs.getInt("up_facility_id") : null)
.jobExecutionId(targetId) .upFacilityNm(rs.getString("up_facility_nm"))
.imoNo(rs.getString("imo_no")) .upFacilityType(rs.getString("up_facility_type"))
.mvmnType(rs.getString("mvmn_type")) .countryCd(rs.getString("country_cd"))
.mvmnDt(mvmnDtTs != null ? mvmnDtTs.toLocalDateTime() : null) .countryNm(rs.getString("country_nm"))
.facilityId(rs.getObject("facility_id") != null ? rs.getInt("facility_id") : null) .draft(rs.getObject("draft") != null ? rs.getBigDecimal("draft") : null)
.facilityNm(rs.getString("facility_nm")) .lat(rs.getObject("lat") != null ? rs.getBigDecimal("lat") : null)
.facilityType(rs.getString("facility_type")) .lon(rs.getObject("lon") != null ? rs.getBigDecimal("lon") : null)
.upFacilityId(rs.getObject("up_facility_id") != null ? rs.getInt("up_facility_id") : null) .positionInfo(rs.getString("position_info"))
.upFacilityNm(rs.getString("up_facility_nm")) .upClotId(rs.getObject("up_clot_id") != null ? rs.getLong("up_clot_id") : null)
.upFacilityType(rs.getString("up_facility_type")) .isoTwoCountryCd(rs.getString("iso_two_country_cd"))
.countryCd(rs.getString("country_cd")) .eventStaDt(eventStaDtTs != null ? eventStaDtTs.toLocalDateTime() : null)
.countryNm(rs.getString("country_nm")) .build();
.draft(rs.getObject("draft") != null ? rs.getBigDecimal("draft") : null)
.lat(rs.getObject("lat") != null ? rs.getBigDecimal("lat") : null)
.lon(rs.getObject("lon") != null ? rs.getBigDecimal("lon") : null)
.positionInfo(rs.getString("position_info"))
.upClotId(rs.getObject("up_clot_id") != null ? rs.getLong("up_clot_id") : null)
.isoTwoCountryCd(rs.getString("iso_two_country_cd"))
.eventStaDt(eventStaDtTs != null ? eventStaDtTs.toLocalDateTime() : null)
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTBerthCall);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,87 +1,55 @@
package com.snp.batch.jobs.datasync.batch.movement.reader; package com.snp.batch.jobs.datasync.batch.movement.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.CurrentlyAtDto; import com.snp.batch.jobs.datasync.batch.movement.dto.CurrentlyAtDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
@Slf4j @Slf4j
public class CurrentlyAtReader implements ItemReader<CurrentlyAtDto> { public class CurrentlyAtReader extends BaseSyncReader<CurrentlyAtDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<CurrentlyAtDto> allDataBuffer = new ArrayList<>();
public CurrentlyAtReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public CurrentlyAtReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public CurrentlyAtDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceTCurrentlyAt;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected CurrentlyAtDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTCurrentlyAt), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) { return CurrentlyAtDto.builder()
log.info("[CurrentlyAtReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .jobExecutionId(targetId)
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTCurrentlyAt); .imoNo(rs.getString("imo_no"))
final Long targetId = nextTargetId; .mvmnType(rs.getString("mvmn_type"))
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .mvmnDt(mvmnDtTs != null ? mvmnDtTs.toLocalDateTime() : null)
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt"); .prtcllId(rs.getObject("prtcll_id") != null ? rs.getInt("prtcll_id") : null)
.facilityId(rs.getObject("facility_id") != null ? rs.getInt("facility_id") : null)
return CurrentlyAtDto.builder() .facilityNm(rs.getString("facility_nm"))
.jobExecutionId(targetId) .facilityType(rs.getString("facility_type"))
.imoNo(rs.getString("imo_no")) .lwrnkFacilityId(rs.getObject("lwrnk_facility_id") != null ? rs.getInt("lwrnk_facility_id") : null)
.mvmnType(rs.getString("mvmn_type")) .lwrnkFacilityDesc(rs.getString("lwrnk_facility_desc"))
.mvmnDt(mvmnDtTs != null ? mvmnDtTs.toLocalDateTime() : null) .lwrnkFacilityType(rs.getString("lwrnk_facility_type"))
.prtcllId(rs.getObject("prtcll_id") != null ? rs.getInt("prtcll_id") : null) .upFacilityId(rs.getObject("up_facility_id") != null ? rs.getInt("up_facility_id") : null)
.facilityId(rs.getObject("facility_id") != null ? rs.getInt("facility_id") : null) .upFacilityNm(rs.getString("up_facility_nm"))
.facilityNm(rs.getString("facility_nm")) .upFacilityType(rs.getString("up_facility_type"))
.facilityType(rs.getString("facility_type")) .countryCd(rs.getString("country_cd"))
.lwrnkFacilityId(rs.getObject("lwrnk_facility_id") != null ? rs.getInt("lwrnk_facility_id") : null) .countryNm(rs.getString("country_nm"))
.lwrnkFacilityDesc(rs.getString("lwrnk_facility_desc")) .draft(rs.getObject("draft") != null ? rs.getBigDecimal("draft") : null)
.lwrnkFacilityType(rs.getString("lwrnk_facility_type")) .lat(rs.getObject("lat") != null ? rs.getBigDecimal("lat") : null)
.upFacilityId(rs.getObject("up_facility_id") != null ? rs.getInt("up_facility_id") : null) .lon(rs.getObject("lon") != null ? rs.getBigDecimal("lon") : null)
.upFacilityNm(rs.getString("up_facility_nm")) .dest(rs.getString("dest"))
.upFacilityType(rs.getString("up_facility_type")) .countryIsoTwoCd(rs.getString("country_iso_two_cd"))
.countryCd(rs.getString("country_cd")) .positionInfo(rs.getString("position_info"))
.countryNm(rs.getString("country_nm")) .build();
.draft(rs.getObject("draft") != null ? rs.getBigDecimal("draft") : null)
.lat(rs.getObject("lat") != null ? rs.getBigDecimal("lat") : null)
.lon(rs.getObject("lon") != null ? rs.getBigDecimal("lon") : null)
.dest(rs.getString("dest"))
.countryIsoTwoCd(rs.getString("country_iso_two_cd"))
.positionInfo(rs.getString("position_info"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTCurrentlyAt);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,78 +1,46 @@
package com.snp.batch.jobs.datasync.batch.movement.reader; package com.snp.batch.jobs.datasync.batch.movement.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.DestinationDto; import com.snp.batch.jobs.datasync.batch.movement.dto.DestinationDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
@Slf4j @Slf4j
public class DestinationReader implements ItemReader<DestinationDto> { public class DestinationReader extends BaseSyncReader<DestinationDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<DestinationDto> allDataBuffer = new ArrayList<>();
public DestinationReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public DestinationReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public DestinationDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceTDestination;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected DestinationDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTDestination), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) { return DestinationDto.builder()
log.info("[DestinationReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .jobExecutionId(targetId)
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTDestination); .imoNo(rs.getString("imo_no"))
final Long targetId = nextTargetId; .mvmnType(rs.getString("mvmn_type"))
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .mvmnDt(mvmnDtTs != null ? mvmnDtTs.toLocalDateTime() : null)
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt"); .facilityId(rs.getObject("facility_id") != null ? rs.getInt("facility_id") : null)
.facilityNm(rs.getString("facility_nm"))
return DestinationDto.builder() .facilityType(rs.getString("facility_type"))
.jobExecutionId(targetId) .countryCd(rs.getString("country_cd"))
.imoNo(rs.getString("imo_no")) .countryNm(rs.getString("country_nm"))
.mvmnType(rs.getString("mvmn_type")) .lat(rs.getObject("lat") != null ? rs.getBigDecimal("lat") : null)
.mvmnDt(mvmnDtTs != null ? mvmnDtTs.toLocalDateTime() : null) .lon(rs.getObject("lon") != null ? rs.getBigDecimal("lon") : null)
.facilityId(rs.getObject("facility_id") != null ? rs.getInt("facility_id") : null) .positionInfo(rs.getString("position_info"))
.facilityNm(rs.getString("facility_nm")) .countryIsoTwoCd(rs.getString("country_iso_two_cd"))
.facilityType(rs.getString("facility_type")) .build();
.countryCd(rs.getString("country_cd"))
.countryNm(rs.getString("country_nm"))
.lat(rs.getObject("lat") != null ? rs.getBigDecimal("lat") : null)
.lon(rs.getObject("lon") != null ? rs.getBigDecimal("lon") : null)
.positionInfo(rs.getString("position_info"))
.countryIsoTwoCd(rs.getString("country_iso_two_cd"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTDestination);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,87 +1,55 @@
package com.snp.batch.jobs.datasync.batch.movement.reader; package com.snp.batch.jobs.datasync.batch.movement.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.PortCallDto; import com.snp.batch.jobs.datasync.batch.movement.dto.PortCallDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
@Slf4j @Slf4j
public class PortCallReader implements ItemReader<PortCallDto> { public class PortCallReader extends BaseSyncReader<PortCallDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<PortCallDto> allDataBuffer = new ArrayList<>();
public PortCallReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public PortCallReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public PortCallDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceTShipStpovInfo;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected PortCallDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTShipStpovInfo), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) { return PortCallDto.builder()
log.info("[PortCallReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .jobExecutionId(targetId)
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTShipStpovInfo); .imoNo(rs.getString("imo_no"))
final Long targetId = nextTargetId; .mvmnType(rs.getString("mvmn_type"))
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .mvmnDt(mvmnDtTs != null ? mvmnDtTs.toLocalDateTime() : null)
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt"); .prtcllId(rs.getObject("prtcll_id") != null ? rs.getInt("prtcll_id") : null)
.facilityId(rs.getObject("facility_id") != null ? rs.getInt("facility_id") : null)
return PortCallDto.builder() .facilityNm(rs.getString("facility_nm"))
.jobExecutionId(targetId) .facilityType(rs.getString("facility_type"))
.imoNo(rs.getString("imo_no")) .lwrnkFacilityId(rs.getObject("lwrnk_facility_id") != null ? rs.getInt("lwrnk_facility_id") : null)
.mvmnType(rs.getString("mvmn_type")) .lwrnkFacilityDesc(rs.getString("lwrnk_facility_desc"))
.mvmnDt(mvmnDtTs != null ? mvmnDtTs.toLocalDateTime() : null) .lwrnkFacilityType(rs.getString("lwrnk_facility_type"))
.prtcllId(rs.getObject("prtcll_id") != null ? rs.getInt("prtcll_id") : null) .upFacilityId(rs.getObject("up_facility_id") != null ? rs.getInt("up_facility_id") : null)
.facilityId(rs.getObject("facility_id") != null ? rs.getInt("facility_id") : null) .upFacilityNm(rs.getString("up_facility_nm"))
.facilityNm(rs.getString("facility_nm")) .upFacilityType(rs.getString("up_facility_type"))
.facilityType(rs.getString("facility_type")) .countryCd(rs.getString("country_cd"))
.lwrnkFacilityId(rs.getObject("lwrnk_facility_id") != null ? rs.getInt("lwrnk_facility_id") : null) .countryNm(rs.getString("country_nm"))
.lwrnkFacilityDesc(rs.getString("lwrnk_facility_desc")) .draft(rs.getObject("draft") != null ? rs.getBigDecimal("draft") : null)
.lwrnkFacilityType(rs.getString("lwrnk_facility_type")) .lat(rs.getObject("lat") != null ? rs.getBigDecimal("lat") : null)
.upFacilityId(rs.getObject("up_facility_id") != null ? rs.getInt("up_facility_id") : null) .lon(rs.getObject("lon") != null ? rs.getBigDecimal("lon") : null)
.upFacilityNm(rs.getString("up_facility_nm")) .dest(rs.getString("dest"))
.upFacilityType(rs.getString("up_facility_type")) .countryIsoTwoCd(rs.getString("country_iso_two_cd"))
.countryCd(rs.getString("country_cd")) .positionInfo(rs.getString("position_info"))
.countryNm(rs.getString("country_nm")) .build();
.draft(rs.getObject("draft") != null ? rs.getBigDecimal("draft") : null)
.lat(rs.getObject("lat") != null ? rs.getBigDecimal("lat") : null)
.lon(rs.getObject("lon") != null ? rs.getBigDecimal("lon") : null)
.dest(rs.getString("dest"))
.countryIsoTwoCd(rs.getString("country_iso_two_cd"))
.positionInfo(rs.getString("position_info"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTShipStpovInfo);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,86 +1,54 @@
package com.snp.batch.jobs.datasync.batch.movement.reader; package com.snp.batch.jobs.datasync.batch.movement.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.StsOperationDto; import com.snp.batch.jobs.datasync.batch.movement.dto.StsOperationDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
@Slf4j @Slf4j
public class StsOperationReader implements ItemReader<StsOperationDto> { public class StsOperationReader extends BaseSyncReader<StsOperationDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<StsOperationDto> allDataBuffer = new ArrayList<>();
public StsOperationReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public StsOperationReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public StsOperationDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceTStsOperation;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected StsOperationDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
nextTargetId = businessJdbcTemplate.queryForObject( Timestamp eventStaDtTs = rs.getTimestamp("event_sta_dt");
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTStsOperation), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) { return StsOperationDto.builder()
log.info("[StsOperationReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .jobExecutionId(targetId)
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTStsOperation); .imoNo(rs.getString("imo_no"))
final Long targetId = nextTargetId; .mvmnType(rs.getString("mvmn_type"))
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .mvmnDt(mvmnDtTs != null ? mvmnDtTs.toLocalDateTime() : null)
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt"); .facilityId(rs.getObject("facility_id") != null ? rs.getInt("facility_id") : null)
Timestamp eventStaDtTs = rs.getTimestamp("event_sta_dt"); .facilityNm(rs.getString("facility_nm"))
.facilityType(rs.getString("facility_type"))
return StsOperationDto.builder() .upFacilityId(rs.getObject("up_facility_id") != null ? rs.getInt("up_facility_id") : null)
.jobExecutionId(targetId) .upFacilityNm(rs.getString("up_facility_nm"))
.imoNo(rs.getString("imo_no")) .upFacilityType(rs.getString("up_facility_type"))
.mvmnType(rs.getString("mvmn_type")) .draft(rs.getObject("draft") != null ? rs.getBigDecimal("draft") : null)
.mvmnDt(mvmnDtTs != null ? mvmnDtTs.toLocalDateTime() : null) .lat(rs.getObject("lat") != null ? rs.getBigDecimal("lat") : null)
.facilityId(rs.getObject("facility_id") != null ? rs.getInt("facility_id") : null) .lon(rs.getObject("lon") != null ? rs.getBigDecimal("lon") : null)
.facilityNm(rs.getString("facility_nm")) .positionInfo(rs.getString("position_info"))
.facilityType(rs.getString("facility_type")) .upPrtcllId(rs.getObject("up_prtcll_id") != null ? rs.getLong("up_prtcll_id") : null)
.upFacilityId(rs.getObject("up_facility_id") != null ? rs.getInt("up_facility_id") : null) .countryCd(rs.getString("country_cd"))
.upFacilityNm(rs.getString("up_facility_nm")) .countryNm(rs.getString("country_nm"))
.upFacilityType(rs.getString("up_facility_type")) .stsPosition(rs.getString("sts_position"))
.draft(rs.getObject("draft") != null ? rs.getBigDecimal("draft") : null) .stsType(rs.getString("sts_type"))
.lat(rs.getObject("lat") != null ? rs.getBigDecimal("lat") : null) .eventStaDt(eventStaDtTs != null ? eventStaDtTs.toLocalDateTime() : null)
.lon(rs.getObject("lon") != null ? rs.getBigDecimal("lon") : null) .build();
.positionInfo(rs.getString("position_info"))
.upPrtcllId(rs.getObject("up_prtcll_id") != null ? rs.getLong("up_prtcll_id") : null)
.countryCd(rs.getString("country_cd"))
.countryNm(rs.getString("country_nm"))
.stsPosition(rs.getString("sts_position"))
.stsType(rs.getString("sts_type"))
.eventStaDt(eventStaDtTs != null ? eventStaDtTs.toLocalDateTime() : null)
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTStsOperation);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,88 +1,56 @@
package com.snp.batch.jobs.datasync.batch.movement.reader; package com.snp.batch.jobs.datasync.batch.movement.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.TerminalCallDto; import com.snp.batch.jobs.datasync.batch.movement.dto.TerminalCallDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
@Slf4j @Slf4j
public class TerminalCallReader implements ItemReader<TerminalCallDto> { public class TerminalCallReader extends BaseSyncReader<TerminalCallDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<TerminalCallDto> allDataBuffer = new ArrayList<>();
public TerminalCallReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public TerminalCallReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public TerminalCallDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceTTerminalCall;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected TerminalCallDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
nextTargetId = businessJdbcTemplate.queryForObject( Timestamp eventStaDtTs = rs.getTimestamp("event_sta_dt");
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTTerminalCall), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) { return TerminalCallDto.builder()
log.info("[TerminalCallReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .jobExecutionId(targetId)
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTTerminalCall); .imoNo(rs.getString("imo_no"))
final Long targetId = nextTargetId; .mvmnType(rs.getString("mvmn_type"))
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .mvmnDt(mvmnDtTs != null ? mvmnDtTs.toLocalDateTime() : null)
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt"); .facilityId(rs.getObject("facility_id") != null ? rs.getInt("facility_id") : null)
Timestamp eventStaDtTs = rs.getTimestamp("event_sta_dt"); .facilityNm(rs.getString("facility_nm"))
.facilityType(rs.getString("facility_type"))
return TerminalCallDto.builder() .upFacilityId(rs.getObject("up_facility_id") != null ? rs.getInt("up_facility_id") : null)
.jobExecutionId(targetId) .upFacilityNm(rs.getString("up_facility_nm"))
.imoNo(rs.getString("imo_no")) .upFacilityType(rs.getString("up_facility_type"))
.mvmnType(rs.getString("mvmn_type")) .countryCd(rs.getString("country_cd"))
.mvmnDt(mvmnDtTs != null ? mvmnDtTs.toLocalDateTime() : null) .countryNm(rs.getString("country_nm"))
.facilityId(rs.getObject("facility_id") != null ? rs.getInt("facility_id") : null) .draft(rs.getObject("draft") != null ? rs.getBigDecimal("draft") : null)
.facilityNm(rs.getString("facility_nm")) .lat(rs.getObject("lat") != null ? rs.getBigDecimal("lat") : null)
.facilityType(rs.getString("facility_type")) .lon(rs.getObject("lon") != null ? rs.getBigDecimal("lon") : null)
.upFacilityId(rs.getObject("up_facility_id") != null ? rs.getInt("up_facility_id") : null) .positionInfo(rs.getString("position_info"))
.upFacilityNm(rs.getString("up_facility_nm")) .upPrtcllId(rs.getObject("up_prtcll_id") != null ? rs.getLong("up_prtcll_id") : null)
.upFacilityType(rs.getString("up_facility_type")) .countryIsoTwoCd(rs.getString("country_iso_two_cd"))
.countryCd(rs.getString("country_cd")) .eventStaDt(eventStaDtTs != null ? eventStaDtTs.toLocalDateTime() : null)
.countryNm(rs.getString("country_nm")) .lwrnkFacilityId(rs.getObject("lwrnk_facility_id") != null ? rs.getInt("lwrnk_facility_id") : null)
.draft(rs.getObject("draft") != null ? rs.getBigDecimal("draft") : null) .lwrnkFacilityDesc(rs.getString("lwrnk_facility_desc"))
.lat(rs.getObject("lat") != null ? rs.getBigDecimal("lat") : null) .lwrnkFacilityType(rs.getString("lwrnk_facility_type"))
.lon(rs.getObject("lon") != null ? rs.getBigDecimal("lon") : null) .build();
.positionInfo(rs.getString("position_info"))
.upPrtcllId(rs.getObject("up_prtcll_id") != null ? rs.getLong("up_prtcll_id") : null)
.countryIsoTwoCd(rs.getString("country_iso_two_cd"))
.eventStaDt(eventStaDtTs != null ? eventStaDtTs.toLocalDateTime() : null)
.lwrnkFacilityId(rs.getObject("lwrnk_facility_id") != null ? rs.getInt("lwrnk_facility_id") : null)
.lwrnkFacilityDesc(rs.getString("lwrnk_facility_desc"))
.lwrnkFacilityType(rs.getString("lwrnk_facility_type"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTTerminalCall);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,72 +1,40 @@
package com.snp.batch.jobs.datasync.batch.movement.reader; package com.snp.batch.jobs.datasync.batch.movement.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.TransitDto; import com.snp.batch.jobs.datasync.batch.movement.dto.TransitDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
@Slf4j @Slf4j
public class TransitReader implements ItemReader<TransitDto> { public class TransitReader extends BaseSyncReader<TransitDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<TransitDto> allDataBuffer = new ArrayList<>();
public TransitReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public TransitReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public TransitDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceTTransit;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected TransitDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTTransit), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) { return TransitDto.builder()
log.info("[TransitReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .jobExecutionId(targetId)
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTTransit); .imoNo(rs.getString("imo_no"))
final Long targetId = nextTargetId; .mvmnType(rs.getString("mvmn_type"))
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .mvmnDt(mvmnDtTs != null ? mvmnDtTs.toLocalDateTime() : null)
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt"); .facilityNm(rs.getString("facility_nm"))
.facilityType(rs.getString("facility_type"))
return TransitDto.builder() .draft(rs.getObject("draft") != null ? rs.getBigDecimal("draft") : null)
.jobExecutionId(targetId) .build();
.imoNo(rs.getString("imo_no"))
.mvmnType(rs.getString("mvmn_type"))
.mvmnDt(mvmnDtTs != null ? mvmnDtTs.toLocalDateTime() : null)
.facilityNm(rs.getString("facility_nm"))
.facilityType(rs.getString("facility_type"))
.draft(rs.getObject("draft") != null ? rs.getBigDecimal("draft") : null)
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTTransit);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.psc.config;
import com.snp.batch.common.batch.config.BaseJobConfig; import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener; import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.psc.dto.PscAllCertificateDto; import com.snp.batch.jobs.datasync.batch.psc.dto.PscAllCertificateDto;
import com.snp.batch.jobs.datasync.batch.psc.dto.PscDefectDto; import com.snp.batch.jobs.datasync.batch.psc.dto.PscDefectDto;
@ -154,12 +151,10 @@ public class PscSyncJobConfig extends BaseJobConfig<PscDetailDto, PscDetailEntit
public Step pscDetailSyncStep() { public Step pscDetailSyncStep() {
log.info("Step 생성: pscDetailSyncStep"); log.info("Step 생성: pscDetailSyncStep");
return new StepBuilder(getStepName(), jobRepository) return new StepBuilder(getStepName(), jobRepository)
.<PscDetailDto, PscDetailEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<PscDetailDto, PscDetailEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader()) .reader(createReader())
.processor(createProcessor()) .processor(createProcessor())
.writer(createWriter()) .writer(createWriter())
.listener(new GroupByExecutionIdReadListener<PscDetailDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(pscDetailWriteListener()) .listener(pscDetailWriteListener())
.build(); .build();
} }
@ -168,12 +163,10 @@ public class PscSyncJobConfig extends BaseJobConfig<PscDetailDto, PscDetailEntit
public Step pscDefectSyncStep() { public Step pscDefectSyncStep() {
log.info("Step 생성: pscDefectSyncStep"); log.info("Step 생성: pscDefectSyncStep");
return new StepBuilder("pscDefectSyncStep", jobRepository) return new StepBuilder("pscDefectSyncStep", jobRepository)
.<PscDefectDto, PscDefectEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<PscDefectDto, PscDefectEntity>chunk(getChunkSize(), transactionManager)
.reader(pscDefectReader(businessDataSource, tableMetaInfo)) .reader(pscDefectReader(businessDataSource, tableMetaInfo))
.processor(new PscDefectProcessor()) .processor(new PscDefectProcessor())
.writer(new PscDefectWriter(pscRepository, transactionManager, subChunkSize)) .writer(new PscDefectWriter(pscRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<PscDefectDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(pscDefectWriteListener()) .listener(pscDefectWriteListener())
.build(); .build();
} }
@ -182,12 +175,10 @@ public class PscSyncJobConfig extends BaseJobConfig<PscDetailDto, PscDetailEntit
public Step pscAllCertificateSyncStep() { public Step pscAllCertificateSyncStep() {
log.info("Step 생성: pscAllCertificateSyncStep"); log.info("Step 생성: pscAllCertificateSyncStep");
return new StepBuilder("pscAllCertificateSyncStep", jobRepository) return new StepBuilder("pscAllCertificateSyncStep", jobRepository)
.<PscAllCertificateDto, PscAllCertificateEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<PscAllCertificateDto, PscAllCertificateEntity>chunk(getChunkSize(), transactionManager)
.reader(pscAllCertificateReader(businessDataSource, tableMetaInfo)) .reader(pscAllCertificateReader(businessDataSource, tableMetaInfo))
.processor(new PscAllCertificateProcessor()) .processor(new PscAllCertificateProcessor())
.writer(new PscAllCertificateWriter(pscRepository, transactionManager, subChunkSize)) .writer(new PscAllCertificateWriter(pscRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<PscAllCertificateDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(pscAllCertificateWriteListener()) .listener(pscAllCertificateWriteListener())
.build(); .build();
} }

파일 보기

@ -1,87 +1,55 @@
package com.snp.batch.jobs.datasync.batch.psc.reader; package com.snp.batch.jobs.datasync.batch.psc.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.psc.dto.PscAllCertificateDto; import com.snp.batch.jobs.datasync.batch.psc.dto.PscAllCertificateDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
@Slf4j @Slf4j
public class PscAllCertificateReader implements ItemReader<PscAllCertificateDto> { public class PscAllCertificateReader extends BaseSyncReader<PscAllCertificateDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<PscAllCertificateDto> allDataBuffer = new ArrayList<>();
public PscAllCertificateReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public PscAllCertificateReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public PscAllCertificateDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourcePscAllCertificate;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected PscAllCertificateDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { Timestamp expryYmdTs = rs.getTimestamp("expry_ymd");
nextTargetId = businessJdbcTemplate.queryForObject( Timestamp lastInspectionYmdTs = rs.getTimestamp("last_inspection_ymd");
CommonSql.getNextTargetQuery(tableMetaInfo.sourcePscAllCertificate), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) { return PscAllCertificateDto.builder()
log.info("[PscAllCertificateReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .jobExecutionId(targetId)
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourcePscAllCertificate); .datasetVer(rs.getString("dataset_ver"))
final Long targetId = nextTargetId; .certId(rs.getString("cert_id"))
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .inspectionId(rs.getString("inspection_id"))
Timestamp expryYmdTs = rs.getTimestamp("expry_ymd"); .imoNo(rs.getString("imo_no"))
Timestamp lastInspectionYmdTs = rs.getTimestamp("last_inspection_ymd"); .certfNmCd(rs.getString("certf_nm_cd"))
.certfNm(rs.getString("certf_nm"))
return PscAllCertificateDto.builder() .issueEnginesCd(rs.getString("issue_engines_cd"))
.jobExecutionId(targetId) .issueEngines(rs.getString("issue_engines"))
.datasetVer(rs.getString("dataset_ver")) .etcIssueEngines(rs.getString("etc_issue_engines"))
.certId(rs.getString("cert_id")) .issueYmd(rs.getString("issue_ymd"))
.inspectionId(rs.getString("inspection_id")) .expryYmd(expryYmdTs != null ? expryYmdTs.toLocalDateTime() : null)
.imoNo(rs.getString("imo_no")) .lastInspectionYmd(lastInspectionYmdTs != null ? lastInspectionYmdTs.toLocalDateTime() : null)
.certfNmCd(rs.getString("certf_nm_cd")) .inspectionEnginesCd(rs.getString("inspection_engines_cd"))
.certfNm(rs.getString("certf_nm")) .inspectionEngines(rs.getString("inspection_engines"))
.issueEnginesCd(rs.getString("issue_engines_cd")) .etcInspectionEngines(rs.getString("etc_inspection_engines"))
.issueEngines(rs.getString("issue_engines")) .recentInspectionPlc(rs.getString("recent_inspection_plc"))
.etcIssueEngines(rs.getString("etc_issue_engines")) .recentInspectionPlcCd(rs.getString("recent_inspection_plc_cd"))
.issueYmd(rs.getString("issue_ymd")) .inspectionEnginesType(rs.getString("inspection_engines_type"))
.expryYmd(expryYmdTs != null ? expryYmdTs.toLocalDateTime() : null) .checkYmd(rs.getString("check_ymd"))
.lastInspectionYmd(lastInspectionYmdTs != null ? lastInspectionYmdTs.toLocalDateTime() : null) .insptr(rs.getString("insptr"))
.inspectionEnginesCd(rs.getString("inspection_engines_cd")) .build();
.inspectionEngines(rs.getString("inspection_engines"))
.etcInspectionEngines(rs.getString("etc_inspection_engines"))
.recentInspectionPlc(rs.getString("recent_inspection_plc"))
.recentInspectionPlcCd(rs.getString("recent_inspection_plc_cd"))
.inspectionEnginesType(rs.getString("inspection_engines_type"))
.checkYmd(rs.getString("check_ymd"))
.insptr(rs.getString("insptr"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourcePscAllCertificate);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,87 +1,55 @@
package com.snp.batch.jobs.datasync.batch.psc.reader; package com.snp.batch.jobs.datasync.batch.psc.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.psc.dto.PscDefectDto; import com.snp.batch.jobs.datasync.batch.psc.dto.PscDefectDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class PscDefectReader implements ItemReader<PscDefectDto> { public class PscDefectReader extends BaseSyncReader<PscDefectDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<PscDefectDto> allDataBuffer = new ArrayList<>();
public PscDefectReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public PscDefectReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public PscDefectDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourcePscDefect;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected PscDefectDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return PscDefectDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourcePscDefect), Long.class); .datasetVer(rs.getString("dataset_ver"))
} catch (Exception e) { .defectId(rs.getString("defect_id"))
return; .inspectionId(rs.getString("inspection_id"))
} .actnOne(rs.getString("actn_one"))
.actnTwo(rs.getString("actn_two"))
if (nextTargetId != null) { .actnThr(rs.getString("actn_thr"))
log.info("[PscDefectReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .actnCdOne(rs.getString("actn_cd_one"))
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourcePscDefect); .actnCdTwo(rs.getString("actn_cd_two"))
final Long targetId = nextTargetId; .actnCdThr(rs.getString("actn_cd_thr"))
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .clficRespsbYn(rs.getString("clfic_respsb_yn"))
return PscDefectDto.builder() .defectCd(rs.getString("defect_cd"))
.jobExecutionId(targetId) .defectCn(rs.getString("defect_cn"))
.datasetVer(rs.getString("dataset_ver")) .defectIemCd(rs.getString("defect_iem_cd"))
.defectId(rs.getString("defect_id")) .detainedReasonDefect(rs.getString("detained_reason_defect"))
.inspectionId(rs.getString("inspection_id")) .mainDefectCd(rs.getString("main_defect_cd"))
.actnOne(rs.getString("actn_one")) .mainDefectCn(rs.getString("main_defect_cn"))
.actnTwo(rs.getString("actn_two")) .defectTypeCd(rs.getString("defect_type_cd"))
.actnThr(rs.getString("actn_thr")) .defectTypeNm(rs.getString("defect_type_nm"))
.actnCdOne(rs.getString("actn_cd_one")) .etcActn(rs.getString("etc_actn"))
.actnCdTwo(rs.getString("actn_cd_two")) .etcPubcEnginesRespsb(rs.getString("etc_pubc_engines_respsb"))
.actnCdThr(rs.getString("actn_cd_thr")) .pubcEnginesRespsb(rs.getString("pubc_engines_respsb"))
.clficRespsbYn(rs.getString("clfic_respsb_yn")) .pubcEnginesRespsbCd(rs.getString("pubc_engines_respsb_cd"))
.defectCd(rs.getString("defect_cd")) .pubcEnginesRespsbYn(rs.getString("pubc_engines_respsb_yn"))
.defectCn(rs.getString("defect_cn")) .acdntDamgYn(rs.getString("acdnt_damg_yn"))
.defectIemCd(rs.getString("defect_iem_cd")) .build();
.detainedReasonDefect(rs.getString("detained_reason_defect"))
.mainDefectCd(rs.getString("main_defect_cd"))
.mainDefectCn(rs.getString("main_defect_cn"))
.defectTypeCd(rs.getString("defect_type_cd"))
.defectTypeNm(rs.getString("defect_type_nm"))
.etcActn(rs.getString("etc_actn"))
.etcPubcEnginesRespsb(rs.getString("etc_pubc_engines_respsb"))
.pubcEnginesRespsb(rs.getString("pubc_engines_respsb"))
.pubcEnginesRespsbCd(rs.getString("pubc_engines_respsb_cd"))
.pubcEnginesRespsbYn(rs.getString("pubc_engines_respsb_yn"))
.acdntDamgYn(rs.getString("acdnt_damg_yn"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourcePscDefect);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,98 +1,66 @@
package com.snp.batch.jobs.datasync.batch.psc.reader; package com.snp.batch.jobs.datasync.batch.psc.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.psc.dto.PscDetailDto; import com.snp.batch.jobs.datasync.batch.psc.dto.PscDetailDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
@Slf4j @Slf4j
public class PscDetailReader implements ItemReader<PscDetailDto> { public class PscDetailReader extends BaseSyncReader<PscDetailDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<PscDetailDto> allDataBuffer = new ArrayList<>();
public PscDetailReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public PscDetailReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public PscDetailDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourcePscDetail;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected PscDetailDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { Timestamp inspectionYmdTs = rs.getTimestamp("inspection_ymd");
nextTargetId = businessJdbcTemplate.queryForObject( Timestamp tkoffPrmtYmdTs = rs.getTimestamp("tkoff_prmt_ymd");
CommonSql.getNextTargetQuery(tableMetaInfo.sourcePscDetail), Long.class); Timestamp lastMdfcnDtTs = rs.getTimestamp("last_mdfcn_dt");
} catch (Exception e) {
return;
}
if (nextTargetId != null) { return PscDetailDto.builder()
log.info("[PscDetailReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .jobExecutionId(targetId)
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourcePscDetail); .datasetVer(rs.getString("dataset_ver"))
final Long targetId = nextTargetId; .imoNo(rs.getString("imo_no"))
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .inspectionId(rs.getString("inspection_id"))
Timestamp inspectionYmdTs = rs.getTimestamp("inspection_ymd"); .typeId(rs.getString("type_id"))
Timestamp tkoffPrmtYmdTs = rs.getTimestamp("tkoff_prmt_ymd"); .clsgnNo(rs.getString("clsgn_no"))
Timestamp lastMdfcnDtTs = rs.getTimestamp("last_mdfcn_dt"); .chrter(rs.getString("chrter"))
.clfic(rs.getString("clfic"))
return PscDetailDto.builder() .country(rs.getString("country"))
.jobExecutionId(targetId) .inspectionYmd(inspectionYmdTs != null ? inspectionYmdTs.toLocalDateTime() : null)
.datasetVer(rs.getString("dataset_ver")) .tkoffPrmtYmd(tkoffPrmtYmdTs != null ? tkoffPrmtYmdTs.toLocalDateTime() : null)
.imoNo(rs.getString("imo_no")) .shipDetainedYn(rs.getString("ship_detained_yn"))
.inspectionId(rs.getString("inspection_id")) .dwt(rs.getString("dwt"))
.typeId(rs.getString("type_id")) .expndInspectionYn(rs.getString("expnd_inspection_yn"))
.clsgnNo(rs.getString("clsgn_no")) .flg(rs.getString("flg"))
.chrter(rs.getString("chrter")) .folwInspectionYn(rs.getString("folw_inspection_yn"))
.clfic(rs.getString("clfic")) .gt(rs.getString("gt"))
.country(rs.getString("country")) .inspectionPortNm(rs.getString("inspection_port_nm"))
.inspectionYmd(inspectionYmdTs != null ? inspectionYmdTs.toLocalDateTime() : null) .lastMdfcnDt(lastMdfcnDtTs != null ? lastMdfcnDtTs.toLocalDateTime() : null)
.tkoffPrmtYmd(tkoffPrmtYmdTs != null ? tkoffPrmtYmdTs.toLocalDateTime() : null) .shipMngr(rs.getString("ship_mngr"))
.shipDetainedYn(rs.getString("ship_detained_yn")) .detainedDays(rs.getObject("detained_days") != null ? rs.getInt("detained_days") : null)
.dwt(rs.getString("dwt")) .defectCnt(rs.getString("defect_cnt"))
.expndInspectionYn(rs.getString("expnd_inspection_yn")) .defectCntDays(rs.getBigDecimal("defect_cnt_days"))
.flg(rs.getString("flg")) .etcInspectionType(rs.getString("etc_inspection_type"))
.folwInspectionYn(rs.getString("folw_inspection_yn")) .shponr(rs.getString("shponr"))
.gt(rs.getString("gt")) .shipNm(rs.getString("ship_nm"))
.inspectionPortNm(rs.getString("inspection_port_nm")) .shipTypeCd(rs.getString("ship_type_cd"))
.lastMdfcnDt(lastMdfcnDtTs != null ? lastMdfcnDtTs.toLocalDateTime() : null) .shipTypeNm(rs.getString("ship_type_nm"))
.shipMngr(rs.getString("ship_mngr")) .dataSrc(rs.getString("data_src"))
.detainedDays(rs.getObject("detained_days") != null ? rs.getInt("detained_days") : null) .unPortCd(rs.getString("un_port_cd"))
.defectCnt(rs.getString("defect_cnt")) .buildYy(rs.getString("build_yy"))
.defectCntDays(rs.getBigDecimal("defect_cnt_days")) .build();
.etcInspectionType(rs.getString("etc_inspection_type"))
.shponr(rs.getString("shponr"))
.shipNm(rs.getString("ship_nm"))
.shipTypeCd(rs.getString("ship_type_cd"))
.shipTypeNm(rs.getString("ship_type_nm"))
.dataSrc(rs.getString("data_src"))
.unPortCd(rs.getString("un_port_cd"))
.buildYy(rs.getString("build_yy"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourcePscDetail);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.risk.config;
import com.snp.batch.common.batch.config.BaseJobConfig; import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener; import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.risk.dto.RiskDto; import com.snp.batch.jobs.datasync.batch.risk.dto.RiskDto;
import com.snp.batch.jobs.datasync.batch.risk.entity.RiskEntity; import com.snp.batch.jobs.datasync.batch.risk.entity.RiskEntity;
@ -112,12 +109,10 @@ public class RiskSyncJobConfig extends BaseJobConfig<RiskDto, RiskEntity> {
public Step riskSyncStep() { public Step riskSyncStep() {
log.info("Step 생성: riskSyncStep"); log.info("Step 생성: riskSyncStep");
return new StepBuilder(getStepName(), jobRepository) return new StepBuilder(getStepName(), jobRepository)
.<RiskDto, RiskEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<RiskDto, RiskEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader()) .reader(createReader())
.processor(createProcessor()) .processor(createProcessor())
.writer(createWriter()) .writer(createWriter())
.listener(new GroupByExecutionIdReadListener<RiskDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(riskWriteListener()) .listener(riskWriteListener())
.build(); .build();
} }

파일 보기

@ -1,108 +1,76 @@
package com.snp.batch.jobs.datasync.batch.risk.reader; package com.snp.batch.jobs.datasync.batch.risk.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.risk.dto.RiskDto; import com.snp.batch.jobs.datasync.batch.risk.dto.RiskDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
@Slf4j @Slf4j
public class RiskReader implements ItemReader<RiskDto> { public class RiskReader extends BaseSyncReader<RiskDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<RiskDto> allDataBuffer = new ArrayList<>();
public RiskReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public RiskReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public RiskDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceRisk;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected RiskDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { Timestamp lastMdfcnDtTs = rs.getTimestamp("last_mdfcn_dt");
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceRisk), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) { return RiskDto.builder()
log.info("[RiskReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .jobExecutionId(targetId)
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceRisk); .imoNo(rs.getString("imo_no"))
final Long targetId = nextTargetId; .lastMdfcnDt(lastMdfcnDtTs != null ? lastMdfcnDtTs.toLocalDateTime() : null)
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .riskDataMaint(rs.getString("risk_data_maint"))
Timestamp lastMdfcnDtTs = rs.getTimestamp("last_mdfcn_dt"); .aisNotrcvElpsDays(rs.getString("ais_notrcv_elps_days"))
.aisLwrnkDays(rs.getString("ais_lwrnk_days"))
return RiskDto.builder() .aisUpImoDesc(rs.getString("ais_up_imo_desc"))
.jobExecutionId(targetId) .othrShipNmVoyYn(rs.getString("othr_ship_nm_voy_yn"))
.imoNo(rs.getString("imo_no")) .mmsiAnomMessage(rs.getString("mmsi_anom_message"))
.lastMdfcnDt(lastMdfcnDtTs != null ? lastMdfcnDtTs.toLocalDateTime() : null) .recentDarkActv(rs.getString("recent_dark_actv"))
.riskDataMaint(rs.getString("risk_data_maint")) .portPrtcll(rs.getString("port_prtcll"))
.aisNotrcvElpsDays(rs.getString("ais_notrcv_elps_days")) .portRisk(rs.getString("port_risk"))
.aisLwrnkDays(rs.getString("ais_lwrnk_days")) .stsJob(rs.getString("sts_job"))
.aisUpImoDesc(rs.getString("ais_up_imo_desc")) .driftChg(rs.getString("drift_chg"))
.othrShipNmVoyYn(rs.getString("othr_ship_nm_voy_yn")) .riskEvent(rs.getString("risk_event"))
.mmsiAnomMessage(rs.getString("mmsi_anom_message")) .ntnltyChg(rs.getString("ntnlty_chg"))
.recentDarkActv(rs.getString("recent_dark_actv")) .ntnltyPrsMouPerf(rs.getString("ntnlty_prs_mou_perf"))
.portPrtcll(rs.getString("port_prtcll")) .ntnltyTkyMouPerf(rs.getString("ntnlty_tky_mou_perf"))
.portRisk(rs.getString("port_risk")) .ntnltyUscgMouPerf(rs.getString("ntnlty_uscg_mou_perf"))
.stsJob(rs.getString("sts_job")) .uscgExclShipCert(rs.getString("uscg_excl_ship_cert"))
.driftChg(rs.getString("drift_chg")) .pscInspectionElpsHr(rs.getString("psc_inspection_elps_hr"))
.riskEvent(rs.getString("risk_event")) .pscInspection(rs.getString("psc_inspection"))
.ntnltyChg(rs.getString("ntnlty_chg")) .pscDefect(rs.getString("psc_defect"))
.ntnltyPrsMouPerf(rs.getString("ntnlty_prs_mou_perf")) .pscDetained(rs.getString("psc_detained"))
.ntnltyTkyMouPerf(rs.getString("ntnlty_tky_mou_perf")) .nowSmgrcEvdc(rs.getString("now_smgrc_evdc"))
.ntnltyUscgMouPerf(rs.getString("ntnlty_uscg_mou_perf")) .doccChg(rs.getString("docc_chg"))
.uscgExclShipCert(rs.getString("uscg_excl_ship_cert")) .nowClfic(rs.getString("now_clfic"))
.pscInspectionElpsHr(rs.getString("psc_inspection_elps_hr")) .clficStatusChg(rs.getString("clfic_status_chg"))
.pscInspection(rs.getString("psc_inspection")) .pniInsrnc(rs.getString("pni_insrnc"))
.pscDefect(rs.getString("psc_defect")) .shipNmChg(rs.getString("ship_nm_chg"))
.pscDetained(rs.getString("psc_detained")) .gboChg(rs.getString("gbo_chg"))
.nowSmgrcEvdc(rs.getString("now_smgrc_evdc")) .vslage(rs.getString("vslage"))
.doccChg(rs.getString("docc_chg")) .ilglFshrViol(rs.getString("ilgl_fshr_viol"))
.nowClfic(rs.getString("now_clfic")) .draftChg(rs.getString("draft_chg"))
.clficStatusChg(rs.getString("clfic_status_chg")) .recentSanctionPrtcll(rs.getString("recent_sanction_prtcll"))
.pniInsrnc(rs.getString("pni_insrnc")) .snglShipVoy(rs.getString("sngl_ship_voy"))
.shipNmChg(rs.getString("ship_nm_chg")) .fltsfty(rs.getString("fltsfty"))
.gboChg(rs.getString("gbo_chg")) .fltPsc(rs.getString("flt_psc"))
.vslage(rs.getString("vslage")) .spcInspectionOvdue(rs.getString("spc_inspection_ovdue"))
.ilglFshrViol(rs.getString("ilgl_fshr_viol")) .ownrUnk(rs.getString("ownr_unk"))
.draftChg(rs.getString("draft_chg")) .rssPortCall(rs.getString("rss_port_call"))
.recentSanctionPrtcll(rs.getString("recent_sanction_prtcll")) .rssOwnrReg(rs.getString("rss_ownr_reg"))
.snglShipVoy(rs.getString("sngl_ship_voy")) .rssSts(rs.getString("rss_sts"))
.fltsfty(rs.getString("fltsfty")) .build();
.fltPsc(rs.getString("flt_psc"))
.spcInspectionOvdue(rs.getString("spc_inspection_ovdue"))
.ownrUnk(rs.getString("ownr_unk"))
.rssPortCall(rs.getString("rss_port_call"))
.rssOwnrReg(rs.getString("rss_ownr_reg"))
.rssSts(rs.getString("rss_sts"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceRisk);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.ship.config;
import com.snp.batch.common.batch.config.BaseJobConfig; import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener; import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.BareboatCharterHistoryDto; import com.snp.batch.jobs.datasync.batch.ship.dto.BareboatCharterHistoryDto;
import com.snp.batch.jobs.datasync.batch.ship.dto.CallsignAndMmsiHistoryDto; import com.snp.batch.jobs.datasync.batch.ship.dto.CallsignAndMmsiHistoryDto;
@ -634,12 +631,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
@Bean(name = "snpShipDetailSyncStep") @Bean(name = "snpShipDetailSyncStep")
public Step snpShipDetailSyncStep() { public Step snpShipDetailSyncStep() {
return new StepBuilder(getStepName(), jobRepository) return new StepBuilder(getStepName(), jobRepository)
.<ShipInfoMstDto, ShipInfoMstEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<ShipInfoMstDto, ShipInfoMstEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader()) .reader(createReader())
.processor(createProcessor()) .processor(createProcessor())
.writer(createWriter()) .writer(createWriter())
.listener(new GroupByExecutionIdReadListener<ShipInfoMstDto>()) // Reader 리스너 (ThreadLocal 설정)
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 (ThreadLocal 정리)
.listener(shipWriteListener()) // Write 완료 batch_flag 업데이트 .listener(shipWriteListener()) // Write 완료 batch_flag 업데이트
.build(); .build();
} }
@ -648,12 +643,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step ownerHistorySyncStep() { public Step ownerHistorySyncStep() {
log.info("Step 생성: ownerHistorySyncStep"); log.info("Step 생성: ownerHistorySyncStep");
return new StepBuilder("ownerHistorySyncStep", jobRepository) return new StepBuilder("ownerHistorySyncStep", jobRepository)
.<OwnerHistoryDto, OwnerHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<OwnerHistoryDto, OwnerHistoryEntity>chunk(getChunkSize(), transactionManager)
.reader(ownerHistoryReader(businessDataSource, tableMetaInfo)) .reader(ownerHistoryReader(businessDataSource, tableMetaInfo))
.processor(new OwnerHistoryProcessor()) .processor(new OwnerHistoryProcessor())
.writer(new OwnerHistoryWriter(shipRepository, transactionManager, subChunkSize)) .writer(new OwnerHistoryWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<OwnerHistoryDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(ownerHistoryWriteListener()) // Write 완료 batch_flag 업데이트 .listener(ownerHistoryWriteListener()) // Write 완료 batch_flag 업데이트
.build(); .build();
} }
@ -662,12 +655,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step shipAddInfoSyncStep() { public Step shipAddInfoSyncStep() {
log.info("Step 생성: shipAddInfoSyncStep"); log.info("Step 생성: shipAddInfoSyncStep");
return new StepBuilder("shipAddInfoSyncStep", jobRepository) return new StepBuilder("shipAddInfoSyncStep", jobRepository)
.<ShipAddInfoDto, ShipAddInfoEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<ShipAddInfoDto, ShipAddInfoEntity>chunk(getChunkSize(), transactionManager)
.reader(shipAddInfoReader(businessDataSource, tableMetaInfo)) .reader(shipAddInfoReader(businessDataSource, tableMetaInfo))
.processor(new ShipAddInfoProcessor()) .processor(new ShipAddInfoProcessor())
.writer(new ShipAddInfoWriter(shipRepository, transactionManager, subChunkSize)) .writer(new ShipAddInfoWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<ShipAddInfoDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(shipAddInfoWriteListener()) // Write 완료 batch_flag 업데이트 .listener(shipAddInfoWriteListener()) // Write 완료 batch_flag 업데이트
.build(); .build();
} }
@ -676,12 +667,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step bareboatCharterHistorySyncStep() { public Step bareboatCharterHistorySyncStep() {
log.info("Step 생성: bareboatCharterHistorySyncStep"); log.info("Step 생성: bareboatCharterHistorySyncStep");
return new StepBuilder("bareboatCharterHistorySyncStep", jobRepository) return new StepBuilder("bareboatCharterHistorySyncStep", jobRepository)
.<BareboatCharterHistoryDto, BareboatCharterHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<BareboatCharterHistoryDto, BareboatCharterHistoryEntity>chunk(getChunkSize(), transactionManager)
.reader(bareboatCharterHistoryReader(businessDataSource, tableMetaInfo)) .reader(bareboatCharterHistoryReader(businessDataSource, tableMetaInfo))
.processor(new BareboatCharterHistoryProcessor()) .processor(new BareboatCharterHistoryProcessor())
.writer(new BareboatCharterHistoryWriter(shipRepository, transactionManager, subChunkSize)) .writer(new BareboatCharterHistoryWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<BareboatCharterHistoryDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(bareboatCharterHistoryWriteListener()) // Write 완료 batch_flag 업데이트 .listener(bareboatCharterHistoryWriteListener()) // Write 완료 batch_flag 업데이트
.build(); .build();
} }
@ -690,12 +679,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step callsignAndMmsiHistorySyncStep() { public Step callsignAndMmsiHistorySyncStep() {
log.info("Step 생성: callsignAndMmsiHistorySyncStep"); log.info("Step 생성: callsignAndMmsiHistorySyncStep");
return new StepBuilder("callsignAndMmsiHistorySyncStep", jobRepository) return new StepBuilder("callsignAndMmsiHistorySyncStep", jobRepository)
.<CallsignAndMmsiHistoryDto, CallsignAndMmsiHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<CallsignAndMmsiHistoryDto, CallsignAndMmsiHistoryEntity>chunk(getChunkSize(), transactionManager)
.reader(callsignAndMmsiHistoryReader(businessDataSource, tableMetaInfo)) .reader(callsignAndMmsiHistoryReader(businessDataSource, tableMetaInfo))
.processor(new CallsignAndMmsiHistoryProcessor()) .processor(new CallsignAndMmsiHistoryProcessor())
.writer(new CallsignAndMmsiHistoryWriter(shipRepository, transactionManager, subChunkSize)) .writer(new CallsignAndMmsiHistoryWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<CallsignAndMmsiHistoryDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(callsignAndMmsiHistoryWriteListener()) // Write 완료 batch_flag 업데이트 .listener(callsignAndMmsiHistoryWriteListener()) // Write 완료 batch_flag 업데이트
.build(); .build();
} }
@ -704,12 +691,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step classHistorySyncStep() { public Step classHistorySyncStep() {
log.info("Step 생성: classHistorySyncStep"); log.info("Step 생성: classHistorySyncStep");
return new StepBuilder("classHistorySyncStep", jobRepository) return new StepBuilder("classHistorySyncStep", jobRepository)
.<ClassHistoryDto, ClassHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<ClassHistoryDto, ClassHistoryEntity>chunk(getChunkSize(), transactionManager)
.reader(classHistoryReader(businessDataSource, tableMetaInfo)) .reader(classHistoryReader(businessDataSource, tableMetaInfo))
.processor(new ClassHistoryProcessor()) .processor(new ClassHistoryProcessor())
.writer(new ClassHistoryWriter(shipRepository, transactionManager, subChunkSize)) .writer(new ClassHistoryWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<ClassHistoryDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(classHistoryWriteListener()) // Write 완료 batch_flag 업데이트 .listener(classHistoryWriteListener()) // Write 완료 batch_flag 업데이트
.build(); .build();
} }
@ -718,12 +703,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step companyVesselRelationshipsSyncStep() { public Step companyVesselRelationshipsSyncStep() {
log.info("Step 생성: companyVesselRelationshipsSyncStep"); log.info("Step 생성: companyVesselRelationshipsSyncStep");
return new StepBuilder("companyVesselRelationshipsSyncStep", jobRepository) return new StepBuilder("companyVesselRelationshipsSyncStep", jobRepository)
.<CompanyVesselRelationshipsDto, CompanyVesselRelationshipsEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<CompanyVesselRelationshipsDto, CompanyVesselRelationshipsEntity>chunk(getChunkSize(), transactionManager)
.reader(companyVesselRelationshipsReader(businessDataSource, tableMetaInfo)) .reader(companyVesselRelationshipsReader(businessDataSource, tableMetaInfo))
.processor(new CompanyVesselRelationshipsProcessor()) .processor(new CompanyVesselRelationshipsProcessor())
.writer(new CompanyVesselRelationshipsWriter(shipRepository, transactionManager, subChunkSize)) .writer(new CompanyVesselRelationshipsWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<CompanyVesselRelationshipsDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(companyVesselRelationshipsWriteListener()) // Write 완료 batch_flag 업데이트 .listener(companyVesselRelationshipsWriteListener()) // Write 완료 batch_flag 업데이트
.build(); .build();
} }
@ -732,12 +715,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step crewListSyncStep() { public Step crewListSyncStep() {
log.info("Step 생성: crewListSyncStep"); log.info("Step 생성: crewListSyncStep");
return new StepBuilder("crewListSyncStep", jobRepository) return new StepBuilder("crewListSyncStep", jobRepository)
.<CrewListDto, CrewListEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<CrewListDto, CrewListEntity>chunk(getChunkSize(), transactionManager)
.reader(crewListReader(businessDataSource, tableMetaInfo)) .reader(crewListReader(businessDataSource, tableMetaInfo))
.processor(new CrewListProcessor()) .processor(new CrewListProcessor())
.writer(new CrewListWriter(shipRepository, transactionManager, subChunkSize)) .writer(new CrewListWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<CrewListDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(crewListWriteListener()) // Write 완료 batch_flag 업데이트 .listener(crewListWriteListener()) // Write 완료 batch_flag 업데이트
.build(); .build();
} }
@ -746,12 +727,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step darkActivityConfirmedSyncStep() { public Step darkActivityConfirmedSyncStep() {
log.info("Step 생성: darkActivityConfirmedSyncStep"); log.info("Step 생성: darkActivityConfirmedSyncStep");
return new StepBuilder("darkActivityConfirmedSyncStep", jobRepository) return new StepBuilder("darkActivityConfirmedSyncStep", jobRepository)
.<DarkActivityConfirmedDto, DarkActivityConfirmedEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<DarkActivityConfirmedDto, DarkActivityConfirmedEntity>chunk(getChunkSize(), transactionManager)
.reader(darkActivityConfirmedReader(businessDataSource, tableMetaInfo)) .reader(darkActivityConfirmedReader(businessDataSource, tableMetaInfo))
.processor(new DarkActivityConfirmedProcessor()) .processor(new DarkActivityConfirmedProcessor())
.writer(new DarkActivityConfirmedWriter(shipRepository, transactionManager, subChunkSize)) .writer(new DarkActivityConfirmedWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<DarkActivityConfirmedDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(darkActivityConfirmedWriteListener()) // Write 완료 batch_flag 업데이트 .listener(darkActivityConfirmedWriteListener()) // Write 완료 batch_flag 업데이트
.build(); .build();
} }
@ -760,12 +739,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step flagHistorySyncStep() { public Step flagHistorySyncStep() {
log.info("Step 생성: flagHistorySyncStep"); log.info("Step 생성: flagHistorySyncStep");
return new StepBuilder("flagHistorySyncStep", jobRepository) return new StepBuilder("flagHistorySyncStep", jobRepository)
.<FlagHistoryDto, FlagHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<FlagHistoryDto, FlagHistoryEntity>chunk(getChunkSize(), transactionManager)
.reader(flagHistoryReader(businessDataSource, tableMetaInfo)) .reader(flagHistoryReader(businessDataSource, tableMetaInfo))
.processor(new FlagHistoryProcessor()) .processor(new FlagHistoryProcessor())
.writer(new FlagHistoryWriter(shipRepository, transactionManager, subChunkSize)) .writer(new FlagHistoryWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<FlagHistoryDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(flagHistoryWriteListener()) // Write 완료 batch_flag 업데이트 .listener(flagHistoryWriteListener()) // Write 완료 batch_flag 업데이트
.build(); .build();
} }
@ -774,12 +751,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step groupBeneficialOwnerHistorySyncStep() { public Step groupBeneficialOwnerHistorySyncStep() {
log.info("Step 생성: groupBeneficialOwnerHistorySyncStep"); log.info("Step 생성: groupBeneficialOwnerHistorySyncStep");
return new StepBuilder("groupBeneficialOwnerHistorySyncStep", jobRepository) return new StepBuilder("groupBeneficialOwnerHistorySyncStep", jobRepository)
.<GroupBeneficialOwnerHistoryDto, GroupBeneficialOwnerHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<GroupBeneficialOwnerHistoryDto, GroupBeneficialOwnerHistoryEntity>chunk(getChunkSize(), transactionManager)
.reader(groupBeneficialOwnerHistoryReader(businessDataSource, tableMetaInfo)) .reader(groupBeneficialOwnerHistoryReader(businessDataSource, tableMetaInfo))
.processor(new GroupBeneficialOwnerHistoryProcessor()) .processor(new GroupBeneficialOwnerHistoryProcessor())
.writer(new GroupBeneficialOwnerHistoryWriter(shipRepository, transactionManager, subChunkSize)) .writer(new GroupBeneficialOwnerHistoryWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<GroupBeneficialOwnerHistoryDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(groupBeneficialOwnerHistoryWriteListener()) // Write 완료 batch_flag 업데이트 .listener(groupBeneficialOwnerHistoryWriteListener()) // Write 완료 batch_flag 업데이트
.build(); .build();
} }
@ -788,12 +763,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step iceClassSyncStep() { public Step iceClassSyncStep() {
log.info("Step 생성: iceClassSyncStep"); log.info("Step 생성: iceClassSyncStep");
return new StepBuilder("iceClassSyncStep", jobRepository) return new StepBuilder("iceClassSyncStep", jobRepository)
.<IceClassDto, IceClassEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<IceClassDto, IceClassEntity>chunk(getChunkSize(), transactionManager)
.reader(iceClassReader(businessDataSource, tableMetaInfo)) .reader(iceClassReader(businessDataSource, tableMetaInfo))
.processor(new IceClassProcessor()) .processor(new IceClassProcessor())
.writer(new IceClassWriter(shipRepository, transactionManager, subChunkSize)) .writer(new IceClassWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<IceClassDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(iceClassWriteListener()) // Write 완료 batch_flag 업데이트 .listener(iceClassWriteListener()) // Write 완료 batch_flag 업데이트
.build(); .build();
} }
@ -802,12 +775,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step nameHistorySyncStep() { public Step nameHistorySyncStep() {
log.info("Step 생성: nameHistorySyncStep"); log.info("Step 생성: nameHistorySyncStep");
return new StepBuilder("nameHistorySyncStep", jobRepository) return new StepBuilder("nameHistorySyncStep", jobRepository)
.<NameHistoryDto, NameHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<NameHistoryDto, NameHistoryEntity>chunk(getChunkSize(), transactionManager)
.reader(nameHistoryReader(businessDataSource, tableMetaInfo)) .reader(nameHistoryReader(businessDataSource, tableMetaInfo))
.processor(new NameHistoryProcessor()) .processor(new NameHistoryProcessor())
.writer(new NameHistoryWriter(shipRepository, transactionManager, subChunkSize)) .writer(new NameHistoryWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<NameHistoryDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(nameHistoryWriteListener()) // Write 완료 batch_flag 업데이트 .listener(nameHistoryWriteListener()) // Write 완료 batch_flag 업데이트
.build(); .build();
} }
@ -816,12 +787,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step operatorHistorySyncStep() { public Step operatorHistorySyncStep() {
log.info("Step 생성: operatorHistorySyncStep"); log.info("Step 생성: operatorHistorySyncStep");
return new StepBuilder("operatorHistorySyncStep", jobRepository) return new StepBuilder("operatorHistorySyncStep", jobRepository)
.<OperatorHistoryDto, OperatorHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<OperatorHistoryDto, OperatorHistoryEntity>chunk(getChunkSize(), transactionManager)
.reader(operatorHistoryReader(businessDataSource, tableMetaInfo)) .reader(operatorHistoryReader(businessDataSource, tableMetaInfo))
.processor(new OperatorHistoryProcessor()) .processor(new OperatorHistoryProcessor())
.writer(new OperatorHistoryWriter(shipRepository, transactionManager, subChunkSize)) .writer(new OperatorHistoryWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<OperatorHistoryDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(operatorHistoryWriteListener()) // Write 완료 batch_flag 업데이트 .listener(operatorHistoryWriteListener()) // Write 완료 batch_flag 업데이트
.build(); .build();
} }
@ -830,12 +799,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step pandIHistorySyncStep() { public Step pandIHistorySyncStep() {
log.info("Step 생성: pandIHistorySyncStep"); log.info("Step 생성: pandIHistorySyncStep");
return new StepBuilder("pandIHistorySyncStep", jobRepository) return new StepBuilder("pandIHistorySyncStep", jobRepository)
.<PandIHistoryDto, PandIHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<PandIHistoryDto, PandIHistoryEntity>chunk(getChunkSize(), transactionManager)
.reader(pandIHistoryReader(businessDataSource, tableMetaInfo)) .reader(pandIHistoryReader(businessDataSource, tableMetaInfo))
.processor(new PandIHistoryProcessor()) .processor(new PandIHistoryProcessor())
.writer(new PandIHistoryWriter(shipRepository, transactionManager, subChunkSize)) .writer(new PandIHistoryWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<PandIHistoryDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(pandIHistoryWriteListener()) // Write 완료 batch_flag 업데이트 .listener(pandIHistoryWriteListener()) // Write 완료 batch_flag 업데이트
.build(); .build();
} }
@ -844,12 +811,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step safetyManagementCertificateHistSyncStep() { public Step safetyManagementCertificateHistSyncStep() {
log.info("Step 생성: safetyManagementCertificateHistSyncStep"); log.info("Step 생성: safetyManagementCertificateHistSyncStep");
return new StepBuilder("safetyManagementCertificateHistSyncStep", jobRepository) return new StepBuilder("safetyManagementCertificateHistSyncStep", jobRepository)
.<SafetyManagementCertificateHistDto, SafetyManagementCertificateHistEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<SafetyManagementCertificateHistDto, SafetyManagementCertificateHistEntity>chunk(getChunkSize(), transactionManager)
.reader(safetyManagementCertificateHistReader(businessDataSource, tableMetaInfo)) .reader(safetyManagementCertificateHistReader(businessDataSource, tableMetaInfo))
.processor(new SafetyManagementCertificateHistProcessor()) .processor(new SafetyManagementCertificateHistProcessor())
.writer(new SafetyManagementCertificateHistWriter(shipRepository, transactionManager, subChunkSize)) .writer(new SafetyManagementCertificateHistWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<SafetyManagementCertificateHistDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(safetyManagementCertificateHistWriteListener()) // Write 완료 batch_flag 업데이트 .listener(safetyManagementCertificateHistWriteListener()) // Write 완료 batch_flag 업데이트
.build(); .build();
} }
@ -858,12 +823,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step shipManagerHistorySyncStep() { public Step shipManagerHistorySyncStep() {
log.info("Step 생성: shipManagerHistorySyncStep"); log.info("Step 생성: shipManagerHistorySyncStep");
return new StepBuilder("shipManagerHistorySyncStep", jobRepository) return new StepBuilder("shipManagerHistorySyncStep", jobRepository)
.<ShipManagerHistoryDto, ShipManagerHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<ShipManagerHistoryDto, ShipManagerHistoryEntity>chunk(getChunkSize(), transactionManager)
.reader(shipManagerHistoryReader(businessDataSource, tableMetaInfo)) .reader(shipManagerHistoryReader(businessDataSource, tableMetaInfo))
.processor(new ShipManagerHistoryProcessor()) .processor(new ShipManagerHistoryProcessor())
.writer(new ShipManagerHistoryWriter(shipRepository, transactionManager, subChunkSize)) .writer(new ShipManagerHistoryWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<ShipManagerHistoryDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(shipManagerHistoryWriteListener()) // Write 완료 batch_flag 업데이트 .listener(shipManagerHistoryWriteListener()) // Write 완료 batch_flag 업데이트
.build(); .build();
} }
@ -872,12 +835,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step sisterShipLinksSyncStep() { public Step sisterShipLinksSyncStep() {
log.info("Step 생성: sisterShipLinksSyncStep"); log.info("Step 생성: sisterShipLinksSyncStep");
return new StepBuilder("sisterShipLinksSyncStep", jobRepository) return new StepBuilder("sisterShipLinksSyncStep", jobRepository)
.<SisterShipLinksDto, SisterShipLinksEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<SisterShipLinksDto, SisterShipLinksEntity>chunk(getChunkSize(), transactionManager)
.reader(sisterShipLinksReader(businessDataSource, tableMetaInfo)) .reader(sisterShipLinksReader(businessDataSource, tableMetaInfo))
.processor(new SisterShipLinksProcessor()) .processor(new SisterShipLinksProcessor())
.writer(new SisterShipLinksWriter(shipRepository, transactionManager, subChunkSize)) .writer(new SisterShipLinksWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<SisterShipLinksDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(sisterShipLinksWriteListener()) // Write 완료 batch_flag 업데이트 .listener(sisterShipLinksWriteListener()) // Write 완료 batch_flag 업데이트
.build(); .build();
} }
@ -886,12 +847,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step specialFeatureSyncStep() { public Step specialFeatureSyncStep() {
log.info("Step 생성: specialFeatureSyncStep"); log.info("Step 생성: specialFeatureSyncStep");
return new StepBuilder("specialFeatureSyncStep", jobRepository) return new StepBuilder("specialFeatureSyncStep", jobRepository)
.<SpecialFeatureDto, SpecialFeatureEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<SpecialFeatureDto, SpecialFeatureEntity>chunk(getChunkSize(), transactionManager)
.reader(specialFeatureReader(businessDataSource, tableMetaInfo)) .reader(specialFeatureReader(businessDataSource, tableMetaInfo))
.processor(new SpecialFeatureProcessor()) .processor(new SpecialFeatureProcessor())
.writer(new SpecialFeatureWriter(shipRepository, transactionManager, subChunkSize)) .writer(new SpecialFeatureWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<SpecialFeatureDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(specialFeatureWriteListener()) // Write 완료 batch_flag 업데이트 .listener(specialFeatureWriteListener()) // Write 완료 batch_flag 업데이트
.build(); .build();
} }
@ -900,12 +859,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step statusHistorySyncStep() { public Step statusHistorySyncStep() {
log.info("Step 생성: statusHistorySyncStep"); log.info("Step 생성: statusHistorySyncStep");
return new StepBuilder("statusHistorySyncStep", jobRepository) return new StepBuilder("statusHistorySyncStep", jobRepository)
.<StatusHistoryDto, StatusHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<StatusHistoryDto, StatusHistoryEntity>chunk(getChunkSize(), transactionManager)
.reader(statusHistoryReader(businessDataSource, tableMetaInfo)) .reader(statusHistoryReader(businessDataSource, tableMetaInfo))
.processor(new StatusHistoryProcessor()) .processor(new StatusHistoryProcessor())
.writer(new StatusHistoryWriter(shipRepository, transactionManager, subChunkSize)) .writer(new StatusHistoryWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<StatusHistoryDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(statusHistoryWriteListener()) // Write 완료 batch_flag 업데이트 .listener(statusHistoryWriteListener()) // Write 완료 batch_flag 업데이트
.build(); .build();
} }
@ -914,12 +871,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step stowageCommoditySyncStep() { public Step stowageCommoditySyncStep() {
log.info("Step 생성: stowageCommoditySyncStep"); log.info("Step 생성: stowageCommoditySyncStep");
return new StepBuilder("stowageCommoditySyncStep", jobRepository) return new StepBuilder("stowageCommoditySyncStep", jobRepository)
.<StowageCommodityDto, StowageCommodityEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<StowageCommodityDto, StowageCommodityEntity>chunk(getChunkSize(), transactionManager)
.reader(stowageCommodityReader(businessDataSource, tableMetaInfo)) .reader(stowageCommodityReader(businessDataSource, tableMetaInfo))
.processor(new StowageCommodityProcessor()) .processor(new StowageCommodityProcessor())
.writer(new StowageCommodityWriter(shipRepository, transactionManager, subChunkSize)) .writer(new StowageCommodityWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<StowageCommodityDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(stowageCommodityWriteListener()) // Write 완료 batch_flag 업데이트 .listener(stowageCommodityWriteListener()) // Write 완료 batch_flag 업데이트
.build(); .build();
} }
@ -928,12 +883,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step surveyDatesSyncStep() { public Step surveyDatesSyncStep() {
log.info("Step 생성: surveyDatesSyncStep"); log.info("Step 생성: surveyDatesSyncStep");
return new StepBuilder("surveyDatesSyncStep", jobRepository) return new StepBuilder("surveyDatesSyncStep", jobRepository)
.<SurveyDatesDto, SurveyDatesEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<SurveyDatesDto, SurveyDatesEntity>chunk(getChunkSize(), transactionManager)
.reader(surveyDatesReader(businessDataSource, tableMetaInfo)) .reader(surveyDatesReader(businessDataSource, tableMetaInfo))
.processor(new SurveyDatesProcessor()) .processor(new SurveyDatesProcessor())
.writer(new SurveyDatesWriter(shipRepository, transactionManager, subChunkSize)) .writer(new SurveyDatesWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<SurveyDatesDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(surveyDatesWriteListener()) // Write 완료 batch_flag 업데이트 .listener(surveyDatesWriteListener()) // Write 완료 batch_flag 업데이트
.build(); .build();
} }
@ -942,12 +895,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step surveyDatesHistoryUniqueSyncStep() { public Step surveyDatesHistoryUniqueSyncStep() {
log.info("Step 생성: surveyDatesHistoryUniqueSyncStep"); log.info("Step 생성: surveyDatesHistoryUniqueSyncStep");
return new StepBuilder("surveyDatesHistoryUniqueSyncStep", jobRepository) return new StepBuilder("surveyDatesHistoryUniqueSyncStep", jobRepository)
.<SurveyDatesHistoryUniqueDto, SurveyDatesHistoryUniqueEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<SurveyDatesHistoryUniqueDto, SurveyDatesHistoryUniqueEntity>chunk(getChunkSize(), transactionManager)
.reader(surveyDatesHistoryUniqueReader(businessDataSource, tableMetaInfo)) .reader(surveyDatesHistoryUniqueReader(businessDataSource, tableMetaInfo))
.processor(new SurveyDatesHistoryUniqueProcessor()) .processor(new SurveyDatesHistoryUniqueProcessor())
.writer(new SurveyDatesHistoryUniqueWriter(shipRepository, transactionManager, subChunkSize)) .writer(new SurveyDatesHistoryUniqueWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<SurveyDatesHistoryUniqueDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(surveyDatesHistoryUniqueWriteListener()) // Write 완료 batch_flag 업데이트 .listener(surveyDatesHistoryUniqueWriteListener()) // Write 완료 batch_flag 업데이트
.build(); .build();
} }
@ -956,12 +907,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step technicalManagerHistorySyncStep() { public Step technicalManagerHistorySyncStep() {
log.info("Step 생성: technicalManagerHistorySyncStep"); log.info("Step 생성: technicalManagerHistorySyncStep");
return new StepBuilder("technicalManagerHistorySyncStep", jobRepository) return new StepBuilder("technicalManagerHistorySyncStep", jobRepository)
.<TechnicalManagerHistoryDto, TechnicalManagerHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<TechnicalManagerHistoryDto, TechnicalManagerHistoryEntity>chunk(getChunkSize(), transactionManager)
.reader(technicalManagerHistoryReader(businessDataSource, tableMetaInfo)) .reader(technicalManagerHistoryReader(businessDataSource, tableMetaInfo))
.processor(new TechnicalManagerHistoryProcessor()) .processor(new TechnicalManagerHistoryProcessor())
.writer(new TechnicalManagerHistoryWriter(shipRepository, transactionManager, subChunkSize)) .writer(new TechnicalManagerHistoryWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<TechnicalManagerHistoryDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(technicalManagerHistoryWriteListener()) // Write 완료 batch_flag 업데이트 .listener(technicalManagerHistoryWriteListener()) // Write 완료 batch_flag 업데이트
.build(); .build();
} }
@ -970,12 +919,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step thrustersSyncStep() { public Step thrustersSyncStep() {
log.info("Step 생성: thrustersSyncStep"); log.info("Step 생성: thrustersSyncStep");
return new StepBuilder("thrustersSyncStep", jobRepository) return new StepBuilder("thrustersSyncStep", jobRepository)
.<ThrustersDto, ThrustersEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<ThrustersDto, ThrustersEntity>chunk(getChunkSize(), transactionManager)
.reader(thrustersReader(businessDataSource, tableMetaInfo)) .reader(thrustersReader(businessDataSource, tableMetaInfo))
.processor(new ThrustersProcessor()) .processor(new ThrustersProcessor())
.writer(new ThrustersWriter(shipRepository, transactionManager, subChunkSize)) .writer(new ThrustersWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<ThrustersDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(thrustersWriteListener()) // Write 완료 batch_flag 업데이트 .listener(thrustersWriteListener()) // Write 완료 batch_flag 업데이트
.build(); .build();
} }
@ -984,12 +931,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step tbCompanyDetailSyncStep() { public Step tbCompanyDetailSyncStep() {
log.info("Step 생성: tbCompanyDetailSyncStep"); log.info("Step 생성: tbCompanyDetailSyncStep");
return new StepBuilder("tbCompanyDetailSyncStep", jobRepository) return new StepBuilder("tbCompanyDetailSyncStep", jobRepository)
.<TbCompanyDetailDto, TbCompanyDetailEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager) .<TbCompanyDetailDto, TbCompanyDetailEntity>chunk(getChunkSize(), transactionManager)
.reader(tbCompanyDetailReader(businessDataSource, tableMetaInfo)) .reader(tbCompanyDetailReader(businessDataSource, tableMetaInfo))
.processor(new TbCompanyDetailProcessor()) .processor(new TbCompanyDetailProcessor())
.writer(new TbCompanyDetailWriter(shipRepository, transactionManager, subChunkSize)) .writer(new TbCompanyDetailWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<TbCompanyDetailDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(tbCompanyDetailWriteListener()) // Write 완료 batch_flag 업데이트 .listener(tbCompanyDetailWriteListener()) // Write 완료 batch_flag 업데이트
.build(); .build();
} }

파일 보기

@ -1,73 +1,37 @@
package com.snp.batch.jobs.datasync.batch.ship.reader; package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.BareboatCharterHistoryDto; import com.snp.batch.jobs.datasync.batch.ship.dto.BareboatCharterHistoryDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class BareboatCharterHistoryReader implements ItemReader<BareboatCharterHistoryDto> { public class BareboatCharterHistoryReader extends BaseSyncReader<BareboatCharterHistoryDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<BareboatCharterHistoryDto> allDataBuffer = new ArrayList<>();
public BareboatCharterHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public BareboatCharterHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public BareboatCharterHistoryDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceBareboatCharterHistory;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected BareboatCharterHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return BareboatCharterHistoryDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceBareboatCharterHistory), Long.class); .datasetVer(rs.getString("dataset_ver"))
} catch (Exception e) { .imoNo(rs.getString("imo_no"))
return; .bbctrSeq(rs.getString("bbctr_seq"))
} .efectStaDay(rs.getString("efect_sta_day"))
.bbctrCompanyCd(rs.getString("bbctr_company_cd"))
if (nextTargetId != null) { .bbctrCompany(rs.getString("bbctr_company"))
log.info("[BareboatCharterHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .build();
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceBareboatCharterHistory);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
return BareboatCharterHistoryDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
.imoNo(rs.getString("imo_no"))
.bbctrSeq(rs.getString("bbctr_seq"))
.efectStaDay(rs.getString("efect_sta_day"))
.bbctrCompanyCd(rs.getString("bbctr_company_cd"))
.bbctrCompany(rs.getString("bbctr_company"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceBareboatCharterHistory);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,73 +1,37 @@
package com.snp.batch.jobs.datasync.batch.ship.reader; package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.CallsignAndMmsiHistoryDto; import com.snp.batch.jobs.datasync.batch.ship.dto.CallsignAndMmsiHistoryDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class CallsignAndMmsiHistoryReader implements ItemReader<CallsignAndMmsiHistoryDto> { public class CallsignAndMmsiHistoryReader extends BaseSyncReader<CallsignAndMmsiHistoryDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<CallsignAndMmsiHistoryDto> allDataBuffer = new ArrayList<>();
public CallsignAndMmsiHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public CallsignAndMmsiHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public CallsignAndMmsiHistoryDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceCallsignAndMmsiHistory;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected CallsignAndMmsiHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return CallsignAndMmsiHistoryDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceCallsignAndMmsiHistory), Long.class); .datasetVer(rs.getString("dataset_ver"))
} catch (Exception e) { .imoNo(rs.getString("imo_no"))
return; .shipIdntfSeq(rs.getString("ship_idntf_seq"))
} .efectStaDay(rs.getString("efect_sta_day"))
.clsgnNo(rs.getString("clsgn_no"))
if (nextTargetId != null) { .mmsiNo(rs.getString("mmsi_no"))
log.info("[CallsignAndMmsiHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .build();
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceCallsignAndMmsiHistory);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
return CallsignAndMmsiHistoryDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
.imoNo(rs.getString("imo_no"))
.shipIdntfSeq(rs.getString("ship_idntf_seq"))
.efectStaDay(rs.getString("efect_sta_day"))
.clsgnNo(rs.getString("clsgn_no"))
.mmsiNo(rs.getString("mmsi_no"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceCallsignAndMmsiHistory);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,76 +1,40 @@
package com.snp.batch.jobs.datasync.batch.ship.reader; package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.ClassHistoryDto; import com.snp.batch.jobs.datasync.batch.ship.dto.ClassHistoryDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class ClassHistoryReader implements ItemReader<ClassHistoryDto> { public class ClassHistoryReader extends BaseSyncReader<ClassHistoryDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<ClassHistoryDto> allDataBuffer = new ArrayList<>();
public ClassHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public ClassHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public ClassHistoryDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceClassHistory;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected ClassHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return ClassHistoryDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceClassHistory), Long.class); .datasetVer(rs.getString("dataset_ver"))
} catch (Exception e) { .imoNo(rs.getString("imo_no"))
return; .clficHstrySeq(rs.getString("clfic_hstry_seq"))
} .efectStaDay(rs.getString("efect_sta_day"))
.clficCd(rs.getString("clfic_cd"))
if (nextTargetId != null) { .clficId(rs.getString("clfic_id"))
log.info("[ClassHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .clficAstnNm(rs.getString("clfic_asctn_nm"))
.clficHasYn(rs.getString("clfic_has_yn"))
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceClassHistory); .nowYn(rs.getString("now_yn"))
final Long targetId = nextTargetId; .build();
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
return ClassHistoryDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
.imoNo(rs.getString("imo_no"))
.clficHstrySeq(rs.getString("clfic_hstry_seq"))
.efectStaDay(rs.getString("efect_sta_day"))
.clficCd(rs.getString("clfic_cd"))
.clficId(rs.getString("clfic_id"))
.clficAstnNm(rs.getString("clfic_asctn_nm"))
.clficHasYn(rs.getString("clfic_has_yn"))
.nowYn(rs.getString("now_yn"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceClassHistory);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,89 +1,53 @@
package com.snp.batch.jobs.datasync.batch.ship.reader; package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.CompanyVesselRelationshipsDto; import com.snp.batch.jobs.datasync.batch.ship.dto.CompanyVesselRelationshipsDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class CompanyVesselRelationshipsReader implements ItemReader<CompanyVesselRelationshipsDto> { public class CompanyVesselRelationshipsReader extends BaseSyncReader<CompanyVesselRelationshipsDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<CompanyVesselRelationshipsDto> allDataBuffer = new ArrayList<>();
public CompanyVesselRelationshipsReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public CompanyVesselRelationshipsReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public CompanyVesselRelationshipsDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceCompanyVesselRelationships;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected CompanyVesselRelationshipsDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return CompanyVesselRelationshipsDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceCompanyVesselRelationships), Long.class); .datasetVer(rs.getString("dataset_ver"))
} catch (Exception e) { .imoNo(rs.getString("imo_no"))
return; .doccHasCompanyCd(rs.getString("docc_has_company_cd"))
} .doccHasCompany(rs.getString("docc_has_company"))
.groupActlOwnr(rs.getString("group_actl_ownr"))
if (nextTargetId != null) { .groupActlOwnrCd(rs.getString("group_actl_ownr_cd"))
log.info("[CompanyVesselRelationshipsReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .shipOperator(rs.getString("ship_operator"))
.shipOperatorCd(rs.getString("ship_operator_cd"))
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceCompanyVesselRelationships); .rgOwnr(rs.getString("rg_ownr"))
final Long targetId = nextTargetId; .rgOwnrCd(rs.getString("rg_ownr_cd"))
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .shipMngCompany(rs.getString("ship_mng_company"))
return CompanyVesselRelationshipsDto.builder() .shipMngCompanyCd(rs.getString("ship_mng_company_cd"))
.jobExecutionId(targetId) .techMngCompany(rs.getString("tech_mng_company"))
.datasetVer(rs.getString("dataset_ver")) .techMngCompanyCd(rs.getString("tech_mng_company_cd"))
.imoNo(rs.getString("imo_no")) .doccGroup(rs.getString("docc_group"))
.doccHasCompanyCd(rs.getString("docc_has_company_cd")) .doccGroupCd(rs.getString("docc_group_cd"))
.doccHasCompany(rs.getString("docc_has_company")) .shipOperatorGroup(rs.getString("ship_operator_group"))
.groupActlOwnr(rs.getString("group_actl_ownr")) .shipOperatorGroupCd(rs.getString("ship_operator_group_cd"))
.groupActlOwnrCd(rs.getString("group_actl_ownr_cd")) .shipMngCompanyGroup(rs.getString("ship_mng_company_group"))
.shipOperator(rs.getString("ship_operator")) .shipMngCompanyGroupCd(rs.getString("ship_mng_company_group_cd"))
.shipOperatorCd(rs.getString("ship_operator_cd")) .techMngCompanyGroup(rs.getString("tech_mng_company_group"))
.rgOwnr(rs.getString("rg_ownr")) .techMngCompanyGroupCd(rs.getString("tech_mng_company_group_cd"))
.rgOwnrCd(rs.getString("rg_ownr_cd")) .build();
.shipMngCompany(rs.getString("ship_mng_company"))
.shipMngCompanyCd(rs.getString("ship_mng_company_cd"))
.techMngCompany(rs.getString("tech_mng_company"))
.techMngCompanyCd(rs.getString("tech_mng_company_cd"))
.doccGroup(rs.getString("docc_group"))
.doccGroupCd(rs.getString("docc_group_cd"))
.shipOperatorGroup(rs.getString("ship_operator_group"))
.shipOperatorGroupCd(rs.getString("ship_operator_group_cd"))
.shipMngCompanyGroup(rs.getString("ship_mng_company_group"))
.shipMngCompanyGroupCd(rs.getString("ship_mng_company_group_cd"))
.techMngCompanyGroup(rs.getString("tech_mng_company_group"))
.techMngCompanyGroupCd(rs.getString("tech_mng_company_group_cd"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceCompanyVesselRelationships);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,80 +1,44 @@
package com.snp.batch.jobs.datasync.batch.ship.reader; package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.CrewListDto; import com.snp.batch.jobs.datasync.batch.ship.dto.CrewListDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class CrewListReader implements ItemReader<CrewListDto> { public class CrewListReader extends BaseSyncReader<CrewListDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<CrewListDto> allDataBuffer = new ArrayList<>();
public CrewListReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public CrewListReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public CrewListDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceCrewList;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected CrewListDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return CrewListDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceCrewList), Long.class); .datasetVer(rs.getString("dataset_ver"))
} catch (Exception e) { .imoNo(rs.getString("imo_no"))
return; .crewId(rs.getString("crew_id"))
} .shipNm(rs.getString("ship_nm"))
.ntnlty(rs.getString("ntnlty"))
if (nextTargetId != null) { .crewRstrYmd(rs.getString("crew_rstr_ymd"))
log.info("[CrewListReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .oaCrewCnt(rs.getBigDecimal("oa_crew_cnt"))
.genCrewCnt(rs.getBigDecimal("gen_crew_cnt"))
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceCrewList); .offcrCnt(rs.getBigDecimal("offcr_cnt"))
final Long targetId = nextTargetId; .apprOffcrCnt(rs.getBigDecimal("appr_offcr_cnt"))
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .trneCnt(rs.getBigDecimal("trne_cnt"))
return CrewListDto.builder() .embrkMntncCrewCnt(rs.getBigDecimal("embrk_mntnc_crew_cnt"))
.jobExecutionId(targetId) .unrprtCnt(rs.getBigDecimal("unrprt_cnt"))
.datasetVer(rs.getString("dataset_ver")) .build();
.imoNo(rs.getString("imo_no"))
.crewId(rs.getString("crew_id"))
.shipNm(rs.getString("ship_nm"))
.ntnlty(rs.getString("ntnlty"))
.crewRstrYmd(rs.getString("crew_rstr_ymd"))
.oaCrewCnt(rs.getBigDecimal("oa_crew_cnt"))
.genCrewCnt(rs.getBigDecimal("gen_crew_cnt"))
.offcrCnt(rs.getBigDecimal("offcr_cnt"))
.apprOffcrCnt(rs.getBigDecimal("appr_offcr_cnt"))
.trneCnt(rs.getBigDecimal("trne_cnt"))
.embrkMntncCrewCnt(rs.getBigDecimal("embrk_mntnc_crew_cnt"))
.unrprtCnt(rs.getBigDecimal("unrprt_cnt"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceCrewList);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,94 +1,58 @@
package com.snp.batch.jobs.datasync.batch.ship.reader; package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.DarkActivityConfirmedDto; import com.snp.batch.jobs.datasync.batch.ship.dto.DarkActivityConfirmedDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class DarkActivityConfirmedReader implements ItemReader<DarkActivityConfirmedDto> { public class DarkActivityConfirmedReader extends BaseSyncReader<DarkActivityConfirmedDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<DarkActivityConfirmedDto> allDataBuffer = new ArrayList<>();
public DarkActivityConfirmedReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public DarkActivityConfirmedReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public DarkActivityConfirmedDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceDarkActivityConfirmed;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected DarkActivityConfirmedDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return DarkActivityConfirmedDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceDarkActivityConfirmed), Long.class); .datasetVer(rs.getString("dataset_ver"))
} catch (Exception e) { .imoNo(rs.getString("imo_no"))
return; .mmsiNo(rs.getString("mmsi_no"))
} .darkHr(rs.getObject("dark_hr", Long.class))
.darkActvStatus(rs.getObject("dark_actv_status", Long.class))
if (nextTargetId != null) { .shipNm(rs.getString("ship_nm"))
log.info("[DarkActivityConfirmedReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .darkActv(rs.getString("dark_actv"))
.zoneId(rs.getObject("zone_id", Long.class))
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceDarkActivityConfirmed); .zoneNm(rs.getString("zone_nm"))
final Long targetId = nextTargetId; .zoneCountry(rs.getString("zone_country"))
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .darkTmUtc(rs.getTimestamp("dark_tm_utc") != null ? rs.getTimestamp("dark_tm_utc").toLocalDateTime() : null)
return DarkActivityConfirmedDto.builder() .darkLat(rs.getObject("dark_lat", Double.class))
.jobExecutionId(targetId) .darkLon(rs.getObject("dark_lon", Double.class))
.datasetVer(rs.getString("dataset_ver")) .darkSpd(rs.getObject("dark_spd", Double.class))
.imoNo(rs.getString("imo_no")) .darkHeading(rs.getObject("dark_heading", Double.class))
.mmsiNo(rs.getString("mmsi_no")) .darkDraft(rs.getObject("dark_draft", Double.class))
.darkHr(rs.getObject("dark_hr", Long.class)) .nxtCptrTmUtc(rs.getTimestamp("nxt_cptr_tm_utc") != null ? rs.getTimestamp("nxt_cptr_tm_utc").toLocalDateTime() : null)
.darkActvStatus(rs.getObject("dark_actv_status", Long.class)) .nxtCptrSpd(rs.getObject("nxt_cptr_spd", Double.class))
.shipNm(rs.getString("ship_nm")) .nxtCptrDraft(rs.getObject("nxt_cptr_draft", Double.class))
.darkActv(rs.getString("dark_actv")) .nxtCptrHeading(rs.getObject("nxt_cptr_heading", Double.class))
.zoneId(rs.getObject("zone_id", Long.class)) .darkRptDestAis(rs.getString("dark_rpt_dest_ais"))
.zoneNm(rs.getString("zone_nm")) .lastPrtcllPort(rs.getString("last_prtcll_port"))
.zoneCountry(rs.getString("zone_country")) .lastPoccntryCd(rs.getString("last_poccntry_cd"))
.darkTmUtc(rs.getTimestamp("dark_tm_utc") != null ? rs.getTimestamp("dark_tm_utc").toLocalDateTime() : null) .lastPoccntry(rs.getString("last_poccntry"))
.darkLat(rs.getObject("dark_lat", Double.class)) .nxtCptrLat(rs.getObject("nxt_cptr_lat", Double.class))
.darkLon(rs.getObject("dark_lon", Double.class)) .nxtCptrLon(rs.getObject("nxt_cptr_lon", Double.class))
.darkSpd(rs.getObject("dark_spd", Double.class)) .nxtCptrRptDestAis(rs.getString("nxt_cptr_rpt_dest_ais"))
.darkHeading(rs.getObject("dark_heading", Double.class)) .build();
.darkDraft(rs.getObject("dark_draft", Double.class))
.nxtCptrTmUtc(rs.getTimestamp("nxt_cptr_tm_utc") != null ? rs.getTimestamp("nxt_cptr_tm_utc").toLocalDateTime() : null)
.nxtCptrSpd(rs.getObject("nxt_cptr_spd", Double.class))
.nxtCptrDraft(rs.getObject("nxt_cptr_draft", Double.class))
.nxtCptrHeading(rs.getObject("nxt_cptr_heading", Double.class))
.darkRptDestAis(rs.getString("dark_rpt_dest_ais"))
.lastPrtcllPort(rs.getString("last_prtcll_port"))
.lastPoccntryCd(rs.getString("last_poccntry_cd"))
.lastPoccntry(rs.getString("last_poccntry"))
.nxtCptrLat(rs.getObject("nxt_cptr_lat", Double.class))
.nxtCptrLon(rs.getObject("nxt_cptr_lon", Double.class))
.nxtCptrRptDestAis(rs.getString("nxt_cptr_rpt_dest_ais"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceDarkActivityConfirmed);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,73 +1,37 @@
package com.snp.batch.jobs.datasync.batch.ship.reader; package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.FlagHistoryDto; import com.snp.batch.jobs.datasync.batch.ship.dto.FlagHistoryDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class FlagHistoryReader implements ItemReader<FlagHistoryDto> { public class FlagHistoryReader extends BaseSyncReader<FlagHistoryDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<FlagHistoryDto> allDataBuffer = new ArrayList<>();
public FlagHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public FlagHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public FlagHistoryDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceFlagHistory;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected FlagHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return FlagHistoryDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceFlagHistory), Long.class); .datasetVer(rs.getString("dataset_ver"))
} catch (Exception e) { .imoNo(rs.getString("imo_no"))
return; .shipCountryHstrySeq(rs.getString("ship_country_hstry_seq"))
} .efectStaDay(rs.getString("efect_sta_day"))
.countryCd(rs.getString("country_cd"))
if (nextTargetId != null) { .country(rs.getString("country"))
log.info("[FlagHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .build();
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceFlagHistory);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
return FlagHistoryDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
.imoNo(rs.getString("imo_no"))
.shipCountryHstrySeq(rs.getString("ship_country_hstry_seq"))
.efectStaDay(rs.getString("efect_sta_day"))
.countryCd(rs.getString("country_cd"))
.country(rs.getString("country"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceFlagHistory);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,74 +1,38 @@
package com.snp.batch.jobs.datasync.batch.ship.reader; package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.GroupBeneficialOwnerHistoryDto; import com.snp.batch.jobs.datasync.batch.ship.dto.GroupBeneficialOwnerHistoryDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class GroupBeneficialOwnerHistoryReader implements ItemReader<GroupBeneficialOwnerHistoryDto> { public class GroupBeneficialOwnerHistoryReader extends BaseSyncReader<GroupBeneficialOwnerHistoryDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<GroupBeneficialOwnerHistoryDto> allDataBuffer = new ArrayList<>();
public GroupBeneficialOwnerHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public GroupBeneficialOwnerHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public GroupBeneficialOwnerHistoryDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceGroupBeneficialOwnerHistory;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected GroupBeneficialOwnerHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return GroupBeneficialOwnerHistoryDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceGroupBeneficialOwnerHistory), Long.class); .datasetVer(rs.getString("dataset_ver"))
} catch (Exception e) { .imoNo(rs.getString("imo_no"))
return; .shipGroupRevnOwnrHstrySeq(rs.getString("ship_group_revn_ownr_hstry_seq"))
} .efectStaDay(rs.getString("efect_sta_day"))
.groupActlOwnrCd(rs.getString("group_actl_ownr_cd"))
if (nextTargetId != null) { .groupActlOwnr(rs.getString("group_actl_ownr"))
log.info("[GroupBeneficialOwnerHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .companyStatus(rs.getString("company_status"))
.build();
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceGroupBeneficialOwnerHistory);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
return GroupBeneficialOwnerHistoryDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
.imoNo(rs.getString("imo_no"))
.shipGroupRevnOwnrHstrySeq(rs.getString("ship_group_revn_ownr_hstry_seq"))
.efectStaDay(rs.getString("efect_sta_day"))
.groupActlOwnrCd(rs.getString("group_actl_ownr_cd"))
.groupActlOwnr(rs.getString("group_actl_ownr"))
.companyStatus(rs.getString("company_status"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceGroupBeneficialOwnerHistory);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,71 +1,35 @@
package com.snp.batch.jobs.datasync.batch.ship.reader; package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.IceClassDto; import com.snp.batch.jobs.datasync.batch.ship.dto.IceClassDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class IceClassReader implements ItemReader<IceClassDto> { public class IceClassReader extends BaseSyncReader<IceClassDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<IceClassDto> allDataBuffer = new ArrayList<>();
public IceClassReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public IceClassReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public IceClassDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceIceClass;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected IceClassDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return IceClassDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceIceClass), Long.class); .datasetVer(rs.getString("dataset_ver"))
} catch (Exception e) { .imoNo(rs.getString("imo_no"))
return; .iceGrdCd(rs.getString("ice_grd_cd"))
} .iceGrd(rs.getString("ice_grd"))
.build();
if (nextTargetId != null) {
log.info("[IceClassReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceIceClass);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
return IceClassDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
.imoNo(rs.getString("imo_no"))
.iceGrdCd(rs.getString("ice_grd_cd"))
.iceGrd(rs.getString("ice_grd"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceIceClass);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,72 +1,36 @@
package com.snp.batch.jobs.datasync.batch.ship.reader; package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.NameHistoryDto; import com.snp.batch.jobs.datasync.batch.ship.dto.NameHistoryDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class NameHistoryReader implements ItemReader<NameHistoryDto> { public class NameHistoryReader extends BaseSyncReader<NameHistoryDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<NameHistoryDto> allDataBuffer = new ArrayList<>();
public NameHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public NameHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public NameHistoryDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceNameHistory;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected NameHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return NameHistoryDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceNameHistory), Long.class); .datasetVer(rs.getString("dataset_ver"))
} catch (Exception e) { .imoNo(rs.getString("imo_no"))
return; .shipNmChgHstrySeq(rs.getString("ship_nm_chg_hstry_seq"))
} .efectStaDay(rs.getString("efect_sta_day"))
.shipNm(rs.getString("ship_nm"))
if (nextTargetId != null) { .build();
log.info("[NameHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceNameHistory);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
return NameHistoryDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
.imoNo(rs.getString("imo_no"))
.shipNmChgHstrySeq(rs.getString("ship_nm_chg_hstry_seq"))
.efectStaDay(rs.getString("efect_sta_day"))
.shipNm(rs.getString("ship_nm"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceNameHistory);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,74 +1,38 @@
package com.snp.batch.jobs.datasync.batch.ship.reader; package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.OperatorHistoryDto; import com.snp.batch.jobs.datasync.batch.ship.dto.OperatorHistoryDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class OperatorHistoryReader implements ItemReader<OperatorHistoryDto> { public class OperatorHistoryReader extends BaseSyncReader<OperatorHistoryDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<OperatorHistoryDto> allDataBuffer = new ArrayList<>();
public OperatorHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public OperatorHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public OperatorHistoryDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceOperatorHistory;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected OperatorHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return OperatorHistoryDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceOperatorHistory), Long.class); .datasetVer(rs.getString("dataset_ver"))
} catch (Exception e) { .imoNo(rs.getString("imo_no"))
return; .shipOperatorHstrySeq(rs.getString("ship_operator_hstry_seq"))
} .efectStaDay(rs.getString("efect_sta_day"))
.shipOperatorCd(rs.getString("ship_operator_cd"))
if (nextTargetId != null) { .shipOperator(rs.getString("ship_operator"))
log.info("[OperatorHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .companyStatus(rs.getString("company_status"))
.build();
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceOperatorHistory);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
return OperatorHistoryDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
.imoNo(rs.getString("imo_no"))
.shipOperatorHstrySeq(rs.getString("ship_operator_hstry_seq"))
.efectStaDay(rs.getString("efect_sta_day"))
.shipOperatorCd(rs.getString("ship_operator_cd"))
.shipOperator(rs.getString("ship_operator"))
.companyStatus(rs.getString("company_status"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceOperatorHistory);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,74 +1,38 @@
package com.snp.batch.jobs.datasync.batch.ship.reader; package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.OwnerHistoryDto; import com.snp.batch.jobs.datasync.batch.ship.dto.OwnerHistoryDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class OwnerHistoryReader implements ItemReader<OwnerHistoryDto> { public class OwnerHistoryReader extends BaseSyncReader<OwnerHistoryDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<OwnerHistoryDto> allDataBuffer = new ArrayList<>();
public OwnerHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public OwnerHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public OwnerHistoryDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceOwnerHistory;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected OwnerHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return OwnerHistoryDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceOwnerHistory), Long.class); .datasetVer(rs.getString("dataset_ver"))
} catch (Exception e) { .imoNo(rs.getString("imo_no"))
return; .shipOwnrHstrySeq(rs.getString("ship_ownr_hstry_seq"))
} .efectStaDay(rs.getString("efect_sta_day"))
.ownrCd(rs.getString("ownr_cd"))
if (nextTargetId != null) { .ownr(rs.getString("ownr"))
log.info("[OwnerHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .companyStatus(rs.getString("company_status"))
.build();
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceOwnerHistory);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
return OwnerHistoryDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
.imoNo(rs.getString("imo_no"))
.shipOwnrHstrySeq(rs.getString("ship_ownr_hstry_seq"))
.efectStaDay(rs.getString("efect_sta_day"))
.ownrCd(rs.getString("ownr_cd"))
.ownr(rs.getString("ownr"))
.companyStatus(rs.getString("company_status"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceOwnerHistory);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,74 +1,38 @@
package com.snp.batch.jobs.datasync.batch.ship.reader; package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.PandIHistoryDto; import com.snp.batch.jobs.datasync.batch.ship.dto.PandIHistoryDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class PandIHistoryReader implements ItemReader<PandIHistoryDto> { public class PandIHistoryReader extends BaseSyncReader<PandIHistoryDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<PandIHistoryDto> allDataBuffer = new ArrayList<>();
public PandIHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public PandIHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public PandIHistoryDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourcePandiHistory;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected PandIHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return PandIHistoryDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourcePandiHistory), Long.class); .datasetVer(rs.getString("dataset_ver"))
} catch (Exception e) { .imoNo(rs.getString("imo_no"))
return; .shipPrtcRpnHstrySeq(rs.getString("ship_prtc_rpn_hstry_seq"))
} .efectStaDay(rs.getString("efect_sta_day"))
.pniClubCd(rs.getString("pni_club_cd"))
if (nextTargetId != null) { .pniClubNm(rs.getString("pni_club_nm"))
log.info("[PandIHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .src(rs.getString("src"))
.build();
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourcePandiHistory);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
return PandIHistoryDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
.imoNo(rs.getString("imo_no"))
.shipPrtcRpnHstrySeq(rs.getString("ship_prtc_rpn_hstry_seq"))
.efectStaDay(rs.getString("efect_sta_day"))
.pniClubCd(rs.getString("pni_club_cd"))
.pniClubNm(rs.getString("pni_club_nm"))
.src(rs.getString("src"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourcePandiHistory);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,82 +1,46 @@
package com.snp.batch.jobs.datasync.batch.ship.reader; package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.SafetyManagementCertificateHistDto; import com.snp.batch.jobs.datasync.batch.ship.dto.SafetyManagementCertificateHistDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class SafetyManagementCertificateHistReader implements ItemReader<SafetyManagementCertificateHistDto> { public class SafetyManagementCertificateHistReader extends BaseSyncReader<SafetyManagementCertificateHistDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<SafetyManagementCertificateHistDto> allDataBuffer = new ArrayList<>();
public SafetyManagementCertificateHistReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public SafetyManagementCertificateHistReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public SafetyManagementCertificateHistDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceSafetyManagementCertificateHist;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected SafetyManagementCertificateHistDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return SafetyManagementCertificateHistDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceSafetyManagementCertificateHist), Long.class); .datasetVer(rs.getString("dataset_ver"))
} catch (Exception e) { .imoNo(rs.getString("imo_no"))
return; .shipSftyMngEvdcSeq(rs.getString("ship_sfty_mng_evdc_seq"))
} .smgrcSrngEngines(rs.getString("smgrc_srng_engines"))
.smgrcSysCatConvArbt(rs.getString("smgrc_sys_cat_conv_arbt"))
if (nextTargetId != null) { .smgrcExpryDay(rs.getString("smgrc_expry_day"))
log.info("[SafetyManagementCertificateHistReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .smgrcIssueDay(rs.getString("smgrc_issue_day"))
.smgrcDoccCompany(rs.getString("smgrc_docc_company"))
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceSafetyManagementCertificateHist); .smgrcNtnlty(rs.getString("smgrc_ntnlty"))
final Long targetId = nextTargetId; .smgrcIssueEngines(rs.getString("smgrc_issue_engines"))
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .smgrcEtcDesc(rs.getString("smgrc_etc_desc"))
return SafetyManagementCertificateHistDto.builder() .smgrcShipNm(rs.getString("smgrc_ship_nm"))
.jobExecutionId(targetId) .smgrcShipType(rs.getString("smgrc_ship_type"))
.datasetVer(rs.getString("dataset_ver")) .smgrcSrc(rs.getString("smgrc_src"))
.imoNo(rs.getString("imo_no")) .smgrcCompanyCd(rs.getString("smgrc_company_cd"))
.shipSftyMngEvdcSeq(rs.getString("ship_sfty_mng_evdc_seq")) .build();
.smgrcSrngEngines(rs.getString("smgrc_srng_engines"))
.smgrcSysCatConvArbt(rs.getString("smgrc_sys_cat_conv_arbt"))
.smgrcExpryDay(rs.getString("smgrc_expry_day"))
.smgrcIssueDay(rs.getString("smgrc_issue_day"))
.smgrcDoccCompany(rs.getString("smgrc_docc_company"))
.smgrcNtnlty(rs.getString("smgrc_ntnlty"))
.smgrcIssueEngines(rs.getString("smgrc_issue_engines"))
.smgrcEtcDesc(rs.getString("smgrc_etc_desc"))
.smgrcShipNm(rs.getString("smgrc_ship_nm"))
.smgrcShipType(rs.getString("smgrc_ship_type"))
.smgrcSrc(rs.getString("smgrc_src"))
.smgrcCompanyCd(rs.getString("smgrc_company_cd"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceSafetyManagementCertificateHist);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,84 +1,44 @@
package com.snp.batch.jobs.datasync.batch.ship.reader; package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.ShipAddInfoDto; import com.snp.batch.jobs.datasync.batch.ship.dto.ShipAddInfoDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class ShipAddInfoReader implements ItemReader<ShipAddInfoDto> { public class ShipAddInfoReader extends BaseSyncReader<ShipAddInfoDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<ShipAddInfoDto> allDataBuffer = new ArrayList<>();
public ShipAddInfoReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public ShipAddInfoReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public ShipAddInfoDto read() throws Exception { protected String getSourceTable() {
// 1. 버퍼가 비어있을 때만 DB에서 "다음 처리 대상 ID 하나" 데이터를 긁어옵니다. return tableMetaInfo.sourceAdditionalShipsData;
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null; // 진짜 데이터가 없으면 종료
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
// 1. 아직 'N' 최소 ID 하나를 찾음 protected ShipAddInfoDto mapRow(ResultSet rs, Long targetId) throws SQLException {
Long nextTargetId = null; return ShipAddInfoDto.builder()
try { .jobExecutionId(targetId)
nextTargetId = businessJdbcTemplate.queryForObject( .datasetVer(rs.getString("dataset_ver"))
CommonSql.getNextTargetQuery(tableMetaInfo.sourceAdditionalShipsData), Long.class); .imoNo(rs.getString("imo_no"))
} catch (Exception e) { .shipEml(rs.getString("ship_eml"))
return; // 대상 없음 .maxDpwt(rs.getString("max_dpwt"))
} .maxDrillDepth(rs.getString("max_drill_depth"))
.drillBrg(rs.getString("drill_brg"))
if (nextTargetId != null) { .oceanProdFacility(rs.getString("ocean_prod_facility"))
log.info("[ShipAddInfoReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .deckHeatExch(rs.getString("deck_heat_exch"))
.dehtexMatral(rs.getString("dehtex_matral"))
// 2. 해당 ID의 데이터만 버퍼에 로드 .portblTwinDeck(rs.getString("portbl_twin_deck"))
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceAdditionalShipsData); .fixedTwinDeck(rs.getString("fixed_twin_deck"))
final Long targetId = nextTargetId; // lambda 내부에서 사용하기 위해 final 변수로 .shipSatlitCommId(rs.getString("ship_satlit_comm_id"))
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .shipSatlitCmrspCd(rs.getString("ship_satlit_cmrsp_cd"))
return ShipAddInfoDto.builder() .build();
.jobExecutionId(targetId) // job_execution_id 설정
.datasetVer(rs.getString("dataset_ver"))
.imoNo(rs.getString("imo_no"))
.shipEml(rs.getString("ship_eml"))
.maxDpwt(rs.getString("max_dpwt"))
.maxDrillDepth(rs.getString("max_drill_depth"))
.drillBrg(rs.getString("drill_brg"))
.oceanProdFacility(rs.getString("ocean_prod_facility"))
.deckHeatExch(rs.getString("deck_heat_exch"))
.dehtexMatral(rs.getString("dehtex_matral"))
.portblTwinDeck(rs.getString("portbl_twin_deck"))
.fixedTwinDeck(rs.getString("fixed_twin_deck"))
.shipSatlitCommId(rs.getString("ship_satlit_comm_id"))
.shipSatlitCmrspCd(rs.getString("ship_satlit_cmrsp_cd"))
.build();
}, nextTargetId);
// 3. 해당 ID 'P' 변경
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceAdditionalShipsData);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,156 +1,115 @@
package com.snp.batch.jobs.datasync.batch.ship.reader; package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.ShipInfoMstDto; import com.snp.batch.jobs.datasync.batch.ship.dto.ShipInfoMstDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class ShipDataReader implements ItemReader<ShipInfoMstDto> { public class ShipDataReader extends BaseSyncReader<ShipInfoMstDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<ShipInfoMstDto> allDataBuffer = new ArrayList<>();
public ShipDataReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public ShipDataReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public ShipInfoMstDto read() throws Exception { protected String getSourceTable() {
// 1. 버퍼가 비어있을 때만 DB에서 "다음 처리 대상 ID 하나" 데이터를 긁어옵니다. return tableMetaInfo.sourceShipDetailData;
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null; // 진짜 데이터가 없으면 종료
}
return allDataBuffer.remove(0);
} }
@Override
private void fetchNextGroup() { protected ShipInfoMstDto mapRow(ResultSet rs, Long targetId) throws SQLException {
// 1. 아직 'N' 최소 ID 하나를 찾음 return ShipInfoMstDto.builder()
Long nextTargetId = null; .jobExecutionId(targetId)
try { .datasetVer(rs.getString("dataset_ver"))
nextTargetId = businessJdbcTemplate.queryForObject(CommonSql.getNextTargetQuery(tableMetaInfo.sourceShipDetailData), Long.class); .imoNo(rs.getString("imo_no"))
} catch (Exception e) { .mmsiNo(rs.getString("mmsi_no"))
return; // 대상 없음 .shipNm(rs.getString("ship_nm"))
} .clsgnNo(rs.getString("clsgn_no"))
.frmlaRegNo(rs.getString("frmla_reg_no"))
if (nextTargetId != null) { .fshrPrmtNo(rs.getString("fshr_prmt_no"))
log.info("[ShipDataReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .shipNtnlty(rs.getString("ship_ntnlty"))
.ntnltyCd(rs.getString("ntnlty_cd"))
// 2. 해당 ID의 데이터만 버퍼에 로드 .loadPort(rs.getString("load_port"))
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceShipDetailData); .clfic(rs.getString("clfic"))
final Long targetId = nextTargetId; // lambda 내부에서 사용하기 위해 final 변수로 .clficDesc(rs.getString("clfic_desc"))
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .shipStatus(rs.getString("ship_status"))
return ShipInfoMstDto.builder() .shipTypeGroup(rs.getString("ship_type_group"))
.jobExecutionId(targetId) // job_execution_id 설정 .shipTypeLvTwo(rs.getString("ship_type_lv_two"))
.datasetVer(rs.getString("dataset_ver")) .shipTypeLvThr(rs.getString("ship_type_lv_thr"))
.imoNo(rs.getString("imo_no")) .shipTypeLvFour(rs.getString("ship_type_lv_four"))
.mmsiNo(rs.getString("mmsi_no")) .shipTypeLvFive(rs.getString("ship_type_lv_five"))
.shipNm(rs.getString("ship_nm")) .shipTypeLvFiveDtldType(rs.getString("ship_type_lv_five_dtld_type"))
.clsgnNo(rs.getString("clsgn_no")) .shipTypeLvFiveHullType(rs.getString("ship_type_lv_five_hull_type"))
.frmlaRegNo(rs.getString("frmla_reg_no")) .shipTypeLvFiveLwrnkGroup(rs.getString("ship_type_lv_five_lwrnk_group"))
.fshrPrmtNo(rs.getString("fshr_prmt_no")) .buildYy(rs.getString("build_yy"))
.shipNtnlty(rs.getString("ship_ntnlty")) .buildYmd(rs.getString("build_ymd"))
.ntnltyCd(rs.getString("ntnlty_cd")) .shpyrd(rs.getString("shpyrd"))
.loadPort(rs.getString("load_port")) .shpyrdOffclNm(rs.getString("shpyrd_offcl_nm"))
.clfic(rs.getString("clfic")) .shpyrdBuildNo(rs.getString("shpyrd_build_no"))
.clficDesc(rs.getString("clfic_desc")) .buildDesc(rs.getString("build_desc"))
.shipStatus(rs.getString("ship_status")) .modfHstryDesc(rs.getString("modf_hstry_desc"))
.shipTypeGroup(rs.getString("ship_type_group")) .whlnthLoa(rs.getString("whlnth_loa"))
.shipTypeLvTwo(rs.getString("ship_type_lv_two")) .regLength(rs.getString("reg_length"))
.shipTypeLvThr(rs.getString("ship_type_lv_thr")) .lbp(rs.getString("lbp"))
.shipTypeLvFour(rs.getString("ship_type_lv_four")) .formnBreadth(rs.getString("formn_breadth"))
.shipTypeLvFive(rs.getString("ship_type_lv_five")) .maxBreadth(rs.getString("max_breadth"))
.shipTypeLvFiveDtldType(rs.getString("ship_type_lv_five_dtld_type")) .depth(rs.getString("depth"))
.shipTypeLvFiveHullType(rs.getString("ship_type_lv_five_hull_type")) .draft(rs.getString("draft"))
.shipTypeLvFiveLwrnkGroup(rs.getString("ship_type_lv_five_lwrnk_group")) .keelMastHg(rs.getString("keel_mast_hg"))
.buildYy(rs.getString("build_yy")) .bulbBow(rs.getString("bulb_bow"))
.buildYmd(rs.getString("build_ymd")) .gt(rs.getString("gt"))
.shpyrd(rs.getString("shpyrd")) .ntTon(rs.getString("nt_ton"))
.shpyrdOffclNm(rs.getString("shpyrd_offcl_nm")) .dwt(rs.getString("dwt"))
.shpyrdBuildNo(rs.getString("shpyrd_build_no")) .displacement(rs.getString("displacement"))
.buildDesc(rs.getString("build_desc")) .lightDisplacementTon(rs.getString("light_displacement_ton"))
.modfHstryDesc(rs.getString("modf_hstry_desc")) .cgt(rs.getString("cgt"))
.whlnthLoa(rs.getString("whlnth_loa")) .fldngOneCmPerTonTpci(rs.getString("fldng_one_cm_per_ton_tpci"))
.regLength(rs.getString("reg_length")) .tonEfectDay(rs.getString("ton_efect_day"))
.lbp(rs.getString("lbp")) .calcfrmDwt(rs.getString("calcfrm_dwt"))
.formnBreadth(rs.getString("formn_breadth")) .teuCnt(rs.getString("teu_cnt"))
.maxBreadth(rs.getString("max_breadth")) .teuCapacity(rs.getString("teu_capacity"))
.depth(rs.getString("depth")) .grainCapacityM3(rs.getString("grain_capacity_m3"))
.draft(rs.getString("draft")) .baleCapacity(rs.getString("bale_capacity"))
.keelMastHg(rs.getString("keel_mast_hg")) .liquidCapacity(rs.getString("liquid_capacity"))
.bulbBow(rs.getString("bulb_bow")) .gasM3(rs.getString("gas_m3"))
.gt(rs.getString("gt")) .insulatedM3(rs.getString("insulated_m3"))
.ntTon(rs.getString("nt_ton")) .passengerCapacity(rs.getString("passenger_capacity"))
.dwt(rs.getString("dwt")) .bollardPull(rs.getString("bollard_pull"))
.displacement(rs.getString("displacement")) .svcSpd(rs.getString("svc_spd"))
.lightDisplacementTon(rs.getString("light_displacement_ton")) .mainEngineType(rs.getString("main_engine_type"))
.cgt(rs.getString("cgt")) .fuelCnsmpSpdOne(rs.getString("fuel_cnsmp_spd_one"))
.fldngOneCmPerTonTpci(rs.getString("fldng_one_cm_per_ton_tpci")) .fuelCnsmpamtValOne(rs.getString("fuel_cnsmpamt_val_one"))
.tonEfectDay(rs.getString("ton_efect_day")) .fuelCnsmpSpdTwo(rs.getString("fuel_cnsmp_spd_two"))
.calcfrmDwt(rs.getString("calcfrm_dwt")) .fuelCnsmpamtValTwo(rs.getString("fuel_cnsmpamt_val_two"))
.teuCnt(rs.getString("teu_cnt")) .totalFuelCapacityM3(rs.getString("total_fuel_capacity_m3"))
.teuCapacity(rs.getString("teu_capacity")) .blrMftr(rs.getString("blr_mftr"))
.grainCapacityM3(rs.getString("grain_capacity_m3")) .proplrMftr(rs.getString("proplr_mftr"))
.baleCapacity(rs.getString("bale_capacity")) .cargoCapacityM3Desc(rs.getString("cargo_capacity_m3_desc"))
.liquidCapacity(rs.getString("liquid_capacity")) .eqpmntDesc(rs.getString("eqpmnt_desc"))
.gasM3(rs.getString("gas_m3")) .hdn(rs.getString("hdn"))
.insulatedM3(rs.getString("insulated_m3")) .hatcheDesc(rs.getString("hatche_desc"))
.passengerCapacity(rs.getString("passenger_capacity")) .laneDoorRampDesc(rs.getString("lane_door_ramp_desc"))
.bollardPull(rs.getString("bollard_pull")) .spcTankDesc(rs.getString("spc_tank_desc"))
.svcSpd(rs.getString("svc_spd")) .tankDesc(rs.getString("tank_desc"))
.mainEngineType(rs.getString("main_engine_type")) .prmovrDesc(rs.getString("prmovr_desc"))
.fuelCnsmpSpdOne(rs.getString("fuel_cnsmp_spd_one")) .prmovrOvrvwDesc(rs.getString("prmovr_ovrvw_desc"))
.fuelCnsmpamtValOne(rs.getString("fuel_cnsmpamt_val_one")) .auxDesc(rs.getString("aux_desc"))
.fuelCnsmpSpdTwo(rs.getString("fuel_cnsmp_spd_two")) .asstGnrtrDesc(rs.getString("asst_gnrtr_desc"))
.fuelCnsmpamtValTwo(rs.getString("fuel_cnsmpamt_val_two")) .fuelDesc(rs.getString("fuel_desc"))
.totalFuelCapacityM3(rs.getString("total_fuel_capacity_m3")) .docCompanyCd(rs.getString("doc_company_cd"))
.blrMftr(rs.getString("blr_mftr")) .groupActlOwnrCompanyCd(rs.getString("group_actl_ownr_company_cd"))
.proplrMftr(rs.getString("proplr_mftr")) .operator(rs.getString("operator"))
.cargoCapacityM3Desc(rs.getString("cargo_capacity_m3_desc")) .operatorCompanyCd(rs.getString("operator_company_cd"))
.eqpmntDesc(rs.getString("eqpmnt_desc")) .shipMngrCompanyCd(rs.getString("ship_mngr_company_cd"))
.hdn(rs.getString("hdn")) .techMngrCd(rs.getString("tech_mngr_cd"))
.hatcheDesc(rs.getString("hatche_desc")) .regShponrCd(rs.getString("reg_shponr_cd"))
.laneDoorRampDesc(rs.getString("lane_door_ramp_desc")) .lastMdfcnDt(rs.getString("last_mdfcn_dt"))
.spcTankDesc(rs.getString("spc_tank_desc")) .build();
.tankDesc(rs.getString("tank_desc"))
.prmovrDesc(rs.getString("prmovr_desc"))
.prmovrOvrvwDesc(rs.getString("prmovr_ovrvw_desc"))
.auxDesc(rs.getString("aux_desc"))
.asstGnrtrDesc(rs.getString("asst_gnrtr_desc"))
.fuelDesc(rs.getString("fuel_desc"))
.docCompanyCd(rs.getString("doc_company_cd"))
.groupActlOwnrCompanyCd(rs.getString("group_actl_ownr_company_cd"))
.operator(rs.getString("operator"))
.operatorCompanyCd(rs.getString("operator_company_cd"))
.shipMngrCompanyCd(rs.getString("ship_mngr_company_cd"))
.techMngrCd(rs.getString("tech_mngr_cd"))
.regShponrCd(rs.getString("reg_shponr_cd"))
.lastMdfcnDt(rs.getString("last_mdfcn_dt"))
.build();
}, nextTargetId);
// 3. 해당 ID 'P' 변경
updateBatchProcessing(nextTargetId);
}
} }
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceShipDetailData);
businessJdbcTemplate.update(sql, targetExecutionId);
}
} }

파일 보기

@ -1,74 +1,38 @@
package com.snp.batch.jobs.datasync.batch.ship.reader; package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.ShipManagerHistoryDto; import com.snp.batch.jobs.datasync.batch.ship.dto.ShipManagerHistoryDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class ShipManagerHistoryReader implements ItemReader<ShipManagerHistoryDto> { public class ShipManagerHistoryReader extends BaseSyncReader<ShipManagerHistoryDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<ShipManagerHistoryDto> allDataBuffer = new ArrayList<>();
public ShipManagerHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public ShipManagerHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public ShipManagerHistoryDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceShipManagerHistory;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected ShipManagerHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return ShipManagerHistoryDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceShipManagerHistory), Long.class); .datasetVer(rs.getString("dataset_ver"))
} catch (Exception e) { .imoNo(rs.getString("imo_no"))
return; .shipMngCompanySeq(rs.getString("ship_mng_company_seq"))
} .efectStaDay(rs.getString("efect_sta_day"))
.shipMngrCd(rs.getString("ship_mngr_cd"))
if (nextTargetId != null) { .shipMngr(rs.getString("ship_mngr"))
log.info("[ShipManagerHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .companyStatus(rs.getString("company_status"))
.build();
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceShipManagerHistory);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
return ShipManagerHistoryDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
.imoNo(rs.getString("imo_no"))
.shipMngCompanySeq(rs.getString("ship_mng_company_seq"))
.efectStaDay(rs.getString("efect_sta_day"))
.shipMngrCd(rs.getString("ship_mngr_cd"))
.shipMngr(rs.getString("ship_mngr"))
.companyStatus(rs.getString("company_status"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceShipManagerHistory);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,66 +1,34 @@
package com.snp.batch.jobs.datasync.batch.ship.reader; package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.SisterShipLinksDto; import com.snp.batch.jobs.datasync.batch.ship.dto.SisterShipLinksDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class SisterShipLinksReader implements ItemReader<SisterShipLinksDto> { public class SisterShipLinksReader extends BaseSyncReader<SisterShipLinksDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<SisterShipLinksDto> allDataBuffer = new ArrayList<>();
public SisterShipLinksReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public SisterShipLinksReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public SisterShipLinksDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceSisterShipLinks;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected SisterShipLinksDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return SisterShipLinksDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceSisterShipLinks), Long.class); .datasetVer(rs.getString("dataset_ver"))
} catch (Exception e) { .imoNo(rs.getString("imo_no"))
return; .linkImoNo(rs.getString("link_imo_no"))
} .build();
if (nextTargetId != null) {
log.info("[SisterShipLinksReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceSisterShipLinks);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
return SisterShipLinksDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
.imoNo(rs.getString("imo_no"))
.linkImoNo(rs.getString("link_imo_no"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceSisterShipLinks);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,68 +1,36 @@
package com.snp.batch.jobs.datasync.batch.ship.reader; package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.SpecialFeatureDto; import com.snp.batch.jobs.datasync.batch.ship.dto.SpecialFeatureDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class SpecialFeatureReader implements ItemReader<SpecialFeatureDto> { public class SpecialFeatureReader extends BaseSyncReader<SpecialFeatureDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<SpecialFeatureDto> allDataBuffer = new ArrayList<>();
public SpecialFeatureReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public SpecialFeatureReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public SpecialFeatureDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceSpecialFeature;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected SpecialFeatureDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return SpecialFeatureDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceSpecialFeature), Long.class); .datasetVer(rs.getString("dataset_ver"))
} catch (Exception e) { .imoNo(rs.getString("imo_no"))
return; .shipSpcFetrSeq(rs.getString("ship_spc_fetr_seq"))
} .spcMttrCd(rs.getString("spc_mttr_cd"))
.spcMttr(rs.getString("spc_mttr"))
if (nextTargetId != null) { .build();
log.info("[SpecialFeatureReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceSpecialFeature);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
return SpecialFeatureDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
.imoNo(rs.getString("imo_no"))
.shipSpcFetrSeq(rs.getString("ship_spc_fetr_seq"))
.spcMttrCd(rs.getString("spc_mttr_cd"))
.spcMttr(rs.getString("spc_mttr"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceSpecialFeature);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,69 +1,37 @@
package com.snp.batch.jobs.datasync.batch.ship.reader; package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.StatusHistoryDto; import com.snp.batch.jobs.datasync.batch.ship.dto.StatusHistoryDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class StatusHistoryReader implements ItemReader<StatusHistoryDto> { public class StatusHistoryReader extends BaseSyncReader<StatusHistoryDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<StatusHistoryDto> allDataBuffer = new ArrayList<>();
public StatusHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public StatusHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public StatusHistoryDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceStatusHistory;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected StatusHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return StatusHistoryDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceStatusHistory), Long.class); .datasetVer(rs.getString("dataset_ver"))
} catch (Exception e) { .imoNo(rs.getString("imo_no"))
return; .shipStatusHstrySeq(rs.getString("ship_status_hstry_seq"))
} .statusCd(rs.getString("status_cd"))
.statusChgYmd(rs.getString("status_chg_ymd"))
if (nextTargetId != null) { .status(rs.getString("status"))
log.info("[StatusHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .build();
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceStatusHistory);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
return StatusHistoryDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
.imoNo(rs.getString("imo_no"))
.shipStatusHstrySeq(rs.getString("ship_status_hstry_seq"))
.statusCd(rs.getString("status_cd"))
.statusChgYmd(rs.getString("status_chg_ymd"))
.status(rs.getString("status"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceStatusHistory);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,70 +1,38 @@
package com.snp.batch.jobs.datasync.batch.ship.reader; package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.StowageCommodityDto; import com.snp.batch.jobs.datasync.batch.ship.dto.StowageCommodityDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class StowageCommodityReader implements ItemReader<StowageCommodityDto> { public class StowageCommodityReader extends BaseSyncReader<StowageCommodityDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<StowageCommodityDto> allDataBuffer = new ArrayList<>();
public StowageCommodityReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public StowageCommodityReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public StowageCommodityDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceStowageCommodity;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected StowageCommodityDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return StowageCommodityDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceStowageCommodity), Long.class); .datasetVer(rs.getString("dataset_ver"))
} catch (Exception e) { .imoNo(rs.getString("imo_no"))
return; .shipCargoCapacitySeq(rs.getString("ship_cargo_capacity_seq"))
} .capacityCd(rs.getString("capacity_cd"))
.capacityCdDesc(rs.getString("capacity_cd_desc"))
if (nextTargetId != null) { .cargoCd(rs.getString("cargo_cd"))
log.info("[StowageCommodityReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .cargoNm(rs.getString("cargo_nm"))
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceStowageCommodity); .build();
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
return StowageCommodityDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
.imoNo(rs.getString("imo_no"))
.shipCargoCapacitySeq(rs.getString("ship_cargo_capacity_seq"))
.capacityCd(rs.getString("capacity_cd"))
.capacityCdDesc(rs.getString("capacity_cd_desc"))
.cargoCd(rs.getString("cargo_cd"))
.cargoNm(rs.getString("cargo_nm"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceStowageCommodity);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,69 +1,37 @@
package com.snp.batch.jobs.datasync.batch.ship.reader; package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.SurveyDatesHistoryUniqueDto; import com.snp.batch.jobs.datasync.batch.ship.dto.SurveyDatesHistoryUniqueDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class SurveyDatesHistoryUniqueReader implements ItemReader<SurveyDatesHistoryUniqueDto> { public class SurveyDatesHistoryUniqueReader extends BaseSyncReader<SurveyDatesHistoryUniqueDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<SurveyDatesHistoryUniqueDto> allDataBuffer = new ArrayList<>();
public SurveyDatesHistoryUniqueReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public SurveyDatesHistoryUniqueReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public SurveyDatesHistoryUniqueDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceSurveyDatesHistoryUnique;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected SurveyDatesHistoryUniqueDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return SurveyDatesHistoryUniqueDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceSurveyDatesHistoryUnique), Long.class); .datasetVer(rs.getString("dataset_ver"))
} catch (Exception e) { .imoNo(rs.getString("imo_no"))
return; .clficCd(rs.getString("clfic_cd"))
} .inspectionType(rs.getString("inspection_type"))
.inspectionYmd(rs.getString("inspection_ymd"))
if (nextTargetId != null) { .clfic(rs.getString("clfic"))
log.info("[SurveyDatesHistoryUniqueReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .build();
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceSurveyDatesHistoryUnique);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
return SurveyDatesHistoryUniqueDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
.imoNo(rs.getString("imo_no"))
.clficCd(rs.getString("clfic_cd"))
.inspectionType(rs.getString("inspection_type"))
.inspectionYmd(rs.getString("inspection_ymd"))
.clfic(rs.getString("clfic"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceSurveyDatesHistoryUnique);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,72 +1,40 @@
package com.snp.batch.jobs.datasync.batch.ship.reader; package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.SurveyDatesDto; import com.snp.batch.jobs.datasync.batch.ship.dto.SurveyDatesDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class SurveyDatesReader implements ItemReader<SurveyDatesDto> { public class SurveyDatesReader extends BaseSyncReader<SurveyDatesDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<SurveyDatesDto> allDataBuffer = new ArrayList<>();
public SurveyDatesReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public SurveyDatesReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public SurveyDatesDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceSurveyDates;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected SurveyDatesDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return SurveyDatesDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceSurveyDates), Long.class); .datasetVer(rs.getString("dataset_ver"))
} catch (Exception e) { .imoNo(rs.getString("imo_no"))
return; .clficCd(rs.getString("clfic_cd"))
} .clfic(rs.getString("clfic"))
.dckngInspection(rs.getString("dckng_inspection"))
if (nextTargetId != null) { .fxtmInspection(rs.getString("fxtm_inspection"))
log.info("[SurveyDatesReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .annualInspection(rs.getString("annual_inspection"))
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceSurveyDates); .mchnFxtmInspectionYmd(rs.getString("mchn_fxtm_inspection_ymd"))
final Long targetId = nextTargetId; .tlsftInspectionYmd(rs.getString("tlsft_inspection_ymd"))
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .build();
return SurveyDatesDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
.imoNo(rs.getString("imo_no"))
.clficCd(rs.getString("clfic_cd"))
.clfic(rs.getString("clfic"))
.dckngInspection(rs.getString("dckng_inspection"))
.fxtmInspection(rs.getString("fxtm_inspection"))
.annualInspection(rs.getString("annual_inspection"))
.mchnFxtmInspectionYmd(rs.getString("mchn_fxtm_inspection_ymd"))
.tlsftInspectionYmd(rs.getString("tlsft_inspection_ymd"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceSurveyDates);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,93 +1,61 @@
package com.snp.batch.jobs.datasync.batch.ship.reader; package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.TbCompanyDetailDto; import com.snp.batch.jobs.datasync.batch.ship.dto.TbCompanyDetailDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class TbCompanyDetailReader implements ItemReader<TbCompanyDetailDto> { public class TbCompanyDetailReader extends BaseSyncReader<TbCompanyDetailDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<TbCompanyDetailDto> allDataBuffer = new ArrayList<>();
public TbCompanyDetailReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public TbCompanyDetailReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public TbCompanyDetailDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceTbCompanyDetail;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected TbCompanyDetailDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return TbCompanyDetailDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTbCompanyDetail), Long.class); .datasetVer(rs.getString("dataset_ver"))
} catch (Exception e) { .companyCd(rs.getString("company_cd"))
return; .lastUpdYmd(rs.getString("last_upd_ymd"))
} .careCd(rs.getString("care_cd"))
.companyStatus(rs.getString("company_status"))
if (nextTargetId != null) { .fullNm(rs.getString("full_nm"))
log.info("[TbCompanyDetailReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .companyNameAbbr(rs.getString("company_name_abbr"))
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTbCompanyDetail); .companyFndnYmd(rs.getString("company_fndn_ymd"))
final Long targetId = nextTargetId; .prntCompanyCd(rs.getString("prnt_company_cd"))
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .countryNm(rs.getString("country_nm"))
return TbCompanyDetailDto.builder() .ctyNm(rs.getString("cty_nm"))
.jobExecutionId(targetId) .oaAddr(rs.getString("oa_addr"))
.datasetVer(rs.getString("dataset_ver")) .emlAddr(rs.getString("eml_addr"))
.companyCd(rs.getString("company_cd")) .tel(rs.getString("tel"))
.lastUpdYmd(rs.getString("last_upd_ymd")) .faxNo(rs.getString("fax_no"))
.careCd(rs.getString("care_cd")) .wbstUrl(rs.getString("wbst_url"))
.companyStatus(rs.getString("company_status")) .countryCtrl(rs.getString("country_ctrl"))
.fullNm(rs.getString("full_nm")) .countryCtrlCd(rs.getString("country_ctrl_cd"))
.companyNameAbbr(rs.getString("company_name_abbr")) .countryReg(rs.getString("country_reg"))
.companyFndnYmd(rs.getString("company_fndn_ymd")) .countryRegCd(rs.getString("country_reg_cd"))
.prntCompanyCd(rs.getString("prnt_company_cd")) .regionCd(rs.getString("region_cd"))
.countryNm(rs.getString("country_nm")) .distNm(rs.getString("dist_nm"))
.ctyNm(rs.getString("cty_nm")) .distNo(rs.getString("dist_no"))
.oaAddr(rs.getString("oa_addr")) .mailAddrRear(rs.getString("mail_addr_rear"))
.emlAddr(rs.getString("eml_addr")) .mailAddrFrnt(rs.getString("mail_addr_frnt"))
.tel(rs.getString("tel")) .poBox(rs.getString("po_box"))
.faxNo(rs.getString("fax_no")) .dtlAddrOne(rs.getString("dtl_addr_one"))
.wbstUrl(rs.getString("wbst_url")) .dtlAddrTwo(rs.getString("dtl_addr_two"))
.countryCtrl(rs.getString("country_ctrl")) .dtlAddrThr(rs.getString("dtl_addr_thr"))
.countryCtrlCd(rs.getString("country_ctrl_cd")) .tlx(rs.getString("tlx"))
.countryReg(rs.getString("country_reg")) .build();
.countryRegCd(rs.getString("country_reg_cd"))
.regionCd(rs.getString("region_cd"))
.distNm(rs.getString("dist_nm"))
.distNo(rs.getString("dist_no"))
.mailAddrRear(rs.getString("mail_addr_rear"))
.mailAddrFrnt(rs.getString("mail_addr_frnt"))
.poBox(rs.getString("po_box"))
.dtlAddrOne(rs.getString("dtl_addr_one"))
.dtlAddrTwo(rs.getString("dtl_addr_two"))
.dtlAddrThr(rs.getString("dtl_addr_thr"))
.tlx(rs.getString("tlx"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTbCompanyDetail);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,70 +1,38 @@
package com.snp.batch.jobs.datasync.batch.ship.reader; package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.TechnicalManagerHistoryDto; import com.snp.batch.jobs.datasync.batch.ship.dto.TechnicalManagerHistoryDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class TechnicalManagerHistoryReader implements ItemReader<TechnicalManagerHistoryDto> { public class TechnicalManagerHistoryReader extends BaseSyncReader<TechnicalManagerHistoryDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<TechnicalManagerHistoryDto> allDataBuffer = new ArrayList<>();
public TechnicalManagerHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public TechnicalManagerHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public TechnicalManagerHistoryDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceTechnicalManagerHistory;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected TechnicalManagerHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return TechnicalManagerHistoryDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTechnicalManagerHistory), Long.class); .datasetVer(rs.getString("dataset_ver"))
} catch (Exception e) { .imoNo(rs.getString("imo_no"))
return; .shipTechMngCompanySeq(rs.getString("ship_tech_mng_company_seq"))
} .efectStaDay(rs.getString("efect_sta_day"))
.techMngrCd(rs.getString("tech_mngr_cd"))
if (nextTargetId != null) { .techMngr(rs.getString("tech_mngr"))
log.info("[TechnicalManagerHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .companyStatus(rs.getString("company_status"))
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTechnicalManagerHistory); .build();
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
return TechnicalManagerHistoryDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
.imoNo(rs.getString("imo_no"))
.shipTechMngCompanySeq(rs.getString("ship_tech_mng_company_seq"))
.efectStaDay(rs.getString("efect_sta_day"))
.techMngrCd(rs.getString("tech_mngr_cd"))
.techMngr(rs.getString("tech_mngr"))
.companyStatus(rs.getString("company_status"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTechnicalManagerHistory);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }

파일 보기

@ -1,73 +1,41 @@
package com.snp.batch.jobs.datasync.batch.ship.reader; package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql; import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo; import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.ThrustersDto; import com.snp.batch.jobs.datasync.batch.ship.dto.ThrustersDto;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.util.ArrayList; import java.sql.ResultSet;
import java.util.List; import java.sql.SQLException;
@Slf4j @Slf4j
public class ThrustersReader implements ItemReader<ThrustersDto> { public class ThrustersReader extends BaseSyncReader<ThrustersDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<ThrustersDto> allDataBuffer = new ArrayList<>();
public ThrustersReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) { public ThrustersReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource); super(businessDataSource, tableMetaInfo);
this.tableMetaInfo = tableMetaInfo;
} }
@Override @Override
public ThrustersDto read() throws Exception { protected String getSourceTable() {
if (allDataBuffer.isEmpty()) { return tableMetaInfo.sourceThrusters;
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
} }
private void fetchNextGroup() { @Override
Long nextTargetId = null; protected ThrustersDto mapRow(ResultSet rs, Long targetId) throws SQLException {
try { return ThrustersDto.builder()
nextTargetId = businessJdbcTemplate.queryForObject( .jobExecutionId(targetId)
CommonSql.getNextTargetQuery(tableMetaInfo.sourceThrusters), Long.class); .datasetVer(rs.getString("dataset_ver"))
} catch (Exception e) { .imoNo(rs.getString("imo_no"))
return; .thrstrSeq(rs.getString("thrstr_seq"))
} .thrstrTypeCd(rs.getString("thrstr_type_cd"))
.thrstrType(rs.getString("thrstr_type"))
if (nextTargetId != null) { .thrstrCnt(rs.getBigDecimal("thrstr_cnt"))
log.info("[ThrustersReader] 다음 처리 대상 ID 발견: {}", nextTargetId); .thrstrPosition(rs.getString("thrstr_position"))
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceThrusters); .thrstrPowerBhp(rs.getBigDecimal("thrstr_power_bhp"))
final Long targetId = nextTargetId; .thrstrPowerKw(rs.getBigDecimal("thrstr_power_kw"))
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> { .instlMth(rs.getString("instl_mth"))
return ThrustersDto.builder() .build();
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
.imoNo(rs.getString("imo_no"))
.thrstrSeq(rs.getString("thrstr_seq"))
.thrstrTypeCd(rs.getString("thrstr_type_cd"))
.thrstrType(rs.getString("thrstr_type"))
.thrstrCnt(rs.getBigDecimal("thrstr_cnt"))
.thrstrPosition(rs.getString("thrstr_position"))
.thrstrPowerBhp(rs.getBigDecimal("thrstr_power_bhp"))
.thrstrPowerKw(rs.getBigDecimal("thrstr_power_kw"))
.instlMth(rs.getString("instl_mth"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceThrusters);
businessJdbcTemplate.update(sql, targetExecutionId);
} }
} }