feat(동기화현황): 동기화 현황 메뉴 추가 및 배치 Reader 리팩토링 (#1) #5

병합
HYOJIN feature/ISSUE-1-sync-status-menu 에서 develop 로 5 commits 를 머지했습니다 2026-03-24 17:29:07 +09:00
64개의 변경된 파일1526개의 추가작업 그리고 3121개의 파일을 삭제
Showing only changes of commit edef10e4bc - Show all commits

파일 보기

@ -0,0 +1,127 @@
package com.snp.batch.common.batch.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.JobExecutionGroupable;
import com.snp.batch.common.util.TableMetaInfo;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
/**
* 동기화 Reader 추상 클래스
*
* job_execution_id 단위로 청크를 분리하는 2단계 read 방식:
* 1단계 (peek): 다음 그룹 ID만 조회, 현재 그룹과 다르면 null 반환 (청크 종료)
* 2단계 (fetch): 데이터 로드 + batch_flag NP 전환
*
* GroupByExecutionIdPolicy를 대체하여 Reader 자체에서 청크 경계를 제어한다.
*
* @param <T> DTO 타입 (JobExecutionGroupable 구현 필요)
*/
@Slf4j
public abstract class BaseSyncReader<T extends JobExecutionGroupable> implements ItemReader<T> {
protected final TableMetaInfo tableMetaInfo;
protected final JdbcTemplate businessJdbcTemplate;
private List<T> allDataBuffer = new ArrayList<>();
private Long currentGroupId = null;
private Long pendingGroupId = null;
protected BaseSyncReader(DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
}
/**
* 소스 테이블명 반환 (: tableMetaInfo.sourceIceClass)
*/
protected abstract String getSourceTable();
/**
* ResultSet DTO 매핑
*
* @param rs ResultSet (현재 row)
* @param targetId job_execution_id (DTO의 jobExecutionId 필드에 설정)
* @return 매핑된 DTO 객체
*/
protected abstract T mapRow(ResultSet rs, Long targetId) throws SQLException;
/**
* 로그 접두사 (: "IceClassReader")
*/
protected String getLogPrefix() {
return getClass().getSimpleName();
}
@Override
public T read() throws Exception {
// 1. buffer가 비어있으면 다음 그룹 확인
if (allDataBuffer.isEmpty()) {
// pending이 있으면 (이전 청크에서 감지된 다음 그룹) 바로 로드
if (pendingGroupId != null) {
fetchAndTransition(pendingGroupId);
currentGroupId = pendingGroupId;
pendingGroupId = null;
} else {
// 다음 그룹 ID peek
Long nextId = peekNextGroupId();
if (nextId == null) {
// 이상 처리할 데이터 없음
currentGroupId = null;
return null;
}
if (currentGroupId != null && !currentGroupId.equals(nextId)) {
// 다른 그룹 발견 현재 청크 종료, 다음 청크에서 처리
pendingGroupId = nextId;
currentGroupId = null;
return null;
}
// 같은 그룹이거나 호출 로드
fetchAndTransition(nextId);
currentGroupId = nextId;
}
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
}
/**
* 다음 처리 대상 job_execution_id 조회 (데이터 로드/전환 없음)
*/
private Long peekNextGroupId() {
try {
return businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(getSourceTable()), Long.class);
} catch (Exception e) {
return null;
}
}
/**
* 데이터 로드 + batch_flag NP 전환
*/
private void fetchAndTransition(Long targetId) {
log.info("[{}] 다음 처리 대상 ID 발견: {}", getLogPrefix(), targetId);
String sql = CommonSql.getTargetDataQuery(getSourceTable());
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) ->
mapRow(rs, targetId), targetId);
// NP 전환
String updateSql = CommonSql.getProcessBatchQuery(getSourceTable());
businessJdbcTemplate.update(updateSql, targetId);
}
}

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.code.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.code.dto.FlagCodeDto;
import com.snp.batch.jobs.datasync.batch.code.dto.Stat5CodeDto;
@ -129,12 +126,10 @@ public class CodeSyncJobConfig extends BaseJobConfig<FlagCodeDto, FlagCodeEntity
public Step flagCodeSyncStep() {
log.info("Step 생성: flagCodeSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<FlagCodeDto, FlagCodeEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<FlagCodeDto, FlagCodeEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())
.listener(new GroupByExecutionIdReadListener<FlagCodeDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(flagCodeWriteListener())
.build();
}
@ -143,12 +138,10 @@ public class CodeSyncJobConfig extends BaseJobConfig<FlagCodeDto, FlagCodeEntity
public Step stat5CodeSyncStep() {
log.info("Step 생성: stat5CodeSyncStep");
return new StepBuilder("stat5CodeSyncStep", jobRepository)
.<Stat5CodeDto, Stat5CodeEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<Stat5CodeDto, Stat5CodeEntity>chunk(getChunkSize(), transactionManager)
.reader(stat5CodeReader(businessDataSource, tableMetaInfo))
.processor(new Stat5CodeProcessor())
.writer(new Stat5CodeWriter(codeRepository))
.listener(new GroupByExecutionIdReadListener<Stat5CodeDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(stat5CodeWriteListener())
.build();
}

파일 보기

@ -1,53 +1,29 @@
package com.snp.batch.jobs.datasync.batch.code.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.code.dto.FlagCodeDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class FlagCodeReader implements ItemReader<FlagCodeDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<FlagCodeDto> allDataBuffer = new ArrayList<>();
public class FlagCodeReader extends BaseSyncReader<FlagCodeDto> {
public FlagCodeReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public FlagCodeDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceFlagCode;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceFlagCode), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[FlagCodeReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceFlagCode);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected FlagCodeDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return FlagCodeDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
@ -56,13 +32,5 @@ public class FlagCodeReader implements ItemReader<FlagCodeDto> {
.isoTwoCd(rs.getString("iso_two_cd"))
.isoThrCd(rs.getString("iso_thr_cd"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceFlagCode);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,53 +1,29 @@
package com.snp.batch.jobs.datasync.batch.code.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.code.dto.Stat5CodeDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class Stat5CodeReader implements ItemReader<Stat5CodeDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<Stat5CodeDto> allDataBuffer = new ArrayList<>();
public class Stat5CodeReader extends BaseSyncReader<Stat5CodeDto> {
public Stat5CodeReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public Stat5CodeDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceStat5Code;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceStat5Code), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[Stat5CodeReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceStat5Code);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected Stat5CodeDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return Stat5CodeDto.builder()
.jobExecutionId(targetId)
.lvOne(rs.getString("lv_one"))
@ -63,13 +39,5 @@ public class Stat5CodeReader implements ItemReader<Stat5CodeDto> {
.dtlDesc(rs.getString("dtl_desc"))
.rlsIem(rs.getString("rls_iem"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceStat5Code);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.compliance.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.compliance.dto.CompanyComplianceDto;
import com.snp.batch.jobs.datasync.batch.compliance.entity.CompanyComplianceEntity;
@ -109,12 +106,10 @@ public class CompanyComplianceSyncJobConfig extends BaseJobConfig<CompanyComplia
public Step companyComplianceSyncStep() {
log.info("Step 생성: companyComplianceSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<CompanyComplianceDto, CompanyComplianceEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<CompanyComplianceDto, CompanyComplianceEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())
.listener(new GroupByExecutionIdReadListener<CompanyComplianceDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(companyComplianceWriteListener())
.build();
}

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.compliance.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.compliance.dto.ShipComplianceDto;
import com.snp.batch.jobs.datasync.batch.compliance.entity.ShipComplianceEntity;
@ -109,12 +106,10 @@ public class ShipComplianceSyncJobConfig extends BaseJobConfig<ShipComplianceDto
public Step shipComplianceSyncStep() {
log.info("Step 생성: shipComplianceSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<ShipComplianceDto, ShipComplianceEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<ShipComplianceDto, ShipComplianceEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())
.listener(new GroupByExecutionIdReadListener<ShipComplianceDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(shipComplianceWriteListener())
.build();
}

파일 보기

@ -1,54 +1,30 @@
package com.snp.batch.jobs.datasync.batch.compliance.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.compliance.dto.CompanyComplianceDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
@Slf4j
public class CompanyComplianceReader implements ItemReader<CompanyComplianceDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<CompanyComplianceDto> allDataBuffer = new ArrayList<>();
public class CompanyComplianceReader extends BaseSyncReader<CompanyComplianceDto> {
public CompanyComplianceReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public CompanyComplianceDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceTbCompanyComplianceInfo;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTbCompanyComplianceInfo), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[CompanyComplianceReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTbCompanyComplianceInfo);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected CompanyComplianceDto mapRow(ResultSet rs, Long targetId) throws SQLException {
Timestamp lstMdfcnDtTs = rs.getTimestamp("lst_mdfcn_dt");
return CompanyComplianceDto.builder()
@ -70,13 +46,5 @@ public class CompanyComplianceReader implements ItemReader<CompanyComplianceDto>
.companyUnSanctionList(rs.getObject("company_un_sanction_list") != null ? rs.getLong("company_un_sanction_list") : null)
.prntCompanyComplianceRisk(rs.getObject("prnt_company_compliance_risk") != null ? rs.getLong("prnt_company_compliance_risk") : null)
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTbCompanyComplianceInfo);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,54 +1,30 @@
package com.snp.batch.jobs.datasync.batch.compliance.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.compliance.dto.ShipComplianceDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
@Slf4j
public class ShipComplianceReader implements ItemReader<ShipComplianceDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<ShipComplianceDto> allDataBuffer = new ArrayList<>();
public class ShipComplianceReader extends BaseSyncReader<ShipComplianceDto> {
public ShipComplianceReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public ShipComplianceDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceCompliance;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceCompliance), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[ShipComplianceReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceCompliance);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected ShipComplianceDto mapRow(ResultSet rs, Long targetId) throws SQLException {
Timestamp lastMdfcnDtTs = rs.getTimestamp("last_mdfcn_dt");
return ShipComplianceDto.builder()
@ -89,13 +65,5 @@ public class ShipComplianceReader implements ItemReader<ShipComplianceDto> {
.shipSwiSanctionList(rs.getString("ship_swi_sanction_list"))
.shipUnSanctionList(rs.getString("ship_un_sanction_list"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceCompliance);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.event.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.event.dto.EventCargoDto;
import com.snp.batch.jobs.datasync.batch.event.dto.EventDto;
@ -175,12 +172,10 @@ public class EventSyncJobConfig extends BaseJobConfig<EventDto, EventEntity> {
public Step eventSyncStep() {
log.info("Step 생성: eventSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<EventDto, EventEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<EventDto, EventEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())
.listener(new GroupByExecutionIdReadListener<EventDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(eventWriteListener())
.build();
}
@ -189,12 +184,10 @@ public class EventSyncJobConfig extends BaseJobConfig<EventDto, EventEntity> {
public Step eventCargoSyncStep() {
log.info("Step 생성: eventCargoSyncStep");
return new StepBuilder("eventCargoSyncStep", jobRepository)
.<EventCargoDto, EventCargoEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<EventCargoDto, EventCargoEntity>chunk(getChunkSize(), transactionManager)
.reader(eventCargoReader(businessDataSource, tableMetaInfo))
.processor(new EventCargoProcessor())
.writer(new EventCargoWriter(eventRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<EventCargoDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(eventCargoWriteListener())
.build();
}
@ -203,12 +196,10 @@ public class EventSyncJobConfig extends BaseJobConfig<EventDto, EventEntity> {
public Step eventHumanCasualtySyncStep() {
log.info("Step 생성: eventHumanCasualtySyncStep");
return new StepBuilder("eventHumanCasualtySyncStep", jobRepository)
.<EventHumanCasualtyDto, EventHumanCasualtyEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<EventHumanCasualtyDto, EventHumanCasualtyEntity>chunk(getChunkSize(), transactionManager)
.reader(eventHumanCasualtyReader(businessDataSource, tableMetaInfo))
.processor(new EventHumanCasualtyProcessor())
.writer(new EventHumanCasualtyWriter(eventRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<EventHumanCasualtyDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(eventHumanCasualtyWriteListener())
.build();
}
@ -217,12 +208,10 @@ public class EventSyncJobConfig extends BaseJobConfig<EventDto, EventEntity> {
public Step eventRelationshipSyncStep() {
log.info("Step 생성: eventRelationshipSyncStep");
return new StepBuilder("eventRelationshipSyncStep", jobRepository)
.<EventRelationshipDto, EventRelationshipEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<EventRelationshipDto, EventRelationshipEntity>chunk(getChunkSize(), transactionManager)
.reader(eventRelationshipReader(businessDataSource, tableMetaInfo))
.processor(new EventRelationshipProcessor())
.writer(new EventRelationshipWriter(eventRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<EventRelationshipDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(eventRelationshipWriteListener())
.build();
}

파일 보기

@ -1,53 +1,29 @@
package com.snp.batch.jobs.datasync.batch.event.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.event.dto.EventCargoDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class EventCargoReader implements ItemReader<EventCargoDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<EventCargoDto> allDataBuffer = new ArrayList<>();
public class EventCargoReader extends BaseSyncReader<EventCargoDto> {
public EventCargoReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public EventCargoDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceEventCargo;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceEventCargo), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[EventCargoReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceEventCargo);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected EventCargoDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return EventCargoDto.builder()
.jobExecutionId(targetId)
.eventId(rs.getObject("event_id") != null ? rs.getInt("event_id") : null)
@ -61,13 +37,5 @@ public class EventCargoReader implements ItemReader<EventCargoDto> {
.riskYn(rs.getString("risk_yn"))
.text(rs.getString("text"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceEventCargo);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,53 +1,29 @@
package com.snp.batch.jobs.datasync.batch.event.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.event.dto.EventHumanCasualtyDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class EventHumanCasualtyReader implements ItemReader<EventHumanCasualtyDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<EventHumanCasualtyDto> allDataBuffer = new ArrayList<>();
public class EventHumanCasualtyReader extends BaseSyncReader<EventHumanCasualtyDto> {
public EventHumanCasualtyReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public EventHumanCasualtyDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceEventHumanCasualty;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceEventHumanCasualty), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[EventHumanCasualtyReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceEventHumanCasualty);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected EventHumanCasualtyDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return EventHumanCasualtyDto.builder()
.jobExecutionId(targetId)
.eventId(rs.getObject("event_id") != null ? rs.getLong("event_id") : null)
@ -56,13 +32,5 @@ public class EventHumanCasualtyReader implements ItemReader<EventHumanCasualtyDt
.qualfr(rs.getString("qualfr"))
.cnt(rs.getObject("cnt") != null ? rs.getLong("cnt") : null)
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceEventHumanCasualty);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,54 +1,31 @@
package com.snp.batch.jobs.datasync.batch.event.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.event.dto.EventDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
import java.time.ZoneId;
@Slf4j
public class EventReader implements ItemReader<EventDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<EventDto> allDataBuffer = new ArrayList<>();
public class EventReader extends BaseSyncReader<EventDto> {
public EventReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public EventDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceEvent;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceEvent), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[EventReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceEvent);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected EventDto mapRow(ResultSet rs, Long targetId) throws SQLException {
Timestamp pstgYmdTs = rs.getTimestamp("pstg_ymd");
Timestamp eventStartDayTs = rs.getTimestamp("event_start_day");
Timestamp eventEndDayTs = rs.getTimestamp("event_end_day");
@ -58,9 +35,9 @@ public class EventReader implements ItemReader<EventDto> {
.eventId(rs.getObject("event_id") != null ? rs.getInt("event_id") : null)
.acdntId(rs.getString("acdnt_id"))
.imoNo(rs.getString("imo_no"))
.pstgYmd(pstgYmdTs != null ? pstgYmdTs.toInstant().atZone(java.time.ZoneId.systemDefault()) : null)
.eventStartDay(eventStartDayTs != null ? eventStartDayTs.toInstant().atZone(java.time.ZoneId.systemDefault()) : null)
.eventEndDay(eventEndDayTs != null ? eventEndDayTs.toInstant().atZone(java.time.ZoneId.systemDefault()) : null)
.pstgYmd(pstgYmdTs != null ? pstgYmdTs.toInstant().atZone(ZoneId.systemDefault()) : null)
.eventStartDay(eventStartDayTs != null ? eventStartDayTs.toInstant().atZone(ZoneId.systemDefault()) : null)
.eventEndDay(eventEndDayTs != null ? eventEndDayTs.toInstant().atZone(ZoneId.systemDefault()) : null)
.embrkTryYn(rs.getString("embrk_try_yn"))
.cargoCapacityStatusCd(rs.getString("cargo_capacity_status_cd"))
.acdntActn(rs.getString("acdnt_actn"))
@ -98,13 +75,5 @@ public class EventReader implements ItemReader<EventDto> {
.shipType(rs.getString("ship_type"))
.shipTypeNm(rs.getString("ship_type_nm"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceEvent);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,53 +1,29 @@
package com.snp.batch.jobs.datasync.batch.event.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.event.dto.EventRelationshipDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class EventRelationshipReader implements ItemReader<EventRelationshipDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<EventRelationshipDto> allDataBuffer = new ArrayList<>();
public class EventRelationshipReader extends BaseSyncReader<EventRelationshipDto> {
public EventRelationshipReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public EventRelationshipDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceEventRelationship;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceEventRelationship), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[EventRelationshipReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceEventRelationship);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected EventRelationshipDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return EventRelationshipDto.builder()
.jobExecutionId(targetId)
.acdntId(rs.getString("acdnt_id"))
@ -58,13 +34,5 @@ public class EventRelationshipReader implements ItemReader<EventRelationshipDto>
.relTypeCd(rs.getString("rel_type_cd"))
.relType(rs.getString("rel_type"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceEventRelationship);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.facility.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.facility.dto.FacilityPortDto;
import com.snp.batch.jobs.datasync.batch.facility.entity.FacilityPortEntity;
@ -112,12 +109,10 @@ public class FacilitySyncJobConfig extends BaseJobConfig<FacilityPortDto, Facili
public Step facilityPortSyncStep() {
log.info("Step 생성: facilityPortSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<FacilityPortDto, FacilityPortEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<FacilityPortDto, FacilityPortEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())
.listener(new GroupByExecutionIdReadListener<FacilityPortDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(facilityPortWriteListener())
.build();
}

파일 보기

@ -1,54 +1,31 @@
package com.snp.batch.jobs.datasync.batch.facility.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.facility.dto.FacilityPortDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
import java.time.ZoneId;
@Slf4j
public class FacilityPortReader implements ItemReader<FacilityPortDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<FacilityPortDto> allDataBuffer = new ArrayList<>();
public class FacilityPortReader extends BaseSyncReader<FacilityPortDto> {
public FacilityPortReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public FacilityPortDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceFacilityPort;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceFacilityPort), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[FacilityPortReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceFacilityPort);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected FacilityPortDto mapRow(ResultSet rs, Long targetId) throws SQLException {
Timestamp lastMdfcnDtTs = rs.getTimestamp("last_mdfcn_dt");
Timestamp regYmdTs = rs.getTimestamp("reg_ymd");
@ -102,16 +79,8 @@ public class FacilityPortReader implements ItemReader<FacilityPortDto> {
.ecfrdPort(rs.getObject("ecfrd_port") != null ? rs.getBoolean("ecfrd_port") : null)
.emsnCtrlArea(rs.getObject("emsn_ctrl_area") != null ? rs.getBoolean("emsn_ctrl_area") : null)
.wsPort(rs.getObject("ws_port") != null ? rs.getLong("ws_port") : null)
.lastMdfcnDt(lastMdfcnDtTs != null ? lastMdfcnDtTs.toInstant().atZone(java.time.ZoneId.systemDefault()) : null)
.regYmd(regYmdTs != null ? regYmdTs.toInstant().atZone(java.time.ZoneId.systemDefault()) : null)
.lastMdfcnDt(lastMdfcnDtTs != null ? lastMdfcnDtTs.toInstant().atZone(ZoneId.systemDefault()) : null)
.regYmd(regYmdTs != null ? regYmdTs.toInstant().atZone(ZoneId.systemDefault()) : null)
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceFacilityPort);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.AnchorageCallDto;
import com.snp.batch.jobs.datasync.batch.movement.entity.AnchorageCallEntity;
@ -103,12 +100,10 @@ public class AnchorageCallSyncJobConfig extends BaseJobConfig<AnchorageCallDto,
public Step anchorageCallSyncStep() {
log.info("Step 생성: anchorageCallSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<AnchorageCallDto, AnchorageCallEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<AnchorageCallDto, AnchorageCallEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())
.listener(new GroupByExecutionIdReadListener<AnchorageCallDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(anchorageCallWriteListener())
.build();
}

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.BerthCallDto;
import com.snp.batch.jobs.datasync.batch.movement.entity.BerthCallEntity;
@ -103,12 +100,10 @@ public class BerthCallSyncJobConfig extends BaseJobConfig<BerthCallDto, BerthCal
public Step berthCallSyncStep() {
log.info("Step 생성: berthCallSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<BerthCallDto, BerthCallEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<BerthCallDto, BerthCallEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())
.listener(new GroupByExecutionIdReadListener<BerthCallDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(berthCallWriteListener())
.build();
}

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.CurrentlyAtDto;
import com.snp.batch.jobs.datasync.batch.movement.entity.CurrentlyAtEntity;
@ -103,12 +100,10 @@ public class CurrentlyAtSyncJobConfig extends BaseJobConfig<CurrentlyAtDto, Curr
public Step currentlyAtSyncStep() {
log.info("Step 생성: currentlyAtSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<CurrentlyAtDto, CurrentlyAtEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<CurrentlyAtDto, CurrentlyAtEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())
.listener(new GroupByExecutionIdReadListener<CurrentlyAtDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(currentlyAtWriteListener())
.build();
}

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.DestinationDto;
import com.snp.batch.jobs.datasync.batch.movement.entity.DestinationEntity;
@ -103,12 +100,10 @@ public class DestinationSyncJobConfig extends BaseJobConfig<DestinationDto, Dest
public Step destinationSyncStep() {
log.info("Step 생성: destinationSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<DestinationDto, DestinationEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<DestinationDto, DestinationEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())
.listener(new GroupByExecutionIdReadListener<DestinationDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(destinationWriteListener())
.build();
}

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.PortCallDto;
import com.snp.batch.jobs.datasync.batch.movement.entity.PortCallEntity;
@ -103,12 +100,10 @@ public class PortCallSyncJobConfig extends BaseJobConfig<PortCallDto, PortCallEn
public Step portCallSyncStep() {
log.info("Step 생성: portCallSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<PortCallDto, PortCallEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<PortCallDto, PortCallEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())
.listener(new GroupByExecutionIdReadListener<PortCallDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(portCallWriteListener())
.build();
}

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.StsOperationDto;
import com.snp.batch.jobs.datasync.batch.movement.entity.StsOperationEntity;
@ -103,12 +100,10 @@ public class StsOperationSyncJobConfig extends BaseJobConfig<StsOperationDto, St
public Step stsOperationSyncStep() {
log.info("Step 생성: stsOperationSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<StsOperationDto, StsOperationEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<StsOperationDto, StsOperationEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())
.listener(new GroupByExecutionIdReadListener<StsOperationDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(stsOperationWriteListener())
.build();
}

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.TerminalCallDto;
import com.snp.batch.jobs.datasync.batch.movement.entity.TerminalCallEntity;
@ -103,12 +100,10 @@ public class TerminalCallSyncJobConfig extends BaseJobConfig<TerminalCallDto, Te
public Step terminalCallSyncStep() {
log.info("Step 생성: terminalCallSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<TerminalCallDto, TerminalCallEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<TerminalCallDto, TerminalCallEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())
.listener(new GroupByExecutionIdReadListener<TerminalCallDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(terminalCallWriteListener())
.build();
}

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.TransitDto;
import com.snp.batch.jobs.datasync.batch.movement.entity.TransitEntity;
@ -103,12 +100,10 @@ public class TransitSyncJobConfig extends BaseJobConfig<TransitDto, TransitEntit
public Step transitSyncStep() {
log.info("Step 생성: transitSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<TransitDto, TransitEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<TransitDto, TransitEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())
.listener(new GroupByExecutionIdReadListener<TransitDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(transitWriteListener())
.build();
}

파일 보기

@ -1,55 +1,30 @@
package com.snp.batch.jobs.datasync.batch.movement.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.AnchorageCallDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.math.BigDecimal;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
@Slf4j
public class AnchorageCallReader implements ItemReader<AnchorageCallDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<AnchorageCallDto> allDataBuffer = new ArrayList<>();
public class AnchorageCallReader extends BaseSyncReader<AnchorageCallDto> {
public AnchorageCallReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public AnchorageCallDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceTAnchorageCall;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTAnchorageCall), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[AnchorageCallReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTAnchorageCall);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected AnchorageCallDto mapRow(ResultSet rs, Long targetId) throws SQLException {
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
return AnchorageCallDto.builder()
@ -73,13 +48,5 @@ public class AnchorageCallReader implements ItemReader<AnchorageCallDto> {
.dest(rs.getString("dest"))
.isoTwoCountryCd(rs.getString("iso_two_country_cd"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTAnchorageCall);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,54 +1,30 @@
package com.snp.batch.jobs.datasync.batch.movement.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.BerthCallDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
@Slf4j
public class BerthCallReader implements ItemReader<BerthCallDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<BerthCallDto> allDataBuffer = new ArrayList<>();
public class BerthCallReader extends BaseSyncReader<BerthCallDto> {
public BerthCallReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public BerthCallDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceTBerthCall;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTBerthCall), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[BerthCallReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTBerthCall);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected BerthCallDto mapRow(ResultSet rs, Long targetId) throws SQLException {
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
Timestamp eventStaDtTs = rs.getTimestamp("event_sta_dt");
@ -73,13 +49,5 @@ public class BerthCallReader implements ItemReader<BerthCallDto> {
.isoTwoCountryCd(rs.getString("iso_two_country_cd"))
.eventStaDt(eventStaDtTs != null ? eventStaDtTs.toLocalDateTime() : null)
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTBerthCall);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,54 +1,30 @@
package com.snp.batch.jobs.datasync.batch.movement.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.CurrentlyAtDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
@Slf4j
public class CurrentlyAtReader implements ItemReader<CurrentlyAtDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<CurrentlyAtDto> allDataBuffer = new ArrayList<>();
public class CurrentlyAtReader extends BaseSyncReader<CurrentlyAtDto> {
public CurrentlyAtReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public CurrentlyAtDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceTCurrentlyAt;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTCurrentlyAt), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[CurrentlyAtReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTCurrentlyAt);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected CurrentlyAtDto mapRow(ResultSet rs, Long targetId) throws SQLException {
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
return CurrentlyAtDto.builder()
@ -75,13 +51,5 @@ public class CurrentlyAtReader implements ItemReader<CurrentlyAtDto> {
.countryIsoTwoCd(rs.getString("country_iso_two_cd"))
.positionInfo(rs.getString("position_info"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTCurrentlyAt);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,54 +1,30 @@
package com.snp.batch.jobs.datasync.batch.movement.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.DestinationDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
@Slf4j
public class DestinationReader implements ItemReader<DestinationDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<DestinationDto> allDataBuffer = new ArrayList<>();
public class DestinationReader extends BaseSyncReader<DestinationDto> {
public DestinationReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public DestinationDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceTDestination;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTDestination), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[DestinationReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTDestination);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected DestinationDto mapRow(ResultSet rs, Long targetId) throws SQLException {
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
return DestinationDto.builder()
@ -66,13 +42,5 @@ public class DestinationReader implements ItemReader<DestinationDto> {
.positionInfo(rs.getString("position_info"))
.countryIsoTwoCd(rs.getString("country_iso_two_cd"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTDestination);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,54 +1,30 @@
package com.snp.batch.jobs.datasync.batch.movement.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.PortCallDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
@Slf4j
public class PortCallReader implements ItemReader<PortCallDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<PortCallDto> allDataBuffer = new ArrayList<>();
public class PortCallReader extends BaseSyncReader<PortCallDto> {
public PortCallReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public PortCallDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceTShipStpovInfo;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTShipStpovInfo), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[PortCallReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTShipStpovInfo);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected PortCallDto mapRow(ResultSet rs, Long targetId) throws SQLException {
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
return PortCallDto.builder()
@ -75,13 +51,5 @@ public class PortCallReader implements ItemReader<PortCallDto> {
.countryIsoTwoCd(rs.getString("country_iso_two_cd"))
.positionInfo(rs.getString("position_info"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTShipStpovInfo);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,54 +1,30 @@
package com.snp.batch.jobs.datasync.batch.movement.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.StsOperationDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
@Slf4j
public class StsOperationReader implements ItemReader<StsOperationDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<StsOperationDto> allDataBuffer = new ArrayList<>();
public class StsOperationReader extends BaseSyncReader<StsOperationDto> {
public StsOperationReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public StsOperationDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceTStsOperation;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTStsOperation), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[StsOperationReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTStsOperation);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected StsOperationDto mapRow(ResultSet rs, Long targetId) throws SQLException {
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
Timestamp eventStaDtTs = rs.getTimestamp("event_sta_dt");
@ -74,13 +50,5 @@ public class StsOperationReader implements ItemReader<StsOperationDto> {
.stsType(rs.getString("sts_type"))
.eventStaDt(eventStaDtTs != null ? eventStaDtTs.toLocalDateTime() : null)
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTStsOperation);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,54 +1,30 @@
package com.snp.batch.jobs.datasync.batch.movement.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.TerminalCallDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
@Slf4j
public class TerminalCallReader implements ItemReader<TerminalCallDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<TerminalCallDto> allDataBuffer = new ArrayList<>();
public class TerminalCallReader extends BaseSyncReader<TerminalCallDto> {
public TerminalCallReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public TerminalCallDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceTTerminalCall;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTTerminalCall), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[TerminalCallReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTTerminalCall);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected TerminalCallDto mapRow(ResultSet rs, Long targetId) throws SQLException {
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
Timestamp eventStaDtTs = rs.getTimestamp("event_sta_dt");
@ -76,13 +52,5 @@ public class TerminalCallReader implements ItemReader<TerminalCallDto> {
.lwrnkFacilityDesc(rs.getString("lwrnk_facility_desc"))
.lwrnkFacilityType(rs.getString("lwrnk_facility_type"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTTerminalCall);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,54 +1,30 @@
package com.snp.batch.jobs.datasync.batch.movement.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.TransitDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
@Slf4j
public class TransitReader implements ItemReader<TransitDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<TransitDto> allDataBuffer = new ArrayList<>();
public class TransitReader extends BaseSyncReader<TransitDto> {
public TransitReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public TransitDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceTTransit;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTTransit), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[TransitReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTTransit);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected TransitDto mapRow(ResultSet rs, Long targetId) throws SQLException {
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
return TransitDto.builder()
@ -60,13 +36,5 @@ public class TransitReader implements ItemReader<TransitDto> {
.facilityType(rs.getString("facility_type"))
.draft(rs.getObject("draft") != null ? rs.getBigDecimal("draft") : null)
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTTransit);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.psc.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.psc.dto.PscAllCertificateDto;
import com.snp.batch.jobs.datasync.batch.psc.dto.PscDefectDto;
@ -154,12 +151,10 @@ public class PscSyncJobConfig extends BaseJobConfig<PscDetailDto, PscDetailEntit
public Step pscDetailSyncStep() {
log.info("Step 생성: pscDetailSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<PscDetailDto, PscDetailEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<PscDetailDto, PscDetailEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())
.listener(new GroupByExecutionIdReadListener<PscDetailDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(pscDetailWriteListener())
.build();
}
@ -168,12 +163,10 @@ public class PscSyncJobConfig extends BaseJobConfig<PscDetailDto, PscDetailEntit
public Step pscDefectSyncStep() {
log.info("Step 생성: pscDefectSyncStep");
return new StepBuilder("pscDefectSyncStep", jobRepository)
.<PscDefectDto, PscDefectEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<PscDefectDto, PscDefectEntity>chunk(getChunkSize(), transactionManager)
.reader(pscDefectReader(businessDataSource, tableMetaInfo))
.processor(new PscDefectProcessor())
.writer(new PscDefectWriter(pscRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<PscDefectDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(pscDefectWriteListener())
.build();
}
@ -182,12 +175,10 @@ public class PscSyncJobConfig extends BaseJobConfig<PscDetailDto, PscDetailEntit
public Step pscAllCertificateSyncStep() {
log.info("Step 생성: pscAllCertificateSyncStep");
return new StepBuilder("pscAllCertificateSyncStep", jobRepository)
.<PscAllCertificateDto, PscAllCertificateEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<PscAllCertificateDto, PscAllCertificateEntity>chunk(getChunkSize(), transactionManager)
.reader(pscAllCertificateReader(businessDataSource, tableMetaInfo))
.processor(new PscAllCertificateProcessor())
.writer(new PscAllCertificateWriter(pscRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<PscAllCertificateDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(pscAllCertificateWriteListener())
.build();
}

파일 보기

@ -1,54 +1,30 @@
package com.snp.batch.jobs.datasync.batch.psc.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.psc.dto.PscAllCertificateDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
@Slf4j
public class PscAllCertificateReader implements ItemReader<PscAllCertificateDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<PscAllCertificateDto> allDataBuffer = new ArrayList<>();
public class PscAllCertificateReader extends BaseSyncReader<PscAllCertificateDto> {
public PscAllCertificateReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public PscAllCertificateDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourcePscAllCertificate;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourcePscAllCertificate), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[PscAllCertificateReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourcePscAllCertificate);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected PscAllCertificateDto mapRow(ResultSet rs, Long targetId) throws SQLException {
Timestamp expryYmdTs = rs.getTimestamp("expry_ymd");
Timestamp lastInspectionYmdTs = rs.getTimestamp("last_inspection_ymd");
@ -75,13 +51,5 @@ public class PscAllCertificateReader implements ItemReader<PscAllCertificateDto>
.checkYmd(rs.getString("check_ymd"))
.insptr(rs.getString("insptr"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourcePscAllCertificate);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,53 +1,29 @@
package com.snp.batch.jobs.datasync.batch.psc.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.psc.dto.PscDefectDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class PscDefectReader implements ItemReader<PscDefectDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<PscDefectDto> allDataBuffer = new ArrayList<>();
public class PscDefectReader extends BaseSyncReader<PscDefectDto> {
public PscDefectReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public PscDefectDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourcePscDefect;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourcePscDefect), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[PscDefectReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourcePscDefect);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected PscDefectDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return PscDefectDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
@ -75,13 +51,5 @@ public class PscDefectReader implements ItemReader<PscDefectDto> {
.pubcEnginesRespsbYn(rs.getString("pubc_engines_respsb_yn"))
.acdntDamgYn(rs.getString("acdnt_damg_yn"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourcePscDefect);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,54 +1,30 @@
package com.snp.batch.jobs.datasync.batch.psc.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.psc.dto.PscDetailDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
@Slf4j
public class PscDetailReader implements ItemReader<PscDetailDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<PscDetailDto> allDataBuffer = new ArrayList<>();
public class PscDetailReader extends BaseSyncReader<PscDetailDto> {
public PscDetailReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public PscDetailDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourcePscDetail;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourcePscDetail), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[PscDetailReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourcePscDetail);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected PscDetailDto mapRow(ResultSet rs, Long targetId) throws SQLException {
Timestamp inspectionYmdTs = rs.getTimestamp("inspection_ymd");
Timestamp tkoffPrmtYmdTs = rs.getTimestamp("tkoff_prmt_ymd");
Timestamp lastMdfcnDtTs = rs.getTimestamp("last_mdfcn_dt");
@ -86,13 +62,5 @@ public class PscDetailReader implements ItemReader<PscDetailDto> {
.unPortCd(rs.getString("un_port_cd"))
.buildYy(rs.getString("build_yy"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourcePscDetail);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.risk.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.risk.dto.RiskDto;
import com.snp.batch.jobs.datasync.batch.risk.entity.RiskEntity;
@ -112,12 +109,10 @@ public class RiskSyncJobConfig extends BaseJobConfig<RiskDto, RiskEntity> {
public Step riskSyncStep() {
log.info("Step 생성: riskSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<RiskDto, RiskEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<RiskDto, RiskEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())
.listener(new GroupByExecutionIdReadListener<RiskDto>())
.listener(new GroupByExecutionIdChunkListener())
.listener(riskWriteListener())
.build();
}

파일 보기

@ -1,54 +1,30 @@
package com.snp.batch.jobs.datasync.batch.risk.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.risk.dto.RiskDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
@Slf4j
public class RiskReader implements ItemReader<RiskDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<RiskDto> allDataBuffer = new ArrayList<>();
public class RiskReader extends BaseSyncReader<RiskDto> {
public RiskReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public RiskDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceRisk;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceRisk), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[RiskReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceRisk);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected RiskDto mapRow(ResultSet rs, Long targetId) throws SQLException {
Timestamp lastMdfcnDtTs = rs.getTimestamp("last_mdfcn_dt");
return RiskDto.builder()
@ -96,13 +72,5 @@ public class RiskReader implements ItemReader<RiskDto> {
.rssOwnrReg(rs.getString("rss_ownr_reg"))
.rssSts(rs.getString("rss_sts"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceRisk);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -3,9 +3,6 @@ package com.snp.batch.jobs.datasync.batch.ship.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.BareboatCharterHistoryDto;
import com.snp.batch.jobs.datasync.batch.ship.dto.CallsignAndMmsiHistoryDto;
@ -634,12 +631,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
@Bean(name = "snpShipDetailSyncStep")
public Step snpShipDetailSyncStep() {
return new StepBuilder(getStepName(), jobRepository)
.<ShipInfoMstDto, ShipInfoMstEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<ShipInfoMstDto, ShipInfoMstEntity>chunk(getChunkSize(), transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())
.listener(new GroupByExecutionIdReadListener<ShipInfoMstDto>()) // Reader 리스너 (ThreadLocal 설정)
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 (ThreadLocal 정리)
.listener(shipWriteListener()) // Write 완료 batch_flag 업데이트
.build();
}
@ -648,12 +643,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step ownerHistorySyncStep() {
log.info("Step 생성: ownerHistorySyncStep");
return new StepBuilder("ownerHistorySyncStep", jobRepository)
.<OwnerHistoryDto, OwnerHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<OwnerHistoryDto, OwnerHistoryEntity>chunk(getChunkSize(), transactionManager)
.reader(ownerHistoryReader(businessDataSource, tableMetaInfo))
.processor(new OwnerHistoryProcessor())
.writer(new OwnerHistoryWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<OwnerHistoryDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(ownerHistoryWriteListener()) // Write 완료 batch_flag 업데이트
.build();
}
@ -662,12 +655,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step shipAddInfoSyncStep() {
log.info("Step 생성: shipAddInfoSyncStep");
return new StepBuilder("shipAddInfoSyncStep", jobRepository)
.<ShipAddInfoDto, ShipAddInfoEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<ShipAddInfoDto, ShipAddInfoEntity>chunk(getChunkSize(), transactionManager)
.reader(shipAddInfoReader(businessDataSource, tableMetaInfo))
.processor(new ShipAddInfoProcessor())
.writer(new ShipAddInfoWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<ShipAddInfoDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(shipAddInfoWriteListener()) // Write 완료 batch_flag 업데이트
.build();
}
@ -676,12 +667,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step bareboatCharterHistorySyncStep() {
log.info("Step 생성: bareboatCharterHistorySyncStep");
return new StepBuilder("bareboatCharterHistorySyncStep", jobRepository)
.<BareboatCharterHistoryDto, BareboatCharterHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<BareboatCharterHistoryDto, BareboatCharterHistoryEntity>chunk(getChunkSize(), transactionManager)
.reader(bareboatCharterHistoryReader(businessDataSource, tableMetaInfo))
.processor(new BareboatCharterHistoryProcessor())
.writer(new BareboatCharterHistoryWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<BareboatCharterHistoryDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(bareboatCharterHistoryWriteListener()) // Write 완료 batch_flag 업데이트
.build();
}
@ -690,12 +679,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step callsignAndMmsiHistorySyncStep() {
log.info("Step 생성: callsignAndMmsiHistorySyncStep");
return new StepBuilder("callsignAndMmsiHistorySyncStep", jobRepository)
.<CallsignAndMmsiHistoryDto, CallsignAndMmsiHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<CallsignAndMmsiHistoryDto, CallsignAndMmsiHistoryEntity>chunk(getChunkSize(), transactionManager)
.reader(callsignAndMmsiHistoryReader(businessDataSource, tableMetaInfo))
.processor(new CallsignAndMmsiHistoryProcessor())
.writer(new CallsignAndMmsiHistoryWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<CallsignAndMmsiHistoryDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(callsignAndMmsiHistoryWriteListener()) // Write 완료 batch_flag 업데이트
.build();
}
@ -704,12 +691,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step classHistorySyncStep() {
log.info("Step 생성: classHistorySyncStep");
return new StepBuilder("classHistorySyncStep", jobRepository)
.<ClassHistoryDto, ClassHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<ClassHistoryDto, ClassHistoryEntity>chunk(getChunkSize(), transactionManager)
.reader(classHistoryReader(businessDataSource, tableMetaInfo))
.processor(new ClassHistoryProcessor())
.writer(new ClassHistoryWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<ClassHistoryDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(classHistoryWriteListener()) // Write 완료 batch_flag 업데이트
.build();
}
@ -718,12 +703,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step companyVesselRelationshipsSyncStep() {
log.info("Step 생성: companyVesselRelationshipsSyncStep");
return new StepBuilder("companyVesselRelationshipsSyncStep", jobRepository)
.<CompanyVesselRelationshipsDto, CompanyVesselRelationshipsEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<CompanyVesselRelationshipsDto, CompanyVesselRelationshipsEntity>chunk(getChunkSize(), transactionManager)
.reader(companyVesselRelationshipsReader(businessDataSource, tableMetaInfo))
.processor(new CompanyVesselRelationshipsProcessor())
.writer(new CompanyVesselRelationshipsWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<CompanyVesselRelationshipsDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(companyVesselRelationshipsWriteListener()) // Write 완료 batch_flag 업데이트
.build();
}
@ -732,12 +715,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step crewListSyncStep() {
log.info("Step 생성: crewListSyncStep");
return new StepBuilder("crewListSyncStep", jobRepository)
.<CrewListDto, CrewListEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<CrewListDto, CrewListEntity>chunk(getChunkSize(), transactionManager)
.reader(crewListReader(businessDataSource, tableMetaInfo))
.processor(new CrewListProcessor())
.writer(new CrewListWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<CrewListDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(crewListWriteListener()) // Write 완료 batch_flag 업데이트
.build();
}
@ -746,12 +727,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step darkActivityConfirmedSyncStep() {
log.info("Step 생성: darkActivityConfirmedSyncStep");
return new StepBuilder("darkActivityConfirmedSyncStep", jobRepository)
.<DarkActivityConfirmedDto, DarkActivityConfirmedEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<DarkActivityConfirmedDto, DarkActivityConfirmedEntity>chunk(getChunkSize(), transactionManager)
.reader(darkActivityConfirmedReader(businessDataSource, tableMetaInfo))
.processor(new DarkActivityConfirmedProcessor())
.writer(new DarkActivityConfirmedWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<DarkActivityConfirmedDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(darkActivityConfirmedWriteListener()) // Write 완료 batch_flag 업데이트
.build();
}
@ -760,12 +739,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step flagHistorySyncStep() {
log.info("Step 생성: flagHistorySyncStep");
return new StepBuilder("flagHistorySyncStep", jobRepository)
.<FlagHistoryDto, FlagHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<FlagHistoryDto, FlagHistoryEntity>chunk(getChunkSize(), transactionManager)
.reader(flagHistoryReader(businessDataSource, tableMetaInfo))
.processor(new FlagHistoryProcessor())
.writer(new FlagHistoryWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<FlagHistoryDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(flagHistoryWriteListener()) // Write 완료 batch_flag 업데이트
.build();
}
@ -774,12 +751,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step groupBeneficialOwnerHistorySyncStep() {
log.info("Step 생성: groupBeneficialOwnerHistorySyncStep");
return new StepBuilder("groupBeneficialOwnerHistorySyncStep", jobRepository)
.<GroupBeneficialOwnerHistoryDto, GroupBeneficialOwnerHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<GroupBeneficialOwnerHistoryDto, GroupBeneficialOwnerHistoryEntity>chunk(getChunkSize(), transactionManager)
.reader(groupBeneficialOwnerHistoryReader(businessDataSource, tableMetaInfo))
.processor(new GroupBeneficialOwnerHistoryProcessor())
.writer(new GroupBeneficialOwnerHistoryWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<GroupBeneficialOwnerHistoryDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(groupBeneficialOwnerHistoryWriteListener()) // Write 완료 batch_flag 업데이트
.build();
}
@ -788,12 +763,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step iceClassSyncStep() {
log.info("Step 생성: iceClassSyncStep");
return new StepBuilder("iceClassSyncStep", jobRepository)
.<IceClassDto, IceClassEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<IceClassDto, IceClassEntity>chunk(getChunkSize(), transactionManager)
.reader(iceClassReader(businessDataSource, tableMetaInfo))
.processor(new IceClassProcessor())
.writer(new IceClassWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<IceClassDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(iceClassWriteListener()) // Write 완료 batch_flag 업데이트
.build();
}
@ -802,12 +775,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step nameHistorySyncStep() {
log.info("Step 생성: nameHistorySyncStep");
return new StepBuilder("nameHistorySyncStep", jobRepository)
.<NameHistoryDto, NameHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<NameHistoryDto, NameHistoryEntity>chunk(getChunkSize(), transactionManager)
.reader(nameHistoryReader(businessDataSource, tableMetaInfo))
.processor(new NameHistoryProcessor())
.writer(new NameHistoryWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<NameHistoryDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(nameHistoryWriteListener()) // Write 완료 batch_flag 업데이트
.build();
}
@ -816,12 +787,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step operatorHistorySyncStep() {
log.info("Step 생성: operatorHistorySyncStep");
return new StepBuilder("operatorHistorySyncStep", jobRepository)
.<OperatorHistoryDto, OperatorHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<OperatorHistoryDto, OperatorHistoryEntity>chunk(getChunkSize(), transactionManager)
.reader(operatorHistoryReader(businessDataSource, tableMetaInfo))
.processor(new OperatorHistoryProcessor())
.writer(new OperatorHistoryWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<OperatorHistoryDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(operatorHistoryWriteListener()) // Write 완료 batch_flag 업데이트
.build();
}
@ -830,12 +799,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step pandIHistorySyncStep() {
log.info("Step 생성: pandIHistorySyncStep");
return new StepBuilder("pandIHistorySyncStep", jobRepository)
.<PandIHistoryDto, PandIHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<PandIHistoryDto, PandIHistoryEntity>chunk(getChunkSize(), transactionManager)
.reader(pandIHistoryReader(businessDataSource, tableMetaInfo))
.processor(new PandIHistoryProcessor())
.writer(new PandIHistoryWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<PandIHistoryDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(pandIHistoryWriteListener()) // Write 완료 batch_flag 업데이트
.build();
}
@ -844,12 +811,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step safetyManagementCertificateHistSyncStep() {
log.info("Step 생성: safetyManagementCertificateHistSyncStep");
return new StepBuilder("safetyManagementCertificateHistSyncStep", jobRepository)
.<SafetyManagementCertificateHistDto, SafetyManagementCertificateHistEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<SafetyManagementCertificateHistDto, SafetyManagementCertificateHistEntity>chunk(getChunkSize(), transactionManager)
.reader(safetyManagementCertificateHistReader(businessDataSource, tableMetaInfo))
.processor(new SafetyManagementCertificateHistProcessor())
.writer(new SafetyManagementCertificateHistWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<SafetyManagementCertificateHistDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(safetyManagementCertificateHistWriteListener()) // Write 완료 batch_flag 업데이트
.build();
}
@ -858,12 +823,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step shipManagerHistorySyncStep() {
log.info("Step 생성: shipManagerHistorySyncStep");
return new StepBuilder("shipManagerHistorySyncStep", jobRepository)
.<ShipManagerHistoryDto, ShipManagerHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<ShipManagerHistoryDto, ShipManagerHistoryEntity>chunk(getChunkSize(), transactionManager)
.reader(shipManagerHistoryReader(businessDataSource, tableMetaInfo))
.processor(new ShipManagerHistoryProcessor())
.writer(new ShipManagerHistoryWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<ShipManagerHistoryDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(shipManagerHistoryWriteListener()) // Write 완료 batch_flag 업데이트
.build();
}
@ -872,12 +835,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step sisterShipLinksSyncStep() {
log.info("Step 생성: sisterShipLinksSyncStep");
return new StepBuilder("sisterShipLinksSyncStep", jobRepository)
.<SisterShipLinksDto, SisterShipLinksEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<SisterShipLinksDto, SisterShipLinksEntity>chunk(getChunkSize(), transactionManager)
.reader(sisterShipLinksReader(businessDataSource, tableMetaInfo))
.processor(new SisterShipLinksProcessor())
.writer(new SisterShipLinksWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<SisterShipLinksDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(sisterShipLinksWriteListener()) // Write 완료 batch_flag 업데이트
.build();
}
@ -886,12 +847,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step specialFeatureSyncStep() {
log.info("Step 생성: specialFeatureSyncStep");
return new StepBuilder("specialFeatureSyncStep", jobRepository)
.<SpecialFeatureDto, SpecialFeatureEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<SpecialFeatureDto, SpecialFeatureEntity>chunk(getChunkSize(), transactionManager)
.reader(specialFeatureReader(businessDataSource, tableMetaInfo))
.processor(new SpecialFeatureProcessor())
.writer(new SpecialFeatureWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<SpecialFeatureDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(specialFeatureWriteListener()) // Write 완료 batch_flag 업데이트
.build();
}
@ -900,12 +859,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step statusHistorySyncStep() {
log.info("Step 생성: statusHistorySyncStep");
return new StepBuilder("statusHistorySyncStep", jobRepository)
.<StatusHistoryDto, StatusHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<StatusHistoryDto, StatusHistoryEntity>chunk(getChunkSize(), transactionManager)
.reader(statusHistoryReader(businessDataSource, tableMetaInfo))
.processor(new StatusHistoryProcessor())
.writer(new StatusHistoryWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<StatusHistoryDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(statusHistoryWriteListener()) // Write 완료 batch_flag 업데이트
.build();
}
@ -914,12 +871,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step stowageCommoditySyncStep() {
log.info("Step 생성: stowageCommoditySyncStep");
return new StepBuilder("stowageCommoditySyncStep", jobRepository)
.<StowageCommodityDto, StowageCommodityEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<StowageCommodityDto, StowageCommodityEntity>chunk(getChunkSize(), transactionManager)
.reader(stowageCommodityReader(businessDataSource, tableMetaInfo))
.processor(new StowageCommodityProcessor())
.writer(new StowageCommodityWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<StowageCommodityDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(stowageCommodityWriteListener()) // Write 완료 batch_flag 업데이트
.build();
}
@ -928,12 +883,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step surveyDatesSyncStep() {
log.info("Step 생성: surveyDatesSyncStep");
return new StepBuilder("surveyDatesSyncStep", jobRepository)
.<SurveyDatesDto, SurveyDatesEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<SurveyDatesDto, SurveyDatesEntity>chunk(getChunkSize(), transactionManager)
.reader(surveyDatesReader(businessDataSource, tableMetaInfo))
.processor(new SurveyDatesProcessor())
.writer(new SurveyDatesWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<SurveyDatesDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(surveyDatesWriteListener()) // Write 완료 batch_flag 업데이트
.build();
}
@ -942,12 +895,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step surveyDatesHistoryUniqueSyncStep() {
log.info("Step 생성: surveyDatesHistoryUniqueSyncStep");
return new StepBuilder("surveyDatesHistoryUniqueSyncStep", jobRepository)
.<SurveyDatesHistoryUniqueDto, SurveyDatesHistoryUniqueEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<SurveyDatesHistoryUniqueDto, SurveyDatesHistoryUniqueEntity>chunk(getChunkSize(), transactionManager)
.reader(surveyDatesHistoryUniqueReader(businessDataSource, tableMetaInfo))
.processor(new SurveyDatesHistoryUniqueProcessor())
.writer(new SurveyDatesHistoryUniqueWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<SurveyDatesHistoryUniqueDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(surveyDatesHistoryUniqueWriteListener()) // Write 완료 batch_flag 업데이트
.build();
}
@ -956,12 +907,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step technicalManagerHistorySyncStep() {
log.info("Step 생성: technicalManagerHistorySyncStep");
return new StepBuilder("technicalManagerHistorySyncStep", jobRepository)
.<TechnicalManagerHistoryDto, TechnicalManagerHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<TechnicalManagerHistoryDto, TechnicalManagerHistoryEntity>chunk(getChunkSize(), transactionManager)
.reader(technicalManagerHistoryReader(businessDataSource, tableMetaInfo))
.processor(new TechnicalManagerHistoryProcessor())
.writer(new TechnicalManagerHistoryWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<TechnicalManagerHistoryDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(technicalManagerHistoryWriteListener()) // Write 완료 batch_flag 업데이트
.build();
}
@ -970,12 +919,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step thrustersSyncStep() {
log.info("Step 생성: thrustersSyncStep");
return new StepBuilder("thrustersSyncStep", jobRepository)
.<ThrustersDto, ThrustersEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<ThrustersDto, ThrustersEntity>chunk(getChunkSize(), transactionManager)
.reader(thrustersReader(businessDataSource, tableMetaInfo))
.processor(new ThrustersProcessor())
.writer(new ThrustersWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<ThrustersDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(thrustersWriteListener()) // Write 완료 batch_flag 업데이트
.build();
}
@ -984,12 +931,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step tbCompanyDetailSyncStep() {
log.info("Step 생성: tbCompanyDetailSyncStep");
return new StepBuilder("tbCompanyDetailSyncStep", jobRepository)
.<TbCompanyDetailDto, TbCompanyDetailEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
.<TbCompanyDetailDto, TbCompanyDetailEntity>chunk(getChunkSize(), transactionManager)
.reader(tbCompanyDetailReader(businessDataSource, tableMetaInfo))
.processor(new TbCompanyDetailProcessor())
.writer(new TbCompanyDetailWriter(shipRepository, transactionManager, subChunkSize))
.listener(new GroupByExecutionIdReadListener<TbCompanyDetailDto>()) // Reader 리스너
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
.listener(tbCompanyDetailWriteListener()) // Write 완료 batch_flag 업데이트
.build();
}

파일 보기

@ -1,56 +1,29 @@
package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.BareboatCharterHistoryDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class BareboatCharterHistoryReader implements ItemReader<BareboatCharterHistoryDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<BareboatCharterHistoryDto> allDataBuffer = new ArrayList<>();
public class BareboatCharterHistoryReader extends BaseSyncReader<BareboatCharterHistoryDto> {
public BareboatCharterHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public BareboatCharterHistoryDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
protected String getSourceTable() {
return tableMetaInfo.sourceBareboatCharterHistory;
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceBareboatCharterHistory), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[BareboatCharterHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceBareboatCharterHistory);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected BareboatCharterHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return BareboatCharterHistoryDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
@ -60,14 +33,5 @@ public class BareboatCharterHistoryReader implements ItemReader<BareboatCharterH
.bbctrCompanyCd(rs.getString("bbctr_company_cd"))
.bbctrCompany(rs.getString("bbctr_company"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceBareboatCharterHistory);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,56 +1,29 @@
package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.CallsignAndMmsiHistoryDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class CallsignAndMmsiHistoryReader implements ItemReader<CallsignAndMmsiHistoryDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<CallsignAndMmsiHistoryDto> allDataBuffer = new ArrayList<>();
public class CallsignAndMmsiHistoryReader extends BaseSyncReader<CallsignAndMmsiHistoryDto> {
public CallsignAndMmsiHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public CallsignAndMmsiHistoryDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
protected String getSourceTable() {
return tableMetaInfo.sourceCallsignAndMmsiHistory;
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceCallsignAndMmsiHistory), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[CallsignAndMmsiHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceCallsignAndMmsiHistory);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected CallsignAndMmsiHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return CallsignAndMmsiHistoryDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
@ -60,14 +33,5 @@ public class CallsignAndMmsiHistoryReader implements ItemReader<CallsignAndMmsiH
.clsgnNo(rs.getString("clsgn_no"))
.mmsiNo(rs.getString("mmsi_no"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceCallsignAndMmsiHistory);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,56 +1,29 @@
package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.ClassHistoryDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class ClassHistoryReader implements ItemReader<ClassHistoryDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<ClassHistoryDto> allDataBuffer = new ArrayList<>();
public class ClassHistoryReader extends BaseSyncReader<ClassHistoryDto> {
public ClassHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public ClassHistoryDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
protected String getSourceTable() {
return tableMetaInfo.sourceClassHistory;
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceClassHistory), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[ClassHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceClassHistory);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected ClassHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return ClassHistoryDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
@ -63,14 +36,5 @@ public class ClassHistoryReader implements ItemReader<ClassHistoryDto> {
.clficHasYn(rs.getString("clfic_has_yn"))
.nowYn(rs.getString("now_yn"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceClassHistory);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,56 +1,29 @@
package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.CompanyVesselRelationshipsDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class CompanyVesselRelationshipsReader implements ItemReader<CompanyVesselRelationshipsDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<CompanyVesselRelationshipsDto> allDataBuffer = new ArrayList<>();
public class CompanyVesselRelationshipsReader extends BaseSyncReader<CompanyVesselRelationshipsDto> {
public CompanyVesselRelationshipsReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public CompanyVesselRelationshipsDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
protected String getSourceTable() {
return tableMetaInfo.sourceCompanyVesselRelationships;
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceCompanyVesselRelationships), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[CompanyVesselRelationshipsReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceCompanyVesselRelationships);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected CompanyVesselRelationshipsDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return CompanyVesselRelationshipsDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
@ -76,14 +49,5 @@ public class CompanyVesselRelationshipsReader implements ItemReader<CompanyVesse
.techMngCompanyGroup(rs.getString("tech_mng_company_group"))
.techMngCompanyGroupCd(rs.getString("tech_mng_company_group_cd"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceCompanyVesselRelationships);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,56 +1,29 @@
package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.CrewListDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class CrewListReader implements ItemReader<CrewListDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<CrewListDto> allDataBuffer = new ArrayList<>();
public class CrewListReader extends BaseSyncReader<CrewListDto> {
public CrewListReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public CrewListDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
protected String getSourceTable() {
return tableMetaInfo.sourceCrewList;
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceCrewList), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[CrewListReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceCrewList);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected CrewListDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return CrewListDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
@ -67,14 +40,5 @@ public class CrewListReader implements ItemReader<CrewListDto> {
.embrkMntncCrewCnt(rs.getBigDecimal("embrk_mntnc_crew_cnt"))
.unrprtCnt(rs.getBigDecimal("unrprt_cnt"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceCrewList);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,56 +1,29 @@
package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.DarkActivityConfirmedDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class DarkActivityConfirmedReader implements ItemReader<DarkActivityConfirmedDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<DarkActivityConfirmedDto> allDataBuffer = new ArrayList<>();
public class DarkActivityConfirmedReader extends BaseSyncReader<DarkActivityConfirmedDto> {
public DarkActivityConfirmedReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public DarkActivityConfirmedDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
protected String getSourceTable() {
return tableMetaInfo.sourceDarkActivityConfirmed;
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceDarkActivityConfirmed), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[DarkActivityConfirmedReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceDarkActivityConfirmed);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected DarkActivityConfirmedDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return DarkActivityConfirmedDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
@ -81,14 +54,5 @@ public class DarkActivityConfirmedReader implements ItemReader<DarkActivityConfi
.nxtCptrLon(rs.getObject("nxt_cptr_lon", Double.class))
.nxtCptrRptDestAis(rs.getString("nxt_cptr_rpt_dest_ais"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceDarkActivityConfirmed);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,56 +1,29 @@
package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.FlagHistoryDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class FlagHistoryReader implements ItemReader<FlagHistoryDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<FlagHistoryDto> allDataBuffer = new ArrayList<>();
public class FlagHistoryReader extends BaseSyncReader<FlagHistoryDto> {
public FlagHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public FlagHistoryDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
protected String getSourceTable() {
return tableMetaInfo.sourceFlagHistory;
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceFlagHistory), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[FlagHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceFlagHistory);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected FlagHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return FlagHistoryDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
@ -60,14 +33,5 @@ public class FlagHistoryReader implements ItemReader<FlagHistoryDto> {
.countryCd(rs.getString("country_cd"))
.country(rs.getString("country"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceFlagHistory);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,56 +1,29 @@
package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.GroupBeneficialOwnerHistoryDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class GroupBeneficialOwnerHistoryReader implements ItemReader<GroupBeneficialOwnerHistoryDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<GroupBeneficialOwnerHistoryDto> allDataBuffer = new ArrayList<>();
public class GroupBeneficialOwnerHistoryReader extends BaseSyncReader<GroupBeneficialOwnerHistoryDto> {
public GroupBeneficialOwnerHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public GroupBeneficialOwnerHistoryDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
protected String getSourceTable() {
return tableMetaInfo.sourceGroupBeneficialOwnerHistory;
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceGroupBeneficialOwnerHistory), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[GroupBeneficialOwnerHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceGroupBeneficialOwnerHistory);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected GroupBeneficialOwnerHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return GroupBeneficialOwnerHistoryDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
@ -61,14 +34,5 @@ public class GroupBeneficialOwnerHistoryReader implements ItemReader<GroupBenefi
.groupActlOwnr(rs.getString("group_actl_ownr"))
.companyStatus(rs.getString("company_status"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceGroupBeneficialOwnerHistory);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,56 +1,29 @@
package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.IceClassDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class IceClassReader implements ItemReader<IceClassDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<IceClassDto> allDataBuffer = new ArrayList<>();
public class IceClassReader extends BaseSyncReader<IceClassDto> {
public IceClassReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public IceClassDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
protected String getSourceTable() {
return tableMetaInfo.sourceIceClass;
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceIceClass), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[IceClassReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceIceClass);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected IceClassDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return IceClassDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
@ -58,14 +31,5 @@ public class IceClassReader implements ItemReader<IceClassDto> {
.iceGrdCd(rs.getString("ice_grd_cd"))
.iceGrd(rs.getString("ice_grd"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceIceClass);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,56 +1,29 @@
package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.NameHistoryDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class NameHistoryReader implements ItemReader<NameHistoryDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<NameHistoryDto> allDataBuffer = new ArrayList<>();
public class NameHistoryReader extends BaseSyncReader<NameHistoryDto> {
public NameHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public NameHistoryDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
protected String getSourceTable() {
return tableMetaInfo.sourceNameHistory;
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceNameHistory), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[NameHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceNameHistory);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected NameHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return NameHistoryDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
@ -59,14 +32,5 @@ public class NameHistoryReader implements ItemReader<NameHistoryDto> {
.efectStaDay(rs.getString("efect_sta_day"))
.shipNm(rs.getString("ship_nm"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceNameHistory);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,56 +1,29 @@
package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.OperatorHistoryDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class OperatorHistoryReader implements ItemReader<OperatorHistoryDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<OperatorHistoryDto> allDataBuffer = new ArrayList<>();
public class OperatorHistoryReader extends BaseSyncReader<OperatorHistoryDto> {
public OperatorHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public OperatorHistoryDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
protected String getSourceTable() {
return tableMetaInfo.sourceOperatorHistory;
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceOperatorHistory), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[OperatorHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceOperatorHistory);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected OperatorHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return OperatorHistoryDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
@ -61,14 +34,5 @@ public class OperatorHistoryReader implements ItemReader<OperatorHistoryDto> {
.shipOperator(rs.getString("ship_operator"))
.companyStatus(rs.getString("company_status"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceOperatorHistory);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,56 +1,29 @@
package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.OwnerHistoryDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class OwnerHistoryReader implements ItemReader<OwnerHistoryDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<OwnerHistoryDto> allDataBuffer = new ArrayList<>();
public class OwnerHistoryReader extends BaseSyncReader<OwnerHistoryDto> {
public OwnerHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public OwnerHistoryDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
protected String getSourceTable() {
return tableMetaInfo.sourceOwnerHistory;
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceOwnerHistory), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[OwnerHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceOwnerHistory);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected OwnerHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return OwnerHistoryDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
@ -61,14 +34,5 @@ public class OwnerHistoryReader implements ItemReader<OwnerHistoryDto> {
.ownr(rs.getString("ownr"))
.companyStatus(rs.getString("company_status"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceOwnerHistory);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,56 +1,29 @@
package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.PandIHistoryDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class PandIHistoryReader implements ItemReader<PandIHistoryDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<PandIHistoryDto> allDataBuffer = new ArrayList<>();
public class PandIHistoryReader extends BaseSyncReader<PandIHistoryDto> {
public PandIHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public PandIHistoryDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
protected String getSourceTable() {
return tableMetaInfo.sourcePandiHistory;
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourcePandiHistory), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[PandIHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourcePandiHistory);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected PandIHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return PandIHistoryDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
@ -61,14 +34,5 @@ public class PandIHistoryReader implements ItemReader<PandIHistoryDto> {
.pniClubNm(rs.getString("pni_club_nm"))
.src(rs.getString("src"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourcePandiHistory);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,56 +1,29 @@
package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.SafetyManagementCertificateHistDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class SafetyManagementCertificateHistReader implements ItemReader<SafetyManagementCertificateHistDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<SafetyManagementCertificateHistDto> allDataBuffer = new ArrayList<>();
public class SafetyManagementCertificateHistReader extends BaseSyncReader<SafetyManagementCertificateHistDto> {
public SafetyManagementCertificateHistReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public SafetyManagementCertificateHistDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
protected String getSourceTable() {
return tableMetaInfo.sourceSafetyManagementCertificateHist;
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceSafetyManagementCertificateHist), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[SafetyManagementCertificateHistReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceSafetyManagementCertificateHist);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected SafetyManagementCertificateHistDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return SafetyManagementCertificateHistDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
@ -69,14 +42,5 @@ public class SafetyManagementCertificateHistReader implements ItemReader<SafetyM
.smgrcSrc(rs.getString("smgrc_src"))
.smgrcCompanyCd(rs.getString("smgrc_company_cd"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceSafetyManagementCertificateHist);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,61 +1,31 @@
package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.ShipAddInfoDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class ShipAddInfoReader implements ItemReader<ShipAddInfoDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<ShipAddInfoDto> allDataBuffer = new ArrayList<>();
public class ShipAddInfoReader extends BaseSyncReader<ShipAddInfoDto> {
public ShipAddInfoReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public ShipAddInfoDto read() throws Exception {
// 1. 버퍼가 비어있을 때만 DB에서 "다음 처리 대상 ID 하나" 데이터를 긁어옵니다.
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
protected String getSourceTable() {
return tableMetaInfo.sourceAdditionalShipsData;
}
if (allDataBuffer.isEmpty()) {
return null; // 진짜 데이터가 없으면 종료
}
return allDataBuffer.remove(0);
}
private void fetchNextGroup() {
// 1. 아직 'N' 최소 ID 하나를 찾음
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceAdditionalShipsData), Long.class);
} catch (Exception e) {
return; // 대상 없음
}
if (nextTargetId != null) {
log.info("[ShipAddInfoReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
// 2. 해당 ID의 데이터만 버퍼에 로드
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceAdditionalShipsData);
final Long targetId = nextTargetId; // lambda 내부에서 사용하기 위해 final 변수로
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected ShipAddInfoDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return ShipAddInfoDto.builder()
.jobExecutionId(targetId) // job_execution_id 설정
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
.imoNo(rs.getString("imo_no"))
.shipEml(rs.getString("ship_eml"))
@ -70,15 +40,5 @@ public class ShipAddInfoReader implements ItemReader<ShipAddInfoDto> {
.shipSatlitCommId(rs.getString("ship_satlit_comm_id"))
.shipSatlitCmrspCd(rs.getString("ship_satlit_cmrsp_cd"))
.build();
}, nextTargetId);
// 3. 해당 ID 'P' 변경
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceAdditionalShipsData);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,61 +1,31 @@
package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.ShipInfoMstDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class ShipDataReader implements ItemReader<ShipInfoMstDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<ShipInfoMstDto> allDataBuffer = new ArrayList<>();
public class ShipDataReader extends BaseSyncReader<ShipInfoMstDto> {
public ShipDataReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public ShipInfoMstDto read() throws Exception {
// 1. 버퍼가 비어있을 때만 DB에서 "다음 처리 대상 ID 하나" 데이터를 긁어옵니다.
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
protected String getSourceTable() {
return tableMetaInfo.sourceShipDetailData;
}
if (allDataBuffer.isEmpty()) {
return null; // 진짜 데이터가 없으면 종료
}
return allDataBuffer.remove(0);
}
private void fetchNextGroup() {
// 1. 아직 'N' 최소 ID 하나를 찾음
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(CommonSql.getNextTargetQuery(tableMetaInfo.sourceShipDetailData), Long.class);
} catch (Exception e) {
return; // 대상 없음
}
if (nextTargetId != null) {
log.info("[ShipDataReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
// 2. 해당 ID의 데이터만 버퍼에 로드
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceShipDetailData);
final Long targetId = nextTargetId; // lambda 내부에서 사용하기 위해 final 변수로
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected ShipInfoMstDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return ShipInfoMstDto.builder()
.jobExecutionId(targetId) // job_execution_id 설정
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
.imoNo(rs.getString("imo_no"))
.mmsiNo(rs.getString("mmsi_no"))
@ -141,16 +111,5 @@ public class ShipDataReader implements ItemReader<ShipInfoMstDto> {
.regShponrCd(rs.getString("reg_shponr_cd"))
.lastMdfcnDt(rs.getString("last_mdfcn_dt"))
.build();
}, nextTargetId);
// 3. 해당 ID 'P' 변경
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceShipDetailData);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,56 +1,29 @@
package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.ShipManagerHistoryDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class ShipManagerHistoryReader implements ItemReader<ShipManagerHistoryDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<ShipManagerHistoryDto> allDataBuffer = new ArrayList<>();
public class ShipManagerHistoryReader extends BaseSyncReader<ShipManagerHistoryDto> {
public ShipManagerHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public ShipManagerHistoryDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
protected String getSourceTable() {
return tableMetaInfo.sourceShipManagerHistory;
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceShipManagerHistory), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[ShipManagerHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceShipManagerHistory);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected ShipManagerHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return ShipManagerHistoryDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
@ -61,14 +34,5 @@ public class ShipManagerHistoryReader implements ItemReader<ShipManagerHistoryDt
.shipMngr(rs.getString("ship_mngr"))
.companyStatus(rs.getString("company_status"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceShipManagerHistory);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,66 +1,34 @@
package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.SisterShipLinksDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class SisterShipLinksReader implements ItemReader<SisterShipLinksDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<SisterShipLinksDto> allDataBuffer = new ArrayList<>();
public class SisterShipLinksReader extends BaseSyncReader<SisterShipLinksDto> {
public SisterShipLinksReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public SisterShipLinksDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceSisterShipLinks;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceSisterShipLinks), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[SisterShipLinksReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceSisterShipLinks);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected SisterShipLinksDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return SisterShipLinksDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
.imoNo(rs.getString("imo_no"))
.linkImoNo(rs.getString("link_imo_no"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceSisterShipLinks);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,53 +1,29 @@
package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.SpecialFeatureDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class SpecialFeatureReader implements ItemReader<SpecialFeatureDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<SpecialFeatureDto> allDataBuffer = new ArrayList<>();
public class SpecialFeatureReader extends BaseSyncReader<SpecialFeatureDto> {
public SpecialFeatureReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public SpecialFeatureDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceSpecialFeature;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceSpecialFeature), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[SpecialFeatureReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceSpecialFeature);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected SpecialFeatureDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return SpecialFeatureDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
@ -56,13 +32,5 @@ public class SpecialFeatureReader implements ItemReader<SpecialFeatureDto> {
.spcMttrCd(rs.getString("spc_mttr_cd"))
.spcMttr(rs.getString("spc_mttr"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceSpecialFeature);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,53 +1,29 @@
package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.StatusHistoryDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class StatusHistoryReader implements ItemReader<StatusHistoryDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<StatusHistoryDto> allDataBuffer = new ArrayList<>();
public class StatusHistoryReader extends BaseSyncReader<StatusHistoryDto> {
public StatusHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public StatusHistoryDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceStatusHistory;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceStatusHistory), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[StatusHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceStatusHistory);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected StatusHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return StatusHistoryDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
@ -57,13 +33,5 @@ public class StatusHistoryReader implements ItemReader<StatusHistoryDto> {
.statusChgYmd(rs.getString("status_chg_ymd"))
.status(rs.getString("status"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceStatusHistory);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,53 +1,29 @@
package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.StowageCommodityDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class StowageCommodityReader implements ItemReader<StowageCommodityDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<StowageCommodityDto> allDataBuffer = new ArrayList<>();
public class StowageCommodityReader extends BaseSyncReader<StowageCommodityDto> {
public StowageCommodityReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public StowageCommodityDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceStowageCommodity;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceStowageCommodity), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[StowageCommodityReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceStowageCommodity);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected StowageCommodityDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return StowageCommodityDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
@ -58,13 +34,5 @@ public class StowageCommodityReader implements ItemReader<StowageCommodityDto> {
.cargoCd(rs.getString("cargo_cd"))
.cargoNm(rs.getString("cargo_nm"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceStowageCommodity);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,53 +1,29 @@
package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.SurveyDatesHistoryUniqueDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class SurveyDatesHistoryUniqueReader implements ItemReader<SurveyDatesHistoryUniqueDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<SurveyDatesHistoryUniqueDto> allDataBuffer = new ArrayList<>();
public class SurveyDatesHistoryUniqueReader extends BaseSyncReader<SurveyDatesHistoryUniqueDto> {
public SurveyDatesHistoryUniqueReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public SurveyDatesHistoryUniqueDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceSurveyDatesHistoryUnique;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceSurveyDatesHistoryUnique), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[SurveyDatesHistoryUniqueReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceSurveyDatesHistoryUnique);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected SurveyDatesHistoryUniqueDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return SurveyDatesHistoryUniqueDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
@ -57,13 +33,5 @@ public class SurveyDatesHistoryUniqueReader implements ItemReader<SurveyDatesHis
.inspectionYmd(rs.getString("inspection_ymd"))
.clfic(rs.getString("clfic"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceSurveyDatesHistoryUnique);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,53 +1,29 @@
package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.SurveyDatesDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class SurveyDatesReader implements ItemReader<SurveyDatesDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<SurveyDatesDto> allDataBuffer = new ArrayList<>();
public class SurveyDatesReader extends BaseSyncReader<SurveyDatesDto> {
public SurveyDatesReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public SurveyDatesDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceSurveyDates;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceSurveyDates), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[SurveyDatesReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceSurveyDates);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected SurveyDatesDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return SurveyDatesDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
@ -60,13 +36,5 @@ public class SurveyDatesReader implements ItemReader<SurveyDatesDto> {
.mchnFxtmInspectionYmd(rs.getString("mchn_fxtm_inspection_ymd"))
.tlsftInspectionYmd(rs.getString("tlsft_inspection_ymd"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceSurveyDates);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,53 +1,29 @@
package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.TbCompanyDetailDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class TbCompanyDetailReader implements ItemReader<TbCompanyDetailDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<TbCompanyDetailDto> allDataBuffer = new ArrayList<>();
public class TbCompanyDetailReader extends BaseSyncReader<TbCompanyDetailDto> {
public TbCompanyDetailReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public TbCompanyDetailDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceTbCompanyDetail;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTbCompanyDetail), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[TbCompanyDetailReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTbCompanyDetail);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected TbCompanyDetailDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return TbCompanyDetailDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
@ -81,13 +57,5 @@ public class TbCompanyDetailReader implements ItemReader<TbCompanyDetailDto> {
.dtlAddrThr(rs.getString("dtl_addr_thr"))
.tlx(rs.getString("tlx"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTbCompanyDetail);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,53 +1,29 @@
package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.TechnicalManagerHistoryDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class TechnicalManagerHistoryReader implements ItemReader<TechnicalManagerHistoryDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<TechnicalManagerHistoryDto> allDataBuffer = new ArrayList<>();
public class TechnicalManagerHistoryReader extends BaseSyncReader<TechnicalManagerHistoryDto> {
public TechnicalManagerHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public TechnicalManagerHistoryDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceTechnicalManagerHistory;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTechnicalManagerHistory), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[TechnicalManagerHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTechnicalManagerHistory);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected TechnicalManagerHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return TechnicalManagerHistoryDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
@ -58,13 +34,5 @@ public class TechnicalManagerHistoryReader implements ItemReader<TechnicalManage
.techMngr(rs.getString("tech_mngr"))
.companyStatus(rs.getString("company_status"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTechnicalManagerHistory);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}

파일 보기

@ -1,53 +1,29 @@
package com.snp.batch.jobs.datasync.batch.ship.reader;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.batch.reader.BaseSyncReader;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.ThrustersDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.sql.ResultSet;
import java.sql.SQLException;
@Slf4j
public class ThrustersReader implements ItemReader<ThrustersDto> {
private final TableMetaInfo tableMetaInfo;
private final JdbcTemplate businessJdbcTemplate;
private List<ThrustersDto> allDataBuffer = new ArrayList<>();
public class ThrustersReader extends BaseSyncReader<ThrustersDto> {
public ThrustersReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
this.tableMetaInfo = tableMetaInfo;
super(businessDataSource, tableMetaInfo);
}
@Override
public ThrustersDto read() throws Exception {
if (allDataBuffer.isEmpty()) {
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
}
return allDataBuffer.remove(0);
protected String getSourceTable() {
return tableMetaInfo.sourceThrusters;
}
private void fetchNextGroup() {
Long nextTargetId = null;
try {
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(tableMetaInfo.sourceThrusters), Long.class);
} catch (Exception e) {
return;
}
if (nextTargetId != null) {
log.info("[ThrustersReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceThrusters);
final Long targetId = nextTargetId;
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
@Override
protected ThrustersDto mapRow(ResultSet rs, Long targetId) throws SQLException {
return ThrustersDto.builder()
.jobExecutionId(targetId)
.datasetVer(rs.getString("dataset_ver"))
@ -61,13 +37,5 @@ public class ThrustersReader implements ItemReader<ThrustersDto> {
.thrstrPowerKw(rs.getBigDecimal("thrstr_power_kw"))
.instlMth(rs.getString("instl_mth"))
.build();
}, nextTargetId);
updateBatchProcessing(nextTargetId);
}
}
private void updateBatchProcessing(Long targetExecutionId) {
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceThrusters);
businessJdbcTemplate.update(sql, targetExecutionId);
}
}