fix(batch): 1 chunk = 1 job_execution_id 보장 및 BatchWriteListener SQL null 수정

- BaseSyncReader: 한 그룹 데이터 반환 후 null로 청크 종료하여
  1 chunk = 1 job_execution_id 보장
- chunk(Integer.MAX_VALUE): Reader의 null로만 청크 경계 결정
- BatchWriteListener: SQL을 실행 시점에 생성하여 CommonSql.SOURCE_SCHEMA
  초기화 전 null 참조 문제 해결 (빈 생성 시 → 실행 시 지연 생성)
- JobConfig: BatchWriteListener에 SQL 대신 테이블명 전달
- Repository: 배치 삽입 시작/완료 로그 주석처리

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
HYOJIN 2026-03-24 09:33:44 +09:00
부모 edef10e4bc
커밋 a81e68ad30
30개의 변경된 파일254개의 추가작업 그리고 326개의 파일을 삭제

파일 보기

@ -94,7 +94,7 @@ public abstract class BaseJobConfig<I, O> {
if (processor != null) {
var chunkBuilder = stepBuilder
.<I, O>chunk(getChunkSize(), transactionManager)
.<I, O>chunk(Integer.MAX_VALUE, transactionManager)
.reader(createReader())
.processor(processor)
.writer(createWriter());
@ -104,7 +104,7 @@ public abstract class BaseJobConfig<I, O> {
} else {
@SuppressWarnings("unchecked")
var chunkBuilder = stepBuilder
.<I, I>chunk(getChunkSize(), transactionManager)
.<I, I>chunk(Integer.MAX_VALUE, transactionManager)
.reader(createReader())
.writer((ItemWriter<? super I>) createWriter());

파일 보기

@ -55,7 +55,7 @@ public abstract class BaseProcessor<I, O> implements ItemProcessor<I, O> {
return null;
}
log.debug("데이터 처리 중: {}", item);
// log.debug("데이터 처리 중: {}", item);
return processItem(item);
}
}

파일 보기

@ -16,11 +16,10 @@ import java.util.List;
/**
* 동기화 Reader 추상 클래스
*
* job_execution_id 단위로 청크를 분리하는 2단계 read 방식:
* 1단계 (peek): 다음 그룹 ID만 조회, 현재 그룹과 다르면 null 반환 (청크 종료)
* 2단계 (fetch): 데이터 로드 + batch_flag NP 전환
*
* GroupByExecutionIdPolicy를 대체하여 Reader 자체에서 청크 경계를 제어한다.
* 1 chunk = 1 job_execution_id 보장:
* - 그룹의 데이터를 모두 반환한 null을 반환하여 청크 종료
* - chunk(Integer.MAX_VALUE) 함께 사용하여 Reader가 청크 경계를 제어
* - 다음 그룹의 NP 전환은 이전 그룹의 청크 처리(Write + PS) 완료된 후에만 발생
*
* @param <T> DTO 타입 (JobExecutionGroupable 구현 필요)
*/
@ -32,7 +31,6 @@ public abstract class BaseSyncReader<T extends JobExecutionGroupable> implements
private List<T> allDataBuffer = new ArrayList<>();
private Long currentGroupId = null;
private Long pendingGroupId = null;
protected BaseSyncReader(DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
@ -46,82 +44,55 @@ public abstract class BaseSyncReader<T extends JobExecutionGroupable> implements
/**
* ResultSet DTO 매핑
*
* @param rs ResultSet (현재 row)
* @param targetId job_execution_id (DTO의 jobExecutionId 필드에 설정)
* @return 매핑된 DTO 객체
*/
protected abstract T mapRow(ResultSet rs, Long targetId) throws SQLException;
/**
* 로그 접두사 (: "IceClassReader")
*/
protected String getLogPrefix() {
return getClass().getSimpleName();
}
@Override
public T read() throws Exception {
// 1. buffer가 비어있으면 다음 그룹 확인
if (allDataBuffer.isEmpty()) {
// pending이 있으면 (이전 청크에서 감지된 다음 그룹) 바로 로드
if (pendingGroupId != null) {
fetchAndTransition(pendingGroupId);
currentGroupId = pendingGroupId;
pendingGroupId = null;
} else {
// 다음 그룹 ID peek
Long nextId = peekNextGroupId();
if (nextId == null) {
// 이상 처리할 데이터 없음
currentGroupId = null;
return null;
}
if (currentGroupId != null && !currentGroupId.equals(nextId)) {
// 다른 그룹 발견 현재 청크 종료, 다음 청크에서 처리
pendingGroupId = nextId;
currentGroupId = null;
return null;
}
// 같은 그룹이거나 호출 로드
fetchAndTransition(nextId);
currentGroupId = nextId;
// 이전 그룹 처리 완료 null 반환하여 청크 종료
// (Writer + afterWrite(PS) 실행된 다음 청크에서 다음 그룹 로드)
if (currentGroupId != null) {
currentGroupId = null;
return null;
}
// 다음 그룹 로드
fetchNextGroup();
}
if (allDataBuffer.isEmpty()) {
return null;
return null; // 이상 처리할 데이터 없음 Step 종료
}
return allDataBuffer.remove(0);
}
/**
* 다음 처리 대상 job_execution_id 조회 (데이터 로드/전환 없음)
*/
private Long peekNextGroupId() {
private void fetchNextGroup() {
Long nextTargetId;
try {
return businessJdbcTemplate.queryForObject(
nextTargetId = businessJdbcTemplate.queryForObject(
CommonSql.getNextTargetQuery(getSourceTable()), Long.class);
} catch (Exception e) {
return null;
return;
}
}
/**
* 데이터 로드 + batch_flag NP 전환
*/
private void fetchAndTransition(Long targetId) {
log.info("[{}] 다음 처리 대상 ID 발견: {}", getLogPrefix(), targetId);
if (nextTargetId == null) return;
log.info("[{}] 다음 처리 대상 ID 발견: {}", getLogPrefix(), nextTargetId);
String sql = CommonSql.getTargetDataQuery(getSourceTable());
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) ->
mapRow(rs, targetId), targetId);
mapRow(rs, nextTargetId), nextTargetId);
// NP 전환
String updateSql = CommonSql.getProcessBatchQuery(getSourceTable());
businessJdbcTemplate.update(updateSql, targetId);
businessJdbcTemplate.update(updateSql, nextTargetId);
currentGroupId = nextTargetId;
}
}

파일 보기

@ -203,7 +203,7 @@ public abstract class BaseJdbcRepository<T, ID> {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", getEntityName(), entities.size());
// log.debug("{} 배치 삽입 시작: {} 건", getEntityName(), entities.size());
jdbcTemplate.batchUpdate(getInsertSql(), entities, entities.size(),
(ps, entity) -> {
@ -215,7 +215,7 @@ public abstract class BaseJdbcRepository<T, ID> {
}
});
log.debug("{} 배치 삽입 완료: {} 건", getEntityName(), entities.size());
// log.debug("{} 배치 삽입 완료: {} 건", getEntityName(), entities.size());
}
/**

파일 보기

@ -78,7 +78,7 @@ public abstract class MultiDataSourceJdbcRepository<T, ID> {
return;
}
log.debug("{} 배치 삽입 시작: {} 건 (Business DB)", getEntityName(), entities.size());
// log.debug("{} 배치 삽입 시작: {} 건 (Business DB)", getEntityName(), entities.size());
// businessJdbcTemplate 사용
businessJdbcTemplate.batchUpdate(getInsertSql(), entities, entities.size(),
@ -91,7 +91,7 @@ public abstract class MultiDataSourceJdbcRepository<T, ID> {
}
});
log.debug("{} 배치 삽입 완료: {} 건", getEntityName(), entities.size());
// log.debug("{} 배치 삽입 완료: {} 건", getEntityName(), entities.size());
}
// ... (나머지 find, save, update, delete 메서드도 businessJdbcTemplate을 사용하여 구현합니다.)

파일 보기

@ -1,51 +1,55 @@
package com.snp.batch.common.util;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.ItemWriteListener;
import org.springframework.batch.item.Chunk;
import org.springframework.jdbc.core.JdbcTemplate;
/**
* Writer 성공 batch_flag PS 업데이트 리스너
*
* SQL은 실행 시점에 생성 (CommonSql.SOURCE_SCHEMA 초기화 보장)
*/
@Slf4j
@RequiredArgsConstructor
public class BatchWriteListener<S extends JobExecutionGroupable> implements ItemWriteListener<S> {
private final JdbcTemplate businessJdbcTemplate;
private final String updateSql; // 실행할 쿼리 (: "UPDATE ... SET batch_flag = 'S' ...")
private final String sourceTable;
public BatchWriteListener(JdbcTemplate businessJdbcTemplate, String sourceTable) {
this.businessJdbcTemplate = businessJdbcTemplate;
this.sourceTable = sourceTable;
}
@Override
public void afterWrite(Chunk<? extends S> items) {
// afterWrite는 Writer가 예외 없이 성공했을 때만 실행되는 것이 보장되어야
if (items.isEmpty()) return;
Long jobExecutionId = items.getItems().get(0).getJobExecutionId();
try {
int updatedRows = businessJdbcTemplate.update(updateSql, jobExecutionId);
// SQL을 실행 시점에 생성하여 SOURCE_SCHEMA null 문제 방지
String sql = CommonSql.getCompleteBatchQuery(sourceTable);
int updatedRows = businessJdbcTemplate.update(sql, jobExecutionId);
log.info("[BatchWriteListener] Success update 'S'. jobExecutionId: {}, rows: {}", jobExecutionId, updatedRows);
} catch (Exception e) {
log.error("[BatchWriteListener] Update 'S' failed. jobExecutionId: {}", jobExecutionId, e);
// 중요: 리스너의 업데이트가 실패해도 배치를 중단시키려면 예외를 던져야
throw e;
}
}
@Override
public void onWriteError(Exception exception, Chunk<? extends S> items) {
// Writer에서 에러가 발생하면 메서드가 호출됨
if (!items.isEmpty()) {
Long jobExecutionId = items.getItems().get(0).getJobExecutionId();
log.error("[BatchWriteListener] Write Error Detected! jobExecutionId: {}. Status will NOT be updated to 'S'. Error: {}",
jobExecutionId, exception.getMessage());
}
// 중요: 여기서 예외를 다시 던져야 배치가 중단(FAILED)
// 만약 여기서 예외를 던지지 않으면 배치는 다음 청크를 계속 시도할 있음
if (exception instanceof RuntimeException) {
throw (RuntimeException) exception;
} else {
throw new RuntimeException("Force stop batch due to write error", exception);
}
}
}

파일 보기

@ -2,7 +2,7 @@ package com.snp.batch.jobs.datasync.batch.code.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.code.dto.FlagCodeDto;
import com.snp.batch.jobs.datasync.batch.code.dto.Stat5CodeDto;
@ -110,14 +110,12 @@ public class CodeSyncJobConfig extends BaseJobConfig<FlagCodeDto, FlagCodeEntity
@Bean
public BatchWriteListener<FlagCodeEntity> flagCodeWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceFlagCode);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceFlagCode);
}
@Bean
public BatchWriteListener<Stat5CodeEntity> stat5CodeWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceStat5Code);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceStat5Code);
}
// --- Steps ---
@ -126,7 +124,7 @@ public class CodeSyncJobConfig extends BaseJobConfig<FlagCodeDto, FlagCodeEntity
public Step flagCodeSyncStep() {
log.info("Step 생성: flagCodeSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<FlagCodeDto, FlagCodeEntity>chunk(getChunkSize(), transactionManager)
.<FlagCodeDto, FlagCodeEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())
@ -138,7 +136,7 @@ public class CodeSyncJobConfig extends BaseJobConfig<FlagCodeDto, FlagCodeEntity
public Step stat5CodeSyncStep() {
log.info("Step 생성: stat5CodeSyncStep");
return new StepBuilder("stat5CodeSyncStep", jobRepository)
.<Stat5CodeDto, Stat5CodeEntity>chunk(getChunkSize(), transactionManager)
.<Stat5CodeDto, Stat5CodeEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(stat5CodeReader(businessDataSource, tableMetaInfo))
.processor(new Stat5CodeProcessor())
.writer(new Stat5CodeWriter(codeRepository))

파일 보기

@ -80,7 +80,7 @@ public class CodeRepositoryImpl extends MultiDataSourceJdbcRepository<FlagCodeEn
if (flagCodeEntityList == null || flagCodeEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "FlagCodeEntity", flagCodeEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "FlagCodeEntity", flagCodeEntityList.size());
batchJdbcTemplate.batchUpdate(sql, flagCodeEntityList, flagCodeEntityList.size(),
(ps, entity) -> {
@ -92,7 +92,7 @@ public class CodeRepositoryImpl extends MultiDataSourceJdbcRepository<FlagCodeEn
}
});
log.debug("{} 배치 삽입 완료: {} 건", "FlagCodeEntity", flagCodeEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "FlagCodeEntity", flagCodeEntityList.size());
}
public void bindFlagCode(PreparedStatement pstmt, FlagCodeEntity entity) throws Exception {
@ -111,7 +111,7 @@ public class CodeRepositoryImpl extends MultiDataSourceJdbcRepository<FlagCodeEn
if (stat5CodeEntityList == null || stat5CodeEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "Stat5CodeEntity", stat5CodeEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "Stat5CodeEntity", stat5CodeEntityList.size());
batchJdbcTemplate.batchUpdate(sql, stat5CodeEntityList, stat5CodeEntityList.size(),
(ps, entity) -> {
@ -123,7 +123,7 @@ public class CodeRepositoryImpl extends MultiDataSourceJdbcRepository<FlagCodeEn
}
});
log.debug("{} 배치 삽입 완료: {} 건", "Stat5CodeEntity", stat5CodeEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "Stat5CodeEntity", stat5CodeEntityList.size());
}
public void bindStat5Code(PreparedStatement pstmt, Stat5CodeEntity entity) throws Exception {

파일 보기

@ -2,7 +2,7 @@ package com.snp.batch.jobs.datasync.batch.compliance.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.compliance.dto.CompanyComplianceDto;
import com.snp.batch.jobs.datasync.batch.compliance.entity.CompanyComplianceEntity;
@ -96,8 +96,7 @@ public class CompanyComplianceSyncJobConfig extends BaseJobConfig<CompanyComplia
@Bean
public BatchWriteListener<CompanyComplianceEntity> companyComplianceWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTbCompanyComplianceInfo);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceTbCompanyComplianceInfo);
}
// --- Steps ---
@ -106,7 +105,7 @@ public class CompanyComplianceSyncJobConfig extends BaseJobConfig<CompanyComplia
public Step companyComplianceSyncStep() {
log.info("Step 생성: companyComplianceSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<CompanyComplianceDto, CompanyComplianceEntity>chunk(getChunkSize(), transactionManager)
.<CompanyComplianceDto, CompanyComplianceEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())

파일 보기

@ -2,7 +2,7 @@ package com.snp.batch.jobs.datasync.batch.compliance.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.compliance.dto.ShipComplianceDto;
import com.snp.batch.jobs.datasync.batch.compliance.entity.ShipComplianceEntity;
@ -96,8 +96,7 @@ public class ShipComplianceSyncJobConfig extends BaseJobConfig<ShipComplianceDto
@Bean
public BatchWriteListener<ShipComplianceEntity> shipComplianceWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceCompliance);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceCompliance);
}
// --- Steps ---
@ -106,7 +105,7 @@ public class ShipComplianceSyncJobConfig extends BaseJobConfig<ShipComplianceDto
public Step shipComplianceSyncStep() {
log.info("Step 생성: shipComplianceSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<ShipComplianceDto, ShipComplianceEntity>chunk(getChunkSize(), transactionManager)
.<ShipComplianceDto, ShipComplianceEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())

파일 보기

@ -80,7 +80,7 @@ public class ComplianceRepositoryImpl extends MultiDataSourceJdbcRepository<Ship
if (shipComplianceEntityList == null || shipComplianceEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "ShipComplianceEntity", shipComplianceEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "ShipComplianceEntity", shipComplianceEntityList.size());
batchJdbcTemplate.batchUpdate(sql, shipComplianceEntityList, shipComplianceEntityList.size(),
(ps, entity) -> {
@ -92,7 +92,7 @@ public class ComplianceRepositoryImpl extends MultiDataSourceJdbcRepository<Ship
}
});
log.debug("{} 배치 삽입 완료: {} 건", "ShipComplianceEntity", shipComplianceEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "ShipComplianceEntity", shipComplianceEntityList.size());
}
@Override
@ -122,7 +122,7 @@ public class ComplianceRepositoryImpl extends MultiDataSourceJdbcRepository<Ship
if (companyComplianceEntityList == null || companyComplianceEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "CompanyComplianceEntity", companyComplianceEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "CompanyComplianceEntity", companyComplianceEntityList.size());
batchJdbcTemplate.batchUpdate(sql, companyComplianceEntityList, companyComplianceEntityList.size(),
(ps, entity) -> {
@ -134,7 +134,7 @@ public class ComplianceRepositoryImpl extends MultiDataSourceJdbcRepository<Ship
}
});
log.debug("{} 배치 삽입 완료: {} 건", "CompanyComplianceEntity", companyComplianceEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "CompanyComplianceEntity", companyComplianceEntityList.size());
}
@Override
@ -281,7 +281,7 @@ public class ComplianceRepositoryImpl extends MultiDataSourceJdbcRepository<Ship
if (companyComplianceChangeEntityList == null || companyComplianceChangeEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "CompanyComplianceChangeEntity", companyComplianceChangeEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "CompanyComplianceChangeEntity", companyComplianceChangeEntityList.size());
batchJdbcTemplate.batchUpdate(sql, companyComplianceChangeEntityList, companyComplianceChangeEntityList.size(),
(ps, entity) -> {
@ -293,7 +293,7 @@ public class ComplianceRepositoryImpl extends MultiDataSourceJdbcRepository<Ship
}
});
log.debug("{} 배치 삽입 완료: {} 건", "CompanyComplianceChangeEntity", companyComplianceChangeEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "CompanyComplianceChangeEntity", companyComplianceChangeEntityList.size());
}
public void bindCompanyComplianceChange(PreparedStatement pstmt, CompanyComplianceChangeEntity entity) throws Exception {
@ -312,7 +312,7 @@ public class ComplianceRepositoryImpl extends MultiDataSourceJdbcRepository<Ship
if (shipComplianceChangeEntityList == null || shipComplianceChangeEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "ShipComplianceChangeEntity", shipComplianceChangeEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "ShipComplianceChangeEntity", shipComplianceChangeEntityList.size());
batchJdbcTemplate.batchUpdate(sql, shipComplianceChangeEntityList, shipComplianceChangeEntityList.size(),
(ps, entity) -> {
@ -324,7 +324,7 @@ public class ComplianceRepositoryImpl extends MultiDataSourceJdbcRepository<Ship
}
});
log.debug("{} 배치 삽입 완료: {} 건", "ShipComplianceChangeEntity", shipComplianceChangeEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "ShipComplianceChangeEntity", shipComplianceChangeEntityList.size());
}
public void bindShipComplianceChange(PreparedStatement pstmt, ShipComplianceChangeEntity entity) throws Exception {

파일 보기

@ -2,7 +2,7 @@ package com.snp.batch.jobs.datasync.batch.event.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.event.dto.EventCargoDto;
import com.snp.batch.jobs.datasync.batch.event.dto.EventDto;
@ -144,26 +144,22 @@ public class EventSyncJobConfig extends BaseJobConfig<EventDto, EventEntity> {
@Bean
public BatchWriteListener<EventEntity> eventWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceEvent);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceEvent);
}
@Bean
public BatchWriteListener<EventCargoEntity> eventCargoWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceEventCargo);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceEventCargo);
}
@Bean
public BatchWriteListener<EventHumanCasualtyEntity> eventHumanCasualtyWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceEventHumanCasualty);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceEventHumanCasualty);
}
@Bean
public BatchWriteListener<EventRelationshipEntity> eventRelationshipWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceEventRelationship);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceEventRelationship);
}
// --- Steps ---
@ -172,7 +168,7 @@ public class EventSyncJobConfig extends BaseJobConfig<EventDto, EventEntity> {
public Step eventSyncStep() {
log.info("Step 생성: eventSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<EventDto, EventEntity>chunk(getChunkSize(), transactionManager)
.<EventDto, EventEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())
@ -184,7 +180,7 @@ public class EventSyncJobConfig extends BaseJobConfig<EventDto, EventEntity> {
public Step eventCargoSyncStep() {
log.info("Step 생성: eventCargoSyncStep");
return new StepBuilder("eventCargoSyncStep", jobRepository)
.<EventCargoDto, EventCargoEntity>chunk(getChunkSize(), transactionManager)
.<EventCargoDto, EventCargoEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(eventCargoReader(businessDataSource, tableMetaInfo))
.processor(new EventCargoProcessor())
.writer(new EventCargoWriter(eventRepository, transactionManager, subChunkSize))
@ -196,7 +192,7 @@ public class EventSyncJobConfig extends BaseJobConfig<EventDto, EventEntity> {
public Step eventHumanCasualtySyncStep() {
log.info("Step 생성: eventHumanCasualtySyncStep");
return new StepBuilder("eventHumanCasualtySyncStep", jobRepository)
.<EventHumanCasualtyDto, EventHumanCasualtyEntity>chunk(getChunkSize(), transactionManager)
.<EventHumanCasualtyDto, EventHumanCasualtyEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(eventHumanCasualtyReader(businessDataSource, tableMetaInfo))
.processor(new EventHumanCasualtyProcessor())
.writer(new EventHumanCasualtyWriter(eventRepository, transactionManager, subChunkSize))
@ -208,7 +204,7 @@ public class EventSyncJobConfig extends BaseJobConfig<EventDto, EventEntity> {
public Step eventRelationshipSyncStep() {
log.info("Step 생성: eventRelationshipSyncStep");
return new StepBuilder("eventRelationshipSyncStep", jobRepository)
.<EventRelationshipDto, EventRelationshipEntity>chunk(getChunkSize(), transactionManager)
.<EventRelationshipDto, EventRelationshipEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(eventRelationshipReader(businessDataSource, tableMetaInfo))
.processor(new EventRelationshipProcessor())
.writer(new EventRelationshipWriter(eventRepository, transactionManager, subChunkSize))

파일 보기

@ -84,7 +84,7 @@ public class EventRepositoryImpl extends MultiDataSourceJdbcRepository<EventEnti
if (eventEntityList == null || eventEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "EventEntity", eventEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "EventEntity", eventEntityList.size());
batchJdbcTemplate.batchUpdate(sql, eventEntityList, eventEntityList.size(),
(ps, entity) -> {
@ -96,7 +96,7 @@ public class EventRepositoryImpl extends MultiDataSourceJdbcRepository<EventEnti
}
});
log.debug("{} 배치 삽입 완료: {} 건", "EventEntity", eventEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "EventEntity", eventEntityList.size());
}
public void bindEvent(PreparedStatement pstmt, EventEntity entity) throws Exception {
@ -152,7 +152,7 @@ public class EventRepositoryImpl extends MultiDataSourceJdbcRepository<EventEnti
if (eventCargoEntityList == null || eventCargoEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "EventCargoEntity", eventCargoEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "EventCargoEntity", eventCargoEntityList.size());
batchJdbcTemplate.batchUpdate(sql, eventCargoEntityList, eventCargoEntityList.size(),
(ps, entity) -> {
@ -164,7 +164,7 @@ public class EventRepositoryImpl extends MultiDataSourceJdbcRepository<EventEnti
}
});
log.debug("{} 배치 삽입 완료: {} 건", "EventCargoEntity", eventCargoEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "EventCargoEntity", eventCargoEntityList.size());
}
public void bindEventCargo(PreparedStatement pstmt, EventCargoEntity entity) throws Exception {
@ -188,7 +188,7 @@ public class EventRepositoryImpl extends MultiDataSourceJdbcRepository<EventEnti
if (eventHumanCasualtyEntityList == null || eventHumanCasualtyEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "EventHumanCasualtyEntity", eventHumanCasualtyEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "EventHumanCasualtyEntity", eventHumanCasualtyEntityList.size());
batchJdbcTemplate.batchUpdate(sql, eventHumanCasualtyEntityList, eventHumanCasualtyEntityList.size(),
(ps, entity) -> {
@ -200,7 +200,7 @@ public class EventRepositoryImpl extends MultiDataSourceJdbcRepository<EventEnti
}
});
log.debug("{} 배치 삽입 완료: {} 건", "EventHumanCasualtyEntity", eventHumanCasualtyEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "EventHumanCasualtyEntity", eventHumanCasualtyEntityList.size());
}
public void bindEventHumanCasualty(PreparedStatement pstmt, EventHumanCasualtyEntity entity) throws Exception {
@ -219,7 +219,7 @@ public class EventRepositoryImpl extends MultiDataSourceJdbcRepository<EventEnti
if (eventRelationshipEntityList == null || eventRelationshipEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "EventRelationshipEntity", eventRelationshipEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "EventRelationshipEntity", eventRelationshipEntityList.size());
batchJdbcTemplate.batchUpdate(sql, eventRelationshipEntityList, eventRelationshipEntityList.size(),
(ps, entity) -> {
@ -231,7 +231,7 @@ public class EventRepositoryImpl extends MultiDataSourceJdbcRepository<EventEnti
}
});
log.debug("{} 배치 삽입 완료: {} 건", "EventRelationshipEntity", eventRelationshipEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "EventRelationshipEntity", eventRelationshipEntityList.size());
}
public void bindEventRelationship(PreparedStatement pstmt, EventRelationshipEntity entity) throws Exception {

파일 보기

@ -2,7 +2,7 @@ package com.snp.batch.jobs.datasync.batch.facility.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.facility.dto.FacilityPortDto;
import com.snp.batch.jobs.datasync.batch.facility.entity.FacilityPortEntity;
@ -99,8 +99,7 @@ public class FacilitySyncJobConfig extends BaseJobConfig<FacilityPortDto, Facili
@Bean
public BatchWriteListener<FacilityPortEntity> facilityPortWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceFacilityPort);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceFacilityPort);
}
// --- Steps ---
@ -109,7 +108,7 @@ public class FacilitySyncJobConfig extends BaseJobConfig<FacilityPortDto, Facili
public Step facilityPortSyncStep() {
log.info("Step 생성: facilityPortSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<FacilityPortDto, FacilityPortEntity>chunk(getChunkSize(), transactionManager)
.<FacilityPortDto, FacilityPortEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())

파일 보기

@ -81,7 +81,7 @@ public class FacilityRepositoryImpl extends MultiDataSourceJdbcRepository<Facili
if (facilityPortEntityList == null || facilityPortEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "FacilityPortEntity", facilityPortEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "FacilityPortEntity", facilityPortEntityList.size());
batchJdbcTemplate.batchUpdate(sql, facilityPortEntityList, facilityPortEntityList.size(),
(ps, entity) -> {
@ -93,7 +93,7 @@ public class FacilityRepositoryImpl extends MultiDataSourceJdbcRepository<Facili
}
});
log.debug("{} 배치 삽입 완료: {} 건", "FacilityPortEntity", facilityPortEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "FacilityPortEntity", facilityPortEntityList.size());
}
public void bindFacilityPort(PreparedStatement pstmt, FacilityPortEntity entity) throws Exception {

파일 보기

@ -2,7 +2,7 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.AnchorageCallDto;
import com.snp.batch.jobs.datasync.batch.movement.entity.AnchorageCallEntity;
@ -92,15 +92,14 @@ public class AnchorageCallSyncJobConfig extends BaseJobConfig<AnchorageCallDto,
@Bean
public BatchWriteListener<AnchorageCallEntity> anchorageCallWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTAnchorageCall);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceTAnchorageCall);
}
@Bean(name = "anchorageCallSyncStep")
public Step anchorageCallSyncStep() {
log.info("Step 생성: anchorageCallSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<AnchorageCallDto, AnchorageCallEntity>chunk(getChunkSize(), transactionManager)
.<AnchorageCallDto, AnchorageCallEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())

파일 보기

@ -2,7 +2,7 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.BerthCallDto;
import com.snp.batch.jobs.datasync.batch.movement.entity.BerthCallEntity;
@ -92,15 +92,14 @@ public class BerthCallSyncJobConfig extends BaseJobConfig<BerthCallDto, BerthCal
@Bean
public BatchWriteListener<BerthCallEntity> berthCallWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTBerthCall);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceTBerthCall);
}
@Bean(name = "berthCallSyncStep")
public Step berthCallSyncStep() {
log.info("Step 생성: berthCallSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<BerthCallDto, BerthCallEntity>chunk(getChunkSize(), transactionManager)
.<BerthCallDto, BerthCallEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())

파일 보기

@ -2,7 +2,7 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.CurrentlyAtDto;
import com.snp.batch.jobs.datasync.batch.movement.entity.CurrentlyAtEntity;
@ -92,15 +92,14 @@ public class CurrentlyAtSyncJobConfig extends BaseJobConfig<CurrentlyAtDto, Curr
@Bean
public BatchWriteListener<CurrentlyAtEntity> currentlyAtWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTCurrentlyAt);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceTCurrentlyAt);
}
@Bean(name = "currentlyAtSyncStep")
public Step currentlyAtSyncStep() {
log.info("Step 생성: currentlyAtSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<CurrentlyAtDto, CurrentlyAtEntity>chunk(getChunkSize(), transactionManager)
.<CurrentlyAtDto, CurrentlyAtEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())

파일 보기

@ -2,7 +2,7 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.DestinationDto;
import com.snp.batch.jobs.datasync.batch.movement.entity.DestinationEntity;
@ -92,15 +92,14 @@ public class DestinationSyncJobConfig extends BaseJobConfig<DestinationDto, Dest
@Bean
public BatchWriteListener<DestinationEntity> destinationWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTDestination);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceTDestination);
}
@Bean(name = "destinationSyncStep")
public Step destinationSyncStep() {
log.info("Step 생성: destinationSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<DestinationDto, DestinationEntity>chunk(getChunkSize(), transactionManager)
.<DestinationDto, DestinationEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())

파일 보기

@ -2,7 +2,7 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.PortCallDto;
import com.snp.batch.jobs.datasync.batch.movement.entity.PortCallEntity;
@ -92,15 +92,14 @@ public class PortCallSyncJobConfig extends BaseJobConfig<PortCallDto, PortCallEn
@Bean
public BatchWriteListener<PortCallEntity> portCallWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTShipStpovInfo);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceTShipStpovInfo);
}
@Bean(name = "portCallSyncStep")
public Step portCallSyncStep() {
log.info("Step 생성: portCallSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<PortCallDto, PortCallEntity>chunk(getChunkSize(), transactionManager)
.<PortCallDto, PortCallEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())

파일 보기

@ -2,7 +2,7 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.StsOperationDto;
import com.snp.batch.jobs.datasync.batch.movement.entity.StsOperationEntity;
@ -92,15 +92,14 @@ public class StsOperationSyncJobConfig extends BaseJobConfig<StsOperationDto, St
@Bean
public BatchWriteListener<StsOperationEntity> stsOperationWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTStsOperation);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceTStsOperation);
}
@Bean(name = "stsOperationSyncStep")
public Step stsOperationSyncStep() {
log.info("Step 생성: stsOperationSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<StsOperationDto, StsOperationEntity>chunk(getChunkSize(), transactionManager)
.<StsOperationDto, StsOperationEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())

파일 보기

@ -2,7 +2,7 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.TerminalCallDto;
import com.snp.batch.jobs.datasync.batch.movement.entity.TerminalCallEntity;
@ -92,15 +92,14 @@ public class TerminalCallSyncJobConfig extends BaseJobConfig<TerminalCallDto, Te
@Bean
public BatchWriteListener<TerminalCallEntity> terminalCallWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTTerminalCall);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceTTerminalCall);
}
@Bean(name = "terminalCallSyncStep")
public Step terminalCallSyncStep() {
log.info("Step 생성: terminalCallSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<TerminalCallDto, TerminalCallEntity>chunk(getChunkSize(), transactionManager)
.<TerminalCallDto, TerminalCallEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())

파일 보기

@ -2,7 +2,7 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.movement.dto.TransitDto;
import com.snp.batch.jobs.datasync.batch.movement.entity.TransitEntity;
@ -92,15 +92,14 @@ public class TransitSyncJobConfig extends BaseJobConfig<TransitDto, TransitEntit
@Bean
public BatchWriteListener<TransitEntity> transitWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTTransit);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceTTransit);
}
@Bean(name = "transitSyncStep")
public Step transitSyncStep() {
log.info("Step 생성: transitSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<TransitDto, TransitEntity>chunk(getChunkSize(), transactionManager)
.<TransitDto, TransitEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())

파일 보기

@ -80,7 +80,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
if (anchorageCallEntityList == null || anchorageCallEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "AnchorageCallEntity", anchorageCallEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "AnchorageCallEntity", anchorageCallEntityList.size());
batchJdbcTemplate.batchUpdate(sql, anchorageCallEntityList, anchorageCallEntityList.size(),
(ps, entity) -> {
@ -92,7 +92,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
}
});
log.debug("{} 배치 삽입 완료: {} 건", "AnchorageCallEntity", anchorageCallEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "AnchorageCallEntity", anchorageCallEntityList.size());
}
public void bindAnchorageCall(PreparedStatement pstmt, AnchorageCallEntity entity) throws Exception {
@ -125,7 +125,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
if (berthCallEntityList == null || berthCallEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "BerthCallEntity", berthCallEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "BerthCallEntity", berthCallEntityList.size());
batchJdbcTemplate.batchUpdate(sql, berthCallEntityList, berthCallEntityList.size(),
(ps, entity) -> {
@ -137,7 +137,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
}
});
log.debug("{} 배치 삽입 완료: {} 건", "BerthCallEntity", berthCallEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "BerthCallEntity", berthCallEntityList.size());
}
public void bindBerthCall(PreparedStatement pstmt, BerthCallEntity entity) throws Exception {
@ -170,7 +170,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
if (currentlyAtEntityList == null || currentlyAtEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "CurrentlyAtEntity", currentlyAtEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "CurrentlyAtEntity", currentlyAtEntityList.size());
batchJdbcTemplate.batchUpdate(sql, currentlyAtEntityList, currentlyAtEntityList.size(),
(ps, entity) -> {
@ -182,7 +182,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
}
});
log.debug("{} 배치 삽입 완료: {} 건", "CurrentlyAtEntity", currentlyAtEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "CurrentlyAtEntity", currentlyAtEntityList.size());
}
public void bindCurrentlyAt(PreparedStatement pstmt, CurrentlyAtEntity entity) throws Exception {
@ -218,7 +218,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
if (destinationEntityList == null || destinationEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "DestinationEntity", destinationEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "DestinationEntity", destinationEntityList.size());
batchJdbcTemplate.batchUpdate(sql, destinationEntityList, destinationEntityList.size(),
(ps, entity) -> {
@ -230,7 +230,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
}
});
log.debug("{} 배치 삽입 완료: {} 건", "DestinationEntity", destinationEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "DestinationEntity", destinationEntityList.size());
}
public void bindDestination(PreparedStatement pstmt, DestinationEntity entity) throws Exception {
@ -257,7 +257,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
if (portCallEntityList == null || portCallEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "PortCallEntity", portCallEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "PortCallEntity", portCallEntityList.size());
batchJdbcTemplate.batchUpdate(sql, portCallEntityList, portCallEntityList.size(),
(ps, entity) -> {
@ -269,7 +269,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
}
});
log.debug("{} 배치 삽입 완료: {} 건", "PortCallEntity", portCallEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "PortCallEntity", portCallEntityList.size());
}
public void bindPortCall(PreparedStatement pstmt, PortCallEntity entity) throws Exception {
@ -305,7 +305,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
if (stsOperationEntityList == null || stsOperationEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "StsOperationEntity", stsOperationEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "StsOperationEntity", stsOperationEntityList.size());
batchJdbcTemplate.batchUpdate(sql, stsOperationEntityList, stsOperationEntityList.size(),
(ps, entity) -> {
@ -317,7 +317,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
}
});
log.debug("{} 배치 삽입 완료: {} 건", "StsOperationEntity", stsOperationEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "StsOperationEntity", stsOperationEntityList.size());
}
public void bindStsOperation(PreparedStatement pstmt, StsOperationEntity entity) throws Exception {
@ -351,7 +351,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
if (terminalCallEntityList == null || terminalCallEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "TerminalCallEntity", terminalCallEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "TerminalCallEntity", terminalCallEntityList.size());
batchJdbcTemplate.batchUpdate(sql, terminalCallEntityList, terminalCallEntityList.size(),
(ps, entity) -> {
@ -363,7 +363,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
}
});
log.debug("{} 배치 삽입 완료: {} 건", "TerminalCallEntity", terminalCallEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "TerminalCallEntity", terminalCallEntityList.size());
}
public void bindTerminalCall(PreparedStatement pstmt, TerminalCallEntity entity) throws Exception {
@ -399,7 +399,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
if (transitEntityList == null || transitEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "TransitEntity", transitEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "TransitEntity", transitEntityList.size());
batchJdbcTemplate.batchUpdate(sql, transitEntityList, transitEntityList.size(),
(ps, entity) -> {
@ -411,7 +411,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
}
});
log.debug("{} 배치 삽입 완료: {} 건", "TransitEntity", transitEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "TransitEntity", transitEntityList.size());
}
public void bindTransit(PreparedStatement pstmt, TransitEntity entity) throws Exception {

파일 보기

@ -2,7 +2,7 @@ package com.snp.batch.jobs.datasync.batch.psc.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.psc.dto.PscAllCertificateDto;
import com.snp.batch.jobs.datasync.batch.psc.dto.PscDefectDto;
@ -129,20 +129,17 @@ public class PscSyncJobConfig extends BaseJobConfig<PscDetailDto, PscDetailEntit
@Bean
public BatchWriteListener<PscDetailEntity> pscDetailWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourcePscDetail);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourcePscDetail);
}
@Bean
public BatchWriteListener<PscDefectEntity> pscDefectWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourcePscDefect);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourcePscDefect);
}
@Bean
public BatchWriteListener<PscAllCertificateEntity> pscAllCertificateWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourcePscAllCertificate);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourcePscAllCertificate);
}
// --- Steps ---
@ -151,7 +148,7 @@ public class PscSyncJobConfig extends BaseJobConfig<PscDetailDto, PscDetailEntit
public Step pscDetailSyncStep() {
log.info("Step 생성: pscDetailSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<PscDetailDto, PscDetailEntity>chunk(getChunkSize(), transactionManager)
.<PscDetailDto, PscDetailEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())
@ -163,7 +160,7 @@ public class PscSyncJobConfig extends BaseJobConfig<PscDetailDto, PscDetailEntit
public Step pscDefectSyncStep() {
log.info("Step 생성: pscDefectSyncStep");
return new StepBuilder("pscDefectSyncStep", jobRepository)
.<PscDefectDto, PscDefectEntity>chunk(getChunkSize(), transactionManager)
.<PscDefectDto, PscDefectEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(pscDefectReader(businessDataSource, tableMetaInfo))
.processor(new PscDefectProcessor())
.writer(new PscDefectWriter(pscRepository, transactionManager, subChunkSize))
@ -175,7 +172,7 @@ public class PscSyncJobConfig extends BaseJobConfig<PscDetailDto, PscDetailEntit
public Step pscAllCertificateSyncStep() {
log.info("Step 생성: pscAllCertificateSyncStep");
return new StepBuilder("pscAllCertificateSyncStep", jobRepository)
.<PscAllCertificateDto, PscAllCertificateEntity>chunk(getChunkSize(), transactionManager)
.<PscAllCertificateDto, PscAllCertificateEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(pscAllCertificateReader(businessDataSource, tableMetaInfo))
.processor(new PscAllCertificateProcessor())
.writer(new PscAllCertificateWriter(pscRepository, transactionManager, subChunkSize))

파일 보기

@ -83,7 +83,7 @@ public class PscRepositoryImpl extends MultiDataSourceJdbcRepository<PscDetailEn
if (pscDetailEntityList == null || pscDetailEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "PscDetailEntity", pscDetailEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "PscDetailEntity", pscDetailEntityList.size());
batchJdbcTemplate.batchUpdate(sql, pscDetailEntityList, pscDetailEntityList.size(),
(ps, entity) -> {
@ -95,7 +95,7 @@ public class PscRepositoryImpl extends MultiDataSourceJdbcRepository<PscDetailEn
}
});
log.debug("{} 배치 삽입 완료: {} 건", "PscDetailEntity", pscDetailEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "PscDetailEntity", pscDetailEntityList.size());
}
public void bindPscDetail(PreparedStatement pstmt, PscDetailEntity entity) throws Exception {
@ -139,7 +139,7 @@ public class PscRepositoryImpl extends MultiDataSourceJdbcRepository<PscDetailEn
if (pscDefectEntityList == null || pscDefectEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "PscDefectEntity", pscDefectEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "PscDefectEntity", pscDefectEntityList.size());
batchJdbcTemplate.batchUpdate(sql, pscDefectEntityList, pscDefectEntityList.size(),
(ps, entity) -> {
@ -151,7 +151,7 @@ public class PscRepositoryImpl extends MultiDataSourceJdbcRepository<PscDetailEn
}
});
log.debug("{} 배치 삽입 완료: {} 건", "PscDefectEntity", pscDefectEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "PscDefectEntity", pscDefectEntityList.size());
}
public void bindPscDefect(PreparedStatement pstmt, PscDefectEntity entity) throws Exception {
@ -189,7 +189,7 @@ public class PscRepositoryImpl extends MultiDataSourceJdbcRepository<PscDetailEn
if (pscAllCertificateEntityList == null || pscAllCertificateEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "PscAllCertificateEntity", pscAllCertificateEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "PscAllCertificateEntity", pscAllCertificateEntityList.size());
batchJdbcTemplate.batchUpdate(sql, pscAllCertificateEntityList, pscAllCertificateEntityList.size(),
(ps, entity) -> {
@ -201,7 +201,7 @@ public class PscRepositoryImpl extends MultiDataSourceJdbcRepository<PscDetailEn
}
});
log.debug("{} 배치 삽입 완료: {} 건", "PscAllCertificateEntity", pscAllCertificateEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "PscAllCertificateEntity", pscAllCertificateEntityList.size());
}
public void bindPscAllCertificate(PreparedStatement pstmt, PscAllCertificateEntity entity) throws Exception {

파일 보기

@ -2,7 +2,7 @@ package com.snp.batch.jobs.datasync.batch.risk.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.risk.dto.RiskDto;
import com.snp.batch.jobs.datasync.batch.risk.entity.RiskEntity;
@ -99,8 +99,7 @@ public class RiskSyncJobConfig extends BaseJobConfig<RiskDto, RiskEntity> {
@Bean
public BatchWriteListener<RiskEntity> riskWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceRisk);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceRisk);
}
// --- Steps ---
@ -109,7 +108,7 @@ public class RiskSyncJobConfig extends BaseJobConfig<RiskDto, RiskEntity> {
public Step riskSyncStep() {
log.info("Step 생성: riskSyncStep");
return new StepBuilder(getStepName(), jobRepository)
.<RiskDto, RiskEntity>chunk(getChunkSize(), transactionManager)
.<RiskDto, RiskEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())

파일 보기

@ -80,7 +80,7 @@ public class RiskRepositoryImpl extends MultiDataSourceJdbcRepository<RiskEntity
if (riskEntityList == null || riskEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "RiskEntity", riskEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "RiskEntity", riskEntityList.size());
batchJdbcTemplate.batchUpdate(sql, riskEntityList, riskEntityList.size(),
(ps, entity) -> {
@ -92,7 +92,7 @@ public class RiskRepositoryImpl extends MultiDataSourceJdbcRepository<RiskEntity
}
});
log.debug("{} 배치 삽입 완료: {} 건", "RiskEntity", riskEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "RiskEntity", riskEntityList.size());
}
@Override
@ -101,7 +101,7 @@ public class RiskRepositoryImpl extends MultiDataSourceJdbcRepository<RiskEntity
if (riskEntityList == null || riskEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "RiskEntity", riskEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "RiskEntity", riskEntityList.size());
batchJdbcTemplate.batchUpdate(sql, riskEntityList, riskEntityList.size(),
(ps, entity) -> {
@ -113,7 +113,7 @@ public class RiskRepositoryImpl extends MultiDataSourceJdbcRepository<RiskEntity
}
});
log.debug("{} 배치 삽입 완료: {} 건", "RiskEntity", riskEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "RiskEntity", riskEntityList.size());
}
public void bindRisk(PreparedStatement pstmt, RiskEntity entity) throws Exception {

파일 보기

@ -2,7 +2,7 @@ package com.snp.batch.jobs.datasync.batch.ship.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.common.util.BatchWriteListener;
import com.snp.batch.common.util.CommonSql;
import com.snp.batch.common.util.TableMetaInfo;
import com.snp.batch.jobs.datasync.batch.ship.dto.BareboatCharterHistoryDto;
import com.snp.batch.jobs.datasync.batch.ship.dto.CallsignAndMmsiHistoryDto;
@ -472,158 +472,132 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
// --- Listeners ---
@Bean
public BatchWriteListener<ShipInfoMstEntity> shipWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceShipDetailData);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceShipDetailData);
}
@Bean
public BatchWriteListener<OwnerHistoryEntity> ownerHistoryWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceOwnerHistory);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceOwnerHistory);
}
@Bean
public BatchWriteListener<ShipAddInfoEntity> shipAddInfoWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceAdditionalShipsData);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceAdditionalShipsData);
}
@Bean
public BatchWriteListener<BareboatCharterHistoryEntity> bareboatCharterHistoryWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceBareboatCharterHistory);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceBareboatCharterHistory);
}
@Bean
public BatchWriteListener<CallsignAndMmsiHistoryEntity> callsignAndMmsiHistoryWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceCallsignAndMmsiHistory);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceCallsignAndMmsiHistory);
}
@Bean
public BatchWriteListener<ClassHistoryEntity> classHistoryWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceClassHistory);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceClassHistory);
}
@Bean
public BatchWriteListener<CompanyVesselRelationshipsEntity> companyVesselRelationshipsWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceCompanyVesselRelationships);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceCompanyVesselRelationships);
}
@Bean
public BatchWriteListener<CrewListEntity> crewListWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceCrewList);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceCrewList);
}
@Bean
public BatchWriteListener<DarkActivityConfirmedEntity> darkActivityConfirmedWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceDarkActivityConfirmed);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceDarkActivityConfirmed);
}
@Bean
public BatchWriteListener<FlagHistoryEntity> flagHistoryWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceFlagHistory);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceFlagHistory);
}
@Bean
public BatchWriteListener<GroupBeneficialOwnerHistoryEntity> groupBeneficialOwnerHistoryWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceGroupBeneficialOwnerHistory);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceGroupBeneficialOwnerHistory);
}
@Bean
public BatchWriteListener<IceClassEntity> iceClassWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceIceClass);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceIceClass);
}
@Bean
public BatchWriteListener<NameHistoryEntity> nameHistoryWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceNameHistory);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceNameHistory);
}
@Bean
public BatchWriteListener<OperatorHistoryEntity> operatorHistoryWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceOperatorHistory);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceOperatorHistory);
}
@Bean
public BatchWriteListener<PandIHistoryEntity> pandIHistoryWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourcePandiHistory);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourcePandiHistory);
}
@Bean
public BatchWriteListener<SafetyManagementCertificateHistEntity> safetyManagementCertificateHistWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceSafetyManagementCertificateHist);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceSafetyManagementCertificateHist);
}
@Bean
public BatchWriteListener<ShipManagerHistoryEntity> shipManagerHistoryWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceShipManagerHistory);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceShipManagerHistory);
}
@Bean
public BatchWriteListener<SisterShipLinksEntity> sisterShipLinksWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceSisterShipLinks);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceSisterShipLinks);
}
@Bean
public BatchWriteListener<SpecialFeatureEntity> specialFeatureWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceSpecialFeature);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceSpecialFeature);
}
@Bean
public BatchWriteListener<StatusHistoryEntity> statusHistoryWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceStatusHistory);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceStatusHistory);
}
@Bean
public BatchWriteListener<StowageCommodityEntity> stowageCommodityWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceStowageCommodity);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceStowageCommodity);
}
@Bean
public BatchWriteListener<SurveyDatesEntity> surveyDatesWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceSurveyDates);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceSurveyDates);
}
@Bean
public BatchWriteListener<SurveyDatesHistoryUniqueEntity> surveyDatesHistoryUniqueWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceSurveyDatesHistoryUnique);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceSurveyDatesHistoryUnique);
}
@Bean
public BatchWriteListener<TechnicalManagerHistoryEntity> technicalManagerHistoryWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTechnicalManagerHistory);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceTechnicalManagerHistory);
}
@Bean
public BatchWriteListener<ThrustersEntity> thrustersWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceThrusters);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceThrusters);
}
@Bean
public BatchWriteListener<TbCompanyDetailEntity> tbCompanyDetailWriteListener() {
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTbCompanyDetail);
return new BatchWriteListener<>(businessJdbcTemplate, sql);
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceTbCompanyDetail);
}
// --- Steps ---
@ -631,7 +605,7 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
@Bean(name = "snpShipDetailSyncStep")
public Step snpShipDetailSyncStep() {
return new StepBuilder(getStepName(), jobRepository)
.<ShipInfoMstDto, ShipInfoMstEntity>chunk(getChunkSize(), transactionManager)
.<ShipInfoMstDto, ShipInfoMstEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(createReader())
.processor(createProcessor())
.writer(createWriter())
@ -643,7 +617,7 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step ownerHistorySyncStep() {
log.info("Step 생성: ownerHistorySyncStep");
return new StepBuilder("ownerHistorySyncStep", jobRepository)
.<OwnerHistoryDto, OwnerHistoryEntity>chunk(getChunkSize(), transactionManager)
.<OwnerHistoryDto, OwnerHistoryEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(ownerHistoryReader(businessDataSource, tableMetaInfo))
.processor(new OwnerHistoryProcessor())
.writer(new OwnerHistoryWriter(shipRepository, transactionManager, subChunkSize))
@ -655,7 +629,7 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step shipAddInfoSyncStep() {
log.info("Step 생성: shipAddInfoSyncStep");
return new StepBuilder("shipAddInfoSyncStep", jobRepository)
.<ShipAddInfoDto, ShipAddInfoEntity>chunk(getChunkSize(), transactionManager)
.<ShipAddInfoDto, ShipAddInfoEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(shipAddInfoReader(businessDataSource, tableMetaInfo))
.processor(new ShipAddInfoProcessor())
.writer(new ShipAddInfoWriter(shipRepository, transactionManager, subChunkSize))
@ -667,7 +641,7 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step bareboatCharterHistorySyncStep() {
log.info("Step 생성: bareboatCharterHistorySyncStep");
return new StepBuilder("bareboatCharterHistorySyncStep", jobRepository)
.<BareboatCharterHistoryDto, BareboatCharterHistoryEntity>chunk(getChunkSize(), transactionManager)
.<BareboatCharterHistoryDto, BareboatCharterHistoryEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(bareboatCharterHistoryReader(businessDataSource, tableMetaInfo))
.processor(new BareboatCharterHistoryProcessor())
.writer(new BareboatCharterHistoryWriter(shipRepository, transactionManager, subChunkSize))
@ -679,7 +653,7 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step callsignAndMmsiHistorySyncStep() {
log.info("Step 생성: callsignAndMmsiHistorySyncStep");
return new StepBuilder("callsignAndMmsiHistorySyncStep", jobRepository)
.<CallsignAndMmsiHistoryDto, CallsignAndMmsiHistoryEntity>chunk(getChunkSize(), transactionManager)
.<CallsignAndMmsiHistoryDto, CallsignAndMmsiHistoryEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(callsignAndMmsiHistoryReader(businessDataSource, tableMetaInfo))
.processor(new CallsignAndMmsiHistoryProcessor())
.writer(new CallsignAndMmsiHistoryWriter(shipRepository, transactionManager, subChunkSize))
@ -691,7 +665,7 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step classHistorySyncStep() {
log.info("Step 생성: classHistorySyncStep");
return new StepBuilder("classHistorySyncStep", jobRepository)
.<ClassHistoryDto, ClassHistoryEntity>chunk(getChunkSize(), transactionManager)
.<ClassHistoryDto, ClassHistoryEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(classHistoryReader(businessDataSource, tableMetaInfo))
.processor(new ClassHistoryProcessor())
.writer(new ClassHistoryWriter(shipRepository, transactionManager, subChunkSize))
@ -703,7 +677,7 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step companyVesselRelationshipsSyncStep() {
log.info("Step 생성: companyVesselRelationshipsSyncStep");
return new StepBuilder("companyVesselRelationshipsSyncStep", jobRepository)
.<CompanyVesselRelationshipsDto, CompanyVesselRelationshipsEntity>chunk(getChunkSize(), transactionManager)
.<CompanyVesselRelationshipsDto, CompanyVesselRelationshipsEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(companyVesselRelationshipsReader(businessDataSource, tableMetaInfo))
.processor(new CompanyVesselRelationshipsProcessor())
.writer(new CompanyVesselRelationshipsWriter(shipRepository, transactionManager, subChunkSize))
@ -715,7 +689,7 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step crewListSyncStep() {
log.info("Step 생성: crewListSyncStep");
return new StepBuilder("crewListSyncStep", jobRepository)
.<CrewListDto, CrewListEntity>chunk(getChunkSize(), transactionManager)
.<CrewListDto, CrewListEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(crewListReader(businessDataSource, tableMetaInfo))
.processor(new CrewListProcessor())
.writer(new CrewListWriter(shipRepository, transactionManager, subChunkSize))
@ -727,7 +701,7 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step darkActivityConfirmedSyncStep() {
log.info("Step 생성: darkActivityConfirmedSyncStep");
return new StepBuilder("darkActivityConfirmedSyncStep", jobRepository)
.<DarkActivityConfirmedDto, DarkActivityConfirmedEntity>chunk(getChunkSize(), transactionManager)
.<DarkActivityConfirmedDto, DarkActivityConfirmedEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(darkActivityConfirmedReader(businessDataSource, tableMetaInfo))
.processor(new DarkActivityConfirmedProcessor())
.writer(new DarkActivityConfirmedWriter(shipRepository, transactionManager, subChunkSize))
@ -739,7 +713,7 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step flagHistorySyncStep() {
log.info("Step 생성: flagHistorySyncStep");
return new StepBuilder("flagHistorySyncStep", jobRepository)
.<FlagHistoryDto, FlagHistoryEntity>chunk(getChunkSize(), transactionManager)
.<FlagHistoryDto, FlagHistoryEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(flagHistoryReader(businessDataSource, tableMetaInfo))
.processor(new FlagHistoryProcessor())
.writer(new FlagHistoryWriter(shipRepository, transactionManager, subChunkSize))
@ -751,7 +725,7 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step groupBeneficialOwnerHistorySyncStep() {
log.info("Step 생성: groupBeneficialOwnerHistorySyncStep");
return new StepBuilder("groupBeneficialOwnerHistorySyncStep", jobRepository)
.<GroupBeneficialOwnerHistoryDto, GroupBeneficialOwnerHistoryEntity>chunk(getChunkSize(), transactionManager)
.<GroupBeneficialOwnerHistoryDto, GroupBeneficialOwnerHistoryEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(groupBeneficialOwnerHistoryReader(businessDataSource, tableMetaInfo))
.processor(new GroupBeneficialOwnerHistoryProcessor())
.writer(new GroupBeneficialOwnerHistoryWriter(shipRepository, transactionManager, subChunkSize))
@ -763,7 +737,7 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step iceClassSyncStep() {
log.info("Step 생성: iceClassSyncStep");
return new StepBuilder("iceClassSyncStep", jobRepository)
.<IceClassDto, IceClassEntity>chunk(getChunkSize(), transactionManager)
.<IceClassDto, IceClassEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(iceClassReader(businessDataSource, tableMetaInfo))
.processor(new IceClassProcessor())
.writer(new IceClassWriter(shipRepository, transactionManager, subChunkSize))
@ -775,7 +749,7 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step nameHistorySyncStep() {
log.info("Step 생성: nameHistorySyncStep");
return new StepBuilder("nameHistorySyncStep", jobRepository)
.<NameHistoryDto, NameHistoryEntity>chunk(getChunkSize(), transactionManager)
.<NameHistoryDto, NameHistoryEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(nameHistoryReader(businessDataSource, tableMetaInfo))
.processor(new NameHistoryProcessor())
.writer(new NameHistoryWriter(shipRepository, transactionManager, subChunkSize))
@ -787,7 +761,7 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step operatorHistorySyncStep() {
log.info("Step 생성: operatorHistorySyncStep");
return new StepBuilder("operatorHistorySyncStep", jobRepository)
.<OperatorHistoryDto, OperatorHistoryEntity>chunk(getChunkSize(), transactionManager)
.<OperatorHistoryDto, OperatorHistoryEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(operatorHistoryReader(businessDataSource, tableMetaInfo))
.processor(new OperatorHistoryProcessor())
.writer(new OperatorHistoryWriter(shipRepository, transactionManager, subChunkSize))
@ -799,7 +773,7 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step pandIHistorySyncStep() {
log.info("Step 생성: pandIHistorySyncStep");
return new StepBuilder("pandIHistorySyncStep", jobRepository)
.<PandIHistoryDto, PandIHistoryEntity>chunk(getChunkSize(), transactionManager)
.<PandIHistoryDto, PandIHistoryEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(pandIHistoryReader(businessDataSource, tableMetaInfo))
.processor(new PandIHistoryProcessor())
.writer(new PandIHistoryWriter(shipRepository, transactionManager, subChunkSize))
@ -811,7 +785,7 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step safetyManagementCertificateHistSyncStep() {
log.info("Step 생성: safetyManagementCertificateHistSyncStep");
return new StepBuilder("safetyManagementCertificateHistSyncStep", jobRepository)
.<SafetyManagementCertificateHistDto, SafetyManagementCertificateHistEntity>chunk(getChunkSize(), transactionManager)
.<SafetyManagementCertificateHistDto, SafetyManagementCertificateHistEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(safetyManagementCertificateHistReader(businessDataSource, tableMetaInfo))
.processor(new SafetyManagementCertificateHistProcessor())
.writer(new SafetyManagementCertificateHistWriter(shipRepository, transactionManager, subChunkSize))
@ -823,7 +797,7 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step shipManagerHistorySyncStep() {
log.info("Step 생성: shipManagerHistorySyncStep");
return new StepBuilder("shipManagerHistorySyncStep", jobRepository)
.<ShipManagerHistoryDto, ShipManagerHistoryEntity>chunk(getChunkSize(), transactionManager)
.<ShipManagerHistoryDto, ShipManagerHistoryEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(shipManagerHistoryReader(businessDataSource, tableMetaInfo))
.processor(new ShipManagerHistoryProcessor())
.writer(new ShipManagerHistoryWriter(shipRepository, transactionManager, subChunkSize))
@ -835,7 +809,7 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step sisterShipLinksSyncStep() {
log.info("Step 생성: sisterShipLinksSyncStep");
return new StepBuilder("sisterShipLinksSyncStep", jobRepository)
.<SisterShipLinksDto, SisterShipLinksEntity>chunk(getChunkSize(), transactionManager)
.<SisterShipLinksDto, SisterShipLinksEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(sisterShipLinksReader(businessDataSource, tableMetaInfo))
.processor(new SisterShipLinksProcessor())
.writer(new SisterShipLinksWriter(shipRepository, transactionManager, subChunkSize))
@ -847,7 +821,7 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step specialFeatureSyncStep() {
log.info("Step 생성: specialFeatureSyncStep");
return new StepBuilder("specialFeatureSyncStep", jobRepository)
.<SpecialFeatureDto, SpecialFeatureEntity>chunk(getChunkSize(), transactionManager)
.<SpecialFeatureDto, SpecialFeatureEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(specialFeatureReader(businessDataSource, tableMetaInfo))
.processor(new SpecialFeatureProcessor())
.writer(new SpecialFeatureWriter(shipRepository, transactionManager, subChunkSize))
@ -859,7 +833,7 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step statusHistorySyncStep() {
log.info("Step 생성: statusHistorySyncStep");
return new StepBuilder("statusHistorySyncStep", jobRepository)
.<StatusHistoryDto, StatusHistoryEntity>chunk(getChunkSize(), transactionManager)
.<StatusHistoryDto, StatusHistoryEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(statusHistoryReader(businessDataSource, tableMetaInfo))
.processor(new StatusHistoryProcessor())
.writer(new StatusHistoryWriter(shipRepository, transactionManager, subChunkSize))
@ -871,7 +845,7 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step stowageCommoditySyncStep() {
log.info("Step 생성: stowageCommoditySyncStep");
return new StepBuilder("stowageCommoditySyncStep", jobRepository)
.<StowageCommodityDto, StowageCommodityEntity>chunk(getChunkSize(), transactionManager)
.<StowageCommodityDto, StowageCommodityEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(stowageCommodityReader(businessDataSource, tableMetaInfo))
.processor(new StowageCommodityProcessor())
.writer(new StowageCommodityWriter(shipRepository, transactionManager, subChunkSize))
@ -883,7 +857,7 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step surveyDatesSyncStep() {
log.info("Step 생성: surveyDatesSyncStep");
return new StepBuilder("surveyDatesSyncStep", jobRepository)
.<SurveyDatesDto, SurveyDatesEntity>chunk(getChunkSize(), transactionManager)
.<SurveyDatesDto, SurveyDatesEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(surveyDatesReader(businessDataSource, tableMetaInfo))
.processor(new SurveyDatesProcessor())
.writer(new SurveyDatesWriter(shipRepository, transactionManager, subChunkSize))
@ -895,7 +869,7 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step surveyDatesHistoryUniqueSyncStep() {
log.info("Step 생성: surveyDatesHistoryUniqueSyncStep");
return new StepBuilder("surveyDatesHistoryUniqueSyncStep", jobRepository)
.<SurveyDatesHistoryUniqueDto, SurveyDatesHistoryUniqueEntity>chunk(getChunkSize(), transactionManager)
.<SurveyDatesHistoryUniqueDto, SurveyDatesHistoryUniqueEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(surveyDatesHistoryUniqueReader(businessDataSource, tableMetaInfo))
.processor(new SurveyDatesHistoryUniqueProcessor())
.writer(new SurveyDatesHistoryUniqueWriter(shipRepository, transactionManager, subChunkSize))
@ -907,7 +881,7 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step technicalManagerHistorySyncStep() {
log.info("Step 생성: technicalManagerHistorySyncStep");
return new StepBuilder("technicalManagerHistorySyncStep", jobRepository)
.<TechnicalManagerHistoryDto, TechnicalManagerHistoryEntity>chunk(getChunkSize(), transactionManager)
.<TechnicalManagerHistoryDto, TechnicalManagerHistoryEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(technicalManagerHistoryReader(businessDataSource, tableMetaInfo))
.processor(new TechnicalManagerHistoryProcessor())
.writer(new TechnicalManagerHistoryWriter(shipRepository, transactionManager, subChunkSize))
@ -919,7 +893,7 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step thrustersSyncStep() {
log.info("Step 생성: thrustersSyncStep");
return new StepBuilder("thrustersSyncStep", jobRepository)
.<ThrustersDto, ThrustersEntity>chunk(getChunkSize(), transactionManager)
.<ThrustersDto, ThrustersEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(thrustersReader(businessDataSource, tableMetaInfo))
.processor(new ThrustersProcessor())
.writer(new ThrustersWriter(shipRepository, transactionManager, subChunkSize))
@ -931,7 +905,7 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
public Step tbCompanyDetailSyncStep() {
log.info("Step 생성: tbCompanyDetailSyncStep");
return new StepBuilder("tbCompanyDetailSyncStep", jobRepository)
.<TbCompanyDetailDto, TbCompanyDetailEntity>chunk(getChunkSize(), transactionManager)
.<TbCompanyDetailDto, TbCompanyDetailEntity>chunk(Integer.MAX_VALUE, transactionManager)
.reader(tbCompanyDetailReader(businessDataSource, tableMetaInfo))
.processor(new TbCompanyDetailProcessor())
.writer(new TbCompanyDetailWriter(shipRepository, transactionManager, subChunkSize))

파일 보기

@ -237,7 +237,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
if (shipAddInfoEntityList == null || shipAddInfoEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "ShipAddInfoEntity", shipAddInfoEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "ShipAddInfoEntity", shipAddInfoEntityList.size());
batchJdbcTemplate.batchUpdate(sql, shipAddInfoEntityList, shipAddInfoEntityList.size(),
(ps, entity) -> {
@ -249,7 +249,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
}
});
log.debug("{} 배치 삽입 완료: {} 건", "ShipAddInfoEntity", shipAddInfoEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "ShipAddInfoEntity", shipAddInfoEntityList.size());
}
public void bindShipAddInfo(PreparedStatement pstmt, ShipAddInfoEntity entity) throws Exception {
@ -276,7 +276,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
if (bareboatCharterHistoryEntityList == null || bareboatCharterHistoryEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "BareboatCharterHistoryEntity", bareboatCharterHistoryEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "BareboatCharterHistoryEntity", bareboatCharterHistoryEntityList.size());
batchJdbcTemplate.batchUpdate(sql, bareboatCharterHistoryEntityList, bareboatCharterHistoryEntityList.size(),
(ps, entity) -> {
@ -288,7 +288,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
}
});
log.debug("{} 배치 삽입 완료: {} 건", "BareboatCharterHistoryEntity", bareboatCharterHistoryEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "BareboatCharterHistoryEntity", bareboatCharterHistoryEntityList.size());
}
public void bindBareboatCharterHistory(PreparedStatement pstmt, BareboatCharterHistoryEntity entity) throws Exception {
@ -308,7 +308,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
if (callsignAndMmsiHistoryEntityList == null || callsignAndMmsiHistoryEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "CallsignAndMmsiHistoryEntity", callsignAndMmsiHistoryEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "CallsignAndMmsiHistoryEntity", callsignAndMmsiHistoryEntityList.size());
batchJdbcTemplate.batchUpdate(sql, callsignAndMmsiHistoryEntityList, callsignAndMmsiHistoryEntityList.size(),
(ps, entity) -> {
@ -320,7 +320,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
}
});
log.debug("{} 배치 삽입 완료: {} 건", "CallsignAndMmsiHistoryEntity", callsignAndMmsiHistoryEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "CallsignAndMmsiHistoryEntity", callsignAndMmsiHistoryEntityList.size());
}
public void bindCallsignAndMmsiHistory(PreparedStatement pstmt, CallsignAndMmsiHistoryEntity entity) throws Exception {
@ -340,7 +340,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
if (classHistoryEntityList == null || classHistoryEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "ClassHistoryEntity", classHistoryEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "ClassHistoryEntity", classHistoryEntityList.size());
batchJdbcTemplate.batchUpdate(sql, classHistoryEntityList, classHistoryEntityList.size(),
(ps, entity) -> {
@ -352,7 +352,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
}
});
log.debug("{} 배치 삽입 완료: {} 건", "ClassHistoryEntity", classHistoryEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "ClassHistoryEntity", classHistoryEntityList.size());
}
public void bindClassHistory(PreparedStatement pstmt, ClassHistoryEntity entity) throws Exception {
@ -375,7 +375,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
if (companyVesselRelationshipsEntityList == null || companyVesselRelationshipsEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "CompanyVesselRelationshipsEntity", companyVesselRelationshipsEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "CompanyVesselRelationshipsEntity", companyVesselRelationshipsEntityList.size());
batchJdbcTemplate.batchUpdate(sql, companyVesselRelationshipsEntityList, companyVesselRelationshipsEntityList.size(),
(ps, entity) -> {
@ -387,7 +387,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
}
});
log.debug("{} 배치 삽입 완료: {} 건", "CompanyVesselRelationshipsEntity", companyVesselRelationshipsEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "CompanyVesselRelationshipsEntity", companyVesselRelationshipsEntityList.size());
}
public void bindCompanyVesselRelationships(PreparedStatement pstmt, CompanyVesselRelationshipsEntity entity) throws Exception {
@ -423,7 +423,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
if (crewListEntityList == null || crewListEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "CrewListEntity", crewListEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "CrewListEntity", crewListEntityList.size());
batchJdbcTemplate.batchUpdate(sql, crewListEntityList, crewListEntityList.size(),
(ps, entity) -> {
@ -435,7 +435,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
}
});
log.debug("{} 배치 삽입 완료: {} 건", "CrewListEntity", crewListEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "CrewListEntity", crewListEntityList.size());
}
public void bindCrewList(PreparedStatement pstmt, CrewListEntity entity) throws Exception {
@ -462,7 +462,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
if (darkActivityConfirmedEntityList == null || darkActivityConfirmedEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "DarkActivityConfirmedEntity", darkActivityConfirmedEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "DarkActivityConfirmedEntity", darkActivityConfirmedEntityList.size());
batchJdbcTemplate.batchUpdate(sql, darkActivityConfirmedEntityList, darkActivityConfirmedEntityList.size(),
(ps, entity) -> {
@ -474,7 +474,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
}
});
log.debug("{} 배치 삽입 완료: {} 건", "DarkActivityConfirmedEntity", darkActivityConfirmedEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "DarkActivityConfirmedEntity", darkActivityConfirmedEntityList.size());
}
public void bindDarkActivityConfirmed(PreparedStatement pstmt, DarkActivityConfirmedEntity entity) throws Exception {
@ -515,7 +515,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
if (flagHistoryEntityList == null || flagHistoryEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "FlagHistoryEntity", flagHistoryEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "FlagHistoryEntity", flagHistoryEntityList.size());
batchJdbcTemplate.batchUpdate(sql, flagHistoryEntityList, flagHistoryEntityList.size(),
(ps, entity) -> {
@ -527,7 +527,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
}
});
log.debug("{} 배치 삽입 완료: {} 건", "FlagHistoryEntity", flagHistoryEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "FlagHistoryEntity", flagHistoryEntityList.size());
}
public void bindFlagHistory(PreparedStatement pstmt, FlagHistoryEntity entity) throws Exception {
@ -547,7 +547,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
if (groupBeneficialOwnerHistoryEntityList == null || groupBeneficialOwnerHistoryEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "GroupBeneficialOwnerHistoryEntity", groupBeneficialOwnerHistoryEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "GroupBeneficialOwnerHistoryEntity", groupBeneficialOwnerHistoryEntityList.size());
batchJdbcTemplate.batchUpdate(sql, groupBeneficialOwnerHistoryEntityList, groupBeneficialOwnerHistoryEntityList.size(),
(ps, entity) -> {
@ -559,7 +559,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
}
});
log.debug("{} 배치 삽입 완료: {} 건", "GroupBeneficialOwnerHistoryEntity", groupBeneficialOwnerHistoryEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "GroupBeneficialOwnerHistoryEntity", groupBeneficialOwnerHistoryEntityList.size());
}
public void bindGroupBeneficialOwnerHistory(PreparedStatement pstmt, GroupBeneficialOwnerHistoryEntity entity) throws Exception {
@ -580,7 +580,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
if (iceClassEntityList == null || iceClassEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "IceClassEntity", iceClassEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "IceClassEntity", iceClassEntityList.size());
batchJdbcTemplate.batchUpdate(sql, iceClassEntityList, iceClassEntityList.size(),
(ps, entity) -> {
@ -592,7 +592,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
}
});
log.debug("{} 배치 삽입 완료: {} 건", "IceClassEntity", iceClassEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "IceClassEntity", iceClassEntityList.size());
}
public void bindIceClass(PreparedStatement pstmt, IceClassEntity entity) throws Exception {
@ -610,7 +610,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
if (nameHistoryEntityList == null || nameHistoryEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "NameHistoryEntity", nameHistoryEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "NameHistoryEntity", nameHistoryEntityList.size());
batchJdbcTemplate.batchUpdate(sql, nameHistoryEntityList, nameHistoryEntityList.size(),
(ps, entity) -> {
@ -622,7 +622,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
}
});
log.debug("{} 배치 삽입 완료: {} 건", "NameHistoryEntity", nameHistoryEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "NameHistoryEntity", nameHistoryEntityList.size());
}
public void bindNameHistory(PreparedStatement pstmt, NameHistoryEntity entity) throws Exception {
@ -641,7 +641,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
if (operatorHistoryEntityList == null || operatorHistoryEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "OperatorHistoryEntity", operatorHistoryEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "OperatorHistoryEntity", operatorHistoryEntityList.size());
batchJdbcTemplate.batchUpdate(sql, operatorHistoryEntityList, operatorHistoryEntityList.size(),
(ps, entity) -> {
@ -653,7 +653,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
}
});
log.debug("{} 배치 삽입 완료: {} 건", "OperatorHistoryEntity", operatorHistoryEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "OperatorHistoryEntity", operatorHistoryEntityList.size());
}
public void bindOperatorHistory(PreparedStatement pstmt, OperatorHistoryEntity entity) throws Exception {
@ -674,7 +674,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
if (ownerHistoryEntityList == null || ownerHistoryEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "OwnerHistoryEntity", ownerHistoryEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "OwnerHistoryEntity", ownerHistoryEntityList.size());
batchJdbcTemplate.batchUpdate(sql, ownerHistoryEntityList, ownerHistoryEntityList.size(),
(ps, entity) -> {
@ -686,7 +686,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
}
});
log.debug("{} 배치 삽입 완료: {} 건", "OwnerHistoryEntity", ownerHistoryEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "OwnerHistoryEntity", ownerHistoryEntityList.size());
}
public void bindOwnerHistory(PreparedStatement pstmt, OwnerHistoryEntity entity) throws Exception {
@ -707,7 +707,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
if (pandIHistoryEntityList == null || pandIHistoryEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "PandIHistoryEntity", pandIHistoryEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "PandIHistoryEntity", pandIHistoryEntityList.size());
batchJdbcTemplate.batchUpdate(sql, pandIHistoryEntityList, pandIHistoryEntityList.size(),
(ps, entity) -> {
@ -719,7 +719,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
}
});
log.debug("{} 배치 삽입 완료: {} 건", "PandIHistoryEntity", pandIHistoryEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "PandIHistoryEntity", pandIHistoryEntityList.size());
}
public void bindPandIHistory(PreparedStatement pstmt, PandIHistoryEntity entity) throws Exception {
@ -740,7 +740,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
if (safetyManagementCertificateHistEntityList == null || safetyManagementCertificateHistEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "SafetyManagementCertificateHistEntity", safetyManagementCertificateHistEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "SafetyManagementCertificateHistEntity", safetyManagementCertificateHistEntityList.size());
batchJdbcTemplate.batchUpdate(sql, safetyManagementCertificateHistEntityList, safetyManagementCertificateHistEntityList.size(),
(ps, entity) -> {
@ -752,7 +752,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
}
});
log.debug("{} 배치 삽입 완료: {} 건", "SafetyManagementCertificateHistEntity", safetyManagementCertificateHistEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "SafetyManagementCertificateHistEntity", safetyManagementCertificateHistEntityList.size());
}
public void bindSafetyManagementCertificateHist(PreparedStatement pstmt, SafetyManagementCertificateHistEntity entity) throws Exception {
@ -781,7 +781,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
if (shipManagerHistoryEntityList == null || shipManagerHistoryEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "ShipManagerHistoryEntity", shipManagerHistoryEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "ShipManagerHistoryEntity", shipManagerHistoryEntityList.size());
batchJdbcTemplate.batchUpdate(sql, shipManagerHistoryEntityList, shipManagerHistoryEntityList.size(),
(ps, entity) -> {
@ -793,7 +793,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
}
});
log.debug("{} 배치 삽입 완료: {} 건", "ShipManagerHistoryEntity", shipManagerHistoryEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "ShipManagerHistoryEntity", shipManagerHistoryEntityList.size());
}
public void bindShipManagerHistory(PreparedStatement pstmt, ShipManagerHistoryEntity entity) throws Exception {
@ -814,7 +814,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
if (sisterShipLinksEntityList == null || sisterShipLinksEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "SisterShipLinksEntity", sisterShipLinksEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "SisterShipLinksEntity", sisterShipLinksEntityList.size());
batchJdbcTemplate.batchUpdate(sql, sisterShipLinksEntityList, sisterShipLinksEntityList.size(),
(ps, entity) -> {
@ -826,7 +826,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
}
});
log.debug("{} 배치 삽입 완료: {} 건", "SisterShipLinksEntity", sisterShipLinksEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "SisterShipLinksEntity", sisterShipLinksEntityList.size());
}
public void bindSisterShipLinks(PreparedStatement pstmt, SisterShipLinksEntity entity) throws Exception {
@ -843,7 +843,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
if (specialFeatureEntityList == null || specialFeatureEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "SpecialFeatureEntity", specialFeatureEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "SpecialFeatureEntity", specialFeatureEntityList.size());
batchJdbcTemplate.batchUpdate(sql, specialFeatureEntityList, specialFeatureEntityList.size(),
(ps, entity) -> {
@ -855,7 +855,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
}
});
log.debug("{} 배치 삽입 완료: {} 건", "SpecialFeatureEntity", specialFeatureEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "SpecialFeatureEntity", specialFeatureEntityList.size());
}
public void bindSpecialFeature(PreparedStatement pstmt, SpecialFeatureEntity entity) throws Exception {
@ -874,7 +874,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
if (statusHistoryEntityList == null || statusHistoryEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "StatusHistoryEntity", statusHistoryEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "StatusHistoryEntity", statusHistoryEntityList.size());
batchJdbcTemplate.batchUpdate(sql, statusHistoryEntityList, statusHistoryEntityList.size(),
(ps, entity) -> {
@ -886,7 +886,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
}
});
log.debug("{} 배치 삽입 완료: {} 건", "StatusHistoryEntity", statusHistoryEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "StatusHistoryEntity", statusHistoryEntityList.size());
}
public void bindStatusHistory(PreparedStatement pstmt, StatusHistoryEntity entity) throws Exception {
@ -906,7 +906,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
if (stowageCommodityEntityList == null || stowageCommodityEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "StowageCommodityEntity", stowageCommodityEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "StowageCommodityEntity", stowageCommodityEntityList.size());
batchJdbcTemplate.batchUpdate(sql, stowageCommodityEntityList, stowageCommodityEntityList.size(),
(ps, entity) -> {
@ -918,7 +918,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
}
});
log.debug("{} 배치 삽입 완료: {} 건", "StowageCommodityEntity", stowageCommodityEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "StowageCommodityEntity", stowageCommodityEntityList.size());
}
public void bindStowageCommodity(PreparedStatement pstmt, StowageCommodityEntity entity) throws Exception {
@ -939,7 +939,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
if (surveyDatesEntityList == null || surveyDatesEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "SurveyDatesEntity", surveyDatesEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "SurveyDatesEntity", surveyDatesEntityList.size());
batchJdbcTemplate.batchUpdate(sql, surveyDatesEntityList, surveyDatesEntityList.size(),
(ps, entity) -> {
@ -951,7 +951,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
}
});
log.debug("{} 배치 삽입 완료: {} 건", "SurveyDatesEntity", surveyDatesEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "SurveyDatesEntity", surveyDatesEntityList.size());
}
public void bindSurveyDates(PreparedStatement pstmt, SurveyDatesEntity entity) throws Exception {
@ -974,7 +974,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
if (surveyDatesHistoryUniqueEntityList == null || surveyDatesHistoryUniqueEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "SurveyDatesHistoryUniqueEntity", surveyDatesHistoryUniqueEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "SurveyDatesHistoryUniqueEntity", surveyDatesHistoryUniqueEntityList.size());
batchJdbcTemplate.batchUpdate(sql, surveyDatesHistoryUniqueEntityList, surveyDatesHistoryUniqueEntityList.size(),
(ps, entity) -> {
@ -986,7 +986,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
}
});
log.debug("{} 배치 삽입 완료: {} 건", "SurveyDatesHistoryUniqueEntity", surveyDatesHistoryUniqueEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "SurveyDatesHistoryUniqueEntity", surveyDatesHistoryUniqueEntityList.size());
}
public void bindSurveyDatesHistoryUnique(PreparedStatement pstmt, SurveyDatesHistoryUniqueEntity entity) throws Exception {
@ -1006,7 +1006,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
if (technicalManagerHistoryEntityList == null || technicalManagerHistoryEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "TechnicalManagerHistoryEntity", technicalManagerHistoryEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "TechnicalManagerHistoryEntity", technicalManagerHistoryEntityList.size());
batchJdbcTemplate.batchUpdate(sql, technicalManagerHistoryEntityList, technicalManagerHistoryEntityList.size(),
(ps, entity) -> {
@ -1018,7 +1018,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
}
});
log.debug("{} 배치 삽입 완료: {} 건", "TechnicalManagerHistoryEntity", technicalManagerHistoryEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "TechnicalManagerHistoryEntity", technicalManagerHistoryEntityList.size());
}
public void bindTechnicalManagerHistory(PreparedStatement pstmt, TechnicalManagerHistoryEntity entity) throws Exception {
@ -1039,7 +1039,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
if (thrustersEntityList == null || thrustersEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "ThrustersEntity", thrustersEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "ThrustersEntity", thrustersEntityList.size());
batchJdbcTemplate.batchUpdate(sql, thrustersEntityList, thrustersEntityList.size(),
(ps, entity) -> {
@ -1051,7 +1051,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
}
});
log.debug("{} 배치 삽입 완료: {} 건", "ThrustersEntity", thrustersEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "ThrustersEntity", thrustersEntityList.size());
}
public void bindThrusters(PreparedStatement pstmt, ThrustersEntity entity) throws Exception {
@ -1075,7 +1075,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
if (tbCompanyDetailEntityList == null || tbCompanyDetailEntityList.isEmpty()) {
return;
}
log.debug("{} 배치 삽입 시작: {} 건", "TbCompanyDetailEntity", tbCompanyDetailEntityList.size());
// log.debug("{} 배치 삽입 시작: {} 건", "TbCompanyDetailEntity", tbCompanyDetailEntityList.size());
batchJdbcTemplate.batchUpdate(sql, tbCompanyDetailEntityList, tbCompanyDetailEntityList.size(),
(ps, entity) -> {
@ -1087,7 +1087,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
}
});
log.debug("{} 배치 삽입 완료: {} 건", "TbCompanyDetailEntity", tbCompanyDetailEntityList.size());
// log.debug("{} 배치 삽입 완료: {} 건", "TbCompanyDetailEntity", tbCompanyDetailEntityList.size());
}
public void bindTbCompanyDetail(PreparedStatement pstmt, TbCompanyDetailEntity entity) throws Exception {