snp-batch-validation/src/main/java/com/snp/batch/service/BatchService.java
HYOJIN 2bc2f1fc32 feat(recollection): 자동 재수집 및 재수집 프로세스 전면 개선 (#30)
- 자동 재수집 리스너(AutoRetryJobExecutionListener) 및 비동기 트리거 서비스 추가
- 실패 레코드 최대 재시도 횟수(3회) 제한으로 무한 루프 방지
- 전용 스레드 풀(autoRetryExecutor) 분리
- last_success_date 복원 시 경합 조건 보호
- 재수집 이력 N+1 쿼리 해결 (벌크 조회)
- 실패 레코드 일괄 RESOLVED 처리 API 추가
- 재수집 이력 CSV 내보내기 API 추가 (UTF-8 BOM)
- 프론트엔드 공유 컴포넌트 추출 (StatCard, CopyButton, ApiLogSection, InfoItem)
- 대시보드 재수집 통계 위젯 추가
- 실행 이력 미해결 건수 COMPLETED 상태만 표시

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-10 17:28:23 +09:00

947 lines
46 KiB
Java

package com.snp.batch.service;
import com.snp.batch.common.batch.listener.RecollectionJobExecutionListener;
import com.snp.batch.global.dto.*;
import com.snp.batch.global.model.BatchApiLog;
import com.snp.batch.global.model.BatchFailedRecord;
import com.snp.batch.global.model.BatchLastExecution;
import com.snp.batch.global.repository.BatchApiLogRepository;
import com.snp.batch.global.repository.BatchFailedRecordRepository;
import com.snp.batch.global.repository.BatchLastExecutionRepository;
import com.snp.batch.global.repository.TimelineRepository;
import jakarta.annotation.PostConstruct;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobInstance;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.explore.JobExplorer;
import org.springframework.batch.core.job.AbstractJob;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.launch.JobOperator;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Lazy;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.temporal.ChronoUnit;
import java.util.*;
import java.util.stream.Collectors;
@Slf4j
@Service
public class BatchService {
private final JobLauncher jobLauncher;
private final JobExplorer jobExplorer;
private final JobOperator jobOperator;
private final Map<String, Job> jobMap;
private final ScheduleService scheduleService;
private final TimelineRepository timelineRepository;
private final RecollectionJobExecutionListener recollectionJobExecutionListener;
private final BatchApiLogRepository apiLogRepository;
private final BatchFailedRecordRepository failedRecordRepository;
private final BatchLastExecutionRepository batchLastExecutionRepository;
@Autowired
public BatchService(JobLauncher jobLauncher,
JobExplorer jobExplorer,
JobOperator jobOperator,
Map<String, Job> jobMap,
@Lazy ScheduleService scheduleService,
TimelineRepository timelineRepository,
RecollectionJobExecutionListener recollectionJobExecutionListener,
BatchApiLogRepository apiLogRepository,
BatchFailedRecordRepository failedRecordRepository,
BatchLastExecutionRepository batchLastExecutionRepository) {
this.jobLauncher = jobLauncher;
this.jobExplorer = jobExplorer;
this.jobOperator = jobOperator;
this.jobMap = jobMap;
this.scheduleService = scheduleService;
this.timelineRepository = timelineRepository;
this.recollectionJobExecutionListener = recollectionJobExecutionListener;
this.apiLogRepository = apiLogRepository;
this.failedRecordRepository = failedRecordRepository;
this.batchLastExecutionRepository = batchLastExecutionRepository;
}
/**
* 모든 Job에 RecollectionJobExecutionListener를 등록
* 리스너 내부에서 executionMode 체크하므로 정상 실행에는 영향 없음
*/
@PostConstruct
public void registerGlobalListeners() {
jobMap.values().forEach(job -> {
if (job instanceof AbstractJob abstractJob) {
abstractJob.registerJobExecutionListener(recollectionJobExecutionListener);
}
});
log.info("[BatchService] RecollectionJobExecutionListener를 {}개 Job에 등록", jobMap.size());
}
public Long executeJob(String jobName) throws Exception {
return executeJob(jobName, null);
}
public Long executeJob(String jobName, Map<String, String> params) throws Exception {
Job job = jobMap.get(jobName);
if (job == null) {
throw new IllegalArgumentException("Job not found: " + jobName);
}
JobParametersBuilder builder = new JobParametersBuilder()
.addLong("timestamp", System.currentTimeMillis());
// 동적 파라미터 추가
if (params != null && !params.isEmpty()) {
params.forEach((key, value) -> {
// timestamp는 자동 생성되므로 무시
if (!"timestamp".equals(key)) {
builder.addString(key, value);
}
});
}
JobParameters jobParameters = builder.toJobParameters();
JobExecution jobExecution = jobLauncher.run(job, jobParameters);
return jobExecution.getId();
}
public List<String> listAllJobs() {
return new ArrayList<>(jobMap.keySet());
}
public List<JobExecutionDto> getJobExecutions(String jobName) {
List<JobInstance> jobInstances = jobExplorer.findJobInstancesByJobName(jobName, 0, 100);
List<JobExecutionDto> executions = jobInstances.stream()
.flatMap(instance -> jobExplorer.getJobExecutions(instance).stream())
.map(this::convertToDto)
.sorted(Comparator.comparing(JobExecutionDto::getExecutionId).reversed())
.collect(Collectors.toList());
populateFailedRecordCounts(executions);
return executions;
}
public List<JobExecutionDto> getRecentExecutions(int limit) {
List<Map<String, Object>> recentData = timelineRepository.findRecentExecutions(limit);
List<JobExecutionDto> executions = recentData.stream()
.map(this::convertMapToDto)
.collect(Collectors.toList());
populateFailedRecordCounts(executions);
return executions;
}
public JobExecutionDto getExecutionDetails(Long executionId) {
JobExecution jobExecution = jobExplorer.getJobExecution(executionId);
if (jobExecution == null) {
throw new IllegalArgumentException("Job execution not found: " + executionId);
}
return convertToDto(jobExecution);
}
public com.snp.batch.global.dto.JobExecutionDetailDto getExecutionDetailWithSteps(Long executionId) {
JobExecution jobExecution = jobExplorer.getJobExecution(executionId);
if (jobExecution == null) {
throw new IllegalArgumentException("Job execution not found: " + executionId);
}
return convertToDetailDto(jobExecution);
}
public void stopExecution(Long executionId) throws Exception {
jobOperator.stop(executionId);
}
private JobExecutionDto convertToDto(JobExecution jobExecution) {
return JobExecutionDto.builder()
.executionId(jobExecution.getId())
.jobName(jobExecution.getJobInstance().getJobName())
.status(jobExecution.getStatus().name())
.startTime(jobExecution.getStartTime())
.endTime(jobExecution.getEndTime())
.exitCode(jobExecution.getExitStatus().getExitCode())
.exitMessage(jobExecution.getExitStatus().getExitDescription())
.build();
}
private com.snp.batch.global.dto.JobExecutionDetailDto convertToDetailDto(JobExecution jobExecution) {
// 실행 시간 계산
Long duration = null;
if (jobExecution.getStartTime() != null && jobExecution.getEndTime() != null) {
duration = java.time.Duration.between(
jobExecution.getStartTime(),
jobExecution.getEndTime()
).toMillis();
}
// Job Parameters 변환 (timestamp는 포맷팅)
Map<String, Object> params = new java.util.LinkedHashMap<>();
jobExecution.getJobParameters().getParameters().forEach((key, value) -> {
Object paramValue = value.getValue();
// timestamp 파라미터는 포맷팅된 문자열도 함께 표시
if ("timestamp".equals(key) && paramValue instanceof Long) {
Long timestamp = (Long) paramValue;
java.time.LocalDateTime dateTime = java.time.LocalDateTime.ofInstant(
java.time.Instant.ofEpochMilli(timestamp),
java.time.ZoneId.systemDefault()
);
String formatted = dateTime.format(java.time.format.DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"));
params.put(key, timestamp + " (" + formatted + ")");
} else {
params.put(key, paramValue);
}
});
// Step Executions 변환
List<com.snp.batch.global.dto.JobExecutionDetailDto.StepExecutionDto> stepDtos =
jobExecution.getStepExecutions().stream()
.map(this::convertStepToDto)
.collect(Collectors.toList());
// 전체 통계 계산
int totalReadCount = stepDtos.stream().mapToInt(s -> s.getReadCount() != null ? s.getReadCount() : 0).sum();
int totalWriteCount = stepDtos.stream().mapToInt(s -> s.getWriteCount() != null ? s.getWriteCount() : 0).sum();
int totalSkipCount = stepDtos.stream().mapToInt(s ->
(s.getReadSkipCount() != null ? s.getReadSkipCount() : 0) +
(s.getProcessSkipCount() != null ? s.getProcessSkipCount() : 0) +
(s.getWriteSkipCount() != null ? s.getWriteSkipCount() : 0)
).sum();
int totalFilterCount = stepDtos.stream().mapToInt(s -> s.getFilterCount() != null ? s.getFilterCount() : 0).sum();
return com.snp.batch.global.dto.JobExecutionDetailDto.builder()
.executionId(jobExecution.getId())
.jobName(jobExecution.getJobInstance().getJobName())
.status(jobExecution.getStatus().name())
.startTime(jobExecution.getStartTime())
.endTime(jobExecution.getEndTime())
.exitCode(jobExecution.getExitStatus().getExitCode())
.exitMessage(jobExecution.getExitStatus().getExitDescription())
.jobParameters(params)
.jobInstanceId(jobExecution.getJobInstance().getInstanceId())
.duration(duration)
.readCount(totalReadCount)
.writeCount(totalWriteCount)
.skipCount(totalSkipCount)
.filterCount(totalFilterCount)
.stepExecutions(stepDtos)
.build();
}
private com.snp.batch.global.dto.JobExecutionDetailDto.StepExecutionDto convertStepToDto(
org.springframework.batch.core.StepExecution stepExecution) {
Long duration = null;
if (stepExecution.getStartTime() != null && stepExecution.getEndTime() != null) {
duration = java.time.Duration.between(
stepExecution.getStartTime(),
stepExecution.getEndTime()
).toMillis();
}
// StepExecutionContext에서 API 정보 추출
com.snp.batch.global.dto.JobExecutionDetailDto.ApiCallInfo apiCallInfo = extractApiCallInfo(stepExecution);
// batch_api_log 테이블에서 Step별 API 로그 집계 + 개별 로그 조회
com.snp.batch.global.dto.JobExecutionDetailDto.StepApiLogSummary apiLogSummary =
buildStepApiLogSummary(stepExecution.getId());
// Step별 실패 레코드 조회
List<JobExecutionDetailDto.FailedRecordDto> failedRecordDtos =
failedRecordRepository.findByStepExecutionId(stepExecution.getId()).stream()
.map(record -> JobExecutionDetailDto.FailedRecordDto.builder()
.id(record.getId())
.jobName(record.getJobName())
.recordKey(record.getRecordKey())
.errorMessage(record.getErrorMessage())
.retryCount(record.getRetryCount())
.status(record.getStatus())
.createdAt(record.getCreatedAt())
.build())
.collect(Collectors.toList());
return com.snp.batch.global.dto.JobExecutionDetailDto.StepExecutionDto.builder()
.stepExecutionId(stepExecution.getId())
.stepName(stepExecution.getStepName())
.status(stepExecution.getStatus().name())
.startTime(stepExecution.getStartTime())
.endTime(stepExecution.getEndTime())
.readCount((int) stepExecution.getReadCount())
.writeCount((int) stepExecution.getWriteCount())
.commitCount((int) stepExecution.getCommitCount())
.rollbackCount((int) stepExecution.getRollbackCount())
.readSkipCount((int) stepExecution.getReadSkipCount())
.processSkipCount((int) stepExecution.getProcessSkipCount())
.writeSkipCount((int) stepExecution.getWriteSkipCount())
.filterCount((int) stepExecution.getFilterCount())
.exitCode(stepExecution.getExitStatus().getExitCode())
.exitMessage(stepExecution.getExitStatus().getExitDescription())
.duration(duration)
.apiCallInfo(apiCallInfo)
.apiLogSummary(apiLogSummary)
.failedRecords(failedRecordDtos.isEmpty() ? null : failedRecordDtos)
.build();
}
/**
* StepExecutionContext에서 API 호출 정보 추출
*
* @param stepExecution Step 실행 정보
* @return API 호출 정보 (없으면 null)
*/
private com.snp.batch.global.dto.JobExecutionDetailDto.ApiCallInfo extractApiCallInfo(
org.springframework.batch.core.StepExecution stepExecution) {
org.springframework.batch.item.ExecutionContext context = stepExecution.getExecutionContext();
// API URL이 없으면 API를 사용하지 않는 Step
if (!context.containsKey("apiUrl")) {
return null;
}
// API 정보 추출
String apiUrl = context.getString("apiUrl");
String method = context.getString("apiMethod", "GET");
Integer totalCalls = context.getInt("totalApiCalls", 0);
Integer completedCalls = context.getInt("completedApiCalls", 0);
String lastCallTime = context.getString("lastCallTime", "");
// API Parameters 추출
Map<String, Object> parameters = null;
if (context.containsKey("apiParameters")) {
Object paramsObj = context.get("apiParameters");
if (paramsObj instanceof Map) {
parameters = (Map<String, Object>) paramsObj;
}
}
return com.snp.batch.global.dto.JobExecutionDetailDto.ApiCallInfo.builder()
.apiUrl(apiUrl)
.method(method)
.parameters(parameters)
.totalCalls(totalCalls)
.completedCalls(completedCalls)
.lastCallTime(lastCallTime)
.build();
}
/**
* Step별 batch_api_log 통계 집계 (개별 로그는 별도 API로 페이징 조회)
*/
private com.snp.batch.global.dto.JobExecutionDetailDto.StepApiLogSummary buildStepApiLogSummary(Long stepExecutionId) {
List<Object[]> stats = apiLogRepository.getApiStatsByStepExecutionId(stepExecutionId);
if (stats.isEmpty() || stats.get(0) == null || ((Number) stats.get(0)[0]).longValue() == 0L) {
return null;
}
Object[] row = stats.get(0);
return com.snp.batch.global.dto.JobExecutionDetailDto.StepApiLogSummary.builder()
.totalCalls(((Number) row[0]).longValue())
.successCount(((Number) row[1]).longValue())
.errorCount(((Number) row[2]).longValue())
.avgResponseMs(((Number) row[3]).doubleValue())
.maxResponseMs(((Number) row[4]).longValue())
.minResponseMs(((Number) row[5]).longValue())
.totalResponseMs(((Number) row[6]).longValue())
.totalRecordCount(((Number) row[7]).longValue())
.build();
}
/**
* Step별 API 호출 로그 페이징 조회 (상태 필터 지원)
*
* @param stepExecutionId Step 실행 ID
* @param status 필터: ALL(전체), SUCCESS(2xx), ERROR(4xx+/에러)
* @param pageable 페이징 정보
*/
@Transactional(readOnly = true)
public JobExecutionDetailDto.ApiLogPageResponse getStepApiLogs(Long stepExecutionId, String status, Pageable pageable) {
Page<BatchApiLog> page = switch (status) {
case "SUCCESS" -> apiLogRepository.findSuccessByStepExecutionId(stepExecutionId, pageable);
case "ERROR" -> apiLogRepository.findErrorByStepExecutionId(stepExecutionId, pageable);
default -> apiLogRepository.findByStepExecutionIdOrderByCreatedAtAsc(stepExecutionId, pageable);
};
List<JobExecutionDetailDto.ApiLogEntryDto> content = page.getContent().stream()
.map(apiLog -> JobExecutionDetailDto.ApiLogEntryDto.builder()
.logId(apiLog.getLogId())
.requestUri(apiLog.getRequestUri())
.httpMethod(apiLog.getHttpMethod())
.statusCode(apiLog.getStatusCode())
.responseTimeMs(apiLog.getResponseTimeMs())
.responseCount(apiLog.getResponseCount())
.errorMessage(apiLog.getErrorMessage())
.createdAt(apiLog.getCreatedAt())
.build())
.toList();
return JobExecutionDetailDto.ApiLogPageResponse.builder()
.content(content)
.page(page.getNumber())
.size(page.getSize())
.totalElements(page.getTotalElements())
.totalPages(page.getTotalPages())
.build();
}
public com.snp.batch.global.dto.TimelineResponse getTimeline(String view, String dateStr) {
try {
java.time.LocalDate date = java.time.LocalDate.parse(dateStr.substring(0, 10));
java.util.List<com.snp.batch.global.dto.TimelineResponse.PeriodInfo> periods = new ArrayList<>();
String periodLabel = "";
// 조회 범위 설정
java.time.LocalDateTime rangeStart;
java.time.LocalDateTime rangeEnd;
if ("day".equals(view)) {
// 일별: 24시간
periodLabel = date.format(java.time.format.DateTimeFormatter.ofPattern("yyyy년 MM월 dd일"));
rangeStart = date.atStartOfDay();
rangeEnd = rangeStart.plusDays(1);
for (int hour = 0; hour < 24; hour++) {
periods.add(com.snp.batch.global.dto.TimelineResponse.PeriodInfo.builder()
.key(date.toString() + "-" + String.format("%02d", hour))
.label(String.format("%02d:00", hour))
.build());
}
} else if ("week".equals(view)) {
// 주별: 7일
java.time.LocalDate startOfWeek = date.with(java.time.DayOfWeek.MONDAY);
java.time.LocalDate endOfWeek = startOfWeek.plusDays(6);
periodLabel = String.format("%s ~ %s",
startOfWeek.format(java.time.format.DateTimeFormatter.ofPattern("MM/dd")),
endOfWeek.format(java.time.format.DateTimeFormatter.ofPattern("MM/dd")));
rangeStart = startOfWeek.atStartOfDay();
rangeEnd = endOfWeek.plusDays(1).atStartOfDay();
for (int day = 0; day < 7; day++) {
java.time.LocalDate current = startOfWeek.plusDays(day);
periods.add(com.snp.batch.global.dto.TimelineResponse.PeriodInfo.builder()
.key(current.toString())
.label(current.format(java.time.format.DateTimeFormatter.ofPattern("MM/dd (E)", java.util.Locale.KOREAN)))
.build());
}
} else if ("month".equals(view)) {
// 월별: 해당 월의 모든 날
java.time.YearMonth yearMonth = java.time.YearMonth.from(date);
periodLabel = date.format(java.time.format.DateTimeFormatter.ofPattern("yyyy년 MM월"));
rangeStart = yearMonth.atDay(1).atStartOfDay();
rangeEnd = yearMonth.atEndOfMonth().plusDays(1).atStartOfDay();
for (int day = 1; day <= yearMonth.lengthOfMonth(); day++) {
java.time.LocalDate current = yearMonth.atDay(day);
periods.add(com.snp.batch.global.dto.TimelineResponse.PeriodInfo.builder()
.key(current.toString())
.label(String.format("%d일", day))
.build());
}
} else {
throw new IllegalArgumentException("Invalid view type: " + view);
}
// 활성 스케줄 조회
java.util.List<com.snp.batch.global.dto.ScheduleResponse> activeSchedules = scheduleService.getAllActiveSchedules();
Map<String, com.snp.batch.global.dto.ScheduleResponse> scheduleMap = activeSchedules.stream()
.collect(Collectors.toMap(
com.snp.batch.global.dto.ScheduleResponse::getJobName,
s -> s
));
// 모든 Job의 실행 이력을 한 번의 쿼리로 조회 (경량화)
List<Map<String, Object>> allExecutions = timelineRepository.findAllExecutionsByDateRange(rangeStart, rangeEnd);
// Job별로 그룹화
Map<String, List<Map<String, Object>>> executionsByJob = allExecutions.stream()
.collect(Collectors.groupingBy(exec -> (String) exec.get("jobName")));
// 타임라인 스케줄 구성
java.util.List<com.snp.batch.global.dto.TimelineResponse.ScheduleTimeline> schedules = new ArrayList<>();
// 실행 이력이 있거나 스케줄이 있는 모든 Job 처리
Set<String> allJobNames = new HashSet<>(executionsByJob.keySet());
allJobNames.addAll(scheduleMap.keySet());
for (String jobName : allJobNames) {
if (!jobMap.containsKey(jobName)) {
continue; // 현재 존재하지 않는 Job은 스킵
}
List<Map<String, Object>> jobExecutions = executionsByJob.getOrDefault(jobName, Collections.emptyList());
Map<String, com.snp.batch.global.dto.TimelineResponse.ExecutionInfo> executions = new HashMap<>();
// 각 period에 대해 실행 이력 또는 예정 상태 매핑
for (com.snp.batch.global.dto.TimelineResponse.PeriodInfo period : periods) {
Map<String, Object> matchedExecution = findExecutionForPeriodFromMap(jobExecutions, period, view);
if (matchedExecution != null) {
// 과거 실행 이력이 있는 경우
java.sql.Timestamp startTimestamp = (java.sql.Timestamp) matchedExecution.get("startTime");
java.sql.Timestamp endTimestamp = (java.sql.Timestamp) matchedExecution.get("endTime");
executions.put(period.getKey(), com.snp.batch.global.dto.TimelineResponse.ExecutionInfo.builder()
.executionId(((Number) matchedExecution.get("executionId")).longValue())
.status((String) matchedExecution.get("status"))
.startTime(startTimestamp != null ? startTimestamp.toLocalDateTime().toString() : null)
.endTime(endTimestamp != null ? endTimestamp.toLocalDateTime().toString() : null)
.build());
} else if (scheduleMap.containsKey(jobName)) {
// 스케줄이 있고, 실행 이력이 없는 경우 - 미래 예정 시간 체크
com.snp.batch.global.dto.ScheduleResponse schedule = scheduleMap.get(jobName);
if (isScheduledForPeriod(schedule, period, view)) {
executions.put(period.getKey(), com.snp.batch.global.dto.TimelineResponse.ExecutionInfo.builder()
.status("SCHEDULED")
.startTime(null)
.endTime(null)
.build());
}
}
}
if (!executions.isEmpty()) {
schedules.add(com.snp.batch.global.dto.TimelineResponse.ScheduleTimeline.builder()
.jobName(jobName)
.executions(executions)
.build());
}
}
return com.snp.batch.global.dto.TimelineResponse.builder()
.periodLabel(periodLabel)
.periods(periods)
.schedules(schedules)
.build();
} catch (Exception e) {
log.error("Error generating timeline", e);
throw new RuntimeException("Failed to generate timeline", e);
}
}
/**
* Map 기반 실행 이력에서 특정 Period에 해당하는 실행 찾기
*/
private Map<String, Object> findExecutionForPeriodFromMap(
List<Map<String, Object>> executions,
com.snp.batch.global.dto.TimelineResponse.PeriodInfo period,
String view) {
return executions.stream()
.filter(exec -> exec.get("startTime") != null)
.filter(exec -> {
java.sql.Timestamp timestamp = (java.sql.Timestamp) exec.get("startTime");
java.time.LocalDateTime startTime = timestamp.toLocalDateTime();
String periodKey = period.getKey();
if ("day".equals(view)) {
// 시간별 매칭 (key format: "2025-10-14-00")
int lastDashIndex = periodKey.lastIndexOf('-');
String dateStr = periodKey.substring(0, lastDashIndex);
int hour = Integer.parseInt(periodKey.substring(lastDashIndex + 1));
java.time.LocalDate periodDate = java.time.LocalDate.parse(dateStr);
return startTime.toLocalDate().equals(periodDate) &&
startTime.getHour() == hour;
} else {
// 일별 매칭
java.time.LocalDate periodDate = java.time.LocalDate.parse(periodKey);
return startTime.toLocalDate().equals(periodDate);
}
})
.max(Comparator.comparing(exec -> ((java.sql.Timestamp) exec.get("startTime")).toLocalDateTime()))
.orElse(null);
}
private boolean isJobScheduled(String jobName) {
// 스케줄이 있는지 확인
try {
scheduleService.getScheduleByJobName(jobName);
return true;
} catch (Exception e) {
return false;
}
}
private boolean isScheduledForPeriod(com.snp.batch.global.dto.ScheduleResponse schedule,
com.snp.batch.global.dto.TimelineResponse.PeriodInfo period,
String view) {
if (schedule.getNextFireTime() == null) {
return false;
}
java.time.LocalDateTime nextFireTime = schedule.getNextFireTime()
.toInstant()
.atZone(java.time.ZoneId.systemDefault())
.toLocalDateTime();
String periodKey = period.getKey();
if ("day".equals(view)) {
// 시간별 매칭 (key format: "2025-10-14-00")
int lastDashIndex = periodKey.lastIndexOf('-');
String dateStr = periodKey.substring(0, lastDashIndex);
int hour = Integer.parseInt(periodKey.substring(lastDashIndex + 1));
java.time.LocalDate periodDate = java.time.LocalDate.parse(dateStr);
java.time.LocalDateTime periodStart = periodDate.atTime(hour, 0);
java.time.LocalDateTime periodEnd = periodStart.plusHours(1);
return !nextFireTime.isBefore(periodStart) && nextFireTime.isBefore(periodEnd);
} else {
// 일별 매칭
java.time.LocalDate periodDate = java.time.LocalDate.parse(periodKey);
java.time.LocalDateTime periodStart = periodDate.atStartOfDay();
java.time.LocalDateTime periodEnd = periodStart.plusDays(1);
return !nextFireTime.isBefore(periodStart) && nextFireTime.isBefore(periodEnd);
}
}
public List<JobExecutionDto> getPeriodExecutions(String jobName, String view, String periodKey) {
List<JobInstance> jobInstances = jobExplorer.findJobInstancesByJobName(jobName, 0, 1000);
return jobInstances.stream()
.flatMap(instance -> jobExplorer.getJobExecutions(instance).stream())
.filter(exec -> exec.getStartTime() != null)
.filter(exec -> matchesPeriod(exec, view, periodKey))
.sorted(Comparator.comparing(JobExecution::getStartTime).reversed())
.map(this::convertToDto)
.collect(Collectors.toList());
}
private boolean matchesPeriod(JobExecution execution, String view, String periodKey) {
java.time.LocalDateTime startTime = execution.getStartTime();
if ("day".equals(view)) {
// 시간별 매칭 (key format: "2025-10-14-00")
int lastDashIndex = periodKey.lastIndexOf('-');
String dateStr = periodKey.substring(0, lastDashIndex);
int hour = Integer.parseInt(periodKey.substring(lastDashIndex + 1));
java.time.LocalDate periodDate = java.time.LocalDate.parse(dateStr);
return startTime.toLocalDate().equals(periodDate) &&
startTime.getHour() == hour;
} else {
// 일별 매칭
java.time.LocalDate periodDate = java.time.LocalDate.parse(periodKey);
return startTime.toLocalDate().equals(periodDate);
}
}
/**
* 대시보드 데이터 조회 (한 번의 호출로 모든 데이터 반환)
*/
public com.snp.batch.global.dto.DashboardResponse getDashboardData() {
// 1. 스케줄 통계
java.util.List<com.snp.batch.global.dto.ScheduleResponse> allSchedules = scheduleService.getAllSchedules();
int totalSchedules = allSchedules.size();
int activeSchedules = (int) allSchedules.stream().filter(com.snp.batch.global.dto.ScheduleResponse::getActive).count();
int inactiveSchedules = totalSchedules - activeSchedules;
int totalJobs = jobMap.size();
com.snp.batch.global.dto.DashboardResponse.Stats stats = com.snp.batch.global.dto.DashboardResponse.Stats.builder()
.totalSchedules(totalSchedules)
.activeSchedules(activeSchedules)
.inactiveSchedules(inactiveSchedules)
.totalJobs(totalJobs)
.build();
// 2. 실행 중인 Job (한 번의 쿼리)
List<Map<String, Object>> runningData = timelineRepository.findRunningExecutions();
List<com.snp.batch.global.dto.DashboardResponse.RunningJob> runningJobs = runningData.stream()
.map(data -> {
java.sql.Timestamp startTimestamp = (java.sql.Timestamp) data.get("startTime");
return com.snp.batch.global.dto.DashboardResponse.RunningJob.builder()
.jobName((String) data.get("jobName"))
.executionId(((Number) data.get("executionId")).longValue())
.status((String) data.get("status"))
.startTime(startTimestamp != null ? startTimestamp.toLocalDateTime() : null)
.build();
})
.collect(Collectors.toList());
// 3. 최근 실행 이력 (한 번의 쿼리로 상위 10개)
List<Map<String, Object>> recentData = timelineRepository.findRecentExecutions(10);
List<com.snp.batch.global.dto.DashboardResponse.RecentExecution> recentExecutions = recentData.stream()
.map(data -> {
java.sql.Timestamp startTimestamp = (java.sql.Timestamp) data.get("startTime");
java.sql.Timestamp endTimestamp = (java.sql.Timestamp) data.get("endTime");
return com.snp.batch.global.dto.DashboardResponse.RecentExecution.builder()
.executionId(((Number) data.get("executionId")).longValue())
.jobName((String) data.get("jobName"))
.status((String) data.get("status"))
.startTime(startTimestamp != null ? startTimestamp.toLocalDateTime() : null)
.endTime(endTimestamp != null ? endTimestamp.toLocalDateTime() : null)
.build();
})
.collect(Collectors.toList());
// 4. 최근 실패 이력 (24시간 이내, 최대 10건)
List<Map<String, Object>> failureData = timelineRepository.findRecentFailures(24);
List<DashboardResponse.RecentFailure> recentFailures = failureData.stream()
.map(data -> {
java.sql.Timestamp startTs = (java.sql.Timestamp) data.get("startTime");
java.sql.Timestamp endTs = (java.sql.Timestamp) data.get("endTime");
return DashboardResponse.RecentFailure.builder()
.executionId(((Number) data.get("executionId")).longValue())
.jobName((String) data.get("jobName"))
.status((String) data.get("status"))
.startTime(startTs != null ? startTs.toLocalDateTime() : null)
.endTime(endTs != null ? endTs.toLocalDateTime() : null)
.exitMessage((String) data.get("exitMessage"))
.build();
})
.collect(Collectors.toList());
// 5. 오래된 실행 중 건수
int staleExecutionCount = timelineRepository.countStaleExecutions(60);
// 6. 실패 통계
int last24h = timelineRepository.countFailuresSince(LocalDateTime.now().minusHours(24));
int last7d = timelineRepository.countFailuresSince(LocalDateTime.now().minusDays(7));
DashboardResponse.FailureStats failureStats = DashboardResponse.FailureStats.builder()
.last24h(last24h)
.last7d(last7d)
.build();
return DashboardResponse.builder()
.stats(stats)
.runningJobs(runningJobs)
.recentExecutions(recentExecutions)
.recentFailures(recentFailures)
.staleExecutionCount(staleExecutionCount)
.failureStats(failureStats)
.build();
}
// ── 마지막 수집 성공일시 모니터링 ─────────────────────────────
/**
* 전체 API의 마지막 수집 성공일시를 조회합니다.
* lastSuccessDate 오름차순 정렬 (오래된 것이 위로 → 모니터링 편의)
*/
@Transactional(readOnly = true)
public List<LastCollectionStatusResponse> getLastCollectionStatuses() {
LocalDateTime now = LocalDateTime.now();
return batchLastExecutionRepository.findAll().stream()
.sorted(Comparator.comparing(
BatchLastExecution::getLastSuccessDate,
Comparator.nullsFirst(Comparator.naturalOrder())))
.map(entity -> new LastCollectionStatusResponse(
entity.getApiKey(),
entity.getApiDesc(),
entity.getLastSuccessDate(),
entity.getUpdatedAt(),
entity.getLastSuccessDate() != null
? ChronoUnit.MINUTES.between(entity.getLastSuccessDate(), now)
: -1
))
.toList();
}
// ── F1: 강제 종료(Abandon) 관련 ──────────────────────────────
public List<JobExecutionDto> getStaleExecutions(int thresholdMinutes) {
List<Map<String, Object>> data = timelineRepository.findStaleExecutions(thresholdMinutes);
return data.stream()
.map(this::convertMapToDto)
.collect(Collectors.toList());
}
@Transactional
public void abandonExecution(long executionId) {
int stepCount = timelineRepository.abandonStepExecutions(executionId);
int jobCount = timelineRepository.abandonJobExecution(executionId);
log.info("Abandoned execution {}: job={}, steps={}", executionId, jobCount, stepCount);
if (jobCount == 0) {
throw new IllegalArgumentException("실행 중 상태가 아니거나 존재하지 않는 executionId: " + executionId);
}
}
@Transactional
public int abandonAllStaleExecutions(int thresholdMinutes) {
List<Map<String, Object>> staleExecutions = timelineRepository.findStaleExecutions(thresholdMinutes);
int abandonedCount = 0;
for (Map<String, Object> exec : staleExecutions) {
long executionId = ((Number) exec.get("executionId")).longValue();
timelineRepository.abandonStepExecutions(executionId);
int updated = timelineRepository.abandonJobExecution(executionId);
abandonedCount += updated;
}
log.info("Abandoned {} stale executions (threshold: {} minutes)", abandonedCount, thresholdMinutes);
return abandonedCount;
}
// ── F4: 실행 이력 검색 (페이지네이션) ─────────────────────────
public ExecutionSearchResponse searchExecutions(
List<String> jobNames, String status,
LocalDateTime startDate, LocalDateTime endDate,
int page, int size) {
int offset = page * size;
List<Map<String, Object>> data = timelineRepository.searchExecutions(
jobNames, status, startDate, endDate, offset, size);
int totalCount = timelineRepository.countExecutions(jobNames, status, startDate, endDate);
List<JobExecutionDto> executions = data.stream()
.map(this::convertMapToDto)
.collect(Collectors.toList());
populateFailedRecordCounts(executions);
return ExecutionSearchResponse.builder()
.executions(executions)
.totalCount(totalCount)
.page(page)
.size(size)
.totalPages((int) Math.ceil((double) totalCount / size))
.build();
}
// ── F7: Job 상세 목록 ────────────────────────────────────────
public List<JobDetailDto> getJobsWithDetail() {
// Job별 최근 실행 정보
List<Map<String, Object>> lastExecutions = timelineRepository.findLastExecutionPerJob();
Map<String, Map<String, Object>> lastExecMap = lastExecutions.stream()
.collect(Collectors.toMap(
data -> (String) data.get("jobName"),
data -> data
));
// 스케줄 정보
List<ScheduleResponse> schedules = scheduleService.getAllSchedules();
Map<String, String> cronMap = schedules.stream()
.collect(Collectors.toMap(
ScheduleResponse::getJobName,
ScheduleResponse::getCronExpression,
(a, b) -> a
));
return jobMap.keySet().stream()
.sorted()
.map(jobName -> {
JobDetailDto.LastExecution lastExec = null;
Map<String, Object> execData = lastExecMap.get(jobName);
if (execData != null) {
java.sql.Timestamp startTs = (java.sql.Timestamp) execData.get("startTime");
java.sql.Timestamp endTs = (java.sql.Timestamp) execData.get("endTime");
lastExec = JobDetailDto.LastExecution.builder()
.executionId(((Number) execData.get("executionId")).longValue())
.status((String) execData.get("status"))
.startTime(startTs != null ? startTs.toLocalDateTime() : null)
.endTime(endTs != null ? endTs.toLocalDateTime() : null)
.build();
}
return JobDetailDto.builder()
.jobName(jobName)
.lastExecution(lastExec)
.scheduleCron(cronMap.get(jobName))
.build();
})
.collect(Collectors.toList());
}
// ── F8: 실행 통계 ──────────────────────────────────────────
public ExecutionStatisticsDto getStatistics(int days) {
List<Map<String, Object>> dailyData = timelineRepository.findDailyStatistics(days);
return buildStatisticsDto(dailyData);
}
public ExecutionStatisticsDto getJobStatistics(String jobName, int days) {
List<Map<String, Object>> dailyData = timelineRepository.findDailyStatisticsForJob(jobName, days);
return buildStatisticsDto(dailyData);
}
private ExecutionStatisticsDto buildStatisticsDto(List<Map<String, Object>> dailyData) {
List<ExecutionStatisticsDto.DailyStat> dailyStats = dailyData.stream()
.map(data -> {
Object dateObj = data.get("execDate");
String dateStr = dateObj != null ? dateObj.toString() : "";
Number avgMs = (Number) data.get("avgDurationMs");
return ExecutionStatisticsDto.DailyStat.builder()
.date(dateStr)
.successCount(((Number) data.get("successCount")).intValue())
.failedCount(((Number) data.get("failedCount")).intValue())
.otherCount(((Number) data.get("otherCount")).intValue())
.avgDurationMs(avgMs != null ? avgMs.doubleValue() : 0)
.build();
})
.collect(Collectors.toList());
int totalSuccess = dailyStats.stream().mapToInt(ExecutionStatisticsDto.DailyStat::getSuccessCount).sum();
int totalFailed = dailyStats.stream().mapToInt(ExecutionStatisticsDto.DailyStat::getFailedCount).sum();
int totalOther = dailyStats.stream().mapToInt(ExecutionStatisticsDto.DailyStat::getOtherCount).sum();
double avgDuration = dailyStats.stream()
.mapToDouble(ExecutionStatisticsDto.DailyStat::getAvgDurationMs)
.filter(d -> d > 0)
.average()
.orElse(0);
return ExecutionStatisticsDto.builder()
.dailyStats(dailyStats)
.totalExecutions(totalSuccess + totalFailed + totalOther)
.totalSuccess(totalSuccess)
.totalFailed(totalFailed)
.avgDurationMs(avgDuration)
.build();
}
// ── 공통: 실패 레코드 건수 세팅 ────────────────────────────────
private void populateFailedRecordCounts(List<JobExecutionDto> executions) {
List<Long> executionIds = executions.stream()
.map(JobExecutionDto::getExecutionId)
.filter(java.util.Objects::nonNull)
.toList();
if (executionIds.isEmpty()) {
return;
}
Map<Long, Long> countMap = failedRecordRepository.countFailedByJobExecutionIds(executionIds)
.stream()
.collect(Collectors.toMap(
row -> ((Number) row[0]).longValue(),
row -> ((Number) row[1]).longValue()
));
executions.forEach(exec -> exec.setFailedRecordCount(
countMap.getOrDefault(exec.getExecutionId(), 0L)));
}
// ── 공통: Map → DTO 변환 헬퍼 ────────────────────────────────
private JobExecutionDto convertMapToDto(Map<String, Object> data) {
java.sql.Timestamp startTimestamp = (java.sql.Timestamp) data.get("startTime");
java.sql.Timestamp endTimestamp = (java.sql.Timestamp) data.get("endTime");
return JobExecutionDto.builder()
.executionId(((Number) data.get("executionId")).longValue())
.jobName((String) data.get("jobName"))
.status((String) data.get("status"))
.startTime(startTimestamp != null ? startTimestamp.toLocalDateTime() : null)
.endTime(endTimestamp != null ? endTimestamp.toLocalDateTime() : null)
.exitCode((String) data.get("exitCode"))
.exitMessage((String) data.get("exitMessage"))
.build();
}
}