+ {/* Form Modal */}
+ {formOpen && (
+
+
setFormOpen(false)} />
+
+
+
+ {scheduleMode === 'existing' ? '스케줄 수정' : '스케줄 등록'}
+
+
+
+
+
+ {/* Job Select */}
+
+
+
+
+ {selectedJob && (
+
+ {scheduleMode === 'existing' ? '기존 스케줄' : '새 스케줄'}
+
+ )}
+ {formLoading && (
+
+ )}
+
+
+
+ {/* Cron Expression */}
+
+
+ setCronExpression(e.target.value)}
+ placeholder="0 0/15 * * * ?"
+ className="w-full rounded-lg border border-wing-border px-3 py-2 text-sm font-mono focus:outline-none focus:ring-2 focus:ring-wing-accent focus:border-wing-accent"
+ disabled={!selectedJob || formLoading}
+ />
+
+
+ {/* Cron Presets */}
+
+
+
+ {CRON_PRESETS.map(({ label, cron }) => (
+
+ ))}
+
+
+
+ {/* Cron Preview */}
+ {cronExpression.trim() && (
+
+ )}
+
+ {/* Description */}
+
+
+ setDescription(e.target.value)}
+ placeholder="스케줄 설명 (선택)"
+ className="w-full rounded-lg border border-wing-border px-3 py-2 text-sm focus:outline-none focus:ring-2 focus:ring-wing-accent focus:border-wing-accent"
+ disabled={!selectedJob || formLoading}
+ />
+
+
+
+ {/* Modal Footer */}
+
+
+
+
+
+
+ )}
+
+ {/* Header */}
+
+
+
스케줄 관리
+ setGuideOpen(true)} />
+
+
+
+
+
+ 총 {schedules.length}개 스케줄
+
+
+
+
+ {/* Active Filter Tabs */}
+
+ {ACTIVE_TABS.map((tab) => (
+
+ ))}
+
+
+ {/* Search + Sort + View Toggle */}
+
+
+ {/* Search */}
+
+
+
+
+
setSearchTerm(e.target.value)}
+ className="w-full pl-10 pr-4 py-2 border border-wing-border rounded-lg text-sm
+ focus:ring-2 focus:ring-wing-accent focus:border-wing-accent outline-none"
+ />
+ {searchTerm && (
+
+ )}
+
+
+ {/* Sort dropdown */}
+
+
+ {/* View mode toggle */}
+
+
+
+
+
+
+ {searchTerm && (
+
+ {filteredSchedules.length}개 스케줄 검색됨
+
+ )}
+
+
+ {/* Schedule List */}
+ {filteredSchedules.length === 0 ? (
+
+
+
+ ) : viewMode === 'card' ? (
+ /* Card View */
+
+ {filteredSchedules.map((schedule) => (
+
+
+
+
+ {getScheduleLabel(schedule)}
+
+
+
+
+ {schedule.active ? '활성' : '비활성'}
+
+ {schedule.triggerState && (
+
+ {schedule.triggerState}
+
+ )}
+
+
+
+ {/* Detail Info */}
+
+
+
+ {schedule.cronExpression}
+
+
+
+ 다음 실행: {formatDateTime(schedule.nextFireTime)}
+
+ {schedule.previousFireTime && (
+
+ 이전 실행: {formatDateTime(schedule.previousFireTime)}
+
+ )}
+
+
+ {/* Action Buttons */}
+
+
+
+
+
+
+ ))}
+
+ ) : (
+ /* Table View */
+
+
+
+
+
+ | 작업명 |
+ Cron 표현식 |
+ 상태 |
+ 다음 실행 |
+ 이전 실행 |
+ 액션 |
+
+
+
+ {filteredSchedules.map((schedule) => (
+
+ |
+ {getScheduleLabel(schedule)}
+ |
+
+ {schedule.cronExpression}
+ |
+
+
+ {schedule.active ? '활성' : '비활성'}
+
+ |
+ {formatDateTime(schedule.nextFireTime)} |
+ {schedule.previousFireTime ? formatDateTime(schedule.previousFireTime) : '-'} |
+
+
+
+
+
+
+ |
+
+ ))}
+
+
+
+
+ )}
+
+ {/* Confirm Modal */}
+ {confirmAction?.type === 'toggle' && (
+
handleToggle(confirmAction.schedule)}
+ onCancel={() => setConfirmAction(null)}
+ />
+ )}
+ {confirmAction?.type === 'delete' && (
+ handleDelete(confirmAction.schedule)}
+ onCancel={() => setConfirmAction(null)}
+ />
+ )}
+ setGuideOpen(false)}
+ pageTitle="스케줄 관리"
+ sections={SCHEDULES_GUIDE}
+ />
+
+ );
+}
diff --git a/frontend/src/pages/Timeline.tsx b/frontend/src/pages/Timeline.tsx
new file mode 100644
index 0000000..d0b95c6
--- /dev/null
+++ b/frontend/src/pages/Timeline.tsx
@@ -0,0 +1,513 @@
+import { useState, useCallback, useRef, useEffect, useMemo } from 'react';
+import { Link } from 'react-router-dom';
+import { batchApi, type ExecutionInfo, type JobDisplayName, type JobExecutionDto, type PeriodInfo, type ScheduleTimeline } from '../api/batchApi';
+import { formatDateTime, calculateDuration } from '../utils/formatters';
+import { usePoller } from '../hooks/usePoller';
+import { useToastContext } from '../contexts/ToastContext';
+import { getStatusColor } from '../components/StatusBadge';
+import StatusBadge from '../components/StatusBadge';
+import LoadingSpinner from '../components/LoadingSpinner';
+import EmptyState from '../components/EmptyState';
+import GuideModal, { HelpButton } from '../components/GuideModal';
+
+type ViewType = 'day' | 'week' | 'month';
+
+interface TooltipData {
+ jobName: string;
+ period: PeriodInfo;
+ execution: ExecutionInfo;
+ x: number;
+ y: number;
+}
+
+interface SelectedCell {
+ jobName: string;
+ periodKey: string;
+ periodLabel: string;
+}
+
+const VIEW_OPTIONS: { value: ViewType; label: string }[] = [
+ { value: 'day', label: 'Day' },
+ { value: 'week', label: 'Week' },
+ { value: 'month', label: 'Month' },
+];
+
+const LEGEND_ITEMS = [
+ { status: 'COMPLETED', color: '#10b981', label: '완료' },
+ { status: 'FAILED', color: '#ef4444', label: '실패' },
+ { status: 'STARTED', color: '#3b82f6', label: '실행중' },
+ { status: 'SCHEDULED', color: '#8b5cf6', label: '예정' },
+ { status: 'NONE', color: '#e5e7eb', label: '없음' },
+];
+
+const JOB_COL_WIDTH = 200;
+const CELL_MIN_WIDTH = 80;
+const POLLING_INTERVAL = 30000;
+
+function formatDateStr(date: Date): string {
+ const y = date.getFullYear();
+ const m = String(date.getMonth() + 1).padStart(2, '0');
+ const d = String(date.getDate()).padStart(2, '0');
+ return `${y}-${m}-${d}`;
+}
+
+function shiftDate(date: Date, view: ViewType, delta: number): Date {
+ const next = new Date(date);
+ switch (view) {
+ case 'day':
+ next.setDate(next.getDate() + delta);
+ break;
+ case 'week':
+ next.setDate(next.getDate() + delta * 7);
+ break;
+ case 'month':
+ next.setMonth(next.getMonth() + delta);
+ break;
+ }
+ return next;
+}
+
+function isRunning(status: string): boolean {
+ return status === 'STARTED' || status === 'STARTING';
+}
+
+const TIMELINE_GUIDE = [
+ {
+ title: '타임라인이란?',
+ content: '타임라인은 배치 작업의 실행 스케줄과 결과를 시각적으로 보여주는 화면입니다.\n세로축은 작업 목록, 가로축은 시간대를 나타냅니다.\n각 셀의 색상으로 실행 상태를 한눈에 파악할 수 있습니다.',
+ },
+ {
+ title: '보기 모드',
+ content: '3가지 보기 모드를 제공합니다.\n• Day: 하루 단위 (시간대별 상세 보기)\n• Week: 일주일 단위\n• Month: 한 달 단위\n\n이전/다음 버튼으로 기간을 이동하고, "오늘" 버튼으로 현재 날짜로 돌아옵니다.',
+ },
+ {
+ title: '색상 범례',
+ content: '각 셀의 색상은 실행 상태를 나타냅니다.\n• 초록색: 완료 (성공적으로 실행됨)\n• 빨간색: 실패 (오류 발생)\n• 파란색: 실행 중 (현재 진행 중)\n• 보라색: 예정 (아직 실행 전)\n• 회색: 없음 (해당 시간대에 실행 기록 없음)',
+ },
+ {
+ title: '상세 보기',
+ content: '셀 위에 마우스를 올리면 툴팁으로 작업명, 기간, 상태 등 요약 정보를 보여줍니다.\n셀을 클릭하면 하단에 상세 패널이 열리며, 해당 시간대의 실행 이력 목록을 확인할 수 있습니다.\n"상세" 링크를 클릭하면 실행 상세 화면으로 이동합니다.',
+ },
+];
+
+export default function Timeline() {
+ const { showToast } = useToastContext();
+
+ // Guide modal state
+ const [guideOpen, setGuideOpen] = useState(false);
+
+ const [view, setView] = useState
('day');
+ const [currentDate, setCurrentDate] = useState(() => new Date());
+ const [periodLabel, setPeriodLabel] = useState('');
+ const [periods, setPeriods] = useState([]);
+ const [schedules, setSchedules] = useState([]);
+ const [loading, setLoading] = useState(true);
+
+ const [displayNames, setDisplayNames] = useState([]);
+
+ useEffect(() => {
+ batchApi.getDisplayNames().then(setDisplayNames).catch(() => {});
+ }, []);
+
+ const displayNameMap = useMemo>(() => {
+ const map: Record = {};
+ for (const dn of displayNames) {
+ map[dn.jobName] = dn.displayName;
+ }
+ return map;
+ }, [displayNames]);
+
+ // Tooltip
+ const [tooltip, setTooltip] = useState(null);
+ const tooltipTimeoutRef = useRef | null>(null);
+
+ // Selected cell & detail panel
+ const [selectedCell, setSelectedCell] = useState(null);
+ const [detailExecutions, setDetailExecutions] = useState([]);
+ const [detailLoading, setDetailLoading] = useState(false);
+
+ const loadTimeline = useCallback(async () => {
+ try {
+ const dateStr = formatDateStr(currentDate);
+ const result = await batchApi.getTimeline(view, dateStr);
+ setPeriodLabel(result.periodLabel);
+ setPeriods(result.periods);
+ setSchedules(result.schedules);
+ } catch (err) {
+ showToast('타임라인 조회 실패', 'error');
+ console.error(err);
+ } finally {
+ setLoading(false);
+ }
+ }, [view, currentDate, showToast]);
+
+ usePoller(loadTimeline, POLLING_INTERVAL, [view, currentDate]);
+
+ const handlePrev = () => setCurrentDate((d) => shiftDate(d, view, -1));
+ const handleNext = () => setCurrentDate((d) => shiftDate(d, view, 1));
+ const handleToday = () => setCurrentDate(new Date());
+
+ const handleRefresh = async () => {
+ setLoading(true);
+ await loadTimeline();
+ };
+
+ // Tooltip handlers
+ const handleCellMouseEnter = (
+ e: React.MouseEvent,
+ jobName: string,
+ period: PeriodInfo,
+ execution: ExecutionInfo,
+ ) => {
+ if (tooltipTimeoutRef.current) {
+ clearTimeout(tooltipTimeoutRef.current);
+ }
+ const rect = (e.currentTarget as HTMLElement).getBoundingClientRect();
+ setTooltip({
+ jobName,
+ period,
+ execution,
+ x: rect.left + rect.width / 2,
+ y: rect.top,
+ });
+ };
+
+ const handleCellMouseLeave = () => {
+ tooltipTimeoutRef.current = setTimeout(() => {
+ setTooltip(null);
+ }, 100);
+ };
+
+ // Clean up tooltip timeout
+ useEffect(() => {
+ return () => {
+ if (tooltipTimeoutRef.current) {
+ clearTimeout(tooltipTimeoutRef.current);
+ }
+ };
+ }, []);
+
+ // Cell click -> detail panel
+ const handleCellClick = async (jobName: string, periodKey: string, periodLabel: string) => {
+ // Toggle off if clicking same cell
+ if (selectedCell?.jobName === jobName && selectedCell?.periodKey === periodKey) {
+ setSelectedCell(null);
+ setDetailExecutions([]);
+ return;
+ }
+
+ setSelectedCell({ jobName, periodKey, periodLabel });
+ setDetailLoading(true);
+ setDetailExecutions([]);
+
+ try {
+ const executions = await batchApi.getPeriodExecutions(jobName, view, periodKey);
+ setDetailExecutions(executions);
+ } catch (err) {
+ showToast('구간 실행 이력 조회 실패', 'error');
+ console.error(err);
+ } finally {
+ setDetailLoading(false);
+ }
+ };
+
+ const closeDetail = () => {
+ setSelectedCell(null);
+ setDetailExecutions([]);
+ };
+
+ const gridTemplateColumns = `${JOB_COL_WIDTH}px repeat(${periods.length}, minmax(${CELL_MIN_WIDTH}px, 1fr))`;
+
+ return (
+
+ {/* Controls */}
+
+
+ {/* View Toggle */}
+
+ {VIEW_OPTIONS.map((opt) => (
+
+ ))}
+
+
+ {/* Navigation */}
+
+
+
+
+
+
+ {/* Period Label */}
+
+ {periodLabel}
+
+
+ {/* Refresh */}
+
+
+ {/* Help */}
+
setGuideOpen(true)} />
+
+
+
+ {/* Legend */}
+
+ {LEGEND_ITEMS.map((item) => (
+
+ ))}
+
+
+ {/* Timeline Grid */}
+
+ {loading ? (
+
+ ) : schedules.length === 0 ? (
+
+ ) : (
+
+
+ {/* Header Row */}
+
+ 작업명
+
+ {periods.map((period) => (
+
+ {period.label}
+
+ ))}
+
+ {/* Data Rows */}
+ {schedules.map((schedule) => (
+ <>
+ {/* Job Name (sticky) */}
+
+ {displayNameMap[schedule.jobName] || schedule.jobName}
+
+
+ {/* Execution Cells */}
+ {periods.map((period) => {
+ const exec = schedule.executions[period.key];
+ const hasExec = exec !== null && exec !== undefined;
+ const isSelected =
+ selectedCell?.jobName === schedule.jobName &&
+ selectedCell?.periodKey === period.key;
+ const running = hasExec && isRunning(exec.status);
+
+ return (
+
+ handleCellClick(schedule.jobName, period.key, period.label)
+ }
+ onMouseEnter={
+ hasExec
+ ? (e) => handleCellMouseEnter(e, schedule.jobName, period, exec)
+ : undefined
+ }
+ onMouseLeave={hasExec ? handleCellMouseLeave : undefined}
+ >
+ {hasExec && (
+
+ )}
+
+ );
+ })}
+ >
+ ))}
+
+
+ )}
+
+
+ {/* Tooltip */}
+ {tooltip && (
+
+
+
{displayNameMap[tooltip.jobName] || tooltip.jobName}
+
+
기간: {tooltip.period.label}
+
+ 상태:{' '}
+
+ {tooltip.execution.status}
+
+
+ {tooltip.execution.startTime && (
+
시작: {formatDateTime(tooltip.execution.startTime)}
+ )}
+ {tooltip.execution.endTime && (
+
종료: {formatDateTime(tooltip.execution.endTime)}
+ )}
+ {tooltip.execution.executionId && (
+
실행 ID: {tooltip.execution.executionId}
+ )}
+
+ {/* Arrow */}
+
+
+
+ )}
+
+ {/* Detail Panel */}
+ {selectedCell && (
+
+
+
+
+ {displayNameMap[selectedCell.jobName] || selectedCell.jobName}
+
+
+ 구간: {selectedCell.periodLabel}
+
+
+
+
+
+ {detailLoading ? (
+
+ ) : detailExecutions.length === 0 ? (
+
+ ) : (
+
+
+
+
+ |
+ 실행 ID
+ |
+
+ 상태
+ |
+
+ 시작 시간
+ |
+
+ 종료 시간
+ |
+
+ 소요 시간
+ |
+
+ 상세
+ |
+
+
+
+ {detailExecutions.map((exec) => (
+
+ |
+ #{exec.executionId}
+ |
+
+
+ |
+
+ {formatDateTime(exec.startTime)}
+ |
+
+ {formatDateTime(exec.endTime)}
+ |
+
+ {calculateDuration(exec.startTime, exec.endTime)}
+ |
+
+
+ 상세
+
+ |
+
+ ))}
+
+
+
+ )}
+
+ )}
+
setGuideOpen(false)}
+ pageTitle="타임라인"
+ sections={TIMELINE_GUIDE}
+ />
+
+ );
+}
diff --git a/frontend/src/theme/base.css b/frontend/src/theme/base.css
new file mode 100644
index 0000000..de7d284
--- /dev/null
+++ b/frontend/src/theme/base.css
@@ -0,0 +1,101 @@
+body {
+ font-family: 'Noto Sans KR', sans-serif;
+ background: var(--wing-bg);
+ color: var(--wing-text);
+ transition: background-color 0.2s ease, color 0.2s ease;
+}
+
+/* Scrollbar styling for dark mode */
+::-webkit-scrollbar {
+ width: 8px;
+ height: 8px;
+}
+
+::-webkit-scrollbar-track {
+ background: var(--wing-surface);
+}
+
+::-webkit-scrollbar-thumb {
+ background: var(--wing-muted);
+ border-radius: 4px;
+}
+
+::-webkit-scrollbar-thumb:hover {
+ background: var(--wing-accent);
+}
+
+/* Main Menu Cards */
+.gc-cards {
+ padding: 2rem 0;
+ display: grid;
+ grid-template-columns: repeat(3, 1fr);
+ grid-auto-rows: 1fr;
+ gap: 2rem;
+ width: 80%;
+ margin: 0 auto;
+}
+
+@media (max-width: 768px) {
+ .gc-cards {
+ grid-template-columns: 1fr;
+ width: 90%;
+ }
+}
+
+.gc-card {
+ display: flex;
+ flex-direction: column;
+ align-items: center;
+ text-align: center;
+ padding: 2.5rem 2rem;
+ border: 1px solid var(--wing-border);
+ border-radius: 12px;
+ background: var(--wing-surface);
+ text-decoration: none !important;
+ color: inherit !important;
+ transition: all 0.2s ease;
+ height: 100%;
+}
+
+.gc-card:hover {
+ border-color: #4183c4;
+ box-shadow: 0 4px 16px rgba(65, 131, 196, 0.15);
+ transform: translateY(-2px);
+}
+
+.gc-card-icon {
+ color: #4183c4;
+ margin-bottom: 1rem;
+}
+
+.gc-card-icon-guide {
+ color: #21ba45;
+}
+
+.gc-card-icon-nexus {
+ color: #f2711c;
+}
+
+.gc-card h3 {
+ font-size: 1.3rem;
+ margin-bottom: 0.5rem;
+ color: var(--wing-text);
+}
+
+.gc-card p {
+ font-size: 0.95rem;
+ color: var(--wing-muted);
+ line-height: 1.5;
+ margin-bottom: 1rem;
+}
+
+.gc-card-link {
+ font-size: 0.9rem;
+ color: #4183c4;
+ font-weight: 600;
+ margin-top: auto;
+}
+
+.gc-card:hover .gc-card-link {
+ text-decoration: underline;
+}
diff --git a/frontend/src/theme/tokens.css b/frontend/src/theme/tokens.css
new file mode 100644
index 0000000..5543625
--- /dev/null
+++ b/frontend/src/theme/tokens.css
@@ -0,0 +1,84 @@
+/* Dark theme (default) */
+:root,
+[data-theme='dark'] {
+ --wing-bg: #020617;
+ --wing-surface: #0f172a;
+ --wing-card: #1e293b;
+ --wing-border: #1e3a5f;
+ --wing-text: #e2e8f0;
+ --wing-muted: #64748b;
+ --wing-accent: #3b82f6;
+ --wing-danger: #ef4444;
+ --wing-warning: #f59e0b;
+ --wing-success: #22c55e;
+ --wing-glass: rgba(15, 23, 42, 0.92);
+ --wing-glass-dense: rgba(15, 23, 42, 0.95);
+ --wing-overlay: rgba(2, 6, 23, 0.42);
+ --wing-card-alpha: rgba(30, 41, 59, 0.55);
+ --wing-subtle: rgba(255, 255, 255, 0.03);
+ --wing-hover: rgba(255, 255, 255, 0.05);
+ --wing-input-bg: #0f172a;
+ --wing-input-border: #334155;
+ --wing-rag-red-bg: rgba(127, 29, 29, 0.15);
+ --wing-rag-red-text: #fca5a5;
+ --wing-rag-amber-bg: rgba(120, 53, 15, 0.15);
+ --wing-rag-amber-text: #fcd34d;
+ --wing-rag-green-bg: rgba(5, 46, 22, 0.15);
+ --wing-rag-green-text: #86efac;
+}
+
+/* Light theme */
+[data-theme='light'] {
+ --wing-bg: #e2e8f0;
+ --wing-surface: #ffffff;
+ --wing-card: #f1f5f9;
+ --wing-border: #94a3b8;
+ --wing-text: #0f172a;
+ --wing-muted: #64748b;
+ --wing-accent: #2563eb;
+ --wing-danger: #dc2626;
+ --wing-warning: #d97706;
+ --wing-success: #16a34a;
+ --wing-glass: rgba(255, 255, 255, 0.92);
+ --wing-glass-dense: rgba(255, 255, 255, 0.95);
+ --wing-overlay: rgba(0, 0, 0, 0.25);
+ --wing-card-alpha: rgba(226, 232, 240, 0.6);
+ --wing-subtle: rgba(0, 0, 0, 0.03);
+ --wing-hover: rgba(0, 0, 0, 0.04);
+ --wing-input-bg: #ffffff;
+ --wing-input-border: #cbd5e1;
+ --wing-rag-red-bg: #fef2f2;
+ --wing-rag-red-text: #b91c1c;
+ --wing-rag-amber-bg: #fffbeb;
+ --wing-rag-amber-text: #b45309;
+ --wing-rag-green-bg: #f0fdf4;
+ --wing-rag-green-text: #15803d;
+}
+
+@theme {
+ --color-wing-bg: var(--wing-bg);
+ --color-wing-surface: var(--wing-surface);
+ --color-wing-card: var(--wing-card);
+ --color-wing-border: var(--wing-border);
+ --color-wing-text: var(--wing-text);
+ --color-wing-muted: var(--wing-muted);
+ --color-wing-accent: var(--wing-accent);
+ --color-wing-danger: var(--wing-danger);
+ --color-wing-warning: var(--wing-warning);
+ --color-wing-success: var(--wing-success);
+ --color-wing-glass: var(--wing-glass);
+ --color-wing-glass-dense: var(--wing-glass-dense);
+ --color-wing-overlay: var(--wing-overlay);
+ --color-wing-card-alpha: var(--wing-card-alpha);
+ --color-wing-subtle: var(--wing-subtle);
+ --color-wing-hover: var(--wing-hover);
+ --color-wing-input-bg: var(--wing-input-bg);
+ --color-wing-input-border: var(--wing-input-border);
+ --color-wing-rag-red-bg: var(--wing-rag-red-bg);
+ --color-wing-rag-red-text: var(--wing-rag-red-text);
+ --color-wing-rag-amber-bg: var(--wing-rag-amber-bg);
+ --color-wing-rag-amber-text: var(--wing-rag-amber-text);
+ --color-wing-rag-green-bg: var(--wing-rag-green-bg);
+ --color-wing-rag-green-text: var(--wing-rag-green-text);
+ --font-sans: 'Noto Sans KR', sans-serif;
+}
diff --git a/frontend/src/utils/cronPreview.ts b/frontend/src/utils/cronPreview.ts
new file mode 100644
index 0000000..7ed0e84
--- /dev/null
+++ b/frontend/src/utils/cronPreview.ts
@@ -0,0 +1,154 @@
+/**
+ * Quartz 형식 Cron 표현식의 다음 실행 시간을 계산한다.
+ * 형식: 초 분 시 일 월 요일
+ */
+export function getNextExecutions(cron: string, count: number): Date[] {
+ const parts = cron.trim().split(/\s+/);
+ if (parts.length < 6) return [];
+
+ const [secField, minField, hourField, dayField, monthField, dowField] = parts;
+
+ if (hasUnsupportedToken(dayField) || hasUnsupportedToken(dowField)) {
+ return [];
+ }
+
+ const seconds = parseField(secField, 0, 59);
+ const minutes = parseField(minField, 0, 59);
+ const hours = parseField(hourField, 0, 23);
+ const daysOfMonth = parseField(dayField, 1, 31);
+ const months = parseField(monthField, 1, 12);
+ const daysOfWeek = parseDowField(dowField);
+
+ if (!seconds || !minutes || !hours || !months) return [];
+
+ const results: Date[] = [];
+ const now = new Date();
+ const cursor = new Date(now.getFullYear(), now.getMonth(), now.getDate(), now.getHours(), now.getMinutes(), now.getSeconds() + 1);
+ cursor.setMilliseconds(0);
+
+ const limit = new Date(now.getTime() + 365 * 24 * 60 * 60 * 1000);
+
+ while (results.length < count && cursor.getTime() <= limit.getTime()) {
+ const month = cursor.getMonth() + 1;
+ if (!months.includes(month)) {
+ cursor.setMonth(cursor.getMonth() + 1, 1);
+ cursor.setHours(0, 0, 0, 0);
+ continue;
+ }
+
+ const day = cursor.getDate();
+ const dayMatches = daysOfMonth ? daysOfMonth.includes(day) : true;
+ const dowMatches = daysOfWeek ? daysOfWeek.includes(cursor.getDay()) : true;
+
+ const needDayCheck = dayField !== '?' && dowField !== '?';
+ const dayOk = needDayCheck ? dayMatches && dowMatches : dayMatches && dowMatches;
+
+ if (!dayOk) {
+ cursor.setDate(cursor.getDate() + 1);
+ cursor.setHours(0, 0, 0, 0);
+ continue;
+ }
+
+ const hour = cursor.getHours();
+ if (!hours.includes(hour)) {
+ cursor.setHours(cursor.getHours() + 1, 0, 0, 0);
+ continue;
+ }
+
+ const minute = cursor.getMinutes();
+ if (!minutes.includes(minute)) {
+ cursor.setMinutes(cursor.getMinutes() + 1, 0, 0);
+ continue;
+ }
+
+ const second = cursor.getSeconds();
+ if (!seconds.includes(second)) {
+ cursor.setSeconds(cursor.getSeconds() + 1, 0);
+ continue;
+ }
+
+ results.push(new Date(cursor));
+ cursor.setSeconds(cursor.getSeconds() + 1);
+ }
+
+ return results;
+}
+
+function hasUnsupportedToken(field: string): boolean {
+ return /[LW#]/.test(field);
+}
+
+function parseField(field: string, min: number, max: number): number[] | null {
+ if (field === '?') return null;
+ if (field === '*') return range(min, max);
+
+ const values = new Set();
+
+ for (const part of field.split(',')) {
+ const stepMatch = part.match(/^(.+)\/(\d+)$/);
+ if (stepMatch) {
+ const [, base, stepStr] = stepMatch;
+ const step = parseInt(stepStr, 10);
+ if (step <= 0) return range(min, max);
+ let start = min;
+ let end = max;
+
+ if (base === '*') {
+ start = min;
+ } else if (base.includes('-')) {
+ const [lo, hi] = base.split('-').map(Number);
+ start = lo;
+ end = hi;
+ } else {
+ start = parseInt(base, 10);
+ }
+
+ for (let v = start; v <= end; v += step) {
+ if (v >= min && v <= max) values.add(v);
+ }
+ continue;
+ }
+
+ const rangeMatch = part.match(/^(\d+)-(\d+)$/);
+ if (rangeMatch) {
+ const lo = parseInt(rangeMatch[1], 10);
+ const hi = parseInt(rangeMatch[2], 10);
+ for (let v = lo; v <= hi; v++) {
+ if (v >= min && v <= max) values.add(v);
+ }
+ continue;
+ }
+
+ const num = parseInt(part, 10);
+ if (!isNaN(num) && num >= min && num <= max) {
+ values.add(num);
+ }
+ }
+
+ return values.size > 0 ? Array.from(values).sort((a, b) => a - b) : range(min, max);
+}
+
+function parseDowField(field: string): number[] | null {
+ if (field === '?' || field === '*') return null;
+
+ const dayMap: Record = {
+ SUN: '0', MON: '1', TUE: '2', WED: '3', THU: '4', FRI: '5', SAT: '6',
+ };
+
+ let normalized = field.toUpperCase();
+ for (const [name, num] of Object.entries(dayMap)) {
+ normalized = normalized.replace(new RegExp(name, 'g'), num);
+ }
+
+ // Quartz uses 1=SUN..7=SAT, convert to JS 0=SUN..6=SAT
+ const parsed = parseField(normalized, 1, 7);
+ if (!parsed) return null;
+
+ return parsed.map((v) => v - 1);
+}
+
+function range(min: number, max: number): number[] {
+ const result: number[] = [];
+ for (let i = min; i <= max; i++) result.push(i);
+ return result;
+}
diff --git a/frontend/src/utils/formatters.ts b/frontend/src/utils/formatters.ts
new file mode 100644
index 0000000..c1cda20
--- /dev/null
+++ b/frontend/src/utils/formatters.ts
@@ -0,0 +1,58 @@
+export function formatDateTime(dateTimeStr: string | null | undefined): string {
+ if (!dateTimeStr) return '-';
+ try {
+ const date = new Date(dateTimeStr);
+ if (isNaN(date.getTime())) return '-';
+ const y = date.getFullYear();
+ const m = String(date.getMonth() + 1).padStart(2, '0');
+ const d = String(date.getDate()).padStart(2, '0');
+ const h = String(date.getHours()).padStart(2, '0');
+ const min = String(date.getMinutes()).padStart(2, '0');
+ const s = String(date.getSeconds()).padStart(2, '0');
+ return `${y}-${m}-${d} ${h}:${min}:${s}`;
+ } catch {
+ return '-';
+ }
+}
+
+export function formatDateTimeShort(dateTimeStr: string | null | undefined): string {
+ if (!dateTimeStr) return '-';
+ try {
+ const date = new Date(dateTimeStr);
+ if (isNaN(date.getTime())) return '-';
+ const m = String(date.getMonth() + 1).padStart(2, '0');
+ const d = String(date.getDate()).padStart(2, '0');
+ const h = String(date.getHours()).padStart(2, '0');
+ const min = String(date.getMinutes()).padStart(2, '0');
+ return `${m}/${d} ${h}:${min}`;
+ } catch {
+ return '-';
+ }
+}
+
+export function formatDuration(ms: number | null | undefined): string {
+ if (ms == null || ms < 0) return '-';
+ const totalSeconds = Math.floor(ms / 1000);
+ const hours = Math.floor(totalSeconds / 3600);
+ const minutes = Math.floor((totalSeconds % 3600) / 60);
+ const seconds = totalSeconds % 60;
+
+ if (hours > 0) return `${hours}시간 ${minutes}분 ${seconds}초`;
+ if (minutes > 0) return `${minutes}분 ${seconds}초`;
+ return `${seconds}초`;
+}
+
+export function calculateDuration(
+ startTime: string | null | undefined,
+ endTime: string | null | undefined,
+): string {
+ if (!startTime) return '-';
+ const start = new Date(startTime).getTime();
+ if (isNaN(start)) return '-';
+
+ if (!endTime) return '실행 중...';
+ const end = new Date(endTime).getTime();
+ if (isNaN(end)) return '-';
+
+ return formatDuration(end - start);
+}
diff --git a/frontend/tsconfig.app.json b/frontend/tsconfig.app.json
new file mode 100644
index 0000000..a9b5a59
--- /dev/null
+++ b/frontend/tsconfig.app.json
@@ -0,0 +1,28 @@
+{
+ "compilerOptions": {
+ "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo",
+ "target": "ES2022",
+ "useDefineForClassFields": true,
+ "lib": ["ES2022", "DOM", "DOM.Iterable"],
+ "module": "ESNext",
+ "types": ["vite/client"],
+ "skipLibCheck": true,
+
+ /* Bundler mode */
+ "moduleResolution": "bundler",
+ "allowImportingTsExtensions": true,
+ "verbatimModuleSyntax": true,
+ "moduleDetection": "force",
+ "noEmit": true,
+ "jsx": "react-jsx",
+
+ /* Linting */
+ "strict": true,
+ "noUnusedLocals": true,
+ "noUnusedParameters": true,
+ "erasableSyntaxOnly": true,
+ "noFallthroughCasesInSwitch": true,
+ "noUncheckedSideEffectImports": true
+ },
+ "include": ["src"]
+}
diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json
new file mode 100644
index 0000000..1ffef60
--- /dev/null
+++ b/frontend/tsconfig.json
@@ -0,0 +1,7 @@
+{
+ "files": [],
+ "references": [
+ { "path": "./tsconfig.app.json" },
+ { "path": "./tsconfig.node.json" }
+ ]
+}
diff --git a/frontend/tsconfig.node.json b/frontend/tsconfig.node.json
new file mode 100644
index 0000000..8a67f62
--- /dev/null
+++ b/frontend/tsconfig.node.json
@@ -0,0 +1,26 @@
+{
+ "compilerOptions": {
+ "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo",
+ "target": "ES2023",
+ "lib": ["ES2023"],
+ "module": "ESNext",
+ "types": ["node"],
+ "skipLibCheck": true,
+
+ /* Bundler mode */
+ "moduleResolution": "bundler",
+ "allowImportingTsExtensions": true,
+ "verbatimModuleSyntax": true,
+ "moduleDetection": "force",
+ "noEmit": true,
+
+ /* Linting */
+ "strict": true,
+ "noUnusedLocals": true,
+ "noUnusedParameters": true,
+ "erasableSyntaxOnly": true,
+ "noFallthroughCasesInSwitch": true,
+ "noUncheckedSideEffectImports": true
+ },
+ "include": ["vite.config.ts"]
+}
diff --git a/frontend/vite.config.ts b/frontend/vite.config.ts
new file mode 100644
index 0000000..4a83652
--- /dev/null
+++ b/frontend/vite.config.ts
@@ -0,0 +1,21 @@
+import { defineConfig } from 'vite'
+import react from '@vitejs/plugin-react'
+import tailwindcss from '@tailwindcss/vite'
+
+export default defineConfig({
+ plugins: [react(), tailwindcss()],
+ server: {
+ port: 5173,
+ proxy: {
+ '/snp-collector/api': {
+ target: 'http://localhost:8041',
+ changeOrigin: true,
+ },
+ },
+ },
+ base: '/snp-collector/',
+ build: {
+ outDir: '../src/main/resources/static',
+ emptyOutDir: true,
+ },
+})
diff --git a/package-lock.json b/package-lock.json
new file mode 100644
index 0000000..55ae0c7
--- /dev/null
+++ b/package-lock.json
@@ -0,0 +1,6 @@
+{
+ "name": "snp-batch-validation",
+ "lockfileVersion": 3,
+ "requires": true,
+ "packages": {}
+}
diff --git a/pom.xml b/pom.xml
new file mode 100644
index 0000000..2cb816f
--- /dev/null
+++ b/pom.xml
@@ -0,0 +1,206 @@
+
+
+ 4.0.0
+
+
+ org.springframework.boot
+ spring-boot-starter-parent
+ 3.2.1
+
+
+
+ com.snp
+ snp-collector
+ 1.0.0
+ SNP Collector
+ S&P Collector - 해양 데이터 수집 배치 시스템
+
+
+ 17
+ UTF-8
+ 17
+ 17
+
+
+ 3.2.1
+ 5.1.0
+ 42.7.6
+ 1.18.30
+ 2.5.0
+
+
+
+
+
+ org.springframework.boot
+ spring-boot-starter-web
+
+
+
+
+ org.springframework.boot
+ spring-boot-starter-batch
+
+
+
+
+ org.springframework.boot
+ spring-boot-starter-data-jpa
+
+
+
+
+ org.postgresql
+ postgresql
+ ${postgresql.version}
+
+
+
+
+ org.springframework.boot
+ spring-boot-starter-thymeleaf
+
+
+
+
+ org.springframework.boot
+ spring-boot-starter-quartz
+
+
+
+
+ com.fasterxml.jackson.core
+ jackson-databind
+
+
+
+
+ org.projectlombok
+ lombok
+ ${lombok.version}
+ true
+
+
+
+
+ org.springframework.boot
+ spring-boot-devtools
+ runtime
+ true
+
+
+
+
+ org.springframework.boot
+ spring-boot-starter-actuator
+
+
+
+
+ org.springframework.boot
+ spring-boot-starter-webflux
+
+
+
+
+ org.springdoc
+ springdoc-openapi-starter-webmvc-ui
+ 2.3.0
+
+
+
+
+
+ com.github.ben-manes.caffeine
+ caffeine
+ 3.1.8
+
+
+
+
+
+ org.springframework.boot
+ spring-boot-starter-test
+ test
+
+
+
+ org.springframework.batch
+ spring-batch-test
+ test
+
+
+
+ com.google.code.findbugs
+ jsr305
+ 3.0.2
+
+
+
+
+
+
+ org.springframework.boot
+ spring-boot-maven-plugin
+ ${spring-boot.version}
+
+
+
+ org.projectlombok
+ lombok
+
+
+
+
+
+ com.github.eirslett
+ frontend-maven-plugin
+ 1.15.1
+
+ frontend
+ v20.19.0
+
+
+
+ install-node-and-npm
+ install-node-and-npm
+
+
+ npm-install
+ npm
+
+ install
+
+
+
+ npm-build
+ npm
+
+ run build
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+ 3.11.0
+
+ 17
+ 17
+ UTF-8
+
+
+ org.projectlombok
+ lombok
+ ${lombok.version}
+
+
+
+
+
+
+
diff --git a/src/main/java/com/snp/batch/SnpCollectorApplication.java b/src/main/java/com/snp/batch/SnpCollectorApplication.java
new file mode 100644
index 0000000..f4f495f
--- /dev/null
+++ b/src/main/java/com/snp/batch/SnpCollectorApplication.java
@@ -0,0 +1,16 @@
+package com.snp.batch;
+
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.boot.context.properties.ConfigurationPropertiesScan;
+import org.springframework.scheduling.annotation.EnableScheduling;
+
+@SpringBootApplication
+@EnableScheduling
+@ConfigurationPropertiesScan
+public class SnpCollectorApplication {
+
+ public static void main(String[] args) {
+ SpringApplication.run(SnpCollectorApplication.class, args);
+ }
+}
diff --git a/src/main/java/com/snp/batch/api/logging/ApiAccessLoggingFilter.java b/src/main/java/com/snp/batch/api/logging/ApiAccessLoggingFilter.java
new file mode 100644
index 0000000..2322337
--- /dev/null
+++ b/src/main/java/com/snp/batch/api/logging/ApiAccessLoggingFilter.java
@@ -0,0 +1,149 @@
+package com.snp.batch.api.logging;
+
+import jakarta.servlet.FilterChain;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.core.Ordered;
+import org.springframework.core.annotation.Order;
+import org.springframework.stereotype.Component;
+import org.springframework.web.filter.OncePerRequestFilter;
+import org.springframework.web.util.ContentCachingRequestWrapper;
+import org.springframework.web.util.ContentCachingResponseWrapper;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.UUID;
+
+/**
+ * API 요청/응답 로깅 필터
+ *
+ * 로그 파일: logs/api-access.log
+ * 기록 내용: 요청 IP, HTTP Method, URI, 파라미터, 응답 상태, 처리 시간
+ */
+@Slf4j
+@Component
+@Order(Ordered.HIGHEST_PRECEDENCE)
+public class ApiAccessLoggingFilter extends OncePerRequestFilter {
+
+ private static final int MAX_PAYLOAD_LENGTH = 1000;
+
+ @Override
+ protected void doFilterInternal(HttpServletRequest request,
+ HttpServletResponse response,
+ FilterChain filterChain) throws ServletException, IOException {
+
+ // 정적 리소스 및 actuator 제외
+ String uri = request.getRequestURI();
+ if (shouldSkip(uri)) {
+ filterChain.doFilter(request, response);
+ return;
+ }
+
+ // 요청 래핑 (body 읽기용)
+ ContentCachingRequestWrapper requestWrapper = new ContentCachingRequestWrapper(request);
+ ContentCachingResponseWrapper responseWrapper = new ContentCachingResponseWrapper(response);
+
+ String requestId = UUID.randomUUID().toString().substring(0, 8);
+ long startTime = System.currentTimeMillis();
+
+ try {
+ filterChain.doFilter(requestWrapper, responseWrapper);
+ } finally {
+ long duration = System.currentTimeMillis() - startTime;
+ logRequest(requestId, requestWrapper, responseWrapper, duration);
+ responseWrapper.copyBodyToResponse();
+ }
+ }
+
+ private boolean shouldSkip(String uri) {
+ return uri.startsWith("/actuator")
+ || uri.startsWith("/css")
+ || uri.startsWith("/js")
+ || uri.startsWith("/images")
+ || uri.startsWith("/favicon")
+ || uri.endsWith(".html")
+ || uri.endsWith(".css")
+ || uri.endsWith(".js")
+ || uri.endsWith(".ico");
+ }
+
+ private void logRequest(String requestId,
+ ContentCachingRequestWrapper request,
+ ContentCachingResponseWrapper response,
+ long duration) {
+
+ String clientIp = getClientIp(request);
+ String method = request.getMethod();
+ String uri = request.getRequestURI();
+ String queryString = request.getQueryString();
+ int status = response.getStatus();
+
+ StringBuilder logMessage = new StringBuilder();
+ logMessage.append(String.format("[%s] %s %s %s",
+ requestId, clientIp, method, uri));
+
+ // Query String
+ if (queryString != null && !queryString.isEmpty()) {
+ logMessage.append("?").append(truncate(queryString, 200));
+ }
+
+ // Request Body (POST/PUT/PATCH)
+ if (isBodyRequest(method)) {
+ String body = getRequestBody(request);
+ if (!body.isEmpty()) {
+ logMessage.append(" | body=").append(truncate(body, MAX_PAYLOAD_LENGTH));
+ }
+ }
+
+ // Response
+ logMessage.append(String.format(" | status=%d | %dms", status, duration));
+
+ // 상태에 따른 로그 레벨
+ if (status >= 500) {
+ log.error(logMessage.toString());
+ } else if (status >= 400) {
+ log.warn(logMessage.toString());
+ } else {
+ log.info(logMessage.toString());
+ }
+ }
+
+ private String getClientIp(HttpServletRequest request) {
+ String ip = request.getHeader("X-Forwarded-For");
+ if (ip == null || ip.isEmpty() || "unknown".equalsIgnoreCase(ip)) {
+ ip = request.getHeader("X-Real-IP");
+ }
+ if (ip == null || ip.isEmpty() || "unknown".equalsIgnoreCase(ip)) {
+ ip = request.getRemoteAddr();
+ }
+ // 여러 IP가 있는 경우 첫 번째만
+ if (ip != null && ip.contains(",")) {
+ ip = ip.split(",")[0].trim();
+ }
+ return ip;
+ }
+
+ private boolean isBodyRequest(String method) {
+ return "POST".equalsIgnoreCase(method)
+ || "PUT".equalsIgnoreCase(method)
+ || "PATCH".equalsIgnoreCase(method);
+ }
+
+ private String getRequestBody(ContentCachingRequestWrapper request) {
+ byte[] content = request.getContentAsByteArray();
+ if (content.length == 0) {
+ return "";
+ }
+ return new String(content, StandardCharsets.UTF_8)
+ .replaceAll("\\s+", " ")
+ .trim();
+ }
+
+ private String truncate(String str, int maxLength) {
+ if (str == null) return "";
+ if (str.length() <= maxLength) return str;
+ return str.substring(0, maxLength) + "...";
+ }
+}
diff --git a/src/main/java/com/snp/batch/common/batch/config/BaseJobConfig.java b/src/main/java/com/snp/batch/common/batch/config/BaseJobConfig.java
new file mode 100644
index 0000000..cb7f2dd
--- /dev/null
+++ b/src/main/java/com/snp/batch/common/batch/config/BaseJobConfig.java
@@ -0,0 +1,138 @@
+package com.snp.batch.common.batch.config;
+
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.batch.core.Job;
+import org.springframework.batch.core.Step;
+import org.springframework.batch.core.job.builder.JobBuilder;
+import org.springframework.batch.core.repository.JobRepository;
+import org.springframework.batch.core.step.builder.StepBuilder;
+import org.springframework.batch.item.ItemProcessor;
+import org.springframework.batch.item.ItemReader;
+import org.springframework.batch.item.ItemWriter;
+import org.springframework.transaction.PlatformTransactionManager;
+
+/**
+ * Batch Job 설정을 위한 추상 클래스
+ * Reader → Processor → Writer 패턴의 표준 Job 구성 제공
+ *
+ * @param 입력 타입 (Reader 출력, Processor 입력)
+ * @param 출력 타입 (Processor 출력, Writer 입력)
+ */
+@Slf4j
+@RequiredArgsConstructor
+public abstract class BaseJobConfig {
+
+ protected final JobRepository jobRepository;
+ protected final PlatformTransactionManager transactionManager;
+
+ /**
+ * Job 이름 반환 (하위 클래스에서 구현)
+ * 예: "shipDataImportJob"
+ */
+ protected abstract String getJobName();
+
+ /**
+ * Step 이름 반환 (선택사항, 기본: {jobName}Step)
+ */
+ protected String getStepName() {
+ return getJobName() + "Step";
+ }
+
+ /**
+ * Reader 생성 (하위 클래스에서 구현)
+ */
+ protected abstract ItemReader createReader();
+
+ /**
+ * Processor 생성 (하위 클래스에서 구현)
+ * 처리 로직이 없는 경우 null 반환 가능
+ */
+ protected abstract ItemProcessor createProcessor();
+
+ /**
+ * Writer 생성 (하위 클래스에서 구현)
+ */
+ protected abstract ItemWriter createWriter();
+
+ /**
+ * Chunk 크기 반환 (선택사항, 기본: 100)
+ */
+ protected int getChunkSize() {
+ return 100;
+ }
+
+ /**
+ * Job 시작 전 실행 (선택사항)
+ * Job Listener 등록 시 사용
+ */
+ protected void configureJob(JobBuilder jobBuilder) {
+ // 기본 구현: 아무것도 하지 않음
+ // 하위 클래스에서 필요시 오버라이드
+ // 예: jobBuilder.listener(jobExecutionListener())
+ }
+
+ /**
+ * Step 커스터마이징 (선택사항)
+ * Step Listener, FaultTolerant 등 설정 시 사용
+ */
+ protected void configureStep(StepBuilder stepBuilder) {
+ // 기본 구현: 아무것도 하지 않음
+ // 하위 클래스에서 필요시 오버라이드
+ // 예: stepBuilder.listener(stepExecutionListener())
+ // stepBuilder.faultTolerant().skip(Exception.class).skipLimit(10)
+ }
+
+ /**
+ * Step 생성 (표준 구현 제공)
+ */
+ public Step step() {
+ log.info("Step 생성: {}", getStepName());
+
+ ItemProcessor processor = createProcessor();
+ StepBuilder stepBuilder = new StepBuilder(getStepName(), jobRepository);
+
+ // Processor가 있는 경우
+ if (processor != null) {
+ var chunkBuilder = stepBuilder
+ .chunk(getChunkSize(), transactionManager)
+ .reader(createReader())
+ .processor(processor)
+ .writer(createWriter());
+
+ // 커스텀 설정 적용
+ configureStep(stepBuilder);
+
+ return chunkBuilder.build();
+ }
+ // Processor가 없는 경우 (I == O 타입 가정)
+ else {
+ @SuppressWarnings("unchecked")
+ var chunkBuilder = stepBuilder
+ .chunk(getChunkSize(), transactionManager)
+ .reader(createReader())
+ .writer((ItemWriter super I>) createWriter());
+
+ // 커스텀 설정 적용
+ configureStep(stepBuilder);
+
+ return chunkBuilder.build();
+ }
+ }
+
+ /**
+ * Job 생성 (표준 구현 제공)
+ */
+ public Job job() {
+ log.info("Job 생성: {}", getJobName());
+
+ JobBuilder jobBuilder = new JobBuilder(getJobName(), jobRepository);
+
+ // 커스텀 설정 적용
+ configureJob(jobBuilder);
+
+ return jobBuilder
+ .start(step())
+ .build();
+ }
+}
diff --git a/src/main/java/com/snp/batch/common/batch/config/BaseMultiStepJobConfig.java b/src/main/java/com/snp/batch/common/batch/config/BaseMultiStepJobConfig.java
new file mode 100644
index 0000000..2d2053c
--- /dev/null
+++ b/src/main/java/com/snp/batch/common/batch/config/BaseMultiStepJobConfig.java
@@ -0,0 +1,44 @@
+package com.snp.batch.common.batch.config;
+
+import org.springframework.batch.core.Job;
+import org.springframework.batch.core.job.builder.JobBuilder;
+import org.springframework.batch.core.repository.JobRepository;
+import org.springframework.batch.item.ItemProcessor;
+import org.springframework.batch.item.ItemReader;
+import org.springframework.batch.item.ItemWriter;
+import org.springframework.transaction.PlatformTransactionManager;
+
+/**
+ * 기존 단일 스텝 기능을 유지하면서 멀티 스텝 구성을 지원하는 확장 클래스
+ */
+public abstract class BaseMultiStepJobConfig extends BaseJobConfig {
+
+ public BaseMultiStepJobConfig(JobRepository jobRepository, PlatformTransactionManager transactionManager) {
+ super(jobRepository, transactionManager);
+ }
+
+ /**
+ * 하위 클래스에서 멀티 스텝 흐름을 정의합니다.
+ */
+ protected abstract Job createJobFlow(JobBuilder jobBuilder);
+
+ /**
+ * 부모의 job() 메서드를 오버라이드하여 멀티 스텝 흐름을 태웁니다.
+ */
+ @Override
+ public Job job() {
+ JobBuilder jobBuilder = new JobBuilder(getJobName(), jobRepository);
+ configureJob(jobBuilder); // 기존 리스너 등 설정 유지
+
+ return createJobFlow(jobBuilder);
+ }
+
+ // 단일 스텝용 Reader/Processor/Writer는 사용하지 않을 경우
+ // 기본적으로 null이나 예외를 던지도록 구현하여 구현 부담을 줄일 수 있습니다.
+ @Override
+ protected ItemReader createReader() { return null; }
+ @Override
+ protected ItemProcessor createProcessor() { return null; }
+ @Override
+ protected ItemWriter createWriter() { return null; }
+}
\ No newline at end of file
diff --git a/src/main/java/com/snp/batch/common/batch/config/BasePartitionedJobConfig.java b/src/main/java/com/snp/batch/common/batch/config/BasePartitionedJobConfig.java
new file mode 100644
index 0000000..7b5a119
--- /dev/null
+++ b/src/main/java/com/snp/batch/common/batch/config/BasePartitionedJobConfig.java
@@ -0,0 +1,82 @@
+package com.snp.batch.common.batch.config;
+
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.batch.core.Step;
+import org.springframework.batch.core.job.flow.FlowExecutionStatus;
+import org.springframework.batch.core.job.flow.JobExecutionDecider;
+import org.springframework.batch.core.partition.support.Partitioner;
+import org.springframework.batch.core.repository.JobRepository;
+import org.springframework.batch.core.step.builder.StepBuilder;
+import org.springframework.batch.core.step.tasklet.Tasklet;
+import org.springframework.core.task.TaskExecutor;
+import org.springframework.transaction.PlatformTransactionManager;
+
+/**
+ * 파티션 기반 병렬 처리 Job 구성을 위한 추상 클래스.
+ * 키 목록 조회 → 파티션 병렬 처리 → 후처리 패턴의 공통 인프라 제공.
+ *
+ * @param 입력 타입 (Reader 출력, Processor 입력)
+ * @param 출력 타입 (Processor 출력, Writer 입력)
+ */
+@Slf4j
+public abstract class BasePartitionedJobConfig extends BaseMultiStepJobConfig {
+
+ public BasePartitionedJobConfig(JobRepository jobRepository, PlatformTransactionManager transactionManager) {
+ super(jobRepository, transactionManager);
+ }
+
+ /**
+ * 파티션 Step을 생성합니다.
+ *
+ * @param stepName 파티션 Step 이름
+ * @param workerStepName Worker Step 이름 (partitioner 등록에 사용)
+ * @param partitioner Partitioner 인스턴스
+ * @param workerStep Worker Step 인스턴스
+ * @param taskExecutor 병렬 실행용 TaskExecutor
+ * @param gridSize 파티션 수
+ * @return 구성된 파티션 Step
+ */
+ protected Step createPartitionedStep(String stepName, String workerStepName,
+ Partitioner partitioner, Step workerStep,
+ TaskExecutor taskExecutor, int gridSize) {
+ return new StepBuilder(stepName, jobRepository)
+ .partitioner(workerStepName, partitioner)
+ .step(workerStep)
+ .taskExecutor(taskExecutor)
+ .gridSize(gridSize)
+ .build();
+ }
+
+ /**
+ * 키 건수 기반 Decider를 생성합니다.
+ * JobExecutionContext의 지정된 키 값이 0이면 EMPTY_RESPONSE, 아니면 NORMAL 반환.
+ *
+ * @param contextKey JobExecutionContext에서 조회할 int 키 이름
+ * @param jobName 로그에 표시할 Job 이름
+ * @return 키 건수 기반 JobExecutionDecider
+ */
+ protected JobExecutionDecider createKeyCountDecider(String contextKey, String jobName) {
+ return (jobExecution, stepExecution) -> {
+ int totalCount = jobExecution.getExecutionContext().getInt(contextKey, 0);
+ if (totalCount == 0) {
+ log.info("[{}] Decider: EMPTY_RESPONSE - {} 0건으로 후속 스텝 스킵", jobName, contextKey);
+ return new FlowExecutionStatus("EMPTY_RESPONSE");
+ }
+ log.info("[{}] Decider: NORMAL - {} {} 건 처리 시작", jobName, contextKey, totalCount);
+ return new FlowExecutionStatus("NORMAL");
+ };
+ }
+
+ /**
+ * LastExecution 업데이트 Step을 생성합니다.
+ *
+ * @param stepName Step 이름
+ * @param tasklet LastExecutionUpdateTasklet 인스턴스
+ * @return 구성된 LastExecution 업데이트 Step
+ */
+ protected Step createLastExecutionUpdateStep(String stepName, Tasklet tasklet) {
+ return new StepBuilder(stepName, jobRepository)
+ .tasklet(tasklet, transactionManager)
+ .build();
+ }
+}
diff --git a/src/main/java/com/snp/batch/common/batch/entity/BaseEntity.java b/src/main/java/com/snp/batch/common/batch/entity/BaseEntity.java
new file mode 100644
index 0000000..bedd1ee
--- /dev/null
+++ b/src/main/java/com/snp/batch/common/batch/entity/BaseEntity.java
@@ -0,0 +1,64 @@
+package com.snp.batch.common.batch.entity;
+
+import lombok.AllArgsConstructor;
+import lombok.Data;
+import lombok.NoArgsConstructor;
+import lombok.experimental.SuperBuilder;
+
+import java.time.LocalDateTime;
+
+/**
+ * 모든 Entity의 공통 베이스 클래스 - JDBC 전용
+ * 생성/수정 감사(Audit) 필드 제공
+ *
+ * 이 필드들은 Repository의 Insert/Update 시 자동으로 설정됩니다.
+ * BaseJdbcRepository가 감사 필드를 자동으로 관리합니다.
+ */
+@Data
+@SuperBuilder
+@NoArgsConstructor
+@AllArgsConstructor
+public abstract class BaseEntity {
+
+ /**
+ * 생성 일시
+ * 컬럼: created_at (TIMESTAMP)
+ */
+ private LocalDateTime createdAt;
+
+ /**
+ * 수정 일시
+ * 컬럼: updated_at (TIMESTAMP)
+ */
+ private LocalDateTime updatedAt;
+
+ /**
+ * 생성자
+ * 컬럼: created_by (VARCHAR(100))
+ */
+ private String createdBy;
+
+ /**
+ * 수정자
+ * 컬럼: updated_by (VARCHAR(100))
+ */
+ private String updatedBy;
+
+ /**
+ * 배치 잡 실행 ID
+ * 컬럼: job_execution_id (int8)
+ */
+ private Long jobExecutionId;
+
+ /**
+ * 배치 공통 필드 설정을 위한 편의 메서드
+ */
+ @SuppressWarnings("unchecked")
+ public T setBatchInfo(Long jobExecutionId, String createdBy) {
+ this.jobExecutionId = jobExecutionId;
+ this.createdBy = createdBy;
+ // 필요시 생성일시 강제 설정 (JPA Auditing을 안 쓸 경우)
+ if (this.createdAt == null) this.createdAt = LocalDateTime.now();
+ return (T) this;
+ }
+}
diff --git a/src/main/java/com/snp/batch/common/batch/listener/AutoRetryJobExecutionListener.java b/src/main/java/com/snp/batch/common/batch/listener/AutoRetryJobExecutionListener.java
new file mode 100644
index 0000000..70a1cf6
--- /dev/null
+++ b/src/main/java/com/snp/batch/common/batch/listener/AutoRetryJobExecutionListener.java
@@ -0,0 +1,113 @@
+package com.snp.batch.common.batch.listener;
+
+import com.snp.batch.global.repository.BatchFailedRecordRepository;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.batch.core.BatchStatus;
+import org.springframework.batch.core.JobExecution;
+import org.springframework.batch.core.JobExecutionListener;
+import org.springframework.batch.core.StepExecution;
+import org.springframework.stereotype.Component;
+
+import java.util.Arrays;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Set;
+
+/**
+ * 배치 Job 완료 후 실패 레코드가 있으면 자동으로 재수집을 트리거하는 리스너.
+ *
+ * 동작 조건:
+ * - Job 상태가 COMPLETED일 때만 실행
+ * - executionMode가 RECOLLECT가 아닌 일반 모드일 때만 실행 (무한 루프 방지)
+ * - StepExecution의 ExecutionContext에 failedRecordKeys가 존재할 때만 실행
+ * - 모든 Step의 failedRecordKeys를 Job 레벨에서 병합한 후 1회만 triggerRetryAsync 호출
+ * - retryCount가 MAX_AUTO_RETRY_COUNT 이상인 키는 재수집에서 제외 (무한 루프 방지)
+ */
+@Slf4j
+@Component
+@RequiredArgsConstructor
+public class AutoRetryJobExecutionListener implements JobExecutionListener {
+
+ private static final String FAILED_RECORD_KEYS = "failedRecordKeys";
+ private static final String FAILED_JOB_EXECUTION_ID = "failedJobExecutionId";
+ private static final String FAILED_API_KEY = "failedApiKey";
+ private static final int MAX_AUTO_RETRY_COUNT = 3;
+
+ private final AutoRetryTriggerService autoRetryTriggerService;
+ private final BatchFailedRecordRepository batchFailedRecordRepository;
+
+ @Override
+ public void beforeJob(JobExecution jobExecution) {
+ // no-op
+ }
+
+ @Override
+ public void afterJob(JobExecution jobExecution) {
+ String executionMode = jobExecution.getJobParameters()
+ .getString("executionMode", "NORMAL");
+
+ // 재수집 모드에서는 자동 재수집을 트리거하지 않음 (무한 루프 방지)
+ if ("RECOLLECT".equals(executionMode)) {
+ return;
+ }
+
+ // Job이 정상 완료된 경우에만 재수집 트리거
+ if (jobExecution.getStatus() != BatchStatus.COMPLETED) {
+ return;
+ }
+
+ String jobName = jobExecution.getJobInstance().getJobName();
+
+ // 모든 Step의 failedRecordKeys를 Set으로 병합 (중복 제거)
+ Set mergedKeys = new LinkedHashSet<>();
+ Long sourceJobExecutionId = jobExecution.getId();
+ String apiKey = null;
+
+ for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
+ String failedKeys = stepExecution.getExecutionContext()
+ .getString(FAILED_RECORD_KEYS, null);
+
+ if (failedKeys == null || failedKeys.isBlank()) {
+ continue;
+ }
+
+ Arrays.stream(failedKeys.split(","))
+ .map(String::trim)
+ .filter(key -> !key.isBlank())
+ .forEach(mergedKeys::add);
+
+ // apiKey: non-null인 것 중 첫 번째 사용
+ if (apiKey == null) {
+ apiKey = stepExecution.getExecutionContext()
+ .getString(FAILED_API_KEY, null);
+ }
+ }
+
+ if (mergedKeys.isEmpty()) {
+ return;
+ }
+
+ // retryCount가 MAX_AUTO_RETRY_COUNT 이상인 키 필터링
+ List exceededKeys = batchFailedRecordRepository.findExceededRetryKeys(
+ jobName, List.copyOf(mergedKeys), MAX_AUTO_RETRY_COUNT);
+
+ if (!exceededKeys.isEmpty()) {
+ log.warn("[AutoRetry] {} Job: 최대 재시도 횟수({})를 초과한 키 {}건 제외: {}",
+ jobName, MAX_AUTO_RETRY_COUNT, exceededKeys.size(), exceededKeys);
+ mergedKeys.removeAll(exceededKeys);
+ }
+
+ if (mergedKeys.isEmpty()) {
+ log.warn("[AutoRetry] {} Job: 모든 실패 키가 최대 재시도 횟수를 초과하여 재수집을 건너뜁니다.", jobName);
+ return;
+ }
+
+ log.info("[AutoRetry] {} Job 완료 후 실패 건 {}건 감지 → 자동 재수집 트리거",
+ jobName, mergedKeys.size());
+
+ // sourceJobExecutionId 기반으로 1회만 triggerRetryAsync 호출 (실패 키는 DB에서 직접 조회)
+ autoRetryTriggerService.triggerRetryAsync(
+ jobName, mergedKeys.size(), sourceJobExecutionId, apiKey);
+ }
+}
diff --git a/src/main/java/com/snp/batch/common/batch/listener/AutoRetryTriggerService.java b/src/main/java/com/snp/batch/common/batch/listener/AutoRetryTriggerService.java
new file mode 100644
index 0000000..5485efb
--- /dev/null
+++ b/src/main/java/com/snp/batch/common/batch/listener/AutoRetryTriggerService.java
@@ -0,0 +1,66 @@
+package com.snp.batch.common.batch.listener;
+
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.batch.core.Job;
+import org.springframework.batch.core.JobExecution;
+import org.springframework.batch.core.JobParameters;
+import org.springframework.batch.core.JobParametersBuilder;
+import org.springframework.batch.core.launch.JobLauncher;
+import org.springframework.context.annotation.Lazy;
+import org.springframework.scheduling.annotation.Async;
+import org.springframework.stereotype.Service;
+
+import java.util.Map;
+
+/**
+ * 자동 재수집 Job 비동기 트리거 서비스.
+ * JobExecutionListener 내부 self-invocation으로는 @Async 프록시가 동작하지 않으므로
+ * 별도 빈으로 분리하여 프록시를 통한 비동기 호출을 보장합니다.
+ */
+@Slf4j
+@Service
+public class AutoRetryTriggerService {
+
+ private final JobLauncher jobLauncher;
+ private final Map jobMap;
+
+ public AutoRetryTriggerService(JobLauncher jobLauncher, @Lazy Map jobMap) {
+ this.jobLauncher = jobLauncher;
+ this.jobMap = jobMap;
+ }
+
+ @Async("autoRetryExecutor")
+ public void triggerRetryAsync(String jobName, int failedCount,
+ Long sourceJobExecutionId, String apiKey) {
+ try {
+ Job job = jobMap.get(jobName);
+ if (job == null) {
+ log.error("[AutoRetry] Job을 찾을 수 없습니다: {}", jobName);
+ return;
+ }
+
+ JobParametersBuilder builder = new JobParametersBuilder()
+ .addLong("timestamp", System.currentTimeMillis())
+ .addString("sourceJobExecutionId", String.valueOf(sourceJobExecutionId))
+ .addString("executionMode", "RECOLLECT")
+ .addString("reason", "자동 재수집 (실패 건 자동 처리)")
+ .addString("executor", "AUTO_RETRY");
+
+ if (apiKey != null) {
+ builder.addString("apiKey", apiKey);
+ }
+
+ JobParameters retryParams = builder.toJobParameters();
+
+ log.info("[AutoRetry] 재수집 Job 실행 시작: jobName={}, 실패건={}, sourceJobExecutionId={}",
+ jobName, failedCount, sourceJobExecutionId);
+
+ JobExecution retryExecution = jobLauncher.run(job, retryParams);
+
+ log.info("[AutoRetry] 재수집 Job 실행 완료: jobName={}, executionId={}, status={}",
+ jobName, retryExecution.getId(), retryExecution.getStatus());
+ } catch (Exception e) {
+ log.error("[AutoRetry] 재수집 Job 실행 실패: jobName={}, error={}", jobName, e.getMessage(), e);
+ }
+ }
+}
diff --git a/src/main/java/com/snp/batch/common/batch/listener/RecollectionJobExecutionListener.java b/src/main/java/com/snp/batch/common/batch/listener/RecollectionJobExecutionListener.java
new file mode 100644
index 0000000..814ccf7
--- /dev/null
+++ b/src/main/java/com/snp/batch/common/batch/listener/RecollectionJobExecutionListener.java
@@ -0,0 +1,120 @@
+package com.snp.batch.common.batch.listener;
+
+import com.snp.batch.service.RecollectionHistoryService;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.batch.core.JobExecution;
+import org.springframework.batch.core.JobExecutionListener;
+import org.springframework.batch.core.StepExecution;
+import org.springframework.stereotype.Component;
+
+@Slf4j
+@Component
+@RequiredArgsConstructor
+public class RecollectionJobExecutionListener implements JobExecutionListener {
+
+ private final RecollectionHistoryService recollectionHistoryService;
+
+ @Override
+ public void beforeJob(JobExecution jobExecution) {
+ String executionMode = jobExecution.getJobParameters()
+ .getString("executionMode", "NORMAL");
+
+ if (!"RECOLLECT".equals(executionMode)) {
+ return;
+ }
+
+ Long jobExecutionId = jobExecution.getId();
+ String jobName = jobExecution.getJobInstance().getJobName();
+ String apiKey = resolveApiKey(jobExecution);
+ String executor = jobExecution.getJobParameters().getString("executor", "SYSTEM");
+ String reason = jobExecution.getJobParameters().getString("reason");
+
+ try {
+ // 재수집 이력 기록
+ recollectionHistoryService.recordStart(
+ jobName, jobExecutionId, apiKey, executor, reason);
+ } catch (Exception e) {
+ log.error("[RecollectionListener] beforeJob 처리 실패: jobExecutionId={}", jobExecutionId, e);
+ }
+ }
+
+ @Override
+ public void afterJob(JobExecution jobExecution) {
+ String executionMode = jobExecution.getJobParameters()
+ .getString("executionMode", "NORMAL");
+
+ if (!"RECOLLECT".equals(executionMode)) {
+ return;
+ }
+
+ Long jobExecutionId = jobExecution.getId();
+ String status = jobExecution.getStatus().name();
+
+ // Step별 통계 집계
+ long totalRead = 0;
+ long totalWrite = 0;
+ long totalSkip = 0;
+ int totalApiCalls = 0;
+
+ for (StepExecution step : jobExecution.getStepExecutions()) {
+ totalRead += step.getReadCount();
+ totalWrite += step.getWriteCount();
+ totalSkip += step.getReadSkipCount()
+ + step.getProcessSkipCount()
+ + step.getWriteSkipCount();
+
+ if (step.getExecutionContext().containsKey("totalApiCalls")) {
+ totalApiCalls += step.getExecutionContext().getInt("totalApiCalls", 0);
+ }
+ }
+
+ // 실패 사유 추출
+ String failureReason = null;
+ if ("FAILED".equals(status)) {
+ failureReason = jobExecution.getExitStatus().getExitDescription();
+ if (failureReason == null || failureReason.isEmpty()) {
+ failureReason = jobExecution.getStepExecutions().stream()
+ .filter(s -> "FAILED".equals(s.getStatus().name()))
+ .map(s -> s.getExitStatus().getExitDescription())
+ .filter(desc -> desc != null && !desc.isEmpty())
+ .findFirst()
+ .orElse("Unknown error");
+ }
+ if (failureReason != null && failureReason.length() > 2000) {
+ failureReason = failureReason.substring(0, 2000) + "...";
+ }
+ }
+
+ // 재수집 이력 완료 기록
+ try {
+ recollectionHistoryService.recordCompletion(
+ jobExecutionId, status,
+ totalRead, totalWrite, totalSkip,
+ totalApiCalls, null,
+ failureReason);
+ } catch (Exception e) {
+ log.error("[RecollectionListener] 재수집 이력 완료 기록 실패: jobExecutionId={}", jobExecutionId, e);
+ }
+ }
+
+ /**
+ * Job 파라미터에서 apiKey를 읽고, 없으면 jobName으로 BatchCollectionPeriod에서 조회합니다.
+ * 수동 재수집(UI 실패건 재수집)에서는 apiKey가 파라미터로 전달되지 않을 수 있으므로
+ * jobName → apiKey 매핑을 fallback으로 사용합니다.
+ */
+ private String resolveApiKey(JobExecution jobExecution) {
+ String apiKey = jobExecution.getJobParameters().getString("apiKey");
+ if (apiKey != null) {
+ return apiKey;
+ }
+
+ // fallback: jobName으로 BatchCollectionPeriod에서 apiKey 조회
+ String jobName = jobExecution.getJobInstance().getJobName();
+ apiKey = recollectionHistoryService.findApiKeyByJobName(jobName);
+ if (apiKey != null) {
+ log.info("[RecollectionListener] apiKey를 jobName에서 조회: jobName={}, apiKey={}", jobName, apiKey);
+ }
+ return apiKey;
+ }
+}
diff --git a/src/main/java/com/snp/batch/common/batch/partition/StringListPartitioner.java b/src/main/java/com/snp/batch/common/batch/partition/StringListPartitioner.java
new file mode 100644
index 0000000..7a99493
--- /dev/null
+++ b/src/main/java/com/snp/batch/common/batch/partition/StringListPartitioner.java
@@ -0,0 +1,61 @@
+package com.snp.batch.common.batch.partition;
+
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.batch.core.partition.support.Partitioner;
+import org.springframework.batch.item.ExecutionContext;
+
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * 문자열 키 목록을 N개 파티션으로 균등 분할하는 범용 Partitioner.
+ *
+ * 각 파티션의 ExecutionContext에 다음 값을 저장한다.
+ *
+ * - {@code {contextKeyName}} — 해당 파티션에 할당된 키 목록 (CSV 형식)
+ * - {@code partitionIndex} — 파티션 인덱스 (0-based)
+ * - {@code partitionSize} — 해당 파티션의 키 수
+ *
+ */
+@Slf4j
+public class StringListPartitioner implements Partitioner {
+
+ private final List allKeys;
+ private final int partitionCount;
+ private final String contextKeyName;
+
+ public StringListPartitioner(List allKeys, int partitionCount, String contextKeyName) {
+ this.allKeys = allKeys;
+ this.partitionCount = partitionCount;
+ this.contextKeyName = contextKeyName;
+ }
+
+ @Override
+ public Map partition(int gridSize) {
+ int totalSize = allKeys.size();
+ int actualPartitionCount = Math.min(partitionCount, Math.max(1, totalSize));
+ Map partitions = new LinkedHashMap<>();
+ int partitionSize = (int) Math.ceil((double) totalSize / actualPartitionCount);
+
+ for (int i = 0; i < actualPartitionCount; i++) {
+ int fromIndex = i * partitionSize;
+ int toIndex = Math.min(fromIndex + partitionSize, totalSize);
+ if (fromIndex >= totalSize) break;
+
+ List partitionKeys = allKeys.subList(fromIndex, toIndex);
+ ExecutionContext context = new ExecutionContext();
+ context.putString(contextKeyName, String.join(",", partitionKeys));
+ context.putInt("partitionIndex", i);
+ context.putInt("partitionSize", partitionKeys.size());
+
+ String partitionKey = "partition" + i;
+ partitions.put(partitionKey, context);
+ log.info("[StringListPartitioner] {} : 키 {} 건 (index {}-{})",
+ partitionKey, partitionKeys.size(), fromIndex, toIndex - 1);
+ }
+ log.info("[StringListPartitioner] 총 {} 개 파티션 생성 (전체 키: {} 건)",
+ partitions.size(), totalSize);
+ return partitions;
+ }
+}
diff --git a/src/main/java/com/snp/batch/common/batch/processor/BaseProcessor.java b/src/main/java/com/snp/batch/common/batch/processor/BaseProcessor.java
new file mode 100644
index 0000000..a9ad40c
--- /dev/null
+++ b/src/main/java/com/snp/batch/common/batch/processor/BaseProcessor.java
@@ -0,0 +1,61 @@
+package com.snp.batch.common.batch.processor;
+
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.batch.item.ItemProcessor;
+
+/**
+ * ItemProcessor 추상 클래스 (v2.0)
+ * 데이터 변환 및 처리 로직을 위한 템플릿 제공
+ *
+ * Template Method Pattern:
+ * - process(): 공통 로직 (null 체크, 로깅)
+ * - processItem(): 하위 클래스에서 변환 로직 구현
+ *
+ * 기본 용도:
+ * - 단순 변환: DTO → Entity
+ * - 데이터 필터링: null 반환 시 해당 아이템 스킵
+ * - 데이터 검증: 유효하지 않은 데이터 필터링
+ *
+ * 고급 용도 (다중 depth JSON 처리):
+ * - 중첩된 JSON을 여러 Entity로 분해
+ * - 1:N 관계 처리 (Order → OrderItems)
+ * - CompositeWriter와 조합하여 여러 테이블에 저장
+ *
+ * 예제:
+ * - 단순 변환: ProductDataProcessor (DTO → Entity)
+ * - 복잡한 처리: 복잡한 JSON 처리 예제 참고
+ *
+ * @param 입력 DTO 타입
+ * @param 출력 Entity 타입
+ */
+@Slf4j
+public abstract class BaseProcessor implements ItemProcessor {
+
+ /**
+ * 데이터 변환 로직 (하위 클래스에서 구현)
+ * DTO → Entity 변환 등의 비즈니스 로직 구현
+ *
+ * @param item 입력 DTO
+ * @return 변환된 Entity (필터링 시 null 반환 가능)
+ * @throws Exception 처리 중 오류 발생 시
+ */
+ protected abstract O processItem(I item) throws Exception;
+
+ /**
+ * Spring Batch ItemProcessor 인터페이스 구현
+ * 데이터 변환 및 필터링 수행
+ *
+ * @param item 입력 DTO
+ * @return 변환된 Entity (null이면 해당 아이템 스킵)
+ * @throws Exception 처리 중 오류 발생 시
+ */
+ @Override
+ public O process(I item) throws Exception {
+ if (item == null) {
+ return null;
+ }
+
+// log.debug("데이터 처리 중: {}", item);
+ return processItem(item);
+ }
+}
diff --git a/src/main/java/com/snp/batch/common/batch/reader/BaseApiReader.java b/src/main/java/com/snp/batch/common/batch/reader/BaseApiReader.java
new file mode 100644
index 0000000..e3b64d2
--- /dev/null
+++ b/src/main/java/com/snp/batch/common/batch/reader/BaseApiReader.java
@@ -0,0 +1,1001 @@
+package com.snp.batch.common.batch.reader;
+
+import com.snp.batch.global.model.BatchApiLog;
+import com.snp.batch.service.BatchApiLogService;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.batch.core.StepExecution;
+import org.springframework.batch.core.annotation.BeforeStep;
+import org.springframework.batch.item.ExecutionContext;
+import org.springframework.batch.item.ItemReader;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.MediaType;
+import org.springframework.util.LinkedMultiValueMap;
+import org.springframework.util.MultiValueMap;
+import org.springframework.web.reactive.function.client.WebClient;
+import org.springframework.web.reactive.function.client.WebClientResponseException;
+import org.springframework.web.util.UriBuilder;
+import org.springframework.core.ParameterizedTypeReference;
+import org.springframework.web.util.UriComponentsBuilder;
+
+import java.net.URI;
+import java.time.LocalDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.function.Function;
+
+/**
+ * REST API 기반 ItemReader 추상 클래스 (v3.0 - Chunk 기반)
+ *
+ * 주요 기능:
+ * - HTTP Method 지원: GET, POST
+ * - 다중 Query Parameter 처리
+ * - Path Variable 지원
+ * - Request Body 지원 (POST)
+ * - 동적 Header 설정
+ * - 복잡한 JSON 응답 파싱
+ * - ✨ Chunk 기반 배치 처리 (Iterator 패턴)
+ *
+ * Template Method Pattern:
+ * - read(): 공통 로직 (1건씩 순차 반환)
+ * - fetchNextBatch(): 다음 배치 조회 (구현체에서 오버라이드)
+ * - 새로운 훅 메서드들: HTTP Method, 파라미터, 헤더 등
+ *
+ * 동작 방식:
+ * 1. read() 호출 시 currentBatch가 비어있으면 fetchNextBatch() 호출
+ * 2. fetchNextBatch()가 100건 반환
+ * 3. read()가 100번 호출되면서 1건씩 반환
+ * 4. 100건 모두 반환되면 다시 fetchNextBatch() 호출
+ * 5. fetchNextBatch()가 null/empty 반환 시 Job 종료
+ *
+ * 하위 호환성:
+ * - 기존 fetchDataFromApi() 메서드 계속 지원
+ * - 새로운 fetchNextBatch() 메서드 사용 권장
+ *
+ * @param DTO 타입 (API 응답 데이터)
+ */
+@Slf4j
+public abstract class BaseApiReader implements ItemReader {
+
+ // Chunk 기반 Iterator 패턴
+ private java.util.Iterator currentBatch;
+ private boolean initialized = false;
+ private boolean useChunkMode = false; // Chunk 모드 사용 여부
+
+ // 하위 호환성을 위한 필드 (fetchDataFromApi 사용 시)
+ private List legacyDataList;
+ private int legacyNextIndex = 0;
+
+ // WebClient는 하위 클래스에서 주입받아 사용
+ protected WebClient webClient;
+
+ // StepExecution - API 정보 저장용
+ protected StepExecution stepExecution;
+
+ // API 호출 통계
+ private int totalApiCalls = 0;
+ private int completedApiCalls = 0;
+
+ // Batch Execution Id
+ private Long jobExecutionId; // 현재 Job 실행 ID
+ private Long stepExecutionId; // 현재 Step 실행 ID
+ /**
+ * 스프링 배치가 Step을 시작할 때 실행 ID를 주입해줍니다.
+ */
+ public void setExecutionIds(Long jobExecutionId, Long stepExecutionId) {
+ this.jobExecutionId = jobExecutionId;
+ this.stepExecutionId = stepExecutionId;
+ }
+
+ protected Long getJobExecutionId() {
+ return this.jobExecutionId;
+ }
+
+ protected Long getStepExecutionId() {
+ return this.stepExecutionId;
+ }
+ /**
+ * 기본 생성자 (WebClient 없이 사용 - Mock 데이터용)
+ */
+ protected BaseApiReader() {
+ this.webClient = null;
+ }
+ /**
+ * API 호출 및 로그 적재 통합 메서드
+ * Response Json 구조 : [...]
+ */
+ protected List executeListApiCall(
+ String baseUrl,
+ String path,
+ Map params,
+ ParameterizedTypeReference> typeReference,
+ BatchApiLogService logService) {
+
+ // 1. 전체 URI 생성 (로그용)
+ MultiValueMap multiValueParams = new LinkedMultiValueMap<>();
+ if (params != null) {
+ params.forEach((key, value) ->
+ multiValueParams.put(key, Collections.singletonList(value))
+ );
+ }
+
+ String fullUri = UriComponentsBuilder.fromHttpUrl(baseUrl)
+ .path(path)
+ .queryParams(multiValueParams)
+ .build()
+ .toUriString();
+
+ long startTime = System.currentTimeMillis();
+ int statusCode = 200;
+ String errorMessage = null;
+ Long responseSize = 0L;
+
+ try {
+ log.info("[{}] API 요청 시작: {}", getReaderName(), fullUri);
+
+ List result = webClient.get()
+ .uri(uriBuilder -> {
+ uriBuilder.path(path);
+ if (params != null) params.forEach(uriBuilder::queryParam);
+ return uriBuilder.build();
+ })
+ .retrieve()
+ .bodyToMono(typeReference)
+ .block();
+
+ responseSize = (result != null) ? (long) result.size() : 0L;
+ return result;
+
+ } catch (WebClientResponseException e) {
+ // API 서버에서 응답은 왔으나 에러인 경우 (4xx, 5xx)
+ statusCode = e.getStatusCode().value();
+ errorMessage = String.format("API Error: %s", e.getResponseBodyAsString());
+ throw e;
+ } catch (Exception e) {
+ // 네트워크 오류, 타임아웃 등 기타 예외
+ statusCode = 500;
+ errorMessage = String.format("System Error: %s", e.getMessage());
+ throw e;
+ } finally {
+ // 성공/실패 여부와 관계없이 무조건 로그 저장
+ long duration = System.currentTimeMillis() - startTime;
+
+ logService.saveLog(BatchApiLog.builder()
+ .apiRequestLocation(getReaderName())
+ .requestUri(fullUri)
+ .httpMethod("GET")
+ .statusCode(statusCode)
+ .responseTimeMs(duration)
+ .responseCount(responseSize)
+ .errorMessage(errorMessage)
+ .createdAt(LocalDateTime.now())
+ .jobExecutionId(this.jobExecutionId) // 추가
+ .stepExecutionId(this.stepExecutionId) // 추가
+ .build());
+ }
+ }
+
+ protected List executeWrapperApiCall(
+ String baseUrl,
+ String path,
+ Class responseWrapperClass, // Stat5CodeApiResponse.class 등을 받음
+ Function> listExtractor, // 결과 객체에서 리스트를 꺼내는 로직
+ BatchApiLogService logService) {
+
+ String fullUri = UriComponentsBuilder.fromHttpUrl(baseUrl)
+ .path(path)
+ .build()
+ .toUriString();
+
+ long startTime = System.currentTimeMillis();
+ int statusCode = 200;
+ String errorMessage = null;
+ Long responseSize = 0L;
+
+ try {
+ log.info("[{}] API 요청 시작: {}", getReaderName(), fullUri);
+
+ // 1. List이 아닌 Wrapper 객체(T)로 받아옵니다.
+ T response = webClient.get()
+ .uri(uriBuilder -> uriBuilder.path(path).build())
+ .retrieve()
+ .bodyToMono(responseWrapperClass)
+ .block();
+
+ // 2. 추출 함수(listExtractor)를 사용하여 내부 리스트를 꺼냅니다.
+ List result = (response != null) ? listExtractor.apply(response) : Collections.emptyList();
+
+ responseSize = (long) result.size();
+ return result;
+
+ } catch (WebClientResponseException e) {
+ statusCode = e.getStatusCode().value();
+ errorMessage = String.format("API Error: %s", e.getResponseBodyAsString());
+ throw e;
+ } catch (Exception e) {
+ statusCode = 500;
+ errorMessage = String.format("System Error: %s", e.getMessage());
+ throw e;
+ } finally {
+ long duration = System.currentTimeMillis() - startTime;
+
+ logService.saveLog(BatchApiLog.builder()
+ .apiRequestLocation(getReaderName())
+ .requestUri(fullUri)
+ .httpMethod("GET")
+ .statusCode(statusCode)
+ .responseTimeMs(duration)
+ .responseCount(responseSize)
+ .errorMessage(errorMessage)
+ .createdAt(LocalDateTime.now())
+ .jobExecutionId(this.jobExecutionId)
+ .stepExecutionId(this.stepExecutionId)
+ .build());
+ }
+ }
+
+ protected List executeListApiCall(
+ String baseUrl,
+ String path,
+ ParameterizedTypeReference> typeReference,
+ BatchApiLogService logService) {
+
+ String fullUri = UriComponentsBuilder.fromHttpUrl(baseUrl)
+ .path(path)
+ .build()
+ .toUriString();
+
+ long startTime = System.currentTimeMillis();
+ int statusCode = 200;
+ String errorMessage = null;
+ Long responseSize = 0L;
+
+ try {
+ log.info("[{}] API 요청 시작: {}", getReaderName(), fullUri);
+
+ List result = webClient.get()
+ .uri(uriBuilder -> {
+ uriBuilder.path(path);
+ return uriBuilder.build();
+ })
+ .retrieve()
+ .bodyToMono(typeReference)
+ .block();
+
+ responseSize = (result != null) ? (long) result.size() : 0L;
+ return result;
+
+ } catch (WebClientResponseException e) {
+ // API 서버에서 응답은 왔으나 에러인 경우 (4xx, 5xx)
+ statusCode = e.getStatusCode().value();
+ errorMessage = String.format("API Error: %s", e.getResponseBodyAsString());
+ throw e;
+ } catch (Exception e) {
+ // 네트워크 오류, 타임아웃 등 기타 예외
+ statusCode = 500;
+ errorMessage = String.format("System Error: %s", e.getMessage());
+ throw e;
+ } finally {
+ // 성공/실패 여부와 관계없이 무조건 로그 저장
+ long duration = System.currentTimeMillis() - startTime;
+
+ logService.saveLog(BatchApiLog.builder()
+ .apiRequestLocation(getReaderName())
+ .requestUri(fullUri)
+ .httpMethod("GET")
+ .statusCode(statusCode)
+ .responseTimeMs(duration)
+ .responseCount(responseSize)
+ .errorMessage(errorMessage)
+ .createdAt(LocalDateTime.now())
+ .jobExecutionId(this.jobExecutionId) // 추가
+ .stepExecutionId(this.stepExecutionId) // 추가
+ .build());
+ }
+ }
+
+ /**
+ * API 호출 및 로그 적재 통합 메서드
+ * Response Json 구조 : { "data": [...] }
+ */
+ protected R executeSingleApiCall(
+ String baseUrl,
+ String path,
+ Map params,
+ ParameterizedTypeReference typeReference,
+ BatchApiLogService logService,
+ Function sizeExtractor) { // 사이즈 추출 함수 추가
+
+ // 1. 전체 URI 생성 (로그용)
+ MultiValueMap multiValueParams = new LinkedMultiValueMap<>();
+ if (params != null) {
+ params.forEach((key, value) ->
+ multiValueParams.put(key, Collections.singletonList(value))
+ );
+ }
+
+ String fullUri = UriComponentsBuilder.fromHttpUrl(baseUrl)
+ .path(path)
+ .queryParams(multiValueParams)
+ .build()
+ .toUriString();
+
+ long startTime = System.currentTimeMillis();
+ int statusCode = 200;
+ String errorMessage = null;
+ R result = null;
+
+ try {
+ log.info("[{}] Single API 요청 시작: {}", getReaderName(), fullUri);
+
+ result = webClient.get()
+ .uri(uriBuilder -> {
+ uriBuilder.path(path);
+ if (params != null) params.forEach(uriBuilder::queryParam);
+ return uriBuilder.build();
+ })
+ .retrieve()
+ .bodyToMono(typeReference)
+ .block();
+
+ return result;
+
+ } catch (WebClientResponseException e) {
+ statusCode = e.getStatusCode().value();
+ errorMessage = String.format("API Error: %s", e.getResponseBodyAsString());
+ throw e;
+ } catch (Exception e) {
+ statusCode = 500;
+ errorMessage = String.format("System Error: %s", e.getMessage());
+ throw e;
+ } finally {
+ long duration = System.currentTimeMillis() - startTime;
+
+ // 2. 주입받은 함수를 통해 데이터 건수(size) 계산
+ long size = 0L;
+ if (result != null && sizeExtractor != null) {
+ try {
+ size = sizeExtractor.apply(result);
+ } catch (Exception e) {
+ log.warn("[{}] 사이즈 추출 중 오류 발생: {}", getReaderName(), e.getMessage());
+ }
+ }
+
+ // 3. 로그 저장 (api_request_location, response_size 반영)
+ logService.saveLog(BatchApiLog.builder()
+ .apiRequestLocation(getReaderName())
+ .jobExecutionId(this.jobExecutionId)
+ .stepExecutionId(this.stepExecutionId)
+ .requestUri(fullUri)
+ .httpMethod("GET")
+ .statusCode(statusCode)
+ .responseTimeMs(duration)
+ .responseCount(size)
+ .errorMessage(errorMessage)
+ .createdAt(LocalDateTime.now())
+ .build());
+ }
+ }
+
+
+ /**
+ * WebClient를 주입받는 생성자 (실제 API 연동용)
+ *
+ * @param webClient Spring WebClient 인스턴스
+ */
+ protected BaseApiReader(WebClient webClient) {
+ this.webClient = webClient;
+ }
+ /**
+ /**
+ * Step 실행 전 초기화 및 API 정보 저장
+ * Spring Batch가 자동으로 StepExecution을 주입하고 이 메서드를 호출함
+ *
+ * @param stepExecution Step 실행 정보
+ */
+ @BeforeStep
+ public void saveApiInfoToContext(StepExecution stepExecution) {
+ this.stepExecution = stepExecution;
+
+ // Reader 상태 초기화 (Job 재실행 시 필수)
+ resetReaderState();
+
+ // API 정보를 StepExecutionContext에 저장
+ ExecutionContext context = stepExecution.getExecutionContext();
+
+ // WebClient가 있는 경우에만 API 정보 저장
+ if (webClient != null) {
+ // 1. API URL 저장
+ String baseUrl = getApiBaseUrl();
+ String apiPath = getApiPath();
+ String fullUrl = baseUrl != null ? baseUrl + apiPath : apiPath;
+ context.putString("apiUrl", fullUrl);
+
+ // 2. HTTP Method 저장
+ context.putString("apiMethod", getHttpMethod());
+
+ // 3. API Parameters 저장
+ Map params = new HashMap<>();
+ Map queryParams = getQueryParams();
+ if (queryParams != null && !queryParams.isEmpty()) {
+ params.putAll(queryParams);
+ }
+ Map pathVars = getPathVariables();
+ if (pathVars != null && !pathVars.isEmpty()) {
+ params.putAll(pathVars);
+ }
+ context.put("apiParameters", params);
+
+ // 4. 통계 초기화
+ context.putInt("totalApiCalls", 0);
+ context.putInt("completedApiCalls", 0);
+
+ log.info("[{}] API 정보 저장: {} {}", getReaderName(), getHttpMethod(), fullUrl);
+ }
+ }
+
+ /**
+ * API Base URL 반환 (WebClient의 baseUrl)
+ * 하위 클래스에서 필요 시 오버라이드
+ */
+ protected String getApiBaseUrl() {
+ return "";
+ }
+
+ /**
+ * Reader 상태 초기화
+ * Job 재실행 시 이전 실행의 상태를 클리어하여 새로 데이터를 읽을 수 있도록 함
+ */
+ private void resetReaderState() {
+ // Chunk 모드 상태 초기화
+ this.currentBatch = null;
+ this.initialized = false;
+
+ // Legacy 모드 상태 초기화
+ this.legacyDataList = null;
+ this.legacyNextIndex = 0;
+
+ // 통계 초기화
+ this.totalApiCalls = 0;
+ this.completedApiCalls = 0;
+
+ // 하위 클래스 상태 초기화 훅 호출
+ resetCustomState();
+
+ log.debug("[{}] Reader 상태 초기화 완료", getReaderName());
+ }
+
+ /**
+ * 하위 클래스 커스텀 상태 초기화 훅
+ * Chunk 모드에서 사용하는 currentBatchIndex, allImoNumbers 등의 필드를 초기화할 때 오버라이드
+ *
+ * 예시:
+ *
+ * @Override
+ * protected void resetCustomState() {
+ * this.currentBatchIndex = 0;
+ * this.allImoNumbers = null;
+ * this.dbMasterHashes = null;
+ * }
+ *
+ */
+ protected void resetCustomState() {
+ // 기본 구현: 아무것도 하지 않음
+ // 하위 클래스에서 필요 시 오버라이드
+ }
+
+ /**
+ * API 호출 통계 업데이트
+ */
+ protected void updateApiCallStats(int totalCalls, int completedCalls) {
+ if (stepExecution != null) {
+ ExecutionContext context = stepExecution.getExecutionContext();
+ context.putInt("totalApiCalls", totalCalls);
+ context.putInt("completedApiCalls", completedCalls);
+
+ // 마지막 호출 시간 저장
+ String lastCallTime = LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"));
+ context.putString("lastCallTime", lastCallTime);
+
+ this.totalApiCalls = totalCalls;
+ this.completedApiCalls = completedCalls;
+ }
+ }
+
+ // ========================================
+ // ItemReader 구현 (공통 로직)
+ // ========================================
+
+ /**
+ * Spring Batch ItemReader 인터페이스 구현
+ * 데이터를 순차적으로 하나씩 반환
+ *
+ * Chunk 기반 동작:
+ * 1. currentBatch가 비어있으면 fetchNextBatch() 호출하여 다음 배치 로드
+ * 2. Iterator에서 1건씩 반환
+ * 3. Iterator가 비면 다시 1번으로
+ * 4. fetchNextBatch()가 null/empty 반환하면 Job 종료
+ *
+ * @return 다음 데이터 항목 (더 이상 없으면 null)
+ */
+ @Override
+ public T read() throws Exception {
+ // Chunk 모드 사용 여부는 첫 호출 시 결정
+ if (!initialized && !useChunkMode) {
+ // Legacy 모드로 시작
+ return readLegacyMode();
+ }
+
+ // Chunk 모드가 활성화된 경우
+ if (useChunkMode) {
+ return readChunkMode();
+ }
+
+ // Legacy 모드
+ return readLegacyMode();
+ }
+
+ /**
+ * Chunk 모드 활성화 (하위 클래스에서 명시적 호출)
+ */
+ protected void enableChunkMode() {
+ this.useChunkMode = true;
+ }
+
+ /**
+ * Chunk 기반 read() 구현 (신규 방식)
+ */
+ private T readChunkMode() throws Exception {
+ // 최초 호출 시 초기화
+ if (!initialized) {
+ beforeFetch();
+ initialized = true;
+ }
+
+ // currentBatch가 비어있으면 다음 배치 로드
+ /*if (currentBatch == null || !currentBatch.hasNext()) {
+ List nextBatch = fetchNextBatch();
+
+ // 더 이상 데이터가 없으면 종료
+// if (nextBatch == null || nextBatch.isEmpty()) {
+ if (nextBatch == null ) {
+ afterFetch(null);
+ log.info("[{}] 모든 배치 처리 완료", getReaderName());
+ return null;
+ }
+
+
+ // Iterator 갱신
+ currentBatch = nextBatch.iterator();
+ log.debug("[{}] 배치 로드 완료: {} 건", getReaderName(), nextBatch.size());
+ }*/
+ // currentBatch가 비어있으면 다음 배치 로드
+ while (currentBatch == null || !currentBatch.hasNext()) {
+ List nextBatch = fetchNextBatch();
+
+ if (nextBatch == null) { // 진짜 종료
+ afterFetch(null);
+ log.info("[{}] 모든 배치 처리 완료", getReaderName());
+ return null;
+ }
+
+ if (nextBatch.isEmpty()) { // emptyList면 다음 batch를 시도
+ log.warn("[{}] 빈 배치 수신 → 다음 배치 재요청", getReaderName());
+ continue; // while 반복문으로 다시 fetch
+ }
+
+ currentBatch = nextBatch.iterator();
+ log.debug("[{}] 배치 로드 완료: {} 건", getReaderName(), nextBatch.size());
+ }
+
+
+ // Iterator에서 1건씩 반환
+ return currentBatch.next();
+ }
+
+ /**
+ * Legacy 모드 read() 구현 (하위 호환성)
+ * 기존 fetchDataFromApi()를 오버라이드한 구현체 지원
+ */
+ private T readLegacyMode() throws Exception {
+ // 최초 호출 시 API에서 전체 데이터 조회
+ if (legacyDataList == null) {
+ beforeFetch();
+ legacyDataList = fetchDataFromApi();
+ afterFetch(legacyDataList);
+ log.info("[{}] 데이터 {}건 조회 완료 (Legacy 모드)",
+ getReaderName(), legacyDataList != null ? legacyDataList.size() : 0);
+ }
+
+ // 데이터를 순차적으로 반환
+ if (legacyDataList != null && legacyNextIndex < legacyDataList.size()) {
+ return legacyDataList.get(legacyNextIndex++);
+ } else {
+ return null; // 데이터 끝
+ }
+ }
+
+
+ // ========================================
+ // 핵심 추상 메서드 (하위 클래스에서 구현)
+ // ========================================
+
+ /**
+ * ✨ 다음 배치 데이터를 조회하여 리스트로 반환 (신규 방식 - Chunk 기반)
+ *
+ * Chunk 기반 배치 처리를 위한 메서드:
+ * - read()가 호출될 때마다 필요 시 이 메서드가 호출됨
+ * - 일반적으로 100~1000건씩 반환
+ * - 더 이상 데이터가 없으면 null 또는 빈 리스트 반환
+ *
+ * 구현 예시:
+ *
+ * private int currentPage = 0;
+ * private final int pageSize = 100;
+ *
+ * @Override
+ * protected List fetchNextBatch() {
+ * if (currentPage >= totalPages) {
+ * return null; // 종료
+ * }
+ *
+ * // API 호출 (100건씩)
+ * ProductApiResponse response = callApiForPage(currentPage, pageSize);
+ * currentPage++;
+ *
+ * return response.getProducts();
+ * }
+ *
+ *
+ * @return 다음 배치 데이터 리스트 (null 또는 빈 리스트면 종료)
+ * @throws Exception API 호출 실패 등
+ */
+ protected List fetchNextBatch() throws Exception {
+ // 기본 구현: Legacy 모드 fallback
+ // 하위 클래스에서 오버라이드 안 하면 fetchDataFromApi() 사용
+ return null;
+ }
+
+ /**
+ * API에서 데이터를 조회하여 리스트로 반환 (Legacy 방식 - 하위 호환성)
+ *
+ * ⚠️ Deprecated: fetchNextBatch()를 사용하세요.
+ *
+ * 구현 방법:
+ * 1. WebClient 없이 Mock 데이터 생성 (sample용)
+ * 2. WebClient로 실제 API 호출 (실전용)
+ * 3. callApi() 헬퍼 메서드 사용 (권장)
+ *
+ * @return API에서 조회한 데이터 리스트 (전체)
+ */
+ protected List fetchDataFromApi() {
+ // 기본 구현: 빈 리스트 반환
+ // 하위 클래스에서 오버라이드 필요
+ return new ArrayList<>();
+ }
+
+ /**
+ * Reader 이름 반환 (로깅용)
+ *
+ * @return Reader 이름 (예: "ProductDataReader")
+ */
+ protected abstract String getReaderName();
+
+ // ========================================
+ // HTTP 요청 설정 메서드 (선택적 오버라이드)
+ // ========================================
+
+ /**
+ * HTTP Method 반환
+ *
+ * 기본값: GET
+ * POST 요청 시 오버라이드
+ *
+ * @return HTTP Method ("GET" 또는 "POST")
+ */
+ protected String getHttpMethod() {
+ return "GET";
+ }
+
+ /**
+ * API 엔드포인트 경로 반환
+ *
+ * 예제:
+ * - "/api/v1/products"
+ * - "/api/v1/orders/{orderId}" (Path Variable 포함)
+ *
+ * @return API 경로
+ */
+ protected String getApiPath() {
+ return "";
+ }
+
+ /**
+ * Query Parameter 맵 반환
+ *
+ * 예제:
+ * Map params = new HashMap<>();
+ * params.put("status", "active");
+ * params.put("page", 1);
+ * params.put("size", 100);
+ * return params;
+ *
+ * @return Query Parameter 맵 (null이면 파라미터 없음)
+ */
+ protected Map getQueryParams() {
+ return null;
+ }
+
+ /**
+ * Path Variable 맵 반환
+ *
+ * 예제:
+ * Map pathVars = new HashMap<>();
+ * pathVars.put("orderId", "ORD-001");
+ * return pathVars;
+ *
+ * @return Path Variable 맵 (null이면 Path Variable 없음)
+ */
+ protected Map getPathVariables() {
+ return null;
+ }
+
+ /**
+ * Request Body 반환 (POST 요청용)
+ *
+ * 예제:
+ * return RequestDto.builder()
+ * .startDate("2025-01-01")
+ * .endDate("2025-12-31")
+ * .build();
+ *
+ * @return Request Body 객체 (null이면 Body 없음)
+ */
+ protected Object getRequestBody() {
+ return null;
+ }
+
+ /**
+ * HTTP Header 맵 반환
+ *
+ * 예제:
+ * Map headers = new HashMap<>();
+ * headers.put("Authorization", "Bearer token123");
+ * headers.put("X-Custom-Header", "value");
+ * return headers;
+ *
+ * 기본 헤더 (자동 추가):
+ * - Content-Type: application/json
+ * - Accept: application/json
+ *
+ * @return HTTP Header 맵 (null이면 기본 헤더만 사용)
+ */
+ protected Map getHeaders() {
+ return null;
+ }
+
+ /**
+ * API 응답 타입 반환
+ *
+ * 예제:
+ * return ProductApiResponse.class;
+ *
+ * @return 응답 클래스 타입
+ */
+ protected Class> getResponseType() {
+ return Object.class;
+ }
+
+ /**
+ * API 응답에서 데이터 리스트 추출
+ *
+ * 복잡한 JSON 응답 구조 처리:
+ * - 단순: response.getData()
+ * - 중첩: response.getResult().getItems()
+ *
+ * @param response API 응답 객체
+ * @return 추출된 데이터 리스트
+ */
+ protected List extractDataFromResponse(Object response) {
+ return Collections.emptyList();
+ }
+
+ // ========================================
+ // 라이프사이클 훅 메서드 (선택적 오버라이드)
+ // ========================================
+
+ /**
+ * API 호출 전 전처리
+ *
+ * 사용 예:
+ * - 파라미터 검증
+ * - 로깅
+ * - 캐시 확인
+ */
+ protected void beforeFetch() {
+ log.debug("[{}] API 호출 준비 중...", getReaderName());
+ }
+
+ /**
+ * API 호출 후 후처리
+ *
+ * 사용 예:
+ * - 데이터 검증
+ * - 로깅
+ * - 캐시 저장
+ *
+ * @param data 조회된 데이터 리스트
+ */
+ protected void afterFetch(List data) {
+ log.debug("[{}] API 호출 완료", getReaderName());
+ }
+
+ /**
+ * API 호출 실패 시 에러 처리
+ *
+ * 기본 동작: 빈 리스트 반환 (Job 실패 방지)
+ * 오버라이드 시: 예외 던지기 또는 재시도 로직 구현
+ *
+ * @param e 발생한 예외
+ * @return 대체 데이터 리스트 (빈 리스트 또는 캐시 데이터)
+ */
+ protected List handleApiError(Exception e) {
+ log.error("[{}] API 호출 실패: {}", getReaderName(), e.getMessage(), e);
+ return new ArrayList<>();
+ }
+
+ // ========================================
+ // 헬퍼 메서드 (하위 클래스에서 사용 가능)
+ // ========================================
+
+ /**
+ * WebClient를 사용한 API 호출 (GET/POST 자동 처리)
+ *
+ * 사용 방법 (fetchDataFromApi()에서):
+ *
+ * @Override
+ * protected List fetchDataFromApi() {
+ * ProductApiResponse response = callApi();
+ * return extractDataFromResponse(response);
+ * }
+ *
+ * @param 응답 타입
+ * @return API 응답 객체
+ */
+ @SuppressWarnings("unchecked")
+ protected R callApi() {
+ if (webClient == null) {
+ throw new IllegalStateException("WebClient가 초기화되지 않았습니다. 생성자에서 WebClient를 주입하세요.");
+ }
+
+ try {
+ String method = getHttpMethod().toUpperCase();
+ String path = getApiPath();
+
+ log.info("[{}] {} 요청 시작: {}", getReaderName(), method, path);
+
+ if ("GET".equals(method)) {
+ return callGetApi();
+ } else if ("POST".equals(method)) {
+ return callPostApi();
+ } else {
+ throw new UnsupportedOperationException("지원하지 않는 HTTP Method: " + method);
+ }
+
+ } catch (Exception e) {
+ log.error("[{}] API 호출 중 오류 발생", getReaderName(), e);
+ throw new RuntimeException("API 호출 실패", e);
+ }
+ }
+
+ /**
+ * GET 요청 내부 처리
+ */
+ @SuppressWarnings("unchecked")
+ private R callGetApi() {
+ return (R) webClient
+ .get()
+ .uri(buildUri())
+ .headers(this::applyHeaders)
+ .retrieve()
+ .bodyToMono(getResponseType())
+ .block();
+ }
+
+ /**
+ * POST 요청 내부 처리
+ */
+ @SuppressWarnings("unchecked")
+ private R callPostApi() {
+ Object requestBody = getRequestBody();
+
+ if (requestBody == null) {
+ // Body 없는 POST 요청
+ return (R) webClient
+ .post()
+ .uri(buildUri())
+ .headers(this::applyHeaders)
+ .retrieve()
+ .bodyToMono(getResponseType())
+ .block();
+ } else {
+ // Body 있는 POST 요청
+ return (R) webClient
+ .post()
+ .uri(buildUri())
+ .headers(this::applyHeaders)
+ .bodyValue(requestBody)
+ .retrieve()
+ .bodyToMono(getResponseType())
+ .block();
+ }
+ }
+
+ /**
+ * URI 빌드 (Path + Query Parameters + Path Variables)
+ */
+ private Function buildUri() {
+ return uriBuilder -> {
+ // 1. Path 설정
+ String path = getApiPath();
+ uriBuilder.path(path);
+
+ // 2. Query Parameters 추가
+ Map queryParams = getQueryParams();
+ if (queryParams != null && !queryParams.isEmpty()) {
+ queryParams.forEach((key, value) -> {
+ if (value != null) {
+ uriBuilder.queryParam(key, value);
+ }
+ });
+ log.debug("[{}] Query Parameters: {}", getReaderName(), queryParams);
+ }
+
+ // 3. Path Variables 적용
+ Map pathVars = getPathVariables();
+ if (pathVars != null && !pathVars.isEmpty()) {
+ log.debug("[{}] Path Variables: {}", getReaderName(), pathVars);
+ return uriBuilder.build(pathVars);
+ } else {
+ return uriBuilder.build();
+ }
+ };
+ }
+
+ /**
+ * HTTP Header 적용
+ */
+ private void applyHeaders(HttpHeaders httpHeaders) {
+ // 1. 기본 헤더 설정
+ httpHeaders.setContentType(MediaType.APPLICATION_JSON);
+ httpHeaders.setAccept(Collections.singletonList(MediaType.APPLICATION_JSON));
+
+ // 2. 커스텀 헤더 추가
+ Map customHeaders = getHeaders();
+ if (customHeaders != null && !customHeaders.isEmpty()) {
+ customHeaders.forEach(httpHeaders::set);
+ log.debug("[{}] Custom Headers: {}", getReaderName(), customHeaders);
+ }
+ }
+
+ // ========================================
+ // 유틸리티 메서드
+ // ========================================
+
+ /**
+ * 데이터 리스트가 비어있는지 확인
+ */
+ protected boolean isEmpty(List data) {
+ return data == null || data.isEmpty();
+ }
+
+ /**
+ * 데이터 리스트 크기 반환 (null-safe)
+ */
+ protected int getDataSize(List data) {
+ return data != null ? data.size() : 0;
+ }
+}
diff --git a/src/main/java/com/snp/batch/common/batch/repository/BaseJdbcRepository.java b/src/main/java/com/snp/batch/common/batch/repository/BaseJdbcRepository.java
new file mode 100644
index 0000000..c7634e6
--- /dev/null
+++ b/src/main/java/com/snp/batch/common/batch/repository/BaseJdbcRepository.java
@@ -0,0 +1,353 @@
+package com.snp.batch.common.batch.repository;
+
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.jdbc.core.JdbcTemplate;
+import org.springframework.jdbc.core.RowMapper;
+import org.springframework.jdbc.support.GeneratedKeyHolder;
+import org.springframework.jdbc.support.KeyHolder;
+import org.springframework.transaction.annotation.Transactional;
+
+import java.sql.PreparedStatement;
+import java.sql.Statement;
+import java.time.LocalDateTime;
+import java.util.List;
+import java.util.Optional;
+
+/**
+ * JdbcTemplate 기반 Repository 추상 클래스
+ * 모든 Repository가 상속받아 일관된 CRUD 패턴 제공
+ *
+ * @param Entity 타입
+ * @param ID 타입
+ */
+@Slf4j
+@RequiredArgsConstructor
+@Transactional(readOnly = true)
+public abstract class BaseJdbcRepository {
+
+ protected final JdbcTemplate jdbcTemplate;
+
+ /**
+ * 대상 스키마 이름 반환 (하위 클래스에서 구현)
+ * application.yml의 app.batch.target-schema.name 값을 @Value로 주입받아 반환
+ */
+ protected abstract String getTargetSchema();
+
+ /**
+ * 테이블명만 반환 (스키마 제외, 하위 클래스에서 구현)
+ */
+ protected abstract String getSimpleTableName();
+
+ /**
+ * 전체 테이블명 반환 (스키마.테이블)
+ * 하위 클래스에서는 getSimpleTableName()만 구현하면 됨
+ */
+ protected String getTableName() {
+ return getTargetSchema() + "." + getSimpleTableName();
+ }
+
+ /**
+ * ID 컬럼명 반환 (기본값: "id")
+ */
+ protected String getIdColumnName() {
+ return "id";
+ }
+ protected String getIdColumnName(String customId) {
+ return customId;
+ }
+
+ /**
+ * RowMapper 반환 (하위 클래스에서 구현)
+ */
+ protected abstract RowMapper getRowMapper();
+
+ /**
+ * Entity에서 ID 추출 (하위 클래스에서 구현)
+ */
+ protected abstract ID extractId(T entity);
+
+ /**
+ * INSERT SQL 생성 (하위 클래스에서 구현)
+ */
+ protected abstract String getInsertSql();
+
+ /**
+ * UPDATE SQL 생성 (하위 클래스에서 구현)
+ */
+ protected abstract String getUpdateSql();
+
+ /**
+ * INSERT용 PreparedStatement 파라미터 설정 (하위 클래스에서 구현)
+ */
+ protected abstract void setInsertParameters(PreparedStatement ps, T entity) throws Exception;
+
+ /**
+ * UPDATE용 PreparedStatement 파라미터 설정 (하위 클래스에서 구현)
+ */
+ protected abstract void setUpdateParameters(PreparedStatement ps, T entity) throws Exception;
+
+ /**
+ * 엔티티명 반환 (로깅용)
+ */
+ protected abstract String getEntityName();
+
+ // ==================== CRUD 메서드 ====================
+
+ /**
+ * ID로 조회
+ */
+ public Optional findById(ID id) {
+ String sql = String.format("SELECT * FROM %s WHERE %s = ?", getTableName(), getIdColumnName());
+ log.debug("{} 조회: ID={}", getEntityName(), id);
+
+ List results = jdbcTemplate.query(sql, getRowMapper(), id);
+ return results.isEmpty() ? Optional.empty() : Optional.of(results.get(0));
+ }
+
+ /**
+ * 전체 조회
+ */
+ public List findAll() {
+ String sql = String.format("SELECT * FROM %s ORDER BY %s DESC", getTableName(), getIdColumnName());
+ log.debug("{} 전체 조회", getEntityName());
+ return jdbcTemplate.query(sql, getRowMapper());
+ }
+
+ /**
+ * 개수 조회
+ */
+ public long count() {
+ String sql = String.format("SELECT COUNT(*) FROM %s", getTableName());
+ Long count = jdbcTemplate.queryForObject(sql, Long.class);
+ return count != null ? count : 0L;
+ }
+
+ /**
+ * 존재 여부 확인
+ */
+ public boolean existsById(ID id) {
+ String sql = String.format("SELECT COUNT(*) FROM %s WHERE %s = ?", getTableName(), getIdColumnName());
+ Long count = jdbcTemplate.queryForObject(sql, Long.class, id);
+ return count != null && count > 0;
+ }
+
+ /**
+ * 단건 저장 (INSERT 또는 UPDATE)
+ */
+ @Transactional
+ public T save(T entity) {
+ ID id = extractId(entity);
+
+ if (id == null || !existsById(id)) {
+ return insert(entity);
+ } else {
+ return update(entity);
+ }
+ }
+
+ /**
+ * 단건 INSERT
+ */
+ @Transactional
+ protected T insert(T entity) {
+ log.info("{} 삽입 시작", getEntityName());
+
+ KeyHolder keyHolder = new GeneratedKeyHolder();
+
+ jdbcTemplate.update(connection -> {
+ PreparedStatement ps = connection.prepareStatement(getInsertSql(), Statement.RETURN_GENERATED_KEYS);
+ try {
+ setInsertParameters(ps, entity);
+ } catch (Exception e) {
+ log.error("{} 삽입 파라미터 설정 실패", getEntityName(), e);
+ throw new RuntimeException("Failed to set insert parameters", e);
+ }
+ return ps;
+ }, keyHolder);
+
+ // 생성된 ID 조회
+ if (keyHolder.getKeys() != null && !keyHolder.getKeys().isEmpty()) {
+ Object idValue = keyHolder.getKeys().get(getIdColumnName());
+ if (idValue != null) {
+ @SuppressWarnings("unchecked")
+ ID generatedId = (ID) (idValue instanceof Number ? ((Number) idValue).longValue() : idValue);
+ log.info("{} 삽입 완료: ID={}", getEntityName(), generatedId);
+ return findById(generatedId).orElse(entity);
+ }
+ }
+
+ log.info("{} 삽입 완료 (ID 미반환)", getEntityName());
+ return entity;
+ }
+
+ /**
+ * 단건 UPDATE
+ */
+ @Transactional
+ protected T update(T entity) {
+ ID id = extractId(entity);
+ log.info("{} 수정 시작: ID={}", getEntityName(), id);
+
+ int updated = jdbcTemplate.update(connection -> {
+ PreparedStatement ps = connection.prepareStatement(getUpdateSql());
+ try {
+ setUpdateParameters(ps, entity);
+ } catch (Exception e) {
+ log.error("{} 수정 파라미터 설정 실패", getEntityName(), e);
+ throw new RuntimeException("Failed to set update parameters", e);
+ }
+ return ps;
+ });
+
+ if (updated == 0) {
+ throw new IllegalStateException(getEntityName() + " 수정 실패: ID=" + id);
+ }
+
+ log.info("{} 수정 완료: ID={}", getEntityName(), id);
+ return findById(id).orElse(entity);
+ }
+
+ /**
+ * 배치 INSERT (대량 삽입)
+ */
+ @Transactional
+ public void batchInsert(List entities) {
+ if (entities == null || entities.isEmpty()) {
+ return;
+ }
+
+ log.info("{} 배치 삽입 시작: {} 건", getEntityName(), entities.size());
+
+ jdbcTemplate.batchUpdate(getInsertSql(), entities, entities.size(),
+ (ps, entity) -> {
+ try {
+ setInsertParameters(ps, entity);
+ } catch (Exception e) {
+ log.error("배치 삽입 파라미터 설정 실패", e);
+ throw new RuntimeException(e);
+ }
+ });
+
+ log.info("{} 배치 삽입 완료: {} 건", getEntityName(), entities.size());
+ }
+
+ /**
+ * 배치 UPDATE (대량 수정)
+ */
+ @Transactional
+ public void batchUpdate(List entities) {
+ if (entities == null || entities.isEmpty()) {
+ return;
+ }
+
+ log.info("{} 배치 수정 시작: {} 건", getEntityName(), entities.size());
+
+ jdbcTemplate.batchUpdate(getUpdateSql(), entities, entities.size(),
+ (ps, entity) -> {
+ try {
+ setUpdateParameters(ps, entity);
+ } catch (Exception e) {
+ log.error("배치 수정 파라미터 설정 실패", e);
+ throw new RuntimeException(e);
+ }
+ });
+
+ log.info("{} 배치 수정 완료: {} 건", getEntityName(), entities.size());
+ }
+
+ /**
+ * 전체 저장 (INSERT 또는 UPDATE)
+ */
+ @Transactional
+ public void saveAll(List entities) {
+ if (entities == null || entities.isEmpty()) {
+ return;
+ }
+
+ log.info("{} 전체 저장 시작: {} 건", getEntityName(), entities.size());
+
+ // INSERT와 UPDATE 분리
+ List toInsert = entities.stream()
+ .filter(e -> extractId(e) == null || !existsById(extractId(e)))
+ .toList();
+
+ List toUpdate = entities.stream()
+ .filter(e -> extractId(e) != null && existsById(extractId(e)))
+ .toList();
+
+ if (!toInsert.isEmpty()) {
+ batchInsert(toInsert);
+ }
+
+ if (!toUpdate.isEmpty()) {
+ batchUpdate(toUpdate);
+ }
+
+ log.info("{} 전체 저장 완료: 삽입={} 건, 수정={} 건", getEntityName(), toInsert.size(), toUpdate.size());
+ }
+
+ /**
+ * ID로 삭제
+ */
+ @Transactional
+ public void deleteById(ID id) {
+ String sql = String.format("DELETE FROM %s WHERE %s = ?", getTableName(), getIdColumnName());
+ log.info("{} 삭제: ID={}", getEntityName(), id);
+
+ int deleted = jdbcTemplate.update(sql, id);
+
+ if (deleted == 0) {
+ log.warn("{} 삭제 실패 (존재하지 않음): ID={}", getEntityName(), id);
+ } else {
+ log.info("{} 삭제 완료: ID={}", getEntityName(), id);
+ }
+ }
+
+ /**
+ * 전체 삭제
+ */
+ @Transactional
+ public void deleteAll() {
+ String sql = String.format("DELETE FROM %s", getTableName());
+ log.warn("{} 전체 삭제", getEntityName());
+
+ int deleted = jdbcTemplate.update(sql);
+ log.info("{} 전체 삭제 완료: {} 건", getEntityName(), deleted);
+ }
+
+ // ==================== 헬퍼 메서드 ====================
+
+ /**
+ * 현재 시각 반환 (감사 필드용)
+ */
+ protected LocalDateTime now() {
+ return LocalDateTime.now();
+ }
+
+ /**
+ * 커스텀 쿼리 실행 (단건 조회)
+ */
+ protected Optional executeQueryForObject(String sql, Object... params) {
+ log.debug("커스텀 쿼리 실행: {}", sql);
+ List results = jdbcTemplate.query(sql, getRowMapper(), params);
+ return results.isEmpty() ? Optional.empty() : Optional.of(results.get(0));
+ }
+
+ /**
+ * 커스텀 쿼리 실행 (다건 조회)
+ */
+ protected List executeQueryForList(String sql, Object... params) {
+ log.debug("커스텀 쿼리 실행: {}", sql);
+ return jdbcTemplate.query(sql, getRowMapper(), params);
+ }
+
+ /**
+ * 커스텀 업데이트 실행
+ */
+ @Transactional
+ protected int executeUpdate(String sql, Object... params) {
+ log.debug("커스텀 업데이트 실행: {}", sql);
+ return jdbcTemplate.update(sql, params);
+ }
+}
diff --git a/src/main/java/com/snp/batch/common/batch/tasklet/LastExecutionUpdateTasklet.java b/src/main/java/com/snp/batch/common/batch/tasklet/LastExecutionUpdateTasklet.java
new file mode 100644
index 0000000..7ef7455
--- /dev/null
+++ b/src/main/java/com/snp/batch/common/batch/tasklet/LastExecutionUpdateTasklet.java
@@ -0,0 +1,73 @@
+package com.snp.batch.common.batch.tasklet;
+
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.batch.core.StepContribution;
+import org.springframework.batch.core.scope.context.ChunkContext;
+import org.springframework.batch.core.step.tasklet.Tasklet;
+import org.springframework.batch.repeat.RepeatStatus;
+import org.springframework.jdbc.core.JdbcTemplate;
+
+import java.sql.Timestamp;
+import java.time.LocalDateTime;
+
+/**
+ * 배치 작업 완료 후 BATCH_LAST_EXECUTION 테이블의 LAST_SUCCESS_DATE를 업데이트하는 공통 Tasklet.
+ *
+ * RECOLLECT 모드일 경우 업데이트를 스킵하며,
+ * Job ExecutionContext에 저장된 {@code batchToDate}를 기준으로 성공 날짜를 계산합니다.
+ * {@code batchToDate}가 없을 경우 현재 시간에서 {@code bufferHours}를 차감하여 사용합니다.
+ */
+@Slf4j
+public class LastExecutionUpdateTasklet implements Tasklet {
+
+ private static final String RECOLLECT_MODE = "RECOLLECT";
+
+ private final JdbcTemplate jdbcTemplate;
+ private final String targetSchema;
+ private final String apiKey;
+ private final int bufferHours;
+
+ public LastExecutionUpdateTasklet(JdbcTemplate jdbcTemplate, String targetSchema,
+ String apiKey, int bufferHours) {
+ this.jdbcTemplate = jdbcTemplate;
+ this.targetSchema = targetSchema;
+ this.apiKey = apiKey;
+ this.bufferHours = bufferHours;
+ }
+
+ @Override
+ public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) {
+ String executionMode = chunkContext.getStepContext()
+ .getStepExecution().getJobExecution()
+ .getJobParameters().getString("executionMode", "NORMAL");
+
+ if (RECOLLECT_MODE.equals(executionMode)) {
+ log.info(">>>>> RECOLLECT 모드 - LAST_EXECUTION 업데이트 스킵");
+ return RepeatStatus.FINISHED;
+ }
+
+ String toDateStr = chunkContext.getStepContext()
+ .getStepExecution().getJobExecution()
+ .getExecutionContext().getString("batchToDate", null);
+
+ LocalDateTime successDate;
+ if (toDateStr != null) {
+ successDate = LocalDateTime.parse(toDateStr).minusHours(bufferHours);
+ log.info(">>>>> BATCH_LAST_EXECUTION 업데이트 시작 (캡처된 toDate - {}시간 버퍼: {})",
+ bufferHours, successDate);
+ } else {
+ successDate = LocalDateTime.now().minusHours(bufferHours);
+ log.warn(">>>>> batchToDate가 없어 현재 시간 - {}시간 버퍼 사용: {}", bufferHours, successDate);
+ }
+
+ jdbcTemplate.update(
+ String.format(
+ "UPDATE %s.BATCH_LAST_EXECUTION SET LAST_SUCCESS_DATE = ?, UPDATED_AT = NOW() WHERE API_KEY = ?",
+ targetSchema),
+ Timestamp.valueOf(successDate), apiKey
+ );
+
+ log.info(">>>>> BATCH_LAST_EXECUTION 업데이트 완료 (LAST_SUCCESS_DATE = {})", successDate);
+ return RepeatStatus.FINISHED;
+ }
+}
diff --git a/src/main/java/com/snp/batch/common/batch/writer/BaseWriter.java b/src/main/java/com/snp/batch/common/batch/writer/BaseWriter.java
new file mode 100644
index 0000000..6169d5b
--- /dev/null
+++ b/src/main/java/com/snp/batch/common/batch/writer/BaseWriter.java
@@ -0,0 +1,61 @@
+package com.snp.batch.common.batch.writer;
+
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.batch.item.Chunk;
+import org.springframework.batch.item.ItemWriter;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * ItemWriter 추상 클래스
+ * 데이터 저장 로직을 위한 템플릿 제공
+ *
+ * Template Method Pattern:
+ * - write(): 공통 로직 (로깅, null 체크)
+ * - writeItems(): 하위 클래스에서 저장 로직 구현
+ *
+ * @param Entity 타입
+ */
+@Slf4j
+@RequiredArgsConstructor
+public abstract class BaseWriter implements ItemWriter {
+
+ private final String entityName;
+
+ /**
+ * 실제 데이터 저장 로직 (하위 클래스에서 구현)
+ * Repository의 saveAll() 또는 batchInsert() 호출 등
+ *
+ * @param items 저장할 Entity 리스트
+ * @throws Exception 저장 중 오류 발생 시
+ */
+ protected abstract void writeItems(List items) throws Exception;
+
+ /**
+ * Spring Batch ItemWriter 인터페이스 구현
+ * Chunk 단위로 데이터를 저장
+ *
+ * @param chunk 저장할 데이터 청크
+ * @throws Exception 저장 중 오류 발생 시
+ */
+ @Override
+ public void write(Chunk extends T> chunk) throws Exception {
+ List items = new ArrayList<>(chunk.getItems());
+
+ if (items.isEmpty()) {
+ log.debug("{} 저장할 데이터가 없습니다", entityName);
+ return;
+ }
+
+ try {
+ log.info("{} 데이터 {}건 저장 시작", entityName, items.size());
+ writeItems(items);
+ log.info("{} 데이터 {}건 저장 완료", entityName, items.size());
+ } catch (Exception e) {
+ log.error("{} 데이터 저장 실패", entityName, e);
+ throw e;
+ }
+ }
+}
diff --git a/src/main/java/com/snp/batch/common/web/ApiResponse.java b/src/main/java/com/snp/batch/common/web/ApiResponse.java
new file mode 100644
index 0000000..b646cb0
--- /dev/null
+++ b/src/main/java/com/snp/batch/common/web/ApiResponse.java
@@ -0,0 +1,75 @@
+package com.snp.batch.common.web;
+
+import io.swagger.v3.oas.annotations.media.Schema;
+import lombok.AllArgsConstructor;
+import lombok.Builder;
+import lombok.Data;
+import lombok.NoArgsConstructor;
+
+/**
+ * 통일된 API 응답 형식
+ *
+ * @param 응답 데이터 타입
+ */
+@Data
+@Builder
+@NoArgsConstructor
+@AllArgsConstructor
+@Schema(description = "공통 API 응답 래퍼")
+public class ApiResponse {
+
+ @Schema(description = "성공 여부", example = "true")
+ private boolean success;
+
+ @Schema(description = "응답 메시지", example = "Success")
+ private String message;
+
+ @Schema(description = "응답 데이터")
+ private T data;
+
+ @Schema(description = "에러 코드 (실패 시에만 존재)", example = "NOT_FOUND", nullable = true)
+ private String errorCode;
+
+ /**
+ * 성공 응답 생성
+ */
+ public static ApiResponse success(T data) {
+ return ApiResponse.builder()
+ .success(true)
+ .message("Success")
+ .data(data)
+ .build();
+ }
+
+ /**
+ * 성공 응답 생성 (메시지 포함)
+ */
+ public static ApiResponse success(String message, T data) {
+ return ApiResponse.builder()
+ .success(true)
+ .message(message)
+ .data(data)
+ .build();
+ }
+
+ /**
+ * 실패 응답 생성
+ */
+ public static ApiResponse error(String message) {
+ return ApiResponse.builder()
+ .success(false)
+ .message(message)
+ .build();
+ }
+
+ /**
+ * 실패 응답 생성 (에러 코드 포함)
+ */
+ public static ApiResponse error(String message, String errorCode) {
+ return ApiResponse.builder()
+ .success(false)
+ .message(message)
+ .errorCode(errorCode)
+ .build();
+ }
+}
diff --git a/src/main/java/com/snp/batch/global/cleanup/LogCleanupConfig.java b/src/main/java/com/snp/batch/global/cleanup/LogCleanupConfig.java
new file mode 100644
index 0000000..31561fb
--- /dev/null
+++ b/src/main/java/com/snp/batch/global/cleanup/LogCleanupConfig.java
@@ -0,0 +1,37 @@
+package com.snp.batch.global.cleanup;
+
+import lombok.Getter;
+import lombok.Setter;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.context.annotation.Configuration;
+
+/**
+ * 배치 로그 정리 설정
+ *
+ * 로그 종류별 보존 기간(일) 설정
+ *
+ * 설정 예시:
+ * app.batch.log-cleanup:
+ * api-log-retention-days: 30
+ * batch-meta-retention-days: 90
+ * failed-record-retention-days: 90
+ * recollection-history-retention-days: 90
+ */
+@Getter
+@Setter
+@Configuration
+@ConfigurationProperties(prefix = "app.batch.log-cleanup")
+public class LogCleanupConfig {
+
+ /** batch_api_log 보존 기간 (일) */
+ private int apiLogRetentionDays = 30;
+
+ /** Spring Batch 메타 테이블 보존 기간 (일) */
+ private int batchMetaRetentionDays = 90;
+
+ /** batch_failed_record (RESOLVED) 보존 기간 (일) */
+ private int failedRecordRetentionDays = 90;
+
+ /** batch_recollection_history 보존 기간 (일) */
+ private int recollectionHistoryRetentionDays = 90;
+}
diff --git a/src/main/java/com/snp/batch/global/cleanup/LogCleanupJobConfig.java b/src/main/java/com/snp/batch/global/cleanup/LogCleanupJobConfig.java
new file mode 100644
index 0000000..1081c8a
--- /dev/null
+++ b/src/main/java/com/snp/batch/global/cleanup/LogCleanupJobConfig.java
@@ -0,0 +1,69 @@
+package com.snp.batch.global.cleanup;
+
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.batch.core.Job;
+import org.springframework.batch.core.JobExecution;
+import org.springframework.batch.core.JobExecutionListener;
+import org.springframework.batch.core.Step;
+import org.springframework.batch.core.job.builder.JobBuilder;
+import org.springframework.batch.core.repository.JobRepository;
+import org.springframework.batch.core.step.builder.StepBuilder;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.transaction.PlatformTransactionManager;
+
+/**
+ * 배치 로그 정리 Job Config
+ *
+ * 스케줄: 매일 02:00 (0 0 2 * * ?)
+ *
+ * 동작:
+ * - 보존 기간이 지난 배치 로그 데이터를 삭제
+ * - batch_api_log (30일), Spring Batch 메타 (90일),
+ * batch_failed_record/RESOLVED (90일), batch_recollection_history (90일)
+ */
+@Slf4j
+@Configuration
+public class LogCleanupJobConfig {
+
+ private final JobRepository jobRepository;
+ private final PlatformTransactionManager transactionManager;
+ private final LogCleanupTasklet logCleanupTasklet;
+
+ public LogCleanupJobConfig(
+ JobRepository jobRepository,
+ PlatformTransactionManager transactionManager,
+ LogCleanupTasklet logCleanupTasklet) {
+ this.jobRepository = jobRepository;
+ this.transactionManager = transactionManager;
+ this.logCleanupTasklet = logCleanupTasklet;
+ }
+
+ @Bean(name = "logCleanupStep")
+ public Step logCleanupStep() {
+ return new StepBuilder("logCleanupStep", jobRepository)
+ .tasklet(logCleanupTasklet, transactionManager)
+ .build();
+ }
+
+ @Bean(name = "LogCleanupJob")
+ public Job logCleanupJob() {
+ log.info("Job 생성: LogCleanupJob");
+
+ return new JobBuilder("LogCleanupJob", jobRepository)
+ .listener(new JobExecutionListener() {
+ @Override
+ public void beforeJob(JobExecution jobExecution) {
+ log.info("[LogCleanupJob] 배치 로그 정리 Job 시작");
+ }
+
+ @Override
+ public void afterJob(JobExecution jobExecution) {
+ log.info("[LogCleanupJob] 배치 로그 정리 Job 완료 - 상태: {}",
+ jobExecution.getStatus());
+ }
+ })
+ .start(logCleanupStep())
+ .build();
+ }
+}
diff --git a/src/main/java/com/snp/batch/global/cleanup/LogCleanupTasklet.java b/src/main/java/com/snp/batch/global/cleanup/LogCleanupTasklet.java
new file mode 100644
index 0000000..79f0da0
--- /dev/null
+++ b/src/main/java/com/snp/batch/global/cleanup/LogCleanupTasklet.java
@@ -0,0 +1,148 @@
+package com.snp.batch.global.cleanup;
+
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.batch.core.StepContribution;
+import org.springframework.batch.core.scope.context.ChunkContext;
+import org.springframework.batch.core.step.tasklet.Tasklet;
+import org.springframework.batch.repeat.RepeatStatus;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.jdbc.core.JdbcTemplate;
+import org.springframework.stereotype.Component;
+
+@Slf4j
+@Component
+@RequiredArgsConstructor
+public class LogCleanupTasklet implements Tasklet {
+
+ private final JdbcTemplate jdbcTemplate;
+ private final LogCleanupConfig config;
+
+ @Value("${app.batch.target-schema.name}")
+ private String schema;
+
+ @Override
+ public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
+ log.info("========================================");
+ log.info("배치 로그 정리 Job 시작");
+ log.info("========================================");
+
+ int totalDeleted = 0;
+
+ // 1. batch_api_log 정리
+ totalDeleted += cleanupApiLog();
+
+ // 2. Spring Batch 메타 테이블 정리 (FK 순서)
+ totalDeleted += cleanupBatchMeta();
+
+ // 3. batch_failed_record 정리 (RESOLVED만)
+ totalDeleted += cleanupFailedRecord();
+
+ // 4. batch_recollection_history 정리
+ totalDeleted += cleanupRecollectionHistory();
+
+ log.info("========================================");
+ log.info("배치 로그 정리 Job 완료 - 총 삭제: {} 건", totalDeleted);
+ log.info("========================================");
+
+ return RepeatStatus.FINISHED;
+ }
+
+ private int cleanupApiLog() {
+ int days = config.getApiLogRetentionDays();
+ String sql = String.format(
+ "DELETE FROM %s.batch_api_log WHERE created_at < NOW() - INTERVAL '%d days'",
+ schema, days);
+ int deleted = jdbcTemplate.update(sql);
+ log.info("[batch_api_log] 보존기간: {}일, 삭제: {}건", days, deleted);
+ return deleted;
+ }
+
+ private int cleanupBatchMeta() {
+ int days = config.getBatchMetaRetentionDays();
+ int totalDeleted = 0;
+
+ // FK 의존 순서: step_execution_context → step_execution → job_execution_context → job_execution_params → job_execution → job_instance(orphan)
+
+ // 1. batch_step_execution_context
+ String sql1 = String.format(
+ "DELETE FROM %s.batch_step_execution_context WHERE step_execution_id IN (" +
+ "SELECT se.step_execution_id FROM %s.batch_step_execution se " +
+ "JOIN %s.batch_job_execution je ON se.job_execution_id = je.job_execution_id " +
+ "WHERE je.create_time < NOW() - INTERVAL '%d days')",
+ schema, schema, schema, days);
+ int deleted = jdbcTemplate.update(sql1);
+ totalDeleted += deleted;
+ log.info("[batch_step_execution_context] 삭제: {}건", deleted);
+
+ // 2. batch_step_execution
+ String sql2 = String.format(
+ "DELETE FROM %s.batch_step_execution WHERE job_execution_id IN (" +
+ "SELECT job_execution_id FROM %s.batch_job_execution " +
+ "WHERE create_time < NOW() - INTERVAL '%d days')",
+ schema, schema, days);
+ deleted = jdbcTemplate.update(sql2);
+ totalDeleted += deleted;
+ log.info("[batch_step_execution] 삭제: {}건", deleted);
+
+ // 3. batch_job_execution_context
+ String sql3 = String.format(
+ "DELETE FROM %s.batch_job_execution_context WHERE job_execution_id IN (" +
+ "SELECT job_execution_id FROM %s.batch_job_execution " +
+ "WHERE create_time < NOW() - INTERVAL '%d days')",
+ schema, schema, days);
+ deleted = jdbcTemplate.update(sql3);
+ totalDeleted += deleted;
+ log.info("[batch_job_execution_context] 삭제: {}건", deleted);
+
+ // 4. batch_job_execution_params
+ String sql4 = String.format(
+ "DELETE FROM %s.batch_job_execution_params WHERE job_execution_id IN (" +
+ "SELECT job_execution_id FROM %s.batch_job_execution " +
+ "WHERE create_time < NOW() - INTERVAL '%d days')",
+ schema, schema, days);
+ deleted = jdbcTemplate.update(sql4);
+ totalDeleted += deleted;
+ log.info("[batch_job_execution_params] 삭제: {}건", deleted);
+
+ // 5. batch_job_execution
+ String sql5 = String.format(
+ "DELETE FROM %s.batch_job_execution WHERE create_time < NOW() - INTERVAL '%d days'",
+ schema, days);
+ deleted = jdbcTemplate.update(sql5);
+ totalDeleted += deleted;
+ log.info("[batch_job_execution] 삭제: {}건", deleted);
+
+ // 6. batch_job_instance (참조 없는 인스턴스만)
+ String sql6 = String.format(
+ "DELETE FROM %s.batch_job_instance WHERE job_instance_id NOT IN (" +
+ "SELECT DISTINCT job_instance_id FROM %s.batch_job_execution)",
+ schema, schema);
+ deleted = jdbcTemplate.update(sql6);
+ totalDeleted += deleted;
+ log.info("[batch_job_instance] orphan 삭제: {}건", deleted);
+
+ log.info("[Spring Batch 메타] 보존기간: {}일, 총 삭제: {}건", days, totalDeleted);
+ return totalDeleted;
+ }
+
+ private int cleanupFailedRecord() {
+ int days = config.getFailedRecordRetentionDays();
+ String sql = String.format(
+ "DELETE FROM %s.batch_failed_record WHERE status = 'RESOLVED' AND resolved_at < NOW() - INTERVAL '%d days'",
+ schema, days);
+ int deleted = jdbcTemplate.update(sql);
+ log.info("[batch_failed_record] 보존기간: {}일 (RESOLVED만), 삭제: {}건", days, deleted);
+ return deleted;
+ }
+
+ private int cleanupRecollectionHistory() {
+ int days = config.getRecollectionHistoryRetentionDays();
+ String sql = String.format(
+ "DELETE FROM %s.batch_recollection_history WHERE created_at < NOW() - INTERVAL '%d days'",
+ schema, days);
+ int deleted = jdbcTemplate.update(sql);
+ log.info("[batch_recollection_history] 보존기간: {}일, 삭제: {}건", days, deleted);
+ return deleted;
+ }
+}
diff --git a/src/main/java/com/snp/batch/global/config/AsyncConfig.java b/src/main/java/com/snp/batch/global/config/AsyncConfig.java
new file mode 100644
index 0000000..e9b273e
--- /dev/null
+++ b/src/main/java/com/snp/batch/global/config/AsyncConfig.java
@@ -0,0 +1,56 @@
+package com.snp.batch.global.config;
+
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.scheduling.annotation.EnableAsync;
+import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
+
+import org.springframework.core.task.TaskExecutor;
+
+import java.util.concurrent.Executor;
+
+@Configuration
+@EnableAsync // 비동기 기능 활성화
+public class AsyncConfig {
+
+ @Bean(name = "apiLogExecutor")
+ public Executor apiLogExecutor() {
+ ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
+ executor.setCorePoolSize(2); // 기본 스레드 수
+ executor.setMaxPoolSize(5); // 최대 스레드 수
+ executor.setQueueCapacity(500); // 대기 큐 크기
+ executor.setThreadNamePrefix("ApiLogThread-");
+ executor.initialize();
+ return executor;
+ }
+
+ /**
+ * 자동 재수집 전용 Executor.
+ * 재수집 Job은 장시간 실행되므로 apiLogExecutor와 분리하여 별도 풀로 관리.
+ */
+ @Bean(name = "autoRetryExecutor")
+ public Executor autoRetryExecutor() {
+ ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
+ executor.setCorePoolSize(1); // 재수집은 순차적으로 충분
+ executor.setMaxPoolSize(2); // 동시 최대 2개까지 허용
+ executor.setQueueCapacity(10); // 대기 큐 (초과 시 CallerRunsPolicy)
+ executor.setThreadNamePrefix("AutoRetry-");
+ executor.initialize();
+ return executor;
+ }
+
+ /**
+ * 배치 파티션 병렬 실행 전용 Executor.
+ * ShipDetailUpdate 파티셔닝 등 배치 Step 병렬 처리에 사용.
+ */
+ @Bean(name = "batchPartitionExecutor")
+ public TaskExecutor batchPartitionExecutor() {
+ ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
+ executor.setCorePoolSize(4); // 기본 파티션 수
+ executor.setMaxPoolSize(8); // 최대 파티션 수
+ executor.setQueueCapacity(20); // 대기 큐
+ executor.setThreadNamePrefix("BatchPartition-");
+ executor.initialize();
+ return executor;
+ }
+}
\ No newline at end of file
diff --git a/src/main/java/com/snp/batch/global/config/MaritimeApiWebClientConfig.java b/src/main/java/com/snp/batch/global/config/MaritimeApiWebClientConfig.java
new file mode 100644
index 0000000..0d599e5
--- /dev/null
+++ b/src/main/java/com/snp/batch/global/config/MaritimeApiWebClientConfig.java
@@ -0,0 +1,161 @@
+package com.snp.batch.global.config;
+
+import io.netty.channel.ChannelOption;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.http.client.reactive.ReactorClientHttpConnector;
+import org.springframework.web.reactive.function.client.WebClient;
+import reactor.netty.http.client.HttpClient;
+
+import java.time.Duration;
+
+/**
+ * Maritime API WebClient 설정
+ *
+ * 목적:
+ * - Maritime API 서버에 대한 WebClient Bean 등록
+ * - 동일한 API 서버를 사용하는 여러 Job에서 재사용
+ * - 설정 변경 시 한 곳에서만 수정
+ *
+ * 사용 Job:
+ * - 각 도메인 Job에서 공통으로 재사용
+ *
+ * 다른 API 서버 추가 시:
+ * - 새로운 Config 클래스 생성 (예: OtherApiWebClientConfig)
+ * - Bean 이름을 다르게 지정 (예: @Bean(name = "otherApiWebClient"))
+ */
+@Slf4j
+@Configuration
+public class MaritimeApiWebClientConfig {
+
+ @Value("${app.batch.ship-api.url}")
+ private String maritimeApiUrl;
+
+ @Value("${app.batch.ais-api.url}")
+ private String maritimeAisApiUrl;
+
+ @Value("${app.batch.webservice-api.url}")
+ private String maritimeServiceApiUrl;
+
+
+ @Value("${app.batch.api-auth.username}")
+ private String maritimeApiUsername;
+
+ @Value("${app.batch.api-auth.password}")
+ private String maritimeApiPassword;
+
+ /**
+ * Maritime API용 WebClient Bean
+ *
+ * 설정:
+ * - Base URL: Maritime API 서버 주소
+ * - 인증: Basic Authentication
+ * - 버퍼: 20MB (대용량 응답 처리)
+ *
+ * @return Maritime API WebClient
+ */
+ @Bean(name = "maritimeApiWebClient")
+ public WebClient maritimeApiWebClient() {
+ log.info("========================================");
+ log.info("Maritime API WebClient 생성");
+ log.info("Base URL: {}", maritimeApiUrl);
+ log.info("========================================");
+
+ HttpClient httpClient = HttpClient.create()
+ .option(ChannelOption.CONNECT_TIMEOUT_MILLIS, 10_000) // 연결 타임아웃 10초
+ .responseTimeout(Duration.ofSeconds(60)); // 응답 대기 60초
+
+ return WebClient.builder()
+ .clientConnector(new ReactorClientHttpConnector(httpClient))
+ .baseUrl(maritimeApiUrl)
+ .defaultHeaders(headers -> headers.setBasicAuth(maritimeApiUsername, maritimeApiPassword))
+ .codecs(configurer -> configurer
+ .defaultCodecs()
+ .maxInMemorySize(100 * 1024 * 1024)) // 100MB 버퍼
+ .build();
+ }
+
+ @Bean(name = "maritimeAisApiWebClient")
+ public WebClient maritimeAisApiWebClient(){
+ log.info("========================================");
+ log.info("Maritime AIS API WebClient 생성");
+ log.info("Base URL: {}", maritimeAisApiUrl);
+ log.info("========================================");
+
+ HttpClient httpClient = HttpClient.create()
+ .option(ChannelOption.CONNECT_TIMEOUT_MILLIS, 10_000) // 연결 타임아웃 10초
+ .responseTimeout(Duration.ofSeconds(60)); // 응답 대기 60초
+
+ return WebClient.builder()
+ .clientConnector(new ReactorClientHttpConnector(httpClient))
+ .baseUrl(maritimeAisApiUrl)
+ .defaultHeaders(headers -> headers.setBasicAuth(maritimeApiUsername, maritimeApiPassword))
+ .codecs(configurer -> configurer
+ .defaultCodecs()
+ .maxInMemorySize(100 * 1024 * 1024)) // 100MB 버퍼
+ .build();
+ }
+
+ @Bean(name = "maritimeServiceApiWebClient")
+ public WebClient maritimeServiceApiWebClient(){
+ log.info("========================================");
+ log.info("Maritime Service API WebClient 생성");
+ log.info("Base URL: {}", maritimeServiceApiUrl);
+ log.info("========================================");
+
+ HttpClient httpClient = HttpClient.create()
+ .option(ChannelOption.CONNECT_TIMEOUT_MILLIS, 10_000)
+ .responseTimeout(Duration.ofMinutes(5));
+
+ return WebClient.builder()
+ .clientConnector(new ReactorClientHttpConnector(httpClient))
+ .baseUrl(maritimeServiceApiUrl)
+ .defaultHeaders(headers -> headers.setBasicAuth(maritimeApiUsername, maritimeApiPassword))
+ .codecs(configurer -> configurer
+ .defaultCodecs()
+ .maxInMemorySize(256 * 1024 * 1024)) // 256MB 버퍼
+ .build();
+ }
+}
+
+
+/**
+ * ========================================
+ * 다른 API 서버 추가 예시
+ * ========================================
+ *
+ * 1. 새로운 Config 클래스 생성:
+ *
+ * @Configuration
+ * public class ExternalApiWebClientConfig {
+ *
+ * @Bean(name = "externalApiWebClient")
+ * public WebClient externalApiWebClient(
+ * @Value("${app.batch.external-api.url}") String url,
+ * @Value("${app.batch.external-api.token}") String token) {
+ *
+ * return WebClient.builder()
+ * .baseUrl(url)
+ * .defaultHeader("Authorization", "Bearer " + token)
+ * .build();
+ * }
+ * }
+ *
+ * 2. JobConfig에서 사용:
+ *
+ * public ExternalJobConfig(
+ * ...,
+ * @Qualifier("externalApiWebClient") WebClient externalApiWebClient) {
+ * this.webClient = externalApiWebClient;
+ * }
+ *
+ * 3. application.yml에 설정 추가:
+ *
+ * app:
+ * batch:
+ * external-api:
+ * url: https://external-api.example.com
+ * token: ${EXTERNAL_API_TOKEN}
+ */
diff --git a/src/main/java/com/snp/batch/global/config/QuartzConfig.java b/src/main/java/com/snp/batch/global/config/QuartzConfig.java
new file mode 100644
index 0000000..6d42e24
--- /dev/null
+++ b/src/main/java/com/snp/batch/global/config/QuartzConfig.java
@@ -0,0 +1,85 @@
+package com.snp.batch.global.config;
+
+import org.quartz.spi.TriggerFiredBundle;
+import org.springframework.beans.factory.config.AutowireCapableBeanFactory;
+import org.springframework.boot.autoconfigure.quartz.QuartzProperties;
+import org.springframework.context.ApplicationContext;
+import org.springframework.context.ApplicationContextAware;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.scheduling.quartz.SchedulerFactoryBean;
+import org.springframework.scheduling.quartz.SpringBeanJobFactory;
+
+import javax.sql.DataSource;
+import java.util.Properties;
+
+/**
+ * Quartz 설정
+ * 커스텀 SchedulerFactoryBean을 정의하면 Spring Boot auto-configuration이 비활성화되므로
+ * DataSource와 QuartzProperties를 명시적으로 주입해야 한다.
+ */
+@Configuration
+public class QuartzConfig {
+
+ /**
+ * Quartz Scheduler Factory Bean 설정
+ * DataSource, QuartzProperties를 명시적으로 주입하여 JDBC Store 사용 보장
+ */
+ @Bean
+ public SchedulerFactoryBean schedulerFactoryBean(
+ ApplicationContext applicationContext,
+ DataSource dataSource,
+ QuartzProperties quartzProperties) {
+
+ SchedulerFactoryBean factory = new SchedulerFactoryBean();
+ factory.setJobFactory(springBeanJobFactory(applicationContext));
+ factory.setDataSource(dataSource);
+ factory.setOverwriteExistingJobs(true);
+ // SchedulerInitializer에서 직접 start() 호출하므로 자동 시작 비활성화
+ // 자동 시작 시 JDBC Store의 기존 trigger가 로드되어 중복 실행 발생 가능
+ factory.setAutoStartup(false);
+
+ // application.yml의 spring.quartz.properties 적용
+ // jobStore.class는 setDataSource()가 LocalDataSourceJobStore로 대체하므로 제외
+ // driverDelegateClass는 PostgreSQLDelegate가 필요하므로 유지
+ Properties properties = new Properties();
+ quartzProperties.getProperties().forEach((key, value) -> {
+ if (!key.contains("jobStore.class")) {
+ properties.put(key, value);
+ }
+ });
+ factory.setQuartzProperties(properties);
+
+ return factory;
+ }
+
+ /**
+ * Spring Bean 자동 주입을 지원하는 JobFactory
+ */
+ @Bean
+ public SpringBeanJobFactory springBeanJobFactory(ApplicationContext applicationContext) {
+ AutowiringSpringBeanJobFactory jobFactory = new AutowiringSpringBeanJobFactory();
+ jobFactory.setApplicationContext(applicationContext);
+ return jobFactory;
+ }
+
+ /**
+ * Quartz Job에서 Spring Bean 자동 주입을 가능하게 하는 Factory
+ */
+ public static class AutowiringSpringBeanJobFactory extends SpringBeanJobFactory implements ApplicationContextAware {
+
+ private AutowireCapableBeanFactory beanFactory;
+
+ @Override
+ public void setApplicationContext(ApplicationContext applicationContext) {
+ beanFactory = applicationContext.getAutowireCapableBeanFactory();
+ }
+
+ @Override
+ protected Object createJobInstance(TriggerFiredBundle bundle) throws Exception {
+ Object jobInstance = super.createJobInstance(bundle);
+ beanFactory.autowireBean(jobInstance);
+ return jobInstance;
+ }
+ }
+}
diff --git a/src/main/java/com/snp/batch/global/config/SwaggerConfig.java b/src/main/java/com/snp/batch/global/config/SwaggerConfig.java
new file mode 100644
index 0000000..aadb66d
--- /dev/null
+++ b/src/main/java/com/snp/batch/global/config/SwaggerConfig.java
@@ -0,0 +1,102 @@
+package com.snp.batch.global.config;
+
+import io.swagger.v3.oas.models.OpenAPI;
+import io.swagger.v3.oas.models.info.Contact;
+import io.swagger.v3.oas.models.info.Info;
+import io.swagger.v3.oas.models.info.License;
+import io.swagger.v3.oas.models.servers.Server;
+import org.springdoc.core.models.GroupedOpenApi;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+import java.util.List;
+
+/**
+ * Swagger/OpenAPI 3.0 설정
+ *
+ * Swagger UI 접속 URL:
+ * - Swagger UI: http://localhost:8041/snp-collector/swagger-ui/index.html
+ * - API 문서 (JSON): http://localhost:8041/snp-collector/v3/api-docs
+ * - API 문서 (YAML): http://localhost:8041/snp-collector/v3/api-docs.yaml
+ */
+@Configuration
+public class SwaggerConfig {
+
+ @Value("${server.port:8041}")
+ private int serverPort;
+
+ @Value("${server.servlet.context-path:}")
+ private String contextPath;
+
+ @Value("${app.environment:dev}")
+ private String environment;
+
+ @Bean
+ @ConditionalOnProperty(name = "app.environment", havingValue = "dev", matchIfMissing = true)
+ public GroupedOpenApi batchManagementApi() {
+ return GroupedOpenApi.builder()
+ .group("1. Batch Management")
+ .pathsToMatch("/api/batch/**")
+ .addOpenApiCustomizer(openApi -> openApi.info(new Info()
+ .title("Batch Management API")
+ .description("배치 Job 실행, 이력 조회, 스케줄 관리 API")
+ .version("v1.0.0")))
+ .build();
+ }
+
+ @Bean
+ public OpenAPI openAPI() {
+ List servers = "prod".equals(environment)
+ ? List.of(
+ new Server()
+ .url("https://guide.gc-si.dev" + contextPath)
+ .description("GC 도메인"))
+ : List.of(
+ new Server()
+ .url("http://localhost:" + serverPort + contextPath)
+ .description("로컬 개발 서버"),
+ new Server()
+ .url("http://211.208.115.83:" + serverPort + contextPath)
+ .description("중계 서버"),
+ new Server()
+ .url("https://guide.gc-si.dev" + contextPath)
+ .description("GC 도메인"));
+
+ return new OpenAPI()
+ .info(defaultApiInfo())
+ .servers(servers);
+ }
+
+ private Info defaultApiInfo() {
+ return new Info()
+ .title("SNP Collector REST API")
+ .description("""
+ ## SNP Collector 시스템 REST API 문서
+
+ 해양 데이터 수집 배치 시스템의 REST API 문서입니다.
+
+ ### 제공 API
+ - **Batch Management API**: 배치 Job 실행, 이력 조회, 스케줄 관리
+
+ ### 주요 기능
+ - 배치 Job 실행 및 중지
+ - Job 실행 이력 조회
+ - 스케줄 관리 (Quartz)
+
+ ### 버전 정보
+ - API Version: v1.0.0
+ - Spring Boot: 3.2.1
+ - Spring Batch: 5.1.0
+ """)
+ .version("v1.0.0")
+ .contact(new Contact()
+ .name("SNP Collector Team")
+ .email("support@snp-collector.com")
+ .url("https://github.com/snp-collector"))
+ .license(new License()
+ .name("Apache 2.0")
+ .url("https://www.apache.org/licenses/LICENSE-2.0"));
+ }
+}
diff --git a/src/main/java/com/snp/batch/global/controller/BatchController.java b/src/main/java/com/snp/batch/global/controller/BatchController.java
new file mode 100644
index 0000000..859bd4c
--- /dev/null
+++ b/src/main/java/com/snp/batch/global/controller/BatchController.java
@@ -0,0 +1,791 @@
+package com.snp.batch.global.controller;
+
+import com.snp.batch.global.dto.*;
+import com.snp.batch.global.model.BatchCollectionPeriod;
+import com.snp.batch.global.model.BatchRecollectionHistory;
+import com.snp.batch.global.model.JobDisplayNameEntity;
+import com.snp.batch.global.repository.JobDisplayNameRepository;
+import com.snp.batch.service.BatchFailedRecordService;
+import com.snp.batch.service.BatchService;
+import com.snp.batch.service.RecollectionHistoryService;
+import com.snp.batch.service.ScheduleService;
+import io.swagger.v3.oas.annotations.Operation;
+import io.swagger.v3.oas.annotations.Parameter;
+import io.swagger.v3.oas.annotations.enums.Explode;
+import io.swagger.v3.oas.annotations.enums.ParameterIn;
+import io.swagger.v3.oas.annotations.enums.ParameterStyle;
+import io.swagger.v3.oas.annotations.responses.ApiResponse;
+import io.swagger.v3.oas.annotations.responses.ApiResponses;
+import io.swagger.v3.oas.annotations.tags.Tag;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.springdoc.core.annotations.ParameterObject;
+import org.springframework.http.ResponseEntity;
+import org.springframework.web.bind.annotation.*;
+
+import org.springframework.data.domain.Page;
+import org.springframework.data.domain.PageRequest;
+
+import jakarta.servlet.http.HttpServletResponse;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.time.LocalDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+@Slf4j
+@RestController
+@RequestMapping("/api/batch")
+@RequiredArgsConstructor
+@Tag(name = "Batch Management API", description = "배치 작업 실행 및 스케줄 관리 API")
+public class BatchController {
+
+ private final BatchService batchService;
+ private final ScheduleService scheduleService;
+ private final RecollectionHistoryService recollectionHistoryService;
+ private final BatchFailedRecordService batchFailedRecordService;
+ private final JobDisplayNameRepository jobDisplayNameRepository;
+
+ @Operation(summary = "배치 작업 실행", description = "지정된 배치 작업을 즉시 실행합니다. 쿼리 파라미터로 Job Parameters 전달 가능")
+ @ApiResponses(value = {
+ @ApiResponse(responseCode = "200", description = "작업 실행 성공"),
+ @ApiResponse(responseCode = "500", description = "작업 실행 실패")
+ })
+ @PostMapping("/jobs/{jobName}/execute")
+ public ResponseEntity