feat(동기화현황): 동기화 현황 메뉴 추가 및 배치 Reader 리팩토링 (#1) #5
1
.gitignore
vendored
1
.gitignore
vendored
@ -71,3 +71,4 @@ application-local.yml
|
|||||||
frontend/node/
|
frontend/node/
|
||||||
frontend/node_modules/
|
frontend/node_modules/
|
||||||
src/main/resources/static/
|
src/main/resources/static/
|
||||||
|
logs/
|
||||||
|
|||||||
24
docs/RELEASE-NOTES.md
Normal file
24
docs/RELEASE-NOTES.md
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
# Release Notes
|
||||||
|
|
||||||
|
이 문서는 [Keep a Changelog](https://keepachangelog.com/ko/1.0.0/) 형식을 따릅니다.
|
||||||
|
|
||||||
|
## [Unreleased]
|
||||||
|
|
||||||
|
### 추가
|
||||||
|
- 동기화 현황 메뉴 추가: 도메인 탭 + 테이블 아코디언 + 인라인 데이터 조회 (#1)
|
||||||
|
- SyncStatusService: batch_flag 기반 테이블별 N/P/S 집계 (병렬 조회)
|
||||||
|
- P 상태 고착 레코드 조회 및 P→N 리셋 기능
|
||||||
|
|
||||||
|
### 변경
|
||||||
|
- BaseSyncReader 추출: 49개 Reader 공통 로직 통합, 1 chunk = 1 job_execution_id 보장
|
||||||
|
- chunk 경계 제어를 GroupByExecutionIdPolicy에서 Reader 자체 제어로 변경
|
||||||
|
- BatchWriteListener: SQL을 실행 시점에 생성하여 SOURCE_SCHEMA null 문제 해결
|
||||||
|
|
||||||
|
### 수정
|
||||||
|
- batch_flag P 상태 고착 버그 수정 (Reader의 N→P 전환 시점 분리)
|
||||||
|
- BatchWriteListener SQL null 참조 수정 (빈 생성 시 → 실행 시 지연 생성)
|
||||||
|
|
||||||
|
### 기타
|
||||||
|
- .gitignore에 logs/ 추가
|
||||||
|
- application-dev.yml chunk-size, sub-chunk-size 설정 추가
|
||||||
|
- Repository 배치 삽입 로그 주석처리
|
||||||
@ -12,6 +12,7 @@ const Executions = lazy(() => import('./pages/Executions'));
|
|||||||
const ExecutionDetail = lazy(() => import('./pages/ExecutionDetail'));
|
const ExecutionDetail = lazy(() => import('./pages/ExecutionDetail'));
|
||||||
const Schedules = lazy(() => import('./pages/Schedules'));
|
const Schedules = lazy(() => import('./pages/Schedules'));
|
||||||
const Timeline = lazy(() => import('./pages/Timeline'));
|
const Timeline = lazy(() => import('./pages/Timeline'));
|
||||||
|
const SyncStatus = lazy(() => import('./pages/SyncStatus'));
|
||||||
|
|
||||||
function AppLayout() {
|
function AppLayout() {
|
||||||
const { toasts, removeToast } = useToastContext();
|
const { toasts, removeToast } = useToastContext();
|
||||||
@ -28,6 +29,7 @@ function AppLayout() {
|
|||||||
<Route path="/executions/:id" element={<ExecutionDetail />} />
|
<Route path="/executions/:id" element={<ExecutionDetail />} />
|
||||||
<Route path="/schedules" element={<Schedules />} />
|
<Route path="/schedules" element={<Schedules />} />
|
||||||
<Route path="/schedule-timeline" element={<Timeline />} />
|
<Route path="/schedule-timeline" element={<Timeline />} />
|
||||||
|
<Route path="/sync-status" element={<SyncStatus />} />
|
||||||
</Routes>
|
</Routes>
|
||||||
</Suspense>
|
</Suspense>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -273,6 +273,48 @@ export interface ExecutionStatisticsDto {
|
|||||||
avgDurationMs: number;
|
avgDurationMs: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ── Sync Status ─────────────────────────────────────────────
|
||||||
|
|
||||||
|
export interface SyncStatusSummary {
|
||||||
|
totalTables: number;
|
||||||
|
pendingCount: number;
|
||||||
|
processingCount: number;
|
||||||
|
completedCount: number;
|
||||||
|
stuckTables: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SyncTableStatus {
|
||||||
|
tableKey: string;
|
||||||
|
sourceTable: string;
|
||||||
|
targetTable: string;
|
||||||
|
domain: string;
|
||||||
|
pendingCount: number;
|
||||||
|
processingCount: number;
|
||||||
|
completedCount: number;
|
||||||
|
lastSyncTime: string | null;
|
||||||
|
stuck: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SyncDomainGroup {
|
||||||
|
domain: string;
|
||||||
|
domainLabel: string;
|
||||||
|
tables: SyncTableStatus[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SyncStatusResponse {
|
||||||
|
summary: SyncStatusSummary;
|
||||||
|
domains: SyncDomainGroup[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SyncDataPreviewResponse {
|
||||||
|
tableKey: string;
|
||||||
|
targetTable: string;
|
||||||
|
targetSchema: string;
|
||||||
|
columns: string[];
|
||||||
|
rows: Record<string, unknown>[];
|
||||||
|
totalCount: number;
|
||||||
|
}
|
||||||
|
|
||||||
// ── API Functions ────────────────────────────────────────────
|
// ── API Functions ────────────────────────────────────────────
|
||||||
|
|
||||||
export const batchApi = {
|
export const batchApi = {
|
||||||
@ -399,4 +441,18 @@ export const batchApi = {
|
|||||||
resetRetryCount: (ids: number[]) =>
|
resetRetryCount: (ids: number[]) =>
|
||||||
postJson<{ success: boolean; message: string; resetCount?: number }>(
|
postJson<{ success: boolean; message: string; resetCount?: number }>(
|
||||||
`${BASE}/failed-records/reset-retry`, { ids }),
|
`${BASE}/failed-records/reset-retry`, { ids }),
|
||||||
|
|
||||||
|
// Sync Status
|
||||||
|
getSyncStatus: () =>
|
||||||
|
fetchJson<SyncStatusResponse>(`${BASE}/sync-status`),
|
||||||
|
|
||||||
|
getSyncDataPreview: (tableKey: string, limit = 10) =>
|
||||||
|
fetchJson<SyncDataPreviewResponse>(`${BASE}/sync-status/${tableKey}/preview?limit=${limit}`),
|
||||||
|
|
||||||
|
getStuckRecords: (tableKey: string, limit = 50) =>
|
||||||
|
fetchJson<SyncDataPreviewResponse>(`${BASE}/sync-status/${tableKey}/stuck?limit=${limit}`),
|
||||||
|
|
||||||
|
resetStuckRecords: (tableKey: string) =>
|
||||||
|
postJson<{ success: boolean; message: string; resetCount?: number }>(
|
||||||
|
`${BASE}/sync-status/${tableKey}/reset`),
|
||||||
};
|
};
|
||||||
|
|||||||
@ -7,6 +7,7 @@ const navItems = [
|
|||||||
{ path: '/jobs', label: '작업', icon: '⚙️' },
|
{ path: '/jobs', label: '작업', icon: '⚙️' },
|
||||||
{ path: '/schedules', label: '스케줄', icon: '🕐' },
|
{ path: '/schedules', label: '스케줄', icon: '🕐' },
|
||||||
{ path: '/schedule-timeline', label: '타임라인', icon: '📅' },
|
{ path: '/schedule-timeline', label: '타임라인', icon: '📅' },
|
||||||
|
{ path: '/sync-status', label: '동기화 현황', icon: '🔄' },
|
||||||
];
|
];
|
||||||
|
|
||||||
export default function Navbar() {
|
export default function Navbar() {
|
||||||
|
|||||||
127
frontend/src/components/StuckRecordsModal.tsx
Normal file
127
frontend/src/components/StuckRecordsModal.tsx
Normal file
@ -0,0 +1,127 @@
|
|||||||
|
import { useState, useEffect } from 'react';
|
||||||
|
import { batchApi, type SyncDataPreviewResponse } from '../api/batchApi';
|
||||||
|
import LoadingSpinner from './LoadingSpinner';
|
||||||
|
|
||||||
|
interface Props {
|
||||||
|
open: boolean;
|
||||||
|
tableKey: string;
|
||||||
|
tableName: string;
|
||||||
|
onClose: () => void;
|
||||||
|
onReset: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default function StuckRecordsModal({ open, tableKey, tableName, onClose, onReset }: Props) {
|
||||||
|
const [data, setData] = useState<SyncDataPreviewResponse | null>(null);
|
||||||
|
const [loading, setLoading] = useState(false);
|
||||||
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!open || !tableKey) return;
|
||||||
|
setLoading(true);
|
||||||
|
setError(null);
|
||||||
|
batchApi.getStuckRecords(tableKey, 50)
|
||||||
|
.then(setData)
|
||||||
|
.catch((e) => setError(e.message))
|
||||||
|
.finally(() => setLoading(false));
|
||||||
|
}, [open, tableKey]);
|
||||||
|
|
||||||
|
if (!open) return null;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="fixed inset-0 z-50 flex items-center justify-center bg-wing-overlay" onClick={onClose}>
|
||||||
|
<div
|
||||||
|
className="bg-wing-surface rounded-xl shadow-2xl max-w-5xl w-full mx-4 max-h-[80vh] flex flex-col"
|
||||||
|
onClick={(e) => e.stopPropagation()}
|
||||||
|
>
|
||||||
|
{/* Header */}
|
||||||
|
<div className="flex items-center justify-between px-6 py-4 border-b border-wing-border">
|
||||||
|
<div>
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<span className="text-red-500">⚠️</span>
|
||||||
|
<h3 className="text-lg font-semibold text-wing-text">P 상태 고착 레코드</h3>
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-wing-muted mt-0.5">
|
||||||
|
{tableName}
|
||||||
|
{data ? ` | ${data.targetSchema}.${data.targetTable} | 총 ${data.totalCount.toLocaleString()}건 고착` : ''}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
{data && data.totalCount > 0 && (
|
||||||
|
<button
|
||||||
|
onClick={onReset}
|
||||||
|
className="px-3 py-1.5 text-xs font-medium text-white bg-red-500 hover:bg-red-600 rounded-lg transition-colors"
|
||||||
|
>
|
||||||
|
전체 P→N 리셋
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
<button
|
||||||
|
onClick={onClose}
|
||||||
|
className="px-3 py-1.5 text-sm text-wing-muted hover:text-wing-text transition-colors"
|
||||||
|
>
|
||||||
|
닫기
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Body */}
|
||||||
|
<div className="flex-1 overflow-auto p-4">
|
||||||
|
{loading && <LoadingSpinner />}
|
||||||
|
{error && (
|
||||||
|
<div className="text-center py-8 text-red-400">조회 실패: {error}</div>
|
||||||
|
)}
|
||||||
|
{!loading && !error && data && data.rows.length === 0 && (
|
||||||
|
<div className="text-center py-8 text-wing-muted">P 상태 레코드가 없습니다</div>
|
||||||
|
)}
|
||||||
|
{!loading && !error && data && data.rows.length > 0 && (
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<table className="w-full text-xs">
|
||||||
|
<thead>
|
||||||
|
<tr className="border-b border-wing-border">
|
||||||
|
{data.columns.map((col) => (
|
||||||
|
<th
|
||||||
|
key={col}
|
||||||
|
className={`px-3 py-2 text-left font-medium whitespace-nowrap bg-wing-card
|
||||||
|
${col === 'batch_flag' ? 'text-red-500' : 'text-wing-muted'}`}
|
||||||
|
>
|
||||||
|
{col}
|
||||||
|
</th>
|
||||||
|
))}
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{data.rows.map((row, idx) => (
|
||||||
|
<tr key={idx} className="border-b border-wing-border/50 hover:bg-wing-hover">
|
||||||
|
{data.columns.map((col) => (
|
||||||
|
<td
|
||||||
|
key={col}
|
||||||
|
className={`px-3 py-1.5 whitespace-nowrap max-w-[200px] truncate
|
||||||
|
${col === 'batch_flag' ? 'text-red-500 font-bold' : 'text-wing-text'}`}
|
||||||
|
>
|
||||||
|
{formatCellValue(row[col])}
|
||||||
|
</td>
|
||||||
|
))}
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Footer */}
|
||||||
|
{data && data.rows.length > 0 && (
|
||||||
|
<div className="px-6 py-3 border-t border-wing-border text-xs text-wing-muted flex items-center justify-between">
|
||||||
|
<span>{data.rows.length}건 표시 (전체 {data.totalCount.toLocaleString()}건)</span>
|
||||||
|
<span className="text-red-400">리셋 시 batch_flag가 P→N으로 변경되어 다음 동기화에 재처리됩니다</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatCellValue(value: unknown): string {
|
||||||
|
if (value === null || value === undefined) return '-';
|
||||||
|
if (typeof value === 'object') return JSON.stringify(value);
|
||||||
|
return String(value);
|
||||||
|
}
|
||||||
108
frontend/src/components/SyncDataPreviewModal.tsx
Normal file
108
frontend/src/components/SyncDataPreviewModal.tsx
Normal file
@ -0,0 +1,108 @@
|
|||||||
|
import { useState, useEffect } from 'react';
|
||||||
|
import { batchApi, type SyncDataPreviewResponse } from '../api/batchApi';
|
||||||
|
import LoadingSpinner from './LoadingSpinner';
|
||||||
|
|
||||||
|
interface Props {
|
||||||
|
open: boolean;
|
||||||
|
tableKey: string;
|
||||||
|
tableName: string;
|
||||||
|
onClose: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default function SyncDataPreviewModal({ open, tableKey, tableName, onClose }: Props) {
|
||||||
|
const [data, setData] = useState<SyncDataPreviewResponse | null>(null);
|
||||||
|
const [loading, setLoading] = useState(false);
|
||||||
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!open || !tableKey) return;
|
||||||
|
setLoading(true);
|
||||||
|
setError(null);
|
||||||
|
batchApi.getSyncDataPreview(tableKey, 10)
|
||||||
|
.then(setData)
|
||||||
|
.catch((e) => setError(e.message))
|
||||||
|
.finally(() => setLoading(false));
|
||||||
|
}, [open, tableKey]);
|
||||||
|
|
||||||
|
if (!open) return null;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="fixed inset-0 z-50 flex items-center justify-center bg-wing-overlay" onClick={onClose}>
|
||||||
|
<div
|
||||||
|
className="bg-wing-surface rounded-xl shadow-2xl max-w-5xl w-full mx-4 max-h-[80vh] flex flex-col"
|
||||||
|
onClick={(e) => e.stopPropagation()}
|
||||||
|
>
|
||||||
|
{/* Header */}
|
||||||
|
<div className="flex items-center justify-between px-6 py-4 border-b border-wing-border">
|
||||||
|
<div>
|
||||||
|
<h3 className="text-lg font-semibold text-wing-text">{tableName}</h3>
|
||||||
|
<p className="text-xs text-wing-muted mt-0.5">
|
||||||
|
{data ? `${data.targetSchema}.${data.targetTable} | 총 ${data.totalCount.toLocaleString()}건` : ''}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<button
|
||||||
|
onClick={onClose}
|
||||||
|
className="px-3 py-1.5 text-sm text-wing-muted hover:text-wing-text transition-colors"
|
||||||
|
>
|
||||||
|
닫기
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Body */}
|
||||||
|
<div className="flex-1 overflow-auto p-4">
|
||||||
|
{loading && <LoadingSpinner />}
|
||||||
|
{error && (
|
||||||
|
<div className="text-center py-8 text-wing-muted">
|
||||||
|
<p className="text-red-400">조회 실패: {error}</p>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{!loading && !error && data && data.rows.length === 0 && (
|
||||||
|
<div className="text-center py-8 text-wing-muted">데이터가 없습니다</div>
|
||||||
|
)}
|
||||||
|
{!loading && !error && data && data.rows.length > 0 && (
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<table className="w-full text-xs">
|
||||||
|
<thead>
|
||||||
|
<tr className="border-b border-wing-border">
|
||||||
|
{data.columns.map((col) => (
|
||||||
|
<th
|
||||||
|
key={col}
|
||||||
|
className="px-3 py-2 text-left font-medium text-wing-muted whitespace-nowrap bg-wing-card"
|
||||||
|
>
|
||||||
|
{col}
|
||||||
|
</th>
|
||||||
|
))}
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{data.rows.map((row, idx) => (
|
||||||
|
<tr key={idx} className="border-b border-wing-border/50 hover:bg-wing-hover">
|
||||||
|
{data.columns.map((col) => (
|
||||||
|
<td key={col} className="px-3 py-1.5 text-wing-text whitespace-nowrap max-w-[200px] truncate">
|
||||||
|
{formatCellValue(row[col])}
|
||||||
|
</td>
|
||||||
|
))}
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Footer */}
|
||||||
|
{data && data.rows.length > 0 && (
|
||||||
|
<div className="px-6 py-3 border-t border-wing-border text-xs text-wing-muted">
|
||||||
|
최근 {data.rows.length}건 표시 (전체 {data.totalCount.toLocaleString()}건)
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatCellValue(value: unknown): string {
|
||||||
|
if (value === null || value === undefined) return '-';
|
||||||
|
if (typeof value === 'object') return JSON.stringify(value);
|
||||||
|
return String(value);
|
||||||
|
}
|
||||||
393
frontend/src/pages/SyncStatus.tsx
Normal file
393
frontend/src/pages/SyncStatus.tsx
Normal file
@ -0,0 +1,393 @@
|
|||||||
|
import { useState, useCallback, useEffect } from 'react';
|
||||||
|
import {
|
||||||
|
batchApi,
|
||||||
|
type SyncStatusResponse,
|
||||||
|
type SyncTableStatus,
|
||||||
|
type SyncDataPreviewResponse,
|
||||||
|
} from '../api/batchApi';
|
||||||
|
import { usePoller } from '../hooks/usePoller';
|
||||||
|
import { useToastContext } from '../contexts/ToastContext';
|
||||||
|
import LoadingSpinner from '../components/LoadingSpinner';
|
||||||
|
import EmptyState from '../components/EmptyState';
|
||||||
|
import ConfirmModal from '../components/ConfirmModal';
|
||||||
|
import GuideModal, { HelpButton } from '../components/GuideModal';
|
||||||
|
|
||||||
|
const POLLING_INTERVAL = 30000;
|
||||||
|
|
||||||
|
const DOMAIN_ICONS: Record<string, string> = {
|
||||||
|
ship: '🚢',
|
||||||
|
company: '🏢',
|
||||||
|
event: '⚠️',
|
||||||
|
facility: '🏭',
|
||||||
|
psc: '🔍',
|
||||||
|
movements: '📍',
|
||||||
|
code: '🏷️',
|
||||||
|
'risk-compliance': '🛡️',
|
||||||
|
};
|
||||||
|
|
||||||
|
const GUIDE_ITEMS = [
|
||||||
|
{
|
||||||
|
title: '도메인 탭',
|
||||||
|
content: 'Ship, PSC 등 도메인별로 테이블을 그룹핑하여 조회합니다.\nP 고착 테이블이 있는 도메인에는 경고 뱃지가 표시됩니다.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: '테이블 아코디언',
|
||||||
|
content: '각 테이블을 펼치면 대기(N)/진행(P)/완료(S) 건수와 상세 데이터를 확인할 수 있습니다.\n⚠️ 표시는 P 상태에 고착된 레코드가 있음을 의미합니다.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: '동기화 데이터 / P 상태 레코드',
|
||||||
|
content: '동기화 데이터 탭: 타겟 스키마(std_snp_svc)의 최근 동기화 데이터를 보여줍니다.\nP 상태 레코드 탭: Writer 실패로 P 상태에 멈춘 레코드를 확인하고 리셋할 수 있습니다.',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
export default function SyncStatus() {
|
||||||
|
const { showToast } = useToastContext();
|
||||||
|
const [data, setData] = useState<SyncStatusResponse | null>(null);
|
||||||
|
const [loading, setLoading] = useState(true);
|
||||||
|
const [guideOpen, setGuideOpen] = useState(false);
|
||||||
|
|
||||||
|
// Tab & accordion state
|
||||||
|
const [activeDomain, setActiveDomain] = useState<string>('ship');
|
||||||
|
const [expandedTable, setExpandedTable] = useState<string>('ship-001');
|
||||||
|
const [detailTabs, setDetailTabs] = useState<Record<string, 'preview' | 'stuck'>>({});
|
||||||
|
|
||||||
|
// Reset confirm
|
||||||
|
const [resetTableKey, setResetTableKey] = useState('');
|
||||||
|
const [resetConfirmOpen, setResetConfirmOpen] = useState(false);
|
||||||
|
const [resetting, setResetting] = useState(false);
|
||||||
|
|
||||||
|
const loadData = useCallback(async () => {
|
||||||
|
try {
|
||||||
|
const result = await batchApi.getSyncStatus();
|
||||||
|
setData(result);
|
||||||
|
} catch {
|
||||||
|
if (loading) showToast('동기화 현황 조회 실패', 'error');
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
usePoller(loadData, POLLING_INTERVAL);
|
||||||
|
|
||||||
|
const toggleAccordion = (tableKey: string) => {
|
||||||
|
setExpandedTable((prev) => (prev === tableKey ? '' : tableKey));
|
||||||
|
};
|
||||||
|
|
||||||
|
const getDetailTab = (tableKey: string) => detailTabs[tableKey] || 'preview';
|
||||||
|
const setDetailTab = (tableKey: string, tab: 'preview' | 'stuck') => {
|
||||||
|
setDetailTabs((prev) => ({ ...prev, [tableKey]: tab }));
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleReset = async () => {
|
||||||
|
setResetting(true);
|
||||||
|
try {
|
||||||
|
const result = await batchApi.resetStuckRecords(resetTableKey);
|
||||||
|
const allTables = data?.domains.flatMap((d) => d.tables) ?? [];
|
||||||
|
const table = allTables.find((t) => t.tableKey === resetTableKey);
|
||||||
|
showToast(`${table?.sourceTable ?? resetTableKey}: ${result.resetCount}건 리셋 완료`, 'success');
|
||||||
|
setResetConfirmOpen(false);
|
||||||
|
loadData();
|
||||||
|
} catch {
|
||||||
|
showToast('리셋 실패', 'error');
|
||||||
|
} finally {
|
||||||
|
setResetting(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const activeDomainGroup = data?.domains.find((d) => d.domain === activeDomain);
|
||||||
|
const resetTable = data?.domains.flatMap((d) => d.tables).find((t) => t.tableKey === resetTableKey);
|
||||||
|
|
||||||
|
if (loading) return <LoadingSpinner />;
|
||||||
|
if (!data) return <EmptyState message="데이터를 불러올 수 없습니다" />;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
{/* Header */}
|
||||||
|
<div className="flex items-center justify-between mb-6">
|
||||||
|
<h1 className="text-2xl font-bold text-wing-text">동기화 현황</h1>
|
||||||
|
<HelpButton onClick={() => setGuideOpen(true)} />
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* ── Domain Tabs ── */}
|
||||||
|
<div className="flex gap-0 overflow-x-auto border-b border-wing-border mb-4">
|
||||||
|
{data.domains.map((d) => {
|
||||||
|
const stuckCount = d.tables.filter((t) => t.stuck).length;
|
||||||
|
return (
|
||||||
|
<button
|
||||||
|
key={d.domain}
|
||||||
|
onClick={() => setActiveDomain(d.domain)}
|
||||||
|
className={`px-4 py-2.5 text-sm font-medium whitespace-nowrap border-b-2 transition-colors
|
||||||
|
${activeDomain === d.domain
|
||||||
|
? 'border-wing-accent text-wing-accent'
|
||||||
|
: 'border-transparent text-wing-muted hover:text-wing-text hover:border-wing-border'
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
<span className="mr-1">{DOMAIN_ICONS[d.domain] || ''}</span>
|
||||||
|
{d.domainLabel}
|
||||||
|
<span className="ml-1 text-xs opacity-60">({d.tables.length})</span>
|
||||||
|
{stuckCount > 0 && (
|
||||||
|
<span className="ml-1.5 inline-flex items-center justify-center w-5 h-5 text-[10px] font-bold text-white bg-red-500 rounded-full">
|
||||||
|
{stuckCount}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</button>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* ── Table Accordions ── */}
|
||||||
|
{activeDomainGroup && (
|
||||||
|
<div className="space-y-2">
|
||||||
|
{activeDomainGroup.tables.map((table) => (
|
||||||
|
<TableAccordion
|
||||||
|
key={table.tableKey}
|
||||||
|
table={table}
|
||||||
|
expanded={expandedTable === table.tableKey}
|
||||||
|
detailTab={getDetailTab(table.tableKey)}
|
||||||
|
onToggle={() => toggleAccordion(table.tableKey)}
|
||||||
|
onDetailTabChange={(tab) => setDetailTab(table.tableKey, tab)}
|
||||||
|
onReset={() => { setResetTableKey(table.tableKey); setResetConfirmOpen(true); }}
|
||||||
|
/>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Reset Confirm Modal */}
|
||||||
|
<ConfirmModal
|
||||||
|
open={resetConfirmOpen}
|
||||||
|
title="P→N 리셋 확인"
|
||||||
|
message={`${resetTable?.sourceTable ?? ''}의 P 상태 레코드를 모두 N(대기)으로 리셋하시겠습니까?\n리셋된 레코드는 다음 동기화 실행 시 재처리됩니다.`}
|
||||||
|
confirmLabel={resetting ? '리셋 중...' : '리셋'}
|
||||||
|
onConfirm={handleReset}
|
||||||
|
onCancel={() => setResetConfirmOpen(false)}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<GuideModal
|
||||||
|
open={guideOpen}
|
||||||
|
pageTitle="동기화 현황"
|
||||||
|
sections={GUIDE_ITEMS}
|
||||||
|
onClose={() => setGuideOpen(false)}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Sub Components ────────────────────────────────────────────
|
||||||
|
|
||||||
|
interface TableAccordionProps {
|
||||||
|
table: SyncTableStatus;
|
||||||
|
expanded: boolean;
|
||||||
|
detailTab: 'preview' | 'stuck';
|
||||||
|
onToggle: () => void;
|
||||||
|
onDetailTabChange: (tab: 'preview' | 'stuck') => void;
|
||||||
|
onReset: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
function TableAccordion({ table, expanded, detailTab, onToggle, onDetailTabChange, onReset }: TableAccordionProps) {
|
||||||
|
return (
|
||||||
|
<div className={`bg-wing-card rounded-xl border overflow-hidden
|
||||||
|
${table.stuck ? 'border-amber-400 ring-1 ring-amber-100' : 'border-wing-border'}`}>
|
||||||
|
{/* Accordion header */}
|
||||||
|
<button
|
||||||
|
onClick={onToggle}
|
||||||
|
className="w-full flex items-center justify-between px-5 py-3 hover:bg-wing-hover transition-colors text-left"
|
||||||
|
>
|
||||||
|
<div className="flex items-center gap-3">
|
||||||
|
{table.stuck && <span className="text-red-500">⚠️</span>}
|
||||||
|
<div>
|
||||||
|
<span className="text-wing-text font-semibold text-sm">{table.targetTable}</span>
|
||||||
|
<span className="text-xs text-wing-muted ml-2">{table.tableKey}</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center gap-2 text-xs">
|
||||||
|
<span className="inline-flex items-center gap-1 px-2.5 py-0.5 rounded-full font-semibold tabular-nums bg-amber-100 text-amber-700">
|
||||||
|
{table.pendingCount.toLocaleString()}
|
||||||
|
</span>
|
||||||
|
<span className={`inline-flex items-center gap-1 px-2.5 py-0.5 rounded-full font-semibold tabular-nums
|
||||||
|
${table.stuck ? 'bg-red-100 text-red-700' : 'bg-blue-100 text-blue-700'}`}>
|
||||||
|
{table.processingCount.toLocaleString()}
|
||||||
|
</span>
|
||||||
|
<span className="inline-flex items-center gap-1 px-2.5 py-0.5 rounded-full font-semibold tabular-nums bg-emerald-100 text-emerald-700">
|
||||||
|
{table.completedCount.toLocaleString()}
|
||||||
|
</span>
|
||||||
|
<span className="text-wing-muted w-16 text-right ml-1">{formatRelativeTime(table.lastSyncTime)}</span>
|
||||||
|
<span className="text-wing-muted">{expanded ? '▲' : '▼'}</span>
|
||||||
|
</div>
|
||||||
|
</button>
|
||||||
|
|
||||||
|
{/* Accordion body */}
|
||||||
|
{expanded && (
|
||||||
|
<div className="border-t border-wing-border p-5">
|
||||||
|
{/* Stats row */}
|
||||||
|
<div className="grid grid-cols-2 md:grid-cols-4 gap-3 mb-4">
|
||||||
|
<MiniStat label="대기 (N)" value={table.pendingCount} color="text-amber-600 dark:text-amber-400" />
|
||||||
|
<MiniStat label="진행 (P)" value={table.processingCount} color="text-blue-600 dark:text-blue-400"
|
||||||
|
warn={table.stuck} />
|
||||||
|
<MiniStat label="완료 (S)" value={table.completedCount} color="text-emerald-600 dark:text-emerald-400" />
|
||||||
|
<div className="bg-wing-bg rounded-lg p-3">
|
||||||
|
<p className="text-xs text-wing-muted">최근 동기화</p>
|
||||||
|
<p className="text-sm font-medium text-wing-text mt-0.5">{formatRelativeTime(table.lastSyncTime)}</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Detail sub-tabs */}
|
||||||
|
<div className="flex items-center justify-between mb-3">
|
||||||
|
<div className="flex gap-1">
|
||||||
|
<button
|
||||||
|
onClick={() => onDetailTabChange('preview')}
|
||||||
|
className={`px-3 py-1.5 rounded-lg text-xs font-medium transition-colors
|
||||||
|
${detailTab === 'preview'
|
||||||
|
? 'bg-wing-accent text-white'
|
||||||
|
: 'bg-wing-bg text-wing-muted hover:bg-wing-hover'
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
동기화 데이터
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
onClick={() => onDetailTabChange('stuck')}
|
||||||
|
className={`px-3 py-1.5 rounded-lg text-xs font-medium transition-colors
|
||||||
|
${detailTab === 'stuck'
|
||||||
|
? 'bg-wing-accent text-white'
|
||||||
|
: 'bg-wing-bg text-wing-muted hover:bg-wing-hover'
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
P 상태 레코드
|
||||||
|
{table.processingCount > 0 && (
|
||||||
|
<span className="ml-1 text-red-300">({table.processingCount.toLocaleString()})</span>
|
||||||
|
)}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
{detailTab === 'stuck' && table.stuck && (
|
||||||
|
<button
|
||||||
|
onClick={onReset}
|
||||||
|
className="px-3 py-1.5 text-xs font-medium text-white bg-red-500 hover:bg-red-600 rounded-lg transition-colors"
|
||||||
|
>
|
||||||
|
전체 P→N 리셋
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Tab content */}
|
||||||
|
{detailTab === 'preview' && (
|
||||||
|
<InlineDataTable tableKey={table.tableKey} fetchFn={batchApi.getSyncDataPreview} />
|
||||||
|
)}
|
||||||
|
{detailTab === 'stuck' && (
|
||||||
|
<InlineDataTable tableKey={table.tableKey} fetchFn={batchApi.getStuckRecords} />
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
interface MiniStatProps {
|
||||||
|
label: string;
|
||||||
|
value: number;
|
||||||
|
color: string;
|
||||||
|
warn?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
function MiniStat({ label, value, color, warn }: MiniStatProps) {
|
||||||
|
return (
|
||||||
|
<div className={`bg-wing-bg rounded-lg p-3 ${warn ? 'ring-1 ring-red-400' : ''}`}>
|
||||||
|
<p className="text-xs text-wing-muted">{label}</p>
|
||||||
|
<p className={`text-lg font-bold mt-0.5 tabular-nums ${color}`}>
|
||||||
|
{value.toLocaleString()}
|
||||||
|
{warn && <span className="ml-1 text-xs text-red-500">고착</span>}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
interface InlineDataTableProps {
|
||||||
|
tableKey: string;
|
||||||
|
fetchFn: (tableKey: string, limit: number) => Promise<SyncDataPreviewResponse>;
|
||||||
|
}
|
||||||
|
|
||||||
|
function InlineDataTable({ tableKey, fetchFn }: InlineDataTableProps) {
|
||||||
|
const [data, setData] = useState<SyncDataPreviewResponse | null>(null);
|
||||||
|
const [loading, setLoading] = useState(false);
|
||||||
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
setLoading(true);
|
||||||
|
setError(null);
|
||||||
|
setData(null);
|
||||||
|
fetchFn(tableKey, 20)
|
||||||
|
.then(setData)
|
||||||
|
.catch((e) => setError(e.message))
|
||||||
|
.finally(() => setLoading(false));
|
||||||
|
}, [tableKey, fetchFn]);
|
||||||
|
|
||||||
|
if (loading) return <div className="py-8"><LoadingSpinner /></div>;
|
||||||
|
if (error) return <div className="text-center py-8 text-red-400 text-sm">조회 실패: {error}</div>;
|
||||||
|
if (!data || data.rows.length === 0) {
|
||||||
|
return <div className="text-center py-8 text-wing-muted text-sm">데이터가 없습니다</div>;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<div className="overflow-x-auto rounded-lg border border-wing-border">
|
||||||
|
<table className="w-full text-xs">
|
||||||
|
<thead>
|
||||||
|
<tr className="border-b border-wing-border">
|
||||||
|
{data.columns.map((col) => (
|
||||||
|
<th
|
||||||
|
key={col}
|
||||||
|
className={`px-3 py-2 text-left font-medium whitespace-nowrap bg-wing-bg
|
||||||
|
${col === 'batch_flag' ? 'text-blue-500' : 'text-wing-muted'}`}
|
||||||
|
>
|
||||||
|
{col}
|
||||||
|
</th>
|
||||||
|
))}
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{data.rows.map((row, idx) => (
|
||||||
|
<tr key={idx} className="border-b border-wing-border/50 hover:bg-wing-hover">
|
||||||
|
{data.columns.map((col) => (
|
||||||
|
<td
|
||||||
|
key={col}
|
||||||
|
className={`px-3 py-1.5 whitespace-nowrap max-w-[200px] truncate
|
||||||
|
${col === 'batch_flag' ? 'font-bold text-blue-600 dark:text-blue-400' : 'text-wing-text'}`}
|
||||||
|
>
|
||||||
|
{formatCellValue(row[col])}
|
||||||
|
</td>
|
||||||
|
))}
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-wing-muted mt-2">
|
||||||
|
{data.rows.length}건 표시 (전체 {data.totalCount.toLocaleString()}건) · {data.targetSchema}.{data.targetTable}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatCellValue(value: unknown): string {
|
||||||
|
if (value === null || value === undefined) return '-';
|
||||||
|
if (typeof value === 'object') return JSON.stringify(value);
|
||||||
|
return String(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatRelativeTime(dateStr: string | null): string {
|
||||||
|
if (!dateStr) return '-';
|
||||||
|
try {
|
||||||
|
const date = new Date(dateStr);
|
||||||
|
if (isNaN(date.getTime())) return '-';
|
||||||
|
const now = new Date();
|
||||||
|
const diffMs = now.getTime() - date.getTime();
|
||||||
|
const diffMin = Math.floor(diffMs / 60000);
|
||||||
|
if (diffMin < 1) return '방금 전';
|
||||||
|
if (diffMin < 60) return `${diffMin}분 전`;
|
||||||
|
const diffHour = Math.floor(diffMin / 60);
|
||||||
|
if (diffHour < 24) return `${diffHour}시간 전`;
|
||||||
|
const diffDay = Math.floor(diffHour / 24);
|
||||||
|
return `${diffDay}일 전`;
|
||||||
|
} catch {
|
||||||
|
return '-';
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -94,7 +94,7 @@ public abstract class BaseJobConfig<I, O> {
|
|||||||
|
|
||||||
if (processor != null) {
|
if (processor != null) {
|
||||||
var chunkBuilder = stepBuilder
|
var chunkBuilder = stepBuilder
|
||||||
.<I, O>chunk(getChunkSize(), transactionManager)
|
.<I, O>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(createReader())
|
.reader(createReader())
|
||||||
.processor(processor)
|
.processor(processor)
|
||||||
.writer(createWriter());
|
.writer(createWriter());
|
||||||
@ -104,7 +104,7 @@ public abstract class BaseJobConfig<I, O> {
|
|||||||
} else {
|
} else {
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
var chunkBuilder = stepBuilder
|
var chunkBuilder = stepBuilder
|
||||||
.<I, I>chunk(getChunkSize(), transactionManager)
|
.<I, I>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(createReader())
|
.reader(createReader())
|
||||||
.writer((ItemWriter<? super I>) createWriter());
|
.writer((ItemWriter<? super I>) createWriter());
|
||||||
|
|
||||||
|
|||||||
@ -55,7 +55,7 @@ public abstract class BaseProcessor<I, O> implements ItemProcessor<I, O> {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
log.debug("데이터 처리 중: {}", item);
|
// log.debug("데이터 처리 중: {}", item);
|
||||||
return processItem(item);
|
return processItem(item);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -0,0 +1,98 @@
|
|||||||
|
package com.snp.batch.common.batch.reader;
|
||||||
|
|
||||||
|
import com.snp.batch.common.util.CommonSql;
|
||||||
|
import com.snp.batch.common.util.JobExecutionGroupable;
|
||||||
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.batch.item.ItemReader;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 동기화 Reader 추상 클래스
|
||||||
|
*
|
||||||
|
* 1 chunk = 1 job_execution_id 보장:
|
||||||
|
* - 한 그룹의 데이터를 모두 반환한 후 null을 반환하여 청크 종료
|
||||||
|
* - chunk(Integer.MAX_VALUE)와 함께 사용하여 Reader가 청크 경계를 제어
|
||||||
|
* - 다음 그룹의 N→P 전환은 이전 그룹의 청크 처리(Write + P→S)가 완료된 후에만 발생
|
||||||
|
*
|
||||||
|
* @param <T> DTO 타입 (JobExecutionGroupable 구현 필요)
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
public abstract class BaseSyncReader<T extends JobExecutionGroupable> implements ItemReader<T> {
|
||||||
|
|
||||||
|
protected final TableMetaInfo tableMetaInfo;
|
||||||
|
protected final JdbcTemplate businessJdbcTemplate;
|
||||||
|
|
||||||
|
private List<T> allDataBuffer = new ArrayList<>();
|
||||||
|
private Long currentGroupId = null;
|
||||||
|
|
||||||
|
protected BaseSyncReader(DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
|
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
||||||
|
this.tableMetaInfo = tableMetaInfo;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 소스 테이블명 반환 (예: tableMetaInfo.sourceIceClass)
|
||||||
|
*/
|
||||||
|
protected abstract String getSourceTable();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ResultSet → DTO 매핑
|
||||||
|
*/
|
||||||
|
protected abstract T mapRow(ResultSet rs, Long targetId) throws SQLException;
|
||||||
|
|
||||||
|
protected String getLogPrefix() {
|
||||||
|
return getClass().getSimpleName();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public T read() throws Exception {
|
||||||
|
if (allDataBuffer.isEmpty()) {
|
||||||
|
// 이전 그룹 처리 완료 → null 반환하여 청크 종료
|
||||||
|
// (Writer + afterWrite(P→S)가 실행된 후 다음 청크에서 다음 그룹 로드)
|
||||||
|
if (currentGroupId != null) {
|
||||||
|
currentGroupId = null;
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 다음 그룹 로드
|
||||||
|
fetchNextGroup();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (allDataBuffer.isEmpty()) {
|
||||||
|
return null; // 더 이상 처리할 데이터 없음 → Step 종료
|
||||||
|
}
|
||||||
|
|
||||||
|
return allDataBuffer.remove(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void fetchNextGroup() {
|
||||||
|
Long nextTargetId;
|
||||||
|
try {
|
||||||
|
nextTargetId = businessJdbcTemplate.queryForObject(
|
||||||
|
CommonSql.getNextTargetQuery(getSourceTable()), Long.class);
|
||||||
|
} catch (Exception e) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (nextTargetId == null) return;
|
||||||
|
|
||||||
|
log.info("[{}] 다음 처리 대상 ID 발견: {}", getLogPrefix(), nextTargetId);
|
||||||
|
|
||||||
|
String sql = CommonSql.getTargetDataQuery(getSourceTable());
|
||||||
|
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) ->
|
||||||
|
mapRow(rs, nextTargetId), nextTargetId);
|
||||||
|
|
||||||
|
// N→P 전환
|
||||||
|
String updateSql = CommonSql.getProcessBatchQuery(getSourceTable());
|
||||||
|
businessJdbcTemplate.update(updateSql, nextTargetId);
|
||||||
|
|
||||||
|
currentGroupId = nextTargetId;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -203,7 +203,7 @@ public abstract class BaseJdbcRepository<T, ID> {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", getEntityName(), entities.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", getEntityName(), entities.size());
|
||||||
|
|
||||||
jdbcTemplate.batchUpdate(getInsertSql(), entities, entities.size(),
|
jdbcTemplate.batchUpdate(getInsertSql(), entities, entities.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -215,7 +215,7 @@ public abstract class BaseJdbcRepository<T, ID> {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", getEntityName(), entities.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", getEntityName(), entities.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@ -78,7 +78,7 @@ public abstract class MultiDataSourceJdbcRepository<T, ID> {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 시작: {} 건 (Business DB)", getEntityName(), entities.size());
|
// log.debug("{} 배치 삽입 시작: {} 건 (Business DB)", getEntityName(), entities.size());
|
||||||
|
|
||||||
// ⭐ businessJdbcTemplate 사용
|
// ⭐ businessJdbcTemplate 사용
|
||||||
businessJdbcTemplate.batchUpdate(getInsertSql(), entities, entities.size(),
|
businessJdbcTemplate.batchUpdate(getInsertSql(), entities, entities.size(),
|
||||||
@ -91,7 +91,7 @@ public abstract class MultiDataSourceJdbcRepository<T, ID> {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", getEntityName(), entities.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", getEntityName(), entities.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
// ... (나머지 find, save, update, delete 메서드도 businessJdbcTemplate을 사용하여 구현합니다.)
|
// ... (나머지 find, save, update, delete 메서드도 businessJdbcTemplate을 사용하여 구현합니다.)
|
||||||
|
|||||||
@ -1,51 +1,55 @@
|
|||||||
package com.snp.batch.common.util;
|
package com.snp.batch.common.util;
|
||||||
|
|
||||||
import lombok.RequiredArgsConstructor;
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.core.ItemWriteListener;
|
import org.springframework.batch.core.ItemWriteListener;
|
||||||
import org.springframework.batch.item.Chunk;
|
import org.springframework.batch.item.Chunk;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writer 성공 후 batch_flag P→S 업데이트 리스너
|
||||||
|
*
|
||||||
|
* SQL은 실행 시점에 생성 (CommonSql.SOURCE_SCHEMA 초기화 보장)
|
||||||
|
*/
|
||||||
@Slf4j
|
@Slf4j
|
||||||
@RequiredArgsConstructor
|
|
||||||
public class BatchWriteListener<S extends JobExecutionGroupable> implements ItemWriteListener<S> {
|
public class BatchWriteListener<S extends JobExecutionGroupable> implements ItemWriteListener<S> {
|
||||||
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
private final JdbcTemplate businessJdbcTemplate;
|
||||||
private final String updateSql; // 실행할 쿼리 (예: "UPDATE ... SET batch_flag = 'S' ...")
|
private final String sourceTable;
|
||||||
|
|
||||||
|
public BatchWriteListener(JdbcTemplate businessJdbcTemplate, String sourceTable) {
|
||||||
|
this.businessJdbcTemplate = businessJdbcTemplate;
|
||||||
|
this.sourceTable = sourceTable;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void afterWrite(Chunk<? extends S> items) {
|
public void afterWrite(Chunk<? extends S> items) {
|
||||||
// afterWrite는 Writer가 예외 없이 성공했을 때만 실행되는 것이 보장되어야 함
|
|
||||||
if (items.isEmpty()) return;
|
if (items.isEmpty()) return;
|
||||||
|
|
||||||
Long jobExecutionId = items.getItems().get(0).getJobExecutionId();
|
Long jobExecutionId = items.getItems().get(0).getJobExecutionId();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
int updatedRows = businessJdbcTemplate.update(updateSql, jobExecutionId);
|
// SQL을 실행 시점에 생성하여 SOURCE_SCHEMA null 문제 방지
|
||||||
|
String sql = CommonSql.getCompleteBatchQuery(sourceTable);
|
||||||
|
int updatedRows = businessJdbcTemplate.update(sql, jobExecutionId);
|
||||||
log.info("[BatchWriteListener] Success update 'S'. jobExecutionId: {}, rows: {}", jobExecutionId, updatedRows);
|
log.info("[BatchWriteListener] Success update 'S'. jobExecutionId: {}, rows: {}", jobExecutionId, updatedRows);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.error("[BatchWriteListener] Update 'S' failed. jobExecutionId: {}", jobExecutionId, e);
|
log.error("[BatchWriteListener] Update 'S' failed. jobExecutionId: {}", jobExecutionId, e);
|
||||||
// ❗중요: 리스너의 업데이트가 실패해도 배치를 중단시키려면 예외를 던져야 함
|
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onWriteError(Exception exception, Chunk<? extends S> items) {
|
public void onWriteError(Exception exception, Chunk<? extends S> items) {
|
||||||
// ⭐ Writer에서 에러가 발생하면 이 메서드가 호출됨
|
|
||||||
if (!items.isEmpty()) {
|
if (!items.isEmpty()) {
|
||||||
Long jobExecutionId = items.getItems().get(0).getJobExecutionId();
|
Long jobExecutionId = items.getItems().get(0).getJobExecutionId();
|
||||||
log.error("[BatchWriteListener] Write Error Detected! jobExecutionId: {}. Status will NOT be updated to 'S'. Error: {}",
|
log.error("[BatchWriteListener] Write Error Detected! jobExecutionId: {}. Status will NOT be updated to 'S'. Error: {}",
|
||||||
jobExecutionId, exception.getMessage());
|
jobExecutionId, exception.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
// ❗중요: 여기서 예외를 다시 던져야 배치가 중단(FAILED)됨
|
|
||||||
// 만약 여기서 예외를 던지지 않으면 배치는 다음 청크를 계속 시도할 수 있음
|
|
||||||
if (exception instanceof RuntimeException) {
|
if (exception instanceof RuntimeException) {
|
||||||
throw (RuntimeException) exception;
|
throw (RuntimeException) exception;
|
||||||
} else {
|
} else {
|
||||||
throw new RuntimeException("Force stop batch due to write error", exception);
|
throw new RuntimeException("Force stop batch due to write error", exception);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -0,0 +1,26 @@
|
|||||||
|
package com.snp.batch.global.config;
|
||||||
|
|
||||||
|
import lombok.Getter;
|
||||||
|
import lombok.Setter;
|
||||||
|
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.util.LinkedHashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@Getter
|
||||||
|
@Setter
|
||||||
|
@Component
|
||||||
|
@ConfigurationProperties(prefix = "app.batch")
|
||||||
|
public class BatchTableProperties {
|
||||||
|
|
||||||
|
private SchemaConfig sourceSchema = new SchemaConfig();
|
||||||
|
private SchemaConfig targetSchema = new SchemaConfig();
|
||||||
|
|
||||||
|
@Getter
|
||||||
|
@Setter
|
||||||
|
public static class SchemaConfig {
|
||||||
|
private String name;
|
||||||
|
private Map<String, String> tables = new LinkedHashMap<>();
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -3,8 +3,11 @@ package com.snp.batch.global.controller;
|
|||||||
import com.snp.batch.global.dto.JobExecutionDto;
|
import com.snp.batch.global.dto.JobExecutionDto;
|
||||||
import com.snp.batch.global.dto.ScheduleRequest;
|
import com.snp.batch.global.dto.ScheduleRequest;
|
||||||
import com.snp.batch.global.dto.ScheduleResponse;
|
import com.snp.batch.global.dto.ScheduleResponse;
|
||||||
|
import com.snp.batch.global.dto.SyncDataPreviewResponse;
|
||||||
|
import com.snp.batch.global.dto.SyncStatusResponse;
|
||||||
import com.snp.batch.service.BatchService;
|
import com.snp.batch.service.BatchService;
|
||||||
import com.snp.batch.service.ScheduleService;
|
import com.snp.batch.service.ScheduleService;
|
||||||
|
import com.snp.batch.service.SyncStatusService;
|
||||||
import io.swagger.v3.oas.annotations.Operation;
|
import io.swagger.v3.oas.annotations.Operation;
|
||||||
import io.swagger.v3.oas.annotations.Parameter;
|
import io.swagger.v3.oas.annotations.Parameter;
|
||||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||||
@ -27,6 +30,7 @@ public class BatchController {
|
|||||||
|
|
||||||
private final BatchService batchService;
|
private final BatchService batchService;
|
||||||
private final ScheduleService scheduleService;
|
private final ScheduleService scheduleService;
|
||||||
|
private final SyncStatusService syncStatusService;
|
||||||
|
|
||||||
@Operation(summary = "배치 작업 실행", description = "지정된 배치 작업을 즉시 실행합니다. 쿼리 파라미터로 Job Parameters 전달 가능")
|
@Operation(summary = "배치 작업 실행", description = "지정된 배치 작업을 즉시 실행합니다. 쿼리 파라미터로 Job Parameters 전달 가능")
|
||||||
@ApiResponses(value = {
|
@ApiResponses(value = {
|
||||||
@ -324,4 +328,84 @@ public class BatchController {
|
|||||||
com.snp.batch.global.dto.ExecutionStatisticsDto stats = batchService.getJobStatistics(jobName, days);
|
com.snp.batch.global.dto.ExecutionStatisticsDto stats = batchService.getJobStatistics(jobName, days);
|
||||||
return ResponseEntity.ok(stats);
|
return ResponseEntity.ok(stats);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ── 동기화 현황 API ──────────────────────────────────────────
|
||||||
|
|
||||||
|
@Operation(summary = "동기화 현황 조회", description = "전체 테이블의 batch_flag 기반 동기화 현황을 조회합니다")
|
||||||
|
@GetMapping("/sync-status")
|
||||||
|
public ResponseEntity<SyncStatusResponse> getSyncStatus() {
|
||||||
|
log.info("Received request to get sync status");
|
||||||
|
try {
|
||||||
|
SyncStatusResponse status = syncStatusService.getSyncStatus();
|
||||||
|
return ResponseEntity.ok(status);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Error getting sync status", e);
|
||||||
|
return ResponseEntity.internalServerError().build();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Operation(summary = "동기화 데이터 미리보기", description = "특정 테이블의 최근 동기화 성공 데이터를 조회합니다")
|
||||||
|
@GetMapping("/sync-status/{tableKey}/preview")
|
||||||
|
public ResponseEntity<SyncDataPreviewResponse> getSyncDataPreview(
|
||||||
|
@Parameter(description = "테이블 키 (예: ship-001)", required = true)
|
||||||
|
@PathVariable String tableKey,
|
||||||
|
@Parameter(description = "조회 건수", example = "10")
|
||||||
|
@RequestParam(defaultValue = "10") int limit) {
|
||||||
|
log.info("Received request to preview sync data for: {}", tableKey);
|
||||||
|
try {
|
||||||
|
SyncDataPreviewResponse preview = syncStatusService.getDataPreview(tableKey, limit);
|
||||||
|
return ResponseEntity.ok(preview);
|
||||||
|
} catch (IllegalArgumentException e) {
|
||||||
|
return ResponseEntity.badRequest().build();
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Error getting sync data preview for: {}", tableKey, e);
|
||||||
|
return ResponseEntity.internalServerError().build();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Operation(summary = "P 상태 고착 레코드 조회", description = "특정 테이블의 batch_flag='P' 고착 레코드를 조회합니다")
|
||||||
|
@GetMapping("/sync-status/{tableKey}/stuck")
|
||||||
|
public ResponseEntity<SyncDataPreviewResponse> getStuckRecords(
|
||||||
|
@Parameter(description = "테이블 키 (예: ship-001)", required = true)
|
||||||
|
@PathVariable String tableKey,
|
||||||
|
@Parameter(description = "조회 건수", example = "50")
|
||||||
|
@RequestParam(defaultValue = "50") int limit) {
|
||||||
|
log.info("Received request to get stuck records for: {}", tableKey);
|
||||||
|
try {
|
||||||
|
SyncDataPreviewResponse stuck = syncStatusService.getStuckRecords(tableKey, limit);
|
||||||
|
return ResponseEntity.ok(stuck);
|
||||||
|
} catch (IllegalArgumentException e) {
|
||||||
|
return ResponseEntity.badRequest().build();
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Error getting stuck records for: {}", tableKey, e);
|
||||||
|
return ResponseEntity.internalServerError().build();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Operation(summary = "P 상태 고착 레코드 리셋", description = "특정 테이블의 batch_flag='P' 레코드를 'N'으로 리셋합니다")
|
||||||
|
@PostMapping("/sync-status/{tableKey}/reset")
|
||||||
|
public ResponseEntity<Map<String, Object>> resetStuckRecords(
|
||||||
|
@Parameter(description = "테이블 키 (예: ship-001)", required = true)
|
||||||
|
@PathVariable String tableKey) {
|
||||||
|
log.info("Received request to reset stuck records for: {}", tableKey);
|
||||||
|
try {
|
||||||
|
int resetCount = syncStatusService.resetStuckRecords(tableKey);
|
||||||
|
return ResponseEntity.ok(Map.of(
|
||||||
|
"success", true,
|
||||||
|
"message", "P→N 리셋 완료",
|
||||||
|
"resetCount", resetCount
|
||||||
|
));
|
||||||
|
} catch (IllegalArgumentException e) {
|
||||||
|
return ResponseEntity.badRequest().body(Map.of(
|
||||||
|
"success", false,
|
||||||
|
"message", e.getMessage()
|
||||||
|
));
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Error resetting stuck records for: {}", tableKey, e);
|
||||||
|
return ResponseEntity.internalServerError().body(Map.of(
|
||||||
|
"success", false,
|
||||||
|
"message", "리셋 실패: " + e.getMessage()
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -13,9 +13,9 @@ import org.springframework.web.bind.annotation.GetMapping;
|
|||||||
public class WebViewController {
|
public class WebViewController {
|
||||||
|
|
||||||
@GetMapping({"/", "/jobs", "/executions", "/executions/{id:\\d+}",
|
@GetMapping({"/", "/jobs", "/executions", "/executions/{id:\\d+}",
|
||||||
"/schedules", "/schedule-timeline",
|
"/schedules", "/schedule-timeline", "/sync-status",
|
||||||
"/jobs/**", "/executions/**",
|
"/jobs/**", "/executions/**",
|
||||||
"/schedules/**", "/schedule-timeline/**"})
|
"/schedules/**", "/schedule-timeline/**", "/sync-status/**"})
|
||||||
public String forward() {
|
public String forward() {
|
||||||
return "forward:/index.html";
|
return "forward:/index.html";
|
||||||
}
|
}
|
||||||
|
|||||||
@ -0,0 +1,26 @@
|
|||||||
|
package com.snp.batch.global.dto;
|
||||||
|
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Getter;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 동기화 성공 데이터 미리보기 응답
|
||||||
|
*/
|
||||||
|
@Getter
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
public class SyncDataPreviewResponse {
|
||||||
|
|
||||||
|
private String tableKey;
|
||||||
|
private String targetTable;
|
||||||
|
private String targetSchema;
|
||||||
|
private List<String> columns;
|
||||||
|
private List<Map<String, Object>> rows;
|
||||||
|
private long totalCount;
|
||||||
|
}
|
||||||
@ -0,0 +1,59 @@
|
|||||||
|
package com.snp.batch.global.dto;
|
||||||
|
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Getter;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 동기화 현황 전체 응답
|
||||||
|
*/
|
||||||
|
@Getter
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
public class SyncStatusResponse {
|
||||||
|
|
||||||
|
private SyncStatusSummary summary;
|
||||||
|
private List<SyncDomainGroup> domains;
|
||||||
|
|
||||||
|
@Getter
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
public static class SyncStatusSummary {
|
||||||
|
private int totalTables;
|
||||||
|
private long pendingCount;
|
||||||
|
private long processingCount;
|
||||||
|
private long completedCount;
|
||||||
|
private int stuckTables;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Getter
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
public static class SyncDomainGroup {
|
||||||
|
private String domain;
|
||||||
|
private String domainLabel;
|
||||||
|
private List<SyncTableStatus> tables;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Getter
|
||||||
|
@Builder
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
public static class SyncTableStatus {
|
||||||
|
private String tableKey;
|
||||||
|
private String sourceTable;
|
||||||
|
private String targetTable;
|
||||||
|
private String domain;
|
||||||
|
private long pendingCount;
|
||||||
|
private long processingCount;
|
||||||
|
private long completedCount;
|
||||||
|
private String lastSyncTime;
|
||||||
|
private boolean stuck;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -2,10 +2,7 @@ package com.snp.batch.jobs.datasync.batch.code.config;
|
|||||||
|
|
||||||
import com.snp.batch.common.batch.config.BaseJobConfig;
|
import com.snp.batch.common.batch.config.BaseJobConfig;
|
||||||
import com.snp.batch.common.util.BatchWriteListener;
|
import com.snp.batch.common.util.BatchWriteListener;
|
||||||
import com.snp.batch.common.util.CommonSql;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
|
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.code.dto.FlagCodeDto;
|
import com.snp.batch.jobs.datasync.batch.code.dto.FlagCodeDto;
|
||||||
import com.snp.batch.jobs.datasync.batch.code.dto.Stat5CodeDto;
|
import com.snp.batch.jobs.datasync.batch.code.dto.Stat5CodeDto;
|
||||||
@ -113,14 +110,12 @@ public class CodeSyncJobConfig extends BaseJobConfig<FlagCodeDto, FlagCodeEntity
|
|||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<FlagCodeEntity> flagCodeWriteListener() {
|
public BatchWriteListener<FlagCodeEntity> flagCodeWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceFlagCode);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceFlagCode);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<Stat5CodeEntity> stat5CodeWriteListener() {
|
public BatchWriteListener<Stat5CodeEntity> stat5CodeWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceStat5Code);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceStat5Code);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Steps ---
|
// --- Steps ---
|
||||||
@ -129,12 +124,10 @@ public class CodeSyncJobConfig extends BaseJobConfig<FlagCodeDto, FlagCodeEntity
|
|||||||
public Step flagCodeSyncStep() {
|
public Step flagCodeSyncStep() {
|
||||||
log.info("Step 생성: flagCodeSyncStep");
|
log.info("Step 생성: flagCodeSyncStep");
|
||||||
return new StepBuilder(getStepName(), jobRepository)
|
return new StepBuilder(getStepName(), jobRepository)
|
||||||
.<FlagCodeDto, FlagCodeEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<FlagCodeDto, FlagCodeEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(createReader())
|
.reader(createReader())
|
||||||
.processor(createProcessor())
|
.processor(createProcessor())
|
||||||
.writer(createWriter())
|
.writer(createWriter())
|
||||||
.listener(new GroupByExecutionIdReadListener<FlagCodeDto>())
|
|
||||||
.listener(new GroupByExecutionIdChunkListener())
|
|
||||||
.listener(flagCodeWriteListener())
|
.listener(flagCodeWriteListener())
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -143,12 +136,10 @@ public class CodeSyncJobConfig extends BaseJobConfig<FlagCodeDto, FlagCodeEntity
|
|||||||
public Step stat5CodeSyncStep() {
|
public Step stat5CodeSyncStep() {
|
||||||
log.info("Step 생성: stat5CodeSyncStep");
|
log.info("Step 생성: stat5CodeSyncStep");
|
||||||
return new StepBuilder("stat5CodeSyncStep", jobRepository)
|
return new StepBuilder("stat5CodeSyncStep", jobRepository)
|
||||||
.<Stat5CodeDto, Stat5CodeEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<Stat5CodeDto, Stat5CodeEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(stat5CodeReader(businessDataSource, tableMetaInfo))
|
.reader(stat5CodeReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new Stat5CodeProcessor())
|
.processor(new Stat5CodeProcessor())
|
||||||
.writer(new Stat5CodeWriter(codeRepository))
|
.writer(new Stat5CodeWriter(codeRepository))
|
||||||
.listener(new GroupByExecutionIdReadListener<Stat5CodeDto>())
|
|
||||||
.listener(new GroupByExecutionIdChunkListener())
|
|
||||||
.listener(stat5CodeWriteListener())
|
.listener(stat5CodeWriteListener())
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,53 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.code.reader;
|
package com.snp.batch.jobs.datasync.batch.code.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.code.dto.FlagCodeDto;
|
import com.snp.batch.jobs.datasync.batch.code.dto.FlagCodeDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class FlagCodeReader implements ItemReader<FlagCodeDto> {
|
public class FlagCodeReader extends BaseSyncReader<FlagCodeDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<FlagCodeDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public FlagCodeReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public FlagCodeReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FlagCodeDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceFlagCode;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected FlagCodeDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceFlagCode), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[FlagCodeReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceFlagCode);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return FlagCodeDto.builder()
|
return FlagCodeDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
@ -56,13 +32,5 @@ public class FlagCodeReader implements ItemReader<FlagCodeDto> {
|
|||||||
.isoTwoCd(rs.getString("iso_two_cd"))
|
.isoTwoCd(rs.getString("iso_two_cd"))
|
||||||
.isoThrCd(rs.getString("iso_thr_cd"))
|
.isoThrCd(rs.getString("iso_thr_cd"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceFlagCode);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,53 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.code.reader;
|
package com.snp.batch.jobs.datasync.batch.code.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.code.dto.Stat5CodeDto;
|
import com.snp.batch.jobs.datasync.batch.code.dto.Stat5CodeDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class Stat5CodeReader implements ItemReader<Stat5CodeDto> {
|
public class Stat5CodeReader extends BaseSyncReader<Stat5CodeDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<Stat5CodeDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public Stat5CodeReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public Stat5CodeReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Stat5CodeDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceStat5Code;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected Stat5CodeDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceStat5Code), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[Stat5CodeReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceStat5Code);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return Stat5CodeDto.builder()
|
return Stat5CodeDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.lvOne(rs.getString("lv_one"))
|
.lvOne(rs.getString("lv_one"))
|
||||||
@ -63,13 +39,5 @@ public class Stat5CodeReader implements ItemReader<Stat5CodeDto> {
|
|||||||
.dtlDesc(rs.getString("dtl_desc"))
|
.dtlDesc(rs.getString("dtl_desc"))
|
||||||
.rlsIem(rs.getString("rls_iem"))
|
.rlsIem(rs.getString("rls_iem"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceStat5Code);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -80,7 +80,7 @@ public class CodeRepositoryImpl extends MultiDataSourceJdbcRepository<FlagCodeEn
|
|||||||
if (flagCodeEntityList == null || flagCodeEntityList.isEmpty()) {
|
if (flagCodeEntityList == null || flagCodeEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "FlagCodeEntity", flagCodeEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "FlagCodeEntity", flagCodeEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, flagCodeEntityList, flagCodeEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, flagCodeEntityList, flagCodeEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -92,7 +92,7 @@ public class CodeRepositoryImpl extends MultiDataSourceJdbcRepository<FlagCodeEn
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "FlagCodeEntity", flagCodeEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "FlagCodeEntity", flagCodeEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindFlagCode(PreparedStatement pstmt, FlagCodeEntity entity) throws Exception {
|
public void bindFlagCode(PreparedStatement pstmt, FlagCodeEntity entity) throws Exception {
|
||||||
@ -111,7 +111,7 @@ public class CodeRepositoryImpl extends MultiDataSourceJdbcRepository<FlagCodeEn
|
|||||||
if (stat5CodeEntityList == null || stat5CodeEntityList.isEmpty()) {
|
if (stat5CodeEntityList == null || stat5CodeEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "Stat5CodeEntity", stat5CodeEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "Stat5CodeEntity", stat5CodeEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, stat5CodeEntityList, stat5CodeEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, stat5CodeEntityList, stat5CodeEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -123,7 +123,7 @@ public class CodeRepositoryImpl extends MultiDataSourceJdbcRepository<FlagCodeEn
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "Stat5CodeEntity", stat5CodeEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "Stat5CodeEntity", stat5CodeEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindStat5Code(PreparedStatement pstmt, Stat5CodeEntity entity) throws Exception {
|
public void bindStat5Code(PreparedStatement pstmt, Stat5CodeEntity entity) throws Exception {
|
||||||
|
|||||||
@ -2,10 +2,7 @@ package com.snp.batch.jobs.datasync.batch.compliance.config;
|
|||||||
|
|
||||||
import com.snp.batch.common.batch.config.BaseJobConfig;
|
import com.snp.batch.common.batch.config.BaseJobConfig;
|
||||||
import com.snp.batch.common.util.BatchWriteListener;
|
import com.snp.batch.common.util.BatchWriteListener;
|
||||||
import com.snp.batch.common.util.CommonSql;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
|
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.compliance.dto.CompanyComplianceDto;
|
import com.snp.batch.jobs.datasync.batch.compliance.dto.CompanyComplianceDto;
|
||||||
import com.snp.batch.jobs.datasync.batch.compliance.entity.CompanyComplianceEntity;
|
import com.snp.batch.jobs.datasync.batch.compliance.entity.CompanyComplianceEntity;
|
||||||
@ -99,8 +96,7 @@ public class CompanyComplianceSyncJobConfig extends BaseJobConfig<CompanyComplia
|
|||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<CompanyComplianceEntity> companyComplianceWriteListener() {
|
public BatchWriteListener<CompanyComplianceEntity> companyComplianceWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTbCompanyComplianceInfo);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceTbCompanyComplianceInfo);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Steps ---
|
// --- Steps ---
|
||||||
@ -109,12 +105,10 @@ public class CompanyComplianceSyncJobConfig extends BaseJobConfig<CompanyComplia
|
|||||||
public Step companyComplianceSyncStep() {
|
public Step companyComplianceSyncStep() {
|
||||||
log.info("Step 생성: companyComplianceSyncStep");
|
log.info("Step 생성: companyComplianceSyncStep");
|
||||||
return new StepBuilder(getStepName(), jobRepository)
|
return new StepBuilder(getStepName(), jobRepository)
|
||||||
.<CompanyComplianceDto, CompanyComplianceEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<CompanyComplianceDto, CompanyComplianceEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(createReader())
|
.reader(createReader())
|
||||||
.processor(createProcessor())
|
.processor(createProcessor())
|
||||||
.writer(createWriter())
|
.writer(createWriter())
|
||||||
.listener(new GroupByExecutionIdReadListener<CompanyComplianceDto>())
|
|
||||||
.listener(new GroupByExecutionIdChunkListener())
|
|
||||||
.listener(companyComplianceWriteListener())
|
.listener(companyComplianceWriteListener())
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|||||||
@ -2,10 +2,7 @@ package com.snp.batch.jobs.datasync.batch.compliance.config;
|
|||||||
|
|
||||||
import com.snp.batch.common.batch.config.BaseJobConfig;
|
import com.snp.batch.common.batch.config.BaseJobConfig;
|
||||||
import com.snp.batch.common.util.BatchWriteListener;
|
import com.snp.batch.common.util.BatchWriteListener;
|
||||||
import com.snp.batch.common.util.CommonSql;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
|
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.compliance.dto.ShipComplianceDto;
|
import com.snp.batch.jobs.datasync.batch.compliance.dto.ShipComplianceDto;
|
||||||
import com.snp.batch.jobs.datasync.batch.compliance.entity.ShipComplianceEntity;
|
import com.snp.batch.jobs.datasync.batch.compliance.entity.ShipComplianceEntity;
|
||||||
@ -99,8 +96,7 @@ public class ShipComplianceSyncJobConfig extends BaseJobConfig<ShipComplianceDto
|
|||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<ShipComplianceEntity> shipComplianceWriteListener() {
|
public BatchWriteListener<ShipComplianceEntity> shipComplianceWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceCompliance);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceCompliance);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Steps ---
|
// --- Steps ---
|
||||||
@ -109,12 +105,10 @@ public class ShipComplianceSyncJobConfig extends BaseJobConfig<ShipComplianceDto
|
|||||||
public Step shipComplianceSyncStep() {
|
public Step shipComplianceSyncStep() {
|
||||||
log.info("Step 생성: shipComplianceSyncStep");
|
log.info("Step 생성: shipComplianceSyncStep");
|
||||||
return new StepBuilder(getStepName(), jobRepository)
|
return new StepBuilder(getStepName(), jobRepository)
|
||||||
.<ShipComplianceDto, ShipComplianceEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<ShipComplianceDto, ShipComplianceEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(createReader())
|
.reader(createReader())
|
||||||
.processor(createProcessor())
|
.processor(createProcessor())
|
||||||
.writer(createWriter())
|
.writer(createWriter())
|
||||||
.listener(new GroupByExecutionIdReadListener<ShipComplianceDto>())
|
|
||||||
.listener(new GroupByExecutionIdChunkListener())
|
|
||||||
.listener(shipComplianceWriteListener())
|
.listener(shipComplianceWriteListener())
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,54 +1,30 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.compliance.reader;
|
package com.snp.batch.jobs.datasync.batch.compliance.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.compliance.dto.CompanyComplianceDto;
|
import com.snp.batch.jobs.datasync.batch.compliance.dto.CompanyComplianceDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
import java.sql.Timestamp;
|
import java.sql.Timestamp;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class CompanyComplianceReader implements ItemReader<CompanyComplianceDto> {
|
public class CompanyComplianceReader extends BaseSyncReader<CompanyComplianceDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<CompanyComplianceDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public CompanyComplianceReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public CompanyComplianceReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public CompanyComplianceDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceTbCompanyComplianceInfo;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected CompanyComplianceDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTbCompanyComplianceInfo), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[CompanyComplianceReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTbCompanyComplianceInfo);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
Timestamp lstMdfcnDtTs = rs.getTimestamp("lst_mdfcn_dt");
|
Timestamp lstMdfcnDtTs = rs.getTimestamp("lst_mdfcn_dt");
|
||||||
|
|
||||||
return CompanyComplianceDto.builder()
|
return CompanyComplianceDto.builder()
|
||||||
@ -70,13 +46,5 @@ public class CompanyComplianceReader implements ItemReader<CompanyComplianceDto>
|
|||||||
.companyUnSanctionList(rs.getObject("company_un_sanction_list") != null ? rs.getLong("company_un_sanction_list") : null)
|
.companyUnSanctionList(rs.getObject("company_un_sanction_list") != null ? rs.getLong("company_un_sanction_list") : null)
|
||||||
.prntCompanyComplianceRisk(rs.getObject("prnt_company_compliance_risk") != null ? rs.getLong("prnt_company_compliance_risk") : null)
|
.prntCompanyComplianceRisk(rs.getObject("prnt_company_compliance_risk") != null ? rs.getLong("prnt_company_compliance_risk") : null)
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTbCompanyComplianceInfo);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,54 +1,30 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.compliance.reader;
|
package com.snp.batch.jobs.datasync.batch.compliance.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.compliance.dto.ShipComplianceDto;
|
import com.snp.batch.jobs.datasync.batch.compliance.dto.ShipComplianceDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
import java.sql.Timestamp;
|
import java.sql.Timestamp;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class ShipComplianceReader implements ItemReader<ShipComplianceDto> {
|
public class ShipComplianceReader extends BaseSyncReader<ShipComplianceDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<ShipComplianceDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public ShipComplianceReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public ShipComplianceReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ShipComplianceDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceCompliance;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected ShipComplianceDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceCompliance), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[ShipComplianceReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceCompliance);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
Timestamp lastMdfcnDtTs = rs.getTimestamp("last_mdfcn_dt");
|
Timestamp lastMdfcnDtTs = rs.getTimestamp("last_mdfcn_dt");
|
||||||
|
|
||||||
return ShipComplianceDto.builder()
|
return ShipComplianceDto.builder()
|
||||||
@ -89,13 +65,5 @@ public class ShipComplianceReader implements ItemReader<ShipComplianceDto> {
|
|||||||
.shipSwiSanctionList(rs.getString("ship_swi_sanction_list"))
|
.shipSwiSanctionList(rs.getString("ship_swi_sanction_list"))
|
||||||
.shipUnSanctionList(rs.getString("ship_un_sanction_list"))
|
.shipUnSanctionList(rs.getString("ship_un_sanction_list"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceCompliance);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -80,7 +80,7 @@ public class ComplianceRepositoryImpl extends MultiDataSourceJdbcRepository<Ship
|
|||||||
if (shipComplianceEntityList == null || shipComplianceEntityList.isEmpty()) {
|
if (shipComplianceEntityList == null || shipComplianceEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "ShipComplianceEntity", shipComplianceEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "ShipComplianceEntity", shipComplianceEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, shipComplianceEntityList, shipComplianceEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, shipComplianceEntityList, shipComplianceEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -92,7 +92,7 @@ public class ComplianceRepositoryImpl extends MultiDataSourceJdbcRepository<Ship
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "ShipComplianceEntity", shipComplianceEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "ShipComplianceEntity", shipComplianceEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -122,7 +122,7 @@ public class ComplianceRepositoryImpl extends MultiDataSourceJdbcRepository<Ship
|
|||||||
if (companyComplianceEntityList == null || companyComplianceEntityList.isEmpty()) {
|
if (companyComplianceEntityList == null || companyComplianceEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "CompanyComplianceEntity", companyComplianceEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "CompanyComplianceEntity", companyComplianceEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, companyComplianceEntityList, companyComplianceEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, companyComplianceEntityList, companyComplianceEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -134,7 +134,7 @@ public class ComplianceRepositoryImpl extends MultiDataSourceJdbcRepository<Ship
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "CompanyComplianceEntity", companyComplianceEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "CompanyComplianceEntity", companyComplianceEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -281,7 +281,7 @@ public class ComplianceRepositoryImpl extends MultiDataSourceJdbcRepository<Ship
|
|||||||
if (companyComplianceChangeEntityList == null || companyComplianceChangeEntityList.isEmpty()) {
|
if (companyComplianceChangeEntityList == null || companyComplianceChangeEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "CompanyComplianceChangeEntity", companyComplianceChangeEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "CompanyComplianceChangeEntity", companyComplianceChangeEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, companyComplianceChangeEntityList, companyComplianceChangeEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, companyComplianceChangeEntityList, companyComplianceChangeEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -293,7 +293,7 @@ public class ComplianceRepositoryImpl extends MultiDataSourceJdbcRepository<Ship
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "CompanyComplianceChangeEntity", companyComplianceChangeEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "CompanyComplianceChangeEntity", companyComplianceChangeEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindCompanyComplianceChange(PreparedStatement pstmt, CompanyComplianceChangeEntity entity) throws Exception {
|
public void bindCompanyComplianceChange(PreparedStatement pstmt, CompanyComplianceChangeEntity entity) throws Exception {
|
||||||
@ -312,7 +312,7 @@ public class ComplianceRepositoryImpl extends MultiDataSourceJdbcRepository<Ship
|
|||||||
if (shipComplianceChangeEntityList == null || shipComplianceChangeEntityList.isEmpty()) {
|
if (shipComplianceChangeEntityList == null || shipComplianceChangeEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "ShipComplianceChangeEntity", shipComplianceChangeEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "ShipComplianceChangeEntity", shipComplianceChangeEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, shipComplianceChangeEntityList, shipComplianceChangeEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, shipComplianceChangeEntityList, shipComplianceChangeEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -324,7 +324,7 @@ public class ComplianceRepositoryImpl extends MultiDataSourceJdbcRepository<Ship
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "ShipComplianceChangeEntity", shipComplianceChangeEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "ShipComplianceChangeEntity", shipComplianceChangeEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindShipComplianceChange(PreparedStatement pstmt, ShipComplianceChangeEntity entity) throws Exception {
|
public void bindShipComplianceChange(PreparedStatement pstmt, ShipComplianceChangeEntity entity) throws Exception {
|
||||||
|
|||||||
@ -2,10 +2,7 @@ package com.snp.batch.jobs.datasync.batch.event.config;
|
|||||||
|
|
||||||
import com.snp.batch.common.batch.config.BaseJobConfig;
|
import com.snp.batch.common.batch.config.BaseJobConfig;
|
||||||
import com.snp.batch.common.util.BatchWriteListener;
|
import com.snp.batch.common.util.BatchWriteListener;
|
||||||
import com.snp.batch.common.util.CommonSql;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
|
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.event.dto.EventCargoDto;
|
import com.snp.batch.jobs.datasync.batch.event.dto.EventCargoDto;
|
||||||
import com.snp.batch.jobs.datasync.batch.event.dto.EventDto;
|
import com.snp.batch.jobs.datasync.batch.event.dto.EventDto;
|
||||||
@ -147,26 +144,22 @@ public class EventSyncJobConfig extends BaseJobConfig<EventDto, EventEntity> {
|
|||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<EventEntity> eventWriteListener() {
|
public BatchWriteListener<EventEntity> eventWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceEvent);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceEvent);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<EventCargoEntity> eventCargoWriteListener() {
|
public BatchWriteListener<EventCargoEntity> eventCargoWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceEventCargo);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceEventCargo);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<EventHumanCasualtyEntity> eventHumanCasualtyWriteListener() {
|
public BatchWriteListener<EventHumanCasualtyEntity> eventHumanCasualtyWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceEventHumanCasualty);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceEventHumanCasualty);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<EventRelationshipEntity> eventRelationshipWriteListener() {
|
public BatchWriteListener<EventRelationshipEntity> eventRelationshipWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceEventRelationship);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceEventRelationship);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Steps ---
|
// --- Steps ---
|
||||||
@ -175,12 +168,10 @@ public class EventSyncJobConfig extends BaseJobConfig<EventDto, EventEntity> {
|
|||||||
public Step eventSyncStep() {
|
public Step eventSyncStep() {
|
||||||
log.info("Step 생성: eventSyncStep");
|
log.info("Step 생성: eventSyncStep");
|
||||||
return new StepBuilder(getStepName(), jobRepository)
|
return new StepBuilder(getStepName(), jobRepository)
|
||||||
.<EventDto, EventEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<EventDto, EventEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(createReader())
|
.reader(createReader())
|
||||||
.processor(createProcessor())
|
.processor(createProcessor())
|
||||||
.writer(createWriter())
|
.writer(createWriter())
|
||||||
.listener(new GroupByExecutionIdReadListener<EventDto>())
|
|
||||||
.listener(new GroupByExecutionIdChunkListener())
|
|
||||||
.listener(eventWriteListener())
|
.listener(eventWriteListener())
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -189,12 +180,10 @@ public class EventSyncJobConfig extends BaseJobConfig<EventDto, EventEntity> {
|
|||||||
public Step eventCargoSyncStep() {
|
public Step eventCargoSyncStep() {
|
||||||
log.info("Step 생성: eventCargoSyncStep");
|
log.info("Step 생성: eventCargoSyncStep");
|
||||||
return new StepBuilder("eventCargoSyncStep", jobRepository)
|
return new StepBuilder("eventCargoSyncStep", jobRepository)
|
||||||
.<EventCargoDto, EventCargoEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<EventCargoDto, EventCargoEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(eventCargoReader(businessDataSource, tableMetaInfo))
|
.reader(eventCargoReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new EventCargoProcessor())
|
.processor(new EventCargoProcessor())
|
||||||
.writer(new EventCargoWriter(eventRepository, transactionManager, subChunkSize))
|
.writer(new EventCargoWriter(eventRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<EventCargoDto>())
|
|
||||||
.listener(new GroupByExecutionIdChunkListener())
|
|
||||||
.listener(eventCargoWriteListener())
|
.listener(eventCargoWriteListener())
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -203,12 +192,10 @@ public class EventSyncJobConfig extends BaseJobConfig<EventDto, EventEntity> {
|
|||||||
public Step eventHumanCasualtySyncStep() {
|
public Step eventHumanCasualtySyncStep() {
|
||||||
log.info("Step 생성: eventHumanCasualtySyncStep");
|
log.info("Step 생성: eventHumanCasualtySyncStep");
|
||||||
return new StepBuilder("eventHumanCasualtySyncStep", jobRepository)
|
return new StepBuilder("eventHumanCasualtySyncStep", jobRepository)
|
||||||
.<EventHumanCasualtyDto, EventHumanCasualtyEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<EventHumanCasualtyDto, EventHumanCasualtyEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(eventHumanCasualtyReader(businessDataSource, tableMetaInfo))
|
.reader(eventHumanCasualtyReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new EventHumanCasualtyProcessor())
|
.processor(new EventHumanCasualtyProcessor())
|
||||||
.writer(new EventHumanCasualtyWriter(eventRepository, transactionManager, subChunkSize))
|
.writer(new EventHumanCasualtyWriter(eventRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<EventHumanCasualtyDto>())
|
|
||||||
.listener(new GroupByExecutionIdChunkListener())
|
|
||||||
.listener(eventHumanCasualtyWriteListener())
|
.listener(eventHumanCasualtyWriteListener())
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -217,12 +204,10 @@ public class EventSyncJobConfig extends BaseJobConfig<EventDto, EventEntity> {
|
|||||||
public Step eventRelationshipSyncStep() {
|
public Step eventRelationshipSyncStep() {
|
||||||
log.info("Step 생성: eventRelationshipSyncStep");
|
log.info("Step 생성: eventRelationshipSyncStep");
|
||||||
return new StepBuilder("eventRelationshipSyncStep", jobRepository)
|
return new StepBuilder("eventRelationshipSyncStep", jobRepository)
|
||||||
.<EventRelationshipDto, EventRelationshipEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<EventRelationshipDto, EventRelationshipEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(eventRelationshipReader(businessDataSource, tableMetaInfo))
|
.reader(eventRelationshipReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new EventRelationshipProcessor())
|
.processor(new EventRelationshipProcessor())
|
||||||
.writer(new EventRelationshipWriter(eventRepository, transactionManager, subChunkSize))
|
.writer(new EventRelationshipWriter(eventRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<EventRelationshipDto>())
|
|
||||||
.listener(new GroupByExecutionIdChunkListener())
|
|
||||||
.listener(eventRelationshipWriteListener())
|
.listener(eventRelationshipWriteListener())
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,53 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.event.reader;
|
package com.snp.batch.jobs.datasync.batch.event.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.event.dto.EventCargoDto;
|
import com.snp.batch.jobs.datasync.batch.event.dto.EventCargoDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class EventCargoReader implements ItemReader<EventCargoDto> {
|
public class EventCargoReader extends BaseSyncReader<EventCargoDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<EventCargoDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public EventCargoReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public EventCargoReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public EventCargoDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceEventCargo;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected EventCargoDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceEventCargo), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[EventCargoReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceEventCargo);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return EventCargoDto.builder()
|
return EventCargoDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.eventId(rs.getObject("event_id") != null ? rs.getInt("event_id") : null)
|
.eventId(rs.getObject("event_id") != null ? rs.getInt("event_id") : null)
|
||||||
@ -61,13 +37,5 @@ public class EventCargoReader implements ItemReader<EventCargoDto> {
|
|||||||
.riskYn(rs.getString("risk_yn"))
|
.riskYn(rs.getString("risk_yn"))
|
||||||
.text(rs.getString("text"))
|
.text(rs.getString("text"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceEventCargo);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,53 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.event.reader;
|
package com.snp.batch.jobs.datasync.batch.event.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.event.dto.EventHumanCasualtyDto;
|
import com.snp.batch.jobs.datasync.batch.event.dto.EventHumanCasualtyDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class EventHumanCasualtyReader implements ItemReader<EventHumanCasualtyDto> {
|
public class EventHumanCasualtyReader extends BaseSyncReader<EventHumanCasualtyDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<EventHumanCasualtyDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public EventHumanCasualtyReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public EventHumanCasualtyReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public EventHumanCasualtyDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceEventHumanCasualty;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected EventHumanCasualtyDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceEventHumanCasualty), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[EventHumanCasualtyReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceEventHumanCasualty);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return EventHumanCasualtyDto.builder()
|
return EventHumanCasualtyDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.eventId(rs.getObject("event_id") != null ? rs.getLong("event_id") : null)
|
.eventId(rs.getObject("event_id") != null ? rs.getLong("event_id") : null)
|
||||||
@ -56,13 +32,5 @@ public class EventHumanCasualtyReader implements ItemReader<EventHumanCasualtyDt
|
|||||||
.qualfr(rs.getString("qualfr"))
|
.qualfr(rs.getString("qualfr"))
|
||||||
.cnt(rs.getObject("cnt") != null ? rs.getLong("cnt") : null)
|
.cnt(rs.getObject("cnt") != null ? rs.getLong("cnt") : null)
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceEventHumanCasualty);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,54 +1,31 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.event.reader;
|
package com.snp.batch.jobs.datasync.batch.event.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.event.dto.EventDto;
|
import com.snp.batch.jobs.datasync.batch.event.dto.EventDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
import java.sql.Timestamp;
|
import java.sql.Timestamp;
|
||||||
import java.util.ArrayList;
|
import java.time.ZoneId;
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class EventReader implements ItemReader<EventDto> {
|
public class EventReader extends BaseSyncReader<EventDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<EventDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public EventReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public EventReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public EventDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceEvent;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected EventDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceEvent), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[EventReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceEvent);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
Timestamp pstgYmdTs = rs.getTimestamp("pstg_ymd");
|
Timestamp pstgYmdTs = rs.getTimestamp("pstg_ymd");
|
||||||
Timestamp eventStartDayTs = rs.getTimestamp("event_start_day");
|
Timestamp eventStartDayTs = rs.getTimestamp("event_start_day");
|
||||||
Timestamp eventEndDayTs = rs.getTimestamp("event_end_day");
|
Timestamp eventEndDayTs = rs.getTimestamp("event_end_day");
|
||||||
@ -58,9 +35,9 @@ public class EventReader implements ItemReader<EventDto> {
|
|||||||
.eventId(rs.getObject("event_id") != null ? rs.getInt("event_id") : null)
|
.eventId(rs.getObject("event_id") != null ? rs.getInt("event_id") : null)
|
||||||
.acdntId(rs.getString("acdnt_id"))
|
.acdntId(rs.getString("acdnt_id"))
|
||||||
.imoNo(rs.getString("imo_no"))
|
.imoNo(rs.getString("imo_no"))
|
||||||
.pstgYmd(pstgYmdTs != null ? pstgYmdTs.toInstant().atZone(java.time.ZoneId.systemDefault()) : null)
|
.pstgYmd(pstgYmdTs != null ? pstgYmdTs.toInstant().atZone(ZoneId.systemDefault()) : null)
|
||||||
.eventStartDay(eventStartDayTs != null ? eventStartDayTs.toInstant().atZone(java.time.ZoneId.systemDefault()) : null)
|
.eventStartDay(eventStartDayTs != null ? eventStartDayTs.toInstant().atZone(ZoneId.systemDefault()) : null)
|
||||||
.eventEndDay(eventEndDayTs != null ? eventEndDayTs.toInstant().atZone(java.time.ZoneId.systemDefault()) : null)
|
.eventEndDay(eventEndDayTs != null ? eventEndDayTs.toInstant().atZone(ZoneId.systemDefault()) : null)
|
||||||
.embrkTryYn(rs.getString("embrk_try_yn"))
|
.embrkTryYn(rs.getString("embrk_try_yn"))
|
||||||
.cargoCapacityStatusCd(rs.getString("cargo_capacity_status_cd"))
|
.cargoCapacityStatusCd(rs.getString("cargo_capacity_status_cd"))
|
||||||
.acdntActn(rs.getString("acdnt_actn"))
|
.acdntActn(rs.getString("acdnt_actn"))
|
||||||
@ -98,13 +75,5 @@ public class EventReader implements ItemReader<EventDto> {
|
|||||||
.shipType(rs.getString("ship_type"))
|
.shipType(rs.getString("ship_type"))
|
||||||
.shipTypeNm(rs.getString("ship_type_nm"))
|
.shipTypeNm(rs.getString("ship_type_nm"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceEvent);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,53 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.event.reader;
|
package com.snp.batch.jobs.datasync.batch.event.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.event.dto.EventRelationshipDto;
|
import com.snp.batch.jobs.datasync.batch.event.dto.EventRelationshipDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class EventRelationshipReader implements ItemReader<EventRelationshipDto> {
|
public class EventRelationshipReader extends BaseSyncReader<EventRelationshipDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<EventRelationshipDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public EventRelationshipReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public EventRelationshipReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public EventRelationshipDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceEventRelationship;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected EventRelationshipDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceEventRelationship), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[EventRelationshipReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceEventRelationship);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return EventRelationshipDto.builder()
|
return EventRelationshipDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.acdntId(rs.getString("acdnt_id"))
|
.acdntId(rs.getString("acdnt_id"))
|
||||||
@ -58,13 +34,5 @@ public class EventRelationshipReader implements ItemReader<EventRelationshipDto>
|
|||||||
.relTypeCd(rs.getString("rel_type_cd"))
|
.relTypeCd(rs.getString("rel_type_cd"))
|
||||||
.relType(rs.getString("rel_type"))
|
.relType(rs.getString("rel_type"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceEventRelationship);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -84,7 +84,7 @@ public class EventRepositoryImpl extends MultiDataSourceJdbcRepository<EventEnti
|
|||||||
if (eventEntityList == null || eventEntityList.isEmpty()) {
|
if (eventEntityList == null || eventEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "EventEntity", eventEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "EventEntity", eventEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, eventEntityList, eventEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, eventEntityList, eventEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -96,7 +96,7 @@ public class EventRepositoryImpl extends MultiDataSourceJdbcRepository<EventEnti
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "EventEntity", eventEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "EventEntity", eventEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindEvent(PreparedStatement pstmt, EventEntity entity) throws Exception {
|
public void bindEvent(PreparedStatement pstmt, EventEntity entity) throws Exception {
|
||||||
@ -152,7 +152,7 @@ public class EventRepositoryImpl extends MultiDataSourceJdbcRepository<EventEnti
|
|||||||
if (eventCargoEntityList == null || eventCargoEntityList.isEmpty()) {
|
if (eventCargoEntityList == null || eventCargoEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "EventCargoEntity", eventCargoEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "EventCargoEntity", eventCargoEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, eventCargoEntityList, eventCargoEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, eventCargoEntityList, eventCargoEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -164,7 +164,7 @@ public class EventRepositoryImpl extends MultiDataSourceJdbcRepository<EventEnti
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "EventCargoEntity", eventCargoEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "EventCargoEntity", eventCargoEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindEventCargo(PreparedStatement pstmt, EventCargoEntity entity) throws Exception {
|
public void bindEventCargo(PreparedStatement pstmt, EventCargoEntity entity) throws Exception {
|
||||||
@ -188,7 +188,7 @@ public class EventRepositoryImpl extends MultiDataSourceJdbcRepository<EventEnti
|
|||||||
if (eventHumanCasualtyEntityList == null || eventHumanCasualtyEntityList.isEmpty()) {
|
if (eventHumanCasualtyEntityList == null || eventHumanCasualtyEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "EventHumanCasualtyEntity", eventHumanCasualtyEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "EventHumanCasualtyEntity", eventHumanCasualtyEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, eventHumanCasualtyEntityList, eventHumanCasualtyEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, eventHumanCasualtyEntityList, eventHumanCasualtyEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -200,7 +200,7 @@ public class EventRepositoryImpl extends MultiDataSourceJdbcRepository<EventEnti
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "EventHumanCasualtyEntity", eventHumanCasualtyEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "EventHumanCasualtyEntity", eventHumanCasualtyEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindEventHumanCasualty(PreparedStatement pstmt, EventHumanCasualtyEntity entity) throws Exception {
|
public void bindEventHumanCasualty(PreparedStatement pstmt, EventHumanCasualtyEntity entity) throws Exception {
|
||||||
@ -219,7 +219,7 @@ public class EventRepositoryImpl extends MultiDataSourceJdbcRepository<EventEnti
|
|||||||
if (eventRelationshipEntityList == null || eventRelationshipEntityList.isEmpty()) {
|
if (eventRelationshipEntityList == null || eventRelationshipEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "EventRelationshipEntity", eventRelationshipEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "EventRelationshipEntity", eventRelationshipEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, eventRelationshipEntityList, eventRelationshipEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, eventRelationshipEntityList, eventRelationshipEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -231,7 +231,7 @@ public class EventRepositoryImpl extends MultiDataSourceJdbcRepository<EventEnti
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "EventRelationshipEntity", eventRelationshipEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "EventRelationshipEntity", eventRelationshipEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindEventRelationship(PreparedStatement pstmt, EventRelationshipEntity entity) throws Exception {
|
public void bindEventRelationship(PreparedStatement pstmt, EventRelationshipEntity entity) throws Exception {
|
||||||
|
|||||||
@ -2,10 +2,7 @@ package com.snp.batch.jobs.datasync.batch.facility.config;
|
|||||||
|
|
||||||
import com.snp.batch.common.batch.config.BaseJobConfig;
|
import com.snp.batch.common.batch.config.BaseJobConfig;
|
||||||
import com.snp.batch.common.util.BatchWriteListener;
|
import com.snp.batch.common.util.BatchWriteListener;
|
||||||
import com.snp.batch.common.util.CommonSql;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
|
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.facility.dto.FacilityPortDto;
|
import com.snp.batch.jobs.datasync.batch.facility.dto.FacilityPortDto;
|
||||||
import com.snp.batch.jobs.datasync.batch.facility.entity.FacilityPortEntity;
|
import com.snp.batch.jobs.datasync.batch.facility.entity.FacilityPortEntity;
|
||||||
@ -102,8 +99,7 @@ public class FacilitySyncJobConfig extends BaseJobConfig<FacilityPortDto, Facili
|
|||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<FacilityPortEntity> facilityPortWriteListener() {
|
public BatchWriteListener<FacilityPortEntity> facilityPortWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceFacilityPort);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceFacilityPort);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Steps ---
|
// --- Steps ---
|
||||||
@ -112,12 +108,10 @@ public class FacilitySyncJobConfig extends BaseJobConfig<FacilityPortDto, Facili
|
|||||||
public Step facilityPortSyncStep() {
|
public Step facilityPortSyncStep() {
|
||||||
log.info("Step 생성: facilityPortSyncStep");
|
log.info("Step 생성: facilityPortSyncStep");
|
||||||
return new StepBuilder(getStepName(), jobRepository)
|
return new StepBuilder(getStepName(), jobRepository)
|
||||||
.<FacilityPortDto, FacilityPortEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<FacilityPortDto, FacilityPortEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(createReader())
|
.reader(createReader())
|
||||||
.processor(createProcessor())
|
.processor(createProcessor())
|
||||||
.writer(createWriter())
|
.writer(createWriter())
|
||||||
.listener(new GroupByExecutionIdReadListener<FacilityPortDto>())
|
|
||||||
.listener(new GroupByExecutionIdChunkListener())
|
|
||||||
.listener(facilityPortWriteListener())
|
.listener(facilityPortWriteListener())
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,54 +1,31 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.facility.reader;
|
package com.snp.batch.jobs.datasync.batch.facility.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.facility.dto.FacilityPortDto;
|
import com.snp.batch.jobs.datasync.batch.facility.dto.FacilityPortDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
import java.sql.Timestamp;
|
import java.sql.Timestamp;
|
||||||
import java.util.ArrayList;
|
import java.time.ZoneId;
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class FacilityPortReader implements ItemReader<FacilityPortDto> {
|
public class FacilityPortReader extends BaseSyncReader<FacilityPortDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<FacilityPortDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public FacilityPortReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public FacilityPortReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FacilityPortDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceFacilityPort;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected FacilityPortDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceFacilityPort), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[FacilityPortReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceFacilityPort);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
Timestamp lastMdfcnDtTs = rs.getTimestamp("last_mdfcn_dt");
|
Timestamp lastMdfcnDtTs = rs.getTimestamp("last_mdfcn_dt");
|
||||||
Timestamp regYmdTs = rs.getTimestamp("reg_ymd");
|
Timestamp regYmdTs = rs.getTimestamp("reg_ymd");
|
||||||
|
|
||||||
@ -102,16 +79,8 @@ public class FacilityPortReader implements ItemReader<FacilityPortDto> {
|
|||||||
.ecfrdPort(rs.getObject("ecfrd_port") != null ? rs.getBoolean("ecfrd_port") : null)
|
.ecfrdPort(rs.getObject("ecfrd_port") != null ? rs.getBoolean("ecfrd_port") : null)
|
||||||
.emsnCtrlArea(rs.getObject("emsn_ctrl_area") != null ? rs.getBoolean("emsn_ctrl_area") : null)
|
.emsnCtrlArea(rs.getObject("emsn_ctrl_area") != null ? rs.getBoolean("emsn_ctrl_area") : null)
|
||||||
.wsPort(rs.getObject("ws_port") != null ? rs.getLong("ws_port") : null)
|
.wsPort(rs.getObject("ws_port") != null ? rs.getLong("ws_port") : null)
|
||||||
.lastMdfcnDt(lastMdfcnDtTs != null ? lastMdfcnDtTs.toInstant().atZone(java.time.ZoneId.systemDefault()) : null)
|
.lastMdfcnDt(lastMdfcnDtTs != null ? lastMdfcnDtTs.toInstant().atZone(ZoneId.systemDefault()) : null)
|
||||||
.regYmd(regYmdTs != null ? regYmdTs.toInstant().atZone(java.time.ZoneId.systemDefault()) : null)
|
.regYmd(regYmdTs != null ? regYmdTs.toInstant().atZone(ZoneId.systemDefault()) : null)
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceFacilityPort);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -81,7 +81,7 @@ public class FacilityRepositoryImpl extends MultiDataSourceJdbcRepository<Facili
|
|||||||
if (facilityPortEntityList == null || facilityPortEntityList.isEmpty()) {
|
if (facilityPortEntityList == null || facilityPortEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "FacilityPortEntity", facilityPortEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "FacilityPortEntity", facilityPortEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, facilityPortEntityList, facilityPortEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, facilityPortEntityList, facilityPortEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -93,7 +93,7 @@ public class FacilityRepositoryImpl extends MultiDataSourceJdbcRepository<Facili
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "FacilityPortEntity", facilityPortEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "FacilityPortEntity", facilityPortEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindFacilityPort(PreparedStatement pstmt, FacilityPortEntity entity) throws Exception {
|
public void bindFacilityPort(PreparedStatement pstmt, FacilityPortEntity entity) throws Exception {
|
||||||
|
|||||||
@ -2,10 +2,7 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
|
|||||||
|
|
||||||
import com.snp.batch.common.batch.config.BaseJobConfig;
|
import com.snp.batch.common.batch.config.BaseJobConfig;
|
||||||
import com.snp.batch.common.util.BatchWriteListener;
|
import com.snp.batch.common.util.BatchWriteListener;
|
||||||
import com.snp.batch.common.util.CommonSql;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
|
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.movement.dto.AnchorageCallDto;
|
import com.snp.batch.jobs.datasync.batch.movement.dto.AnchorageCallDto;
|
||||||
import com.snp.batch.jobs.datasync.batch.movement.entity.AnchorageCallEntity;
|
import com.snp.batch.jobs.datasync.batch.movement.entity.AnchorageCallEntity;
|
||||||
@ -95,20 +92,17 @@ public class AnchorageCallSyncJobConfig extends BaseJobConfig<AnchorageCallDto,
|
|||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<AnchorageCallEntity> anchorageCallWriteListener() {
|
public BatchWriteListener<AnchorageCallEntity> anchorageCallWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTAnchorageCall);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceTAnchorageCall);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean(name = "anchorageCallSyncStep")
|
@Bean(name = "anchorageCallSyncStep")
|
||||||
public Step anchorageCallSyncStep() {
|
public Step anchorageCallSyncStep() {
|
||||||
log.info("Step 생성: anchorageCallSyncStep");
|
log.info("Step 생성: anchorageCallSyncStep");
|
||||||
return new StepBuilder(getStepName(), jobRepository)
|
return new StepBuilder(getStepName(), jobRepository)
|
||||||
.<AnchorageCallDto, AnchorageCallEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<AnchorageCallDto, AnchorageCallEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(createReader())
|
.reader(createReader())
|
||||||
.processor(createProcessor())
|
.processor(createProcessor())
|
||||||
.writer(createWriter())
|
.writer(createWriter())
|
||||||
.listener(new GroupByExecutionIdReadListener<AnchorageCallDto>())
|
|
||||||
.listener(new GroupByExecutionIdChunkListener())
|
|
||||||
.listener(anchorageCallWriteListener())
|
.listener(anchorageCallWriteListener())
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|||||||
@ -2,10 +2,7 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
|
|||||||
|
|
||||||
import com.snp.batch.common.batch.config.BaseJobConfig;
|
import com.snp.batch.common.batch.config.BaseJobConfig;
|
||||||
import com.snp.batch.common.util.BatchWriteListener;
|
import com.snp.batch.common.util.BatchWriteListener;
|
||||||
import com.snp.batch.common.util.CommonSql;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
|
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.movement.dto.BerthCallDto;
|
import com.snp.batch.jobs.datasync.batch.movement.dto.BerthCallDto;
|
||||||
import com.snp.batch.jobs.datasync.batch.movement.entity.BerthCallEntity;
|
import com.snp.batch.jobs.datasync.batch.movement.entity.BerthCallEntity;
|
||||||
@ -95,20 +92,17 @@ public class BerthCallSyncJobConfig extends BaseJobConfig<BerthCallDto, BerthCal
|
|||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<BerthCallEntity> berthCallWriteListener() {
|
public BatchWriteListener<BerthCallEntity> berthCallWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTBerthCall);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceTBerthCall);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean(name = "berthCallSyncStep")
|
@Bean(name = "berthCallSyncStep")
|
||||||
public Step berthCallSyncStep() {
|
public Step berthCallSyncStep() {
|
||||||
log.info("Step 생성: berthCallSyncStep");
|
log.info("Step 생성: berthCallSyncStep");
|
||||||
return new StepBuilder(getStepName(), jobRepository)
|
return new StepBuilder(getStepName(), jobRepository)
|
||||||
.<BerthCallDto, BerthCallEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<BerthCallDto, BerthCallEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(createReader())
|
.reader(createReader())
|
||||||
.processor(createProcessor())
|
.processor(createProcessor())
|
||||||
.writer(createWriter())
|
.writer(createWriter())
|
||||||
.listener(new GroupByExecutionIdReadListener<BerthCallDto>())
|
|
||||||
.listener(new GroupByExecutionIdChunkListener())
|
|
||||||
.listener(berthCallWriteListener())
|
.listener(berthCallWriteListener())
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|||||||
@ -2,10 +2,7 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
|
|||||||
|
|
||||||
import com.snp.batch.common.batch.config.BaseJobConfig;
|
import com.snp.batch.common.batch.config.BaseJobConfig;
|
||||||
import com.snp.batch.common.util.BatchWriteListener;
|
import com.snp.batch.common.util.BatchWriteListener;
|
||||||
import com.snp.batch.common.util.CommonSql;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
|
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.movement.dto.CurrentlyAtDto;
|
import com.snp.batch.jobs.datasync.batch.movement.dto.CurrentlyAtDto;
|
||||||
import com.snp.batch.jobs.datasync.batch.movement.entity.CurrentlyAtEntity;
|
import com.snp.batch.jobs.datasync.batch.movement.entity.CurrentlyAtEntity;
|
||||||
@ -95,20 +92,17 @@ public class CurrentlyAtSyncJobConfig extends BaseJobConfig<CurrentlyAtDto, Curr
|
|||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<CurrentlyAtEntity> currentlyAtWriteListener() {
|
public BatchWriteListener<CurrentlyAtEntity> currentlyAtWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTCurrentlyAt);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceTCurrentlyAt);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean(name = "currentlyAtSyncStep")
|
@Bean(name = "currentlyAtSyncStep")
|
||||||
public Step currentlyAtSyncStep() {
|
public Step currentlyAtSyncStep() {
|
||||||
log.info("Step 생성: currentlyAtSyncStep");
|
log.info("Step 생성: currentlyAtSyncStep");
|
||||||
return new StepBuilder(getStepName(), jobRepository)
|
return new StepBuilder(getStepName(), jobRepository)
|
||||||
.<CurrentlyAtDto, CurrentlyAtEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<CurrentlyAtDto, CurrentlyAtEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(createReader())
|
.reader(createReader())
|
||||||
.processor(createProcessor())
|
.processor(createProcessor())
|
||||||
.writer(createWriter())
|
.writer(createWriter())
|
||||||
.listener(new GroupByExecutionIdReadListener<CurrentlyAtDto>())
|
|
||||||
.listener(new GroupByExecutionIdChunkListener())
|
|
||||||
.listener(currentlyAtWriteListener())
|
.listener(currentlyAtWriteListener())
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|||||||
@ -2,10 +2,7 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
|
|||||||
|
|
||||||
import com.snp.batch.common.batch.config.BaseJobConfig;
|
import com.snp.batch.common.batch.config.BaseJobConfig;
|
||||||
import com.snp.batch.common.util.BatchWriteListener;
|
import com.snp.batch.common.util.BatchWriteListener;
|
||||||
import com.snp.batch.common.util.CommonSql;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
|
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.movement.dto.DestinationDto;
|
import com.snp.batch.jobs.datasync.batch.movement.dto.DestinationDto;
|
||||||
import com.snp.batch.jobs.datasync.batch.movement.entity.DestinationEntity;
|
import com.snp.batch.jobs.datasync.batch.movement.entity.DestinationEntity;
|
||||||
@ -95,20 +92,17 @@ public class DestinationSyncJobConfig extends BaseJobConfig<DestinationDto, Dest
|
|||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<DestinationEntity> destinationWriteListener() {
|
public BatchWriteListener<DestinationEntity> destinationWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTDestination);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceTDestination);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean(name = "destinationSyncStep")
|
@Bean(name = "destinationSyncStep")
|
||||||
public Step destinationSyncStep() {
|
public Step destinationSyncStep() {
|
||||||
log.info("Step 생성: destinationSyncStep");
|
log.info("Step 생성: destinationSyncStep");
|
||||||
return new StepBuilder(getStepName(), jobRepository)
|
return new StepBuilder(getStepName(), jobRepository)
|
||||||
.<DestinationDto, DestinationEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<DestinationDto, DestinationEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(createReader())
|
.reader(createReader())
|
||||||
.processor(createProcessor())
|
.processor(createProcessor())
|
||||||
.writer(createWriter())
|
.writer(createWriter())
|
||||||
.listener(new GroupByExecutionIdReadListener<DestinationDto>())
|
|
||||||
.listener(new GroupByExecutionIdChunkListener())
|
|
||||||
.listener(destinationWriteListener())
|
.listener(destinationWriteListener())
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|||||||
@ -2,10 +2,7 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
|
|||||||
|
|
||||||
import com.snp.batch.common.batch.config.BaseJobConfig;
|
import com.snp.batch.common.batch.config.BaseJobConfig;
|
||||||
import com.snp.batch.common.util.BatchWriteListener;
|
import com.snp.batch.common.util.BatchWriteListener;
|
||||||
import com.snp.batch.common.util.CommonSql;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
|
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.movement.dto.PortCallDto;
|
import com.snp.batch.jobs.datasync.batch.movement.dto.PortCallDto;
|
||||||
import com.snp.batch.jobs.datasync.batch.movement.entity.PortCallEntity;
|
import com.snp.batch.jobs.datasync.batch.movement.entity.PortCallEntity;
|
||||||
@ -95,20 +92,17 @@ public class PortCallSyncJobConfig extends BaseJobConfig<PortCallDto, PortCallEn
|
|||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<PortCallEntity> portCallWriteListener() {
|
public BatchWriteListener<PortCallEntity> portCallWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTShipStpovInfo);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceTShipStpovInfo);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean(name = "portCallSyncStep")
|
@Bean(name = "portCallSyncStep")
|
||||||
public Step portCallSyncStep() {
|
public Step portCallSyncStep() {
|
||||||
log.info("Step 생성: portCallSyncStep");
|
log.info("Step 생성: portCallSyncStep");
|
||||||
return new StepBuilder(getStepName(), jobRepository)
|
return new StepBuilder(getStepName(), jobRepository)
|
||||||
.<PortCallDto, PortCallEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<PortCallDto, PortCallEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(createReader())
|
.reader(createReader())
|
||||||
.processor(createProcessor())
|
.processor(createProcessor())
|
||||||
.writer(createWriter())
|
.writer(createWriter())
|
||||||
.listener(new GroupByExecutionIdReadListener<PortCallDto>())
|
|
||||||
.listener(new GroupByExecutionIdChunkListener())
|
|
||||||
.listener(portCallWriteListener())
|
.listener(portCallWriteListener())
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|||||||
@ -2,10 +2,7 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
|
|||||||
|
|
||||||
import com.snp.batch.common.batch.config.BaseJobConfig;
|
import com.snp.batch.common.batch.config.BaseJobConfig;
|
||||||
import com.snp.batch.common.util.BatchWriteListener;
|
import com.snp.batch.common.util.BatchWriteListener;
|
||||||
import com.snp.batch.common.util.CommonSql;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
|
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.movement.dto.StsOperationDto;
|
import com.snp.batch.jobs.datasync.batch.movement.dto.StsOperationDto;
|
||||||
import com.snp.batch.jobs.datasync.batch.movement.entity.StsOperationEntity;
|
import com.snp.batch.jobs.datasync.batch.movement.entity.StsOperationEntity;
|
||||||
@ -95,20 +92,17 @@ public class StsOperationSyncJobConfig extends BaseJobConfig<StsOperationDto, St
|
|||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<StsOperationEntity> stsOperationWriteListener() {
|
public BatchWriteListener<StsOperationEntity> stsOperationWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTStsOperation);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceTStsOperation);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean(name = "stsOperationSyncStep")
|
@Bean(name = "stsOperationSyncStep")
|
||||||
public Step stsOperationSyncStep() {
|
public Step stsOperationSyncStep() {
|
||||||
log.info("Step 생성: stsOperationSyncStep");
|
log.info("Step 생성: stsOperationSyncStep");
|
||||||
return new StepBuilder(getStepName(), jobRepository)
|
return new StepBuilder(getStepName(), jobRepository)
|
||||||
.<StsOperationDto, StsOperationEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<StsOperationDto, StsOperationEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(createReader())
|
.reader(createReader())
|
||||||
.processor(createProcessor())
|
.processor(createProcessor())
|
||||||
.writer(createWriter())
|
.writer(createWriter())
|
||||||
.listener(new GroupByExecutionIdReadListener<StsOperationDto>())
|
|
||||||
.listener(new GroupByExecutionIdChunkListener())
|
|
||||||
.listener(stsOperationWriteListener())
|
.listener(stsOperationWriteListener())
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|||||||
@ -2,10 +2,7 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
|
|||||||
|
|
||||||
import com.snp.batch.common.batch.config.BaseJobConfig;
|
import com.snp.batch.common.batch.config.BaseJobConfig;
|
||||||
import com.snp.batch.common.util.BatchWriteListener;
|
import com.snp.batch.common.util.BatchWriteListener;
|
||||||
import com.snp.batch.common.util.CommonSql;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
|
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.movement.dto.TerminalCallDto;
|
import com.snp.batch.jobs.datasync.batch.movement.dto.TerminalCallDto;
|
||||||
import com.snp.batch.jobs.datasync.batch.movement.entity.TerminalCallEntity;
|
import com.snp.batch.jobs.datasync.batch.movement.entity.TerminalCallEntity;
|
||||||
@ -95,20 +92,17 @@ public class TerminalCallSyncJobConfig extends BaseJobConfig<TerminalCallDto, Te
|
|||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<TerminalCallEntity> terminalCallWriteListener() {
|
public BatchWriteListener<TerminalCallEntity> terminalCallWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTTerminalCall);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceTTerminalCall);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean(name = "terminalCallSyncStep")
|
@Bean(name = "terminalCallSyncStep")
|
||||||
public Step terminalCallSyncStep() {
|
public Step terminalCallSyncStep() {
|
||||||
log.info("Step 생성: terminalCallSyncStep");
|
log.info("Step 생성: terminalCallSyncStep");
|
||||||
return new StepBuilder(getStepName(), jobRepository)
|
return new StepBuilder(getStepName(), jobRepository)
|
||||||
.<TerminalCallDto, TerminalCallEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<TerminalCallDto, TerminalCallEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(createReader())
|
.reader(createReader())
|
||||||
.processor(createProcessor())
|
.processor(createProcessor())
|
||||||
.writer(createWriter())
|
.writer(createWriter())
|
||||||
.listener(new GroupByExecutionIdReadListener<TerminalCallDto>())
|
|
||||||
.listener(new GroupByExecutionIdChunkListener())
|
|
||||||
.listener(terminalCallWriteListener())
|
.listener(terminalCallWriteListener())
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|||||||
@ -2,10 +2,7 @@ package com.snp.batch.jobs.datasync.batch.movement.config;
|
|||||||
|
|
||||||
import com.snp.batch.common.batch.config.BaseJobConfig;
|
import com.snp.batch.common.batch.config.BaseJobConfig;
|
||||||
import com.snp.batch.common.util.BatchWriteListener;
|
import com.snp.batch.common.util.BatchWriteListener;
|
||||||
import com.snp.batch.common.util.CommonSql;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
|
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.movement.dto.TransitDto;
|
import com.snp.batch.jobs.datasync.batch.movement.dto.TransitDto;
|
||||||
import com.snp.batch.jobs.datasync.batch.movement.entity.TransitEntity;
|
import com.snp.batch.jobs.datasync.batch.movement.entity.TransitEntity;
|
||||||
@ -95,20 +92,17 @@ public class TransitSyncJobConfig extends BaseJobConfig<TransitDto, TransitEntit
|
|||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<TransitEntity> transitWriteListener() {
|
public BatchWriteListener<TransitEntity> transitWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTTransit);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceTTransit);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean(name = "transitSyncStep")
|
@Bean(name = "transitSyncStep")
|
||||||
public Step transitSyncStep() {
|
public Step transitSyncStep() {
|
||||||
log.info("Step 생성: transitSyncStep");
|
log.info("Step 생성: transitSyncStep");
|
||||||
return new StepBuilder(getStepName(), jobRepository)
|
return new StepBuilder(getStepName(), jobRepository)
|
||||||
.<TransitDto, TransitEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<TransitDto, TransitEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(createReader())
|
.reader(createReader())
|
||||||
.processor(createProcessor())
|
.processor(createProcessor())
|
||||||
.writer(createWriter())
|
.writer(createWriter())
|
||||||
.listener(new GroupByExecutionIdReadListener<TransitDto>())
|
|
||||||
.listener(new GroupByExecutionIdChunkListener())
|
|
||||||
.listener(transitWriteListener())
|
.listener(transitWriteListener())
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,55 +1,30 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.movement.reader;
|
package com.snp.batch.jobs.datasync.batch.movement.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.movement.dto.AnchorageCallDto;
|
import com.snp.batch.jobs.datasync.batch.movement.dto.AnchorageCallDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.math.BigDecimal;
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
import java.sql.Timestamp;
|
import java.sql.Timestamp;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class AnchorageCallReader implements ItemReader<AnchorageCallDto> {
|
public class AnchorageCallReader extends BaseSyncReader<AnchorageCallDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<AnchorageCallDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public AnchorageCallReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public AnchorageCallReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public AnchorageCallDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceTAnchorageCall;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected AnchorageCallDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTAnchorageCall), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[AnchorageCallReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTAnchorageCall);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
|
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
|
||||||
|
|
||||||
return AnchorageCallDto.builder()
|
return AnchorageCallDto.builder()
|
||||||
@ -73,13 +48,5 @@ public class AnchorageCallReader implements ItemReader<AnchorageCallDto> {
|
|||||||
.dest(rs.getString("dest"))
|
.dest(rs.getString("dest"))
|
||||||
.isoTwoCountryCd(rs.getString("iso_two_country_cd"))
|
.isoTwoCountryCd(rs.getString("iso_two_country_cd"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTAnchorageCall);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,54 +1,30 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.movement.reader;
|
package com.snp.batch.jobs.datasync.batch.movement.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.movement.dto.BerthCallDto;
|
import com.snp.batch.jobs.datasync.batch.movement.dto.BerthCallDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
import java.sql.Timestamp;
|
import java.sql.Timestamp;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class BerthCallReader implements ItemReader<BerthCallDto> {
|
public class BerthCallReader extends BaseSyncReader<BerthCallDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<BerthCallDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public BerthCallReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public BerthCallReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BerthCallDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceTBerthCall;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected BerthCallDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTBerthCall), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[BerthCallReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTBerthCall);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
|
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
|
||||||
Timestamp eventStaDtTs = rs.getTimestamp("event_sta_dt");
|
Timestamp eventStaDtTs = rs.getTimestamp("event_sta_dt");
|
||||||
|
|
||||||
@ -73,13 +49,5 @@ public class BerthCallReader implements ItemReader<BerthCallDto> {
|
|||||||
.isoTwoCountryCd(rs.getString("iso_two_country_cd"))
|
.isoTwoCountryCd(rs.getString("iso_two_country_cd"))
|
||||||
.eventStaDt(eventStaDtTs != null ? eventStaDtTs.toLocalDateTime() : null)
|
.eventStaDt(eventStaDtTs != null ? eventStaDtTs.toLocalDateTime() : null)
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTBerthCall);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,54 +1,30 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.movement.reader;
|
package com.snp.batch.jobs.datasync.batch.movement.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.movement.dto.CurrentlyAtDto;
|
import com.snp.batch.jobs.datasync.batch.movement.dto.CurrentlyAtDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
import java.sql.Timestamp;
|
import java.sql.Timestamp;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class CurrentlyAtReader implements ItemReader<CurrentlyAtDto> {
|
public class CurrentlyAtReader extends BaseSyncReader<CurrentlyAtDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<CurrentlyAtDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public CurrentlyAtReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public CurrentlyAtReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public CurrentlyAtDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceTCurrentlyAt;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected CurrentlyAtDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTCurrentlyAt), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[CurrentlyAtReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTCurrentlyAt);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
|
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
|
||||||
|
|
||||||
return CurrentlyAtDto.builder()
|
return CurrentlyAtDto.builder()
|
||||||
@ -75,13 +51,5 @@ public class CurrentlyAtReader implements ItemReader<CurrentlyAtDto> {
|
|||||||
.countryIsoTwoCd(rs.getString("country_iso_two_cd"))
|
.countryIsoTwoCd(rs.getString("country_iso_two_cd"))
|
||||||
.positionInfo(rs.getString("position_info"))
|
.positionInfo(rs.getString("position_info"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTCurrentlyAt);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,54 +1,30 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.movement.reader;
|
package com.snp.batch.jobs.datasync.batch.movement.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.movement.dto.DestinationDto;
|
import com.snp.batch.jobs.datasync.batch.movement.dto.DestinationDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
import java.sql.Timestamp;
|
import java.sql.Timestamp;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class DestinationReader implements ItemReader<DestinationDto> {
|
public class DestinationReader extends BaseSyncReader<DestinationDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<DestinationDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public DestinationReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public DestinationReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DestinationDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceTDestination;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected DestinationDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTDestination), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[DestinationReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTDestination);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
|
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
|
||||||
|
|
||||||
return DestinationDto.builder()
|
return DestinationDto.builder()
|
||||||
@ -66,13 +42,5 @@ public class DestinationReader implements ItemReader<DestinationDto> {
|
|||||||
.positionInfo(rs.getString("position_info"))
|
.positionInfo(rs.getString("position_info"))
|
||||||
.countryIsoTwoCd(rs.getString("country_iso_two_cd"))
|
.countryIsoTwoCd(rs.getString("country_iso_two_cd"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTDestination);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,54 +1,30 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.movement.reader;
|
package com.snp.batch.jobs.datasync.batch.movement.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.movement.dto.PortCallDto;
|
import com.snp.batch.jobs.datasync.batch.movement.dto.PortCallDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
import java.sql.Timestamp;
|
import java.sql.Timestamp;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class PortCallReader implements ItemReader<PortCallDto> {
|
public class PortCallReader extends BaseSyncReader<PortCallDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<PortCallDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public PortCallReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public PortCallReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public PortCallDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceTShipStpovInfo;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected PortCallDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTShipStpovInfo), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[PortCallReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTShipStpovInfo);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
|
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
|
||||||
|
|
||||||
return PortCallDto.builder()
|
return PortCallDto.builder()
|
||||||
@ -75,13 +51,5 @@ public class PortCallReader implements ItemReader<PortCallDto> {
|
|||||||
.countryIsoTwoCd(rs.getString("country_iso_two_cd"))
|
.countryIsoTwoCd(rs.getString("country_iso_two_cd"))
|
||||||
.positionInfo(rs.getString("position_info"))
|
.positionInfo(rs.getString("position_info"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTShipStpovInfo);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,54 +1,30 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.movement.reader;
|
package com.snp.batch.jobs.datasync.batch.movement.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.movement.dto.StsOperationDto;
|
import com.snp.batch.jobs.datasync.batch.movement.dto.StsOperationDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
import java.sql.Timestamp;
|
import java.sql.Timestamp;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class StsOperationReader implements ItemReader<StsOperationDto> {
|
public class StsOperationReader extends BaseSyncReader<StsOperationDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<StsOperationDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public StsOperationReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public StsOperationReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public StsOperationDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceTStsOperation;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected StsOperationDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTStsOperation), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[StsOperationReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTStsOperation);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
|
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
|
||||||
Timestamp eventStaDtTs = rs.getTimestamp("event_sta_dt");
|
Timestamp eventStaDtTs = rs.getTimestamp("event_sta_dt");
|
||||||
|
|
||||||
@ -74,13 +50,5 @@ public class StsOperationReader implements ItemReader<StsOperationDto> {
|
|||||||
.stsType(rs.getString("sts_type"))
|
.stsType(rs.getString("sts_type"))
|
||||||
.eventStaDt(eventStaDtTs != null ? eventStaDtTs.toLocalDateTime() : null)
|
.eventStaDt(eventStaDtTs != null ? eventStaDtTs.toLocalDateTime() : null)
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTStsOperation);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,54 +1,30 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.movement.reader;
|
package com.snp.batch.jobs.datasync.batch.movement.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.movement.dto.TerminalCallDto;
|
import com.snp.batch.jobs.datasync.batch.movement.dto.TerminalCallDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
import java.sql.Timestamp;
|
import java.sql.Timestamp;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class TerminalCallReader implements ItemReader<TerminalCallDto> {
|
public class TerminalCallReader extends BaseSyncReader<TerminalCallDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<TerminalCallDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public TerminalCallReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public TerminalCallReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TerminalCallDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceTTerminalCall;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected TerminalCallDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTTerminalCall), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[TerminalCallReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTTerminalCall);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
|
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
|
||||||
Timestamp eventStaDtTs = rs.getTimestamp("event_sta_dt");
|
Timestamp eventStaDtTs = rs.getTimestamp("event_sta_dt");
|
||||||
|
|
||||||
@ -76,13 +52,5 @@ public class TerminalCallReader implements ItemReader<TerminalCallDto> {
|
|||||||
.lwrnkFacilityDesc(rs.getString("lwrnk_facility_desc"))
|
.lwrnkFacilityDesc(rs.getString("lwrnk_facility_desc"))
|
||||||
.lwrnkFacilityType(rs.getString("lwrnk_facility_type"))
|
.lwrnkFacilityType(rs.getString("lwrnk_facility_type"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTTerminalCall);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,54 +1,30 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.movement.reader;
|
package com.snp.batch.jobs.datasync.batch.movement.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.movement.dto.TransitDto;
|
import com.snp.batch.jobs.datasync.batch.movement.dto.TransitDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
import java.sql.Timestamp;
|
import java.sql.Timestamp;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class TransitReader implements ItemReader<TransitDto> {
|
public class TransitReader extends BaseSyncReader<TransitDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<TransitDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public TransitReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public TransitReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TransitDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceTTransit;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected TransitDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTTransit), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[TransitReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTTransit);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
|
Timestamp mvmnDtTs = rs.getTimestamp("mvmn_dt");
|
||||||
|
|
||||||
return TransitDto.builder()
|
return TransitDto.builder()
|
||||||
@ -60,13 +36,5 @@ public class TransitReader implements ItemReader<TransitDto> {
|
|||||||
.facilityType(rs.getString("facility_type"))
|
.facilityType(rs.getString("facility_type"))
|
||||||
.draft(rs.getObject("draft") != null ? rs.getBigDecimal("draft") : null)
|
.draft(rs.getObject("draft") != null ? rs.getBigDecimal("draft") : null)
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTTransit);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -80,7 +80,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
|
|||||||
if (anchorageCallEntityList == null || anchorageCallEntityList.isEmpty()) {
|
if (anchorageCallEntityList == null || anchorageCallEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "AnchorageCallEntity", anchorageCallEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "AnchorageCallEntity", anchorageCallEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, anchorageCallEntityList, anchorageCallEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, anchorageCallEntityList, anchorageCallEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -92,7 +92,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "AnchorageCallEntity", anchorageCallEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "AnchorageCallEntity", anchorageCallEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindAnchorageCall(PreparedStatement pstmt, AnchorageCallEntity entity) throws Exception {
|
public void bindAnchorageCall(PreparedStatement pstmt, AnchorageCallEntity entity) throws Exception {
|
||||||
@ -125,7 +125,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
|
|||||||
if (berthCallEntityList == null || berthCallEntityList.isEmpty()) {
|
if (berthCallEntityList == null || berthCallEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "BerthCallEntity", berthCallEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "BerthCallEntity", berthCallEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, berthCallEntityList, berthCallEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, berthCallEntityList, berthCallEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -137,7 +137,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "BerthCallEntity", berthCallEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "BerthCallEntity", berthCallEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindBerthCall(PreparedStatement pstmt, BerthCallEntity entity) throws Exception {
|
public void bindBerthCall(PreparedStatement pstmt, BerthCallEntity entity) throws Exception {
|
||||||
@ -170,7 +170,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
|
|||||||
if (currentlyAtEntityList == null || currentlyAtEntityList.isEmpty()) {
|
if (currentlyAtEntityList == null || currentlyAtEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "CurrentlyAtEntity", currentlyAtEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "CurrentlyAtEntity", currentlyAtEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, currentlyAtEntityList, currentlyAtEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, currentlyAtEntityList, currentlyAtEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -182,7 +182,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "CurrentlyAtEntity", currentlyAtEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "CurrentlyAtEntity", currentlyAtEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindCurrentlyAt(PreparedStatement pstmt, CurrentlyAtEntity entity) throws Exception {
|
public void bindCurrentlyAt(PreparedStatement pstmt, CurrentlyAtEntity entity) throws Exception {
|
||||||
@ -218,7 +218,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
|
|||||||
if (destinationEntityList == null || destinationEntityList.isEmpty()) {
|
if (destinationEntityList == null || destinationEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "DestinationEntity", destinationEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "DestinationEntity", destinationEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, destinationEntityList, destinationEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, destinationEntityList, destinationEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -230,7 +230,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "DestinationEntity", destinationEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "DestinationEntity", destinationEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindDestination(PreparedStatement pstmt, DestinationEntity entity) throws Exception {
|
public void bindDestination(PreparedStatement pstmt, DestinationEntity entity) throws Exception {
|
||||||
@ -257,7 +257,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
|
|||||||
if (portCallEntityList == null || portCallEntityList.isEmpty()) {
|
if (portCallEntityList == null || portCallEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "PortCallEntity", portCallEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "PortCallEntity", portCallEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, portCallEntityList, portCallEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, portCallEntityList, portCallEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -269,7 +269,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "PortCallEntity", portCallEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "PortCallEntity", portCallEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindPortCall(PreparedStatement pstmt, PortCallEntity entity) throws Exception {
|
public void bindPortCall(PreparedStatement pstmt, PortCallEntity entity) throws Exception {
|
||||||
@ -305,7 +305,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
|
|||||||
if (stsOperationEntityList == null || stsOperationEntityList.isEmpty()) {
|
if (stsOperationEntityList == null || stsOperationEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "StsOperationEntity", stsOperationEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "StsOperationEntity", stsOperationEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, stsOperationEntityList, stsOperationEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, stsOperationEntityList, stsOperationEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -317,7 +317,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "StsOperationEntity", stsOperationEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "StsOperationEntity", stsOperationEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindStsOperation(PreparedStatement pstmt, StsOperationEntity entity) throws Exception {
|
public void bindStsOperation(PreparedStatement pstmt, StsOperationEntity entity) throws Exception {
|
||||||
@ -351,7 +351,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
|
|||||||
if (terminalCallEntityList == null || terminalCallEntityList.isEmpty()) {
|
if (terminalCallEntityList == null || terminalCallEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "TerminalCallEntity", terminalCallEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "TerminalCallEntity", terminalCallEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, terminalCallEntityList, terminalCallEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, terminalCallEntityList, terminalCallEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -363,7 +363,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "TerminalCallEntity", terminalCallEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "TerminalCallEntity", terminalCallEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindTerminalCall(PreparedStatement pstmt, TerminalCallEntity entity) throws Exception {
|
public void bindTerminalCall(PreparedStatement pstmt, TerminalCallEntity entity) throws Exception {
|
||||||
@ -399,7 +399,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
|
|||||||
if (transitEntityList == null || transitEntityList.isEmpty()) {
|
if (transitEntityList == null || transitEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "TransitEntity", transitEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "TransitEntity", transitEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, transitEntityList, transitEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, transitEntityList, transitEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -411,7 +411,7 @@ public class MovementRepositoryImpl extends MultiDataSourceJdbcRepository<Anchor
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "TransitEntity", transitEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "TransitEntity", transitEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindTransit(PreparedStatement pstmt, TransitEntity entity) throws Exception {
|
public void bindTransit(PreparedStatement pstmt, TransitEntity entity) throws Exception {
|
||||||
|
|||||||
@ -2,10 +2,7 @@ package com.snp.batch.jobs.datasync.batch.psc.config;
|
|||||||
|
|
||||||
import com.snp.batch.common.batch.config.BaseJobConfig;
|
import com.snp.batch.common.batch.config.BaseJobConfig;
|
||||||
import com.snp.batch.common.util.BatchWriteListener;
|
import com.snp.batch.common.util.BatchWriteListener;
|
||||||
import com.snp.batch.common.util.CommonSql;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
|
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.psc.dto.PscAllCertificateDto;
|
import com.snp.batch.jobs.datasync.batch.psc.dto.PscAllCertificateDto;
|
||||||
import com.snp.batch.jobs.datasync.batch.psc.dto.PscDefectDto;
|
import com.snp.batch.jobs.datasync.batch.psc.dto.PscDefectDto;
|
||||||
@ -132,20 +129,17 @@ public class PscSyncJobConfig extends BaseJobConfig<PscDetailDto, PscDetailEntit
|
|||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<PscDetailEntity> pscDetailWriteListener() {
|
public BatchWriteListener<PscDetailEntity> pscDetailWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourcePscDetail);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourcePscDetail);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<PscDefectEntity> pscDefectWriteListener() {
|
public BatchWriteListener<PscDefectEntity> pscDefectWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourcePscDefect);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourcePscDefect);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<PscAllCertificateEntity> pscAllCertificateWriteListener() {
|
public BatchWriteListener<PscAllCertificateEntity> pscAllCertificateWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourcePscAllCertificate);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourcePscAllCertificate);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Steps ---
|
// --- Steps ---
|
||||||
@ -154,12 +148,10 @@ public class PscSyncJobConfig extends BaseJobConfig<PscDetailDto, PscDetailEntit
|
|||||||
public Step pscDetailSyncStep() {
|
public Step pscDetailSyncStep() {
|
||||||
log.info("Step 생성: pscDetailSyncStep");
|
log.info("Step 생성: pscDetailSyncStep");
|
||||||
return new StepBuilder(getStepName(), jobRepository)
|
return new StepBuilder(getStepName(), jobRepository)
|
||||||
.<PscDetailDto, PscDetailEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<PscDetailDto, PscDetailEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(createReader())
|
.reader(createReader())
|
||||||
.processor(createProcessor())
|
.processor(createProcessor())
|
||||||
.writer(createWriter())
|
.writer(createWriter())
|
||||||
.listener(new GroupByExecutionIdReadListener<PscDetailDto>())
|
|
||||||
.listener(new GroupByExecutionIdChunkListener())
|
|
||||||
.listener(pscDetailWriteListener())
|
.listener(pscDetailWriteListener())
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -168,12 +160,10 @@ public class PscSyncJobConfig extends BaseJobConfig<PscDetailDto, PscDetailEntit
|
|||||||
public Step pscDefectSyncStep() {
|
public Step pscDefectSyncStep() {
|
||||||
log.info("Step 생성: pscDefectSyncStep");
|
log.info("Step 생성: pscDefectSyncStep");
|
||||||
return new StepBuilder("pscDefectSyncStep", jobRepository)
|
return new StepBuilder("pscDefectSyncStep", jobRepository)
|
||||||
.<PscDefectDto, PscDefectEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<PscDefectDto, PscDefectEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(pscDefectReader(businessDataSource, tableMetaInfo))
|
.reader(pscDefectReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new PscDefectProcessor())
|
.processor(new PscDefectProcessor())
|
||||||
.writer(new PscDefectWriter(pscRepository, transactionManager, subChunkSize))
|
.writer(new PscDefectWriter(pscRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<PscDefectDto>())
|
|
||||||
.listener(new GroupByExecutionIdChunkListener())
|
|
||||||
.listener(pscDefectWriteListener())
|
.listener(pscDefectWriteListener())
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -182,12 +172,10 @@ public class PscSyncJobConfig extends BaseJobConfig<PscDetailDto, PscDetailEntit
|
|||||||
public Step pscAllCertificateSyncStep() {
|
public Step pscAllCertificateSyncStep() {
|
||||||
log.info("Step 생성: pscAllCertificateSyncStep");
|
log.info("Step 생성: pscAllCertificateSyncStep");
|
||||||
return new StepBuilder("pscAllCertificateSyncStep", jobRepository)
|
return new StepBuilder("pscAllCertificateSyncStep", jobRepository)
|
||||||
.<PscAllCertificateDto, PscAllCertificateEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<PscAllCertificateDto, PscAllCertificateEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(pscAllCertificateReader(businessDataSource, tableMetaInfo))
|
.reader(pscAllCertificateReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new PscAllCertificateProcessor())
|
.processor(new PscAllCertificateProcessor())
|
||||||
.writer(new PscAllCertificateWriter(pscRepository, transactionManager, subChunkSize))
|
.writer(new PscAllCertificateWriter(pscRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<PscAllCertificateDto>())
|
|
||||||
.listener(new GroupByExecutionIdChunkListener())
|
|
||||||
.listener(pscAllCertificateWriteListener())
|
.listener(pscAllCertificateWriteListener())
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,54 +1,30 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.psc.reader;
|
package com.snp.batch.jobs.datasync.batch.psc.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.psc.dto.PscAllCertificateDto;
|
import com.snp.batch.jobs.datasync.batch.psc.dto.PscAllCertificateDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
import java.sql.Timestamp;
|
import java.sql.Timestamp;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class PscAllCertificateReader implements ItemReader<PscAllCertificateDto> {
|
public class PscAllCertificateReader extends BaseSyncReader<PscAllCertificateDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<PscAllCertificateDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public PscAllCertificateReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public PscAllCertificateReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public PscAllCertificateDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourcePscAllCertificate;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected PscAllCertificateDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourcePscAllCertificate), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[PscAllCertificateReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourcePscAllCertificate);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
Timestamp expryYmdTs = rs.getTimestamp("expry_ymd");
|
Timestamp expryYmdTs = rs.getTimestamp("expry_ymd");
|
||||||
Timestamp lastInspectionYmdTs = rs.getTimestamp("last_inspection_ymd");
|
Timestamp lastInspectionYmdTs = rs.getTimestamp("last_inspection_ymd");
|
||||||
|
|
||||||
@ -75,13 +51,5 @@ public class PscAllCertificateReader implements ItemReader<PscAllCertificateDto>
|
|||||||
.checkYmd(rs.getString("check_ymd"))
|
.checkYmd(rs.getString("check_ymd"))
|
||||||
.insptr(rs.getString("insptr"))
|
.insptr(rs.getString("insptr"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourcePscAllCertificate);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,53 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.psc.reader;
|
package com.snp.batch.jobs.datasync.batch.psc.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.psc.dto.PscDefectDto;
|
import com.snp.batch.jobs.datasync.batch.psc.dto.PscDefectDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class PscDefectReader implements ItemReader<PscDefectDto> {
|
public class PscDefectReader extends BaseSyncReader<PscDefectDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<PscDefectDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public PscDefectReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public PscDefectReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public PscDefectDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourcePscDefect;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected PscDefectDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourcePscDefect), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[PscDefectReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourcePscDefect);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return PscDefectDto.builder()
|
return PscDefectDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
@ -75,13 +51,5 @@ public class PscDefectReader implements ItemReader<PscDefectDto> {
|
|||||||
.pubcEnginesRespsbYn(rs.getString("pubc_engines_respsb_yn"))
|
.pubcEnginesRespsbYn(rs.getString("pubc_engines_respsb_yn"))
|
||||||
.acdntDamgYn(rs.getString("acdnt_damg_yn"))
|
.acdntDamgYn(rs.getString("acdnt_damg_yn"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourcePscDefect);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,54 +1,30 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.psc.reader;
|
package com.snp.batch.jobs.datasync.batch.psc.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.psc.dto.PscDetailDto;
|
import com.snp.batch.jobs.datasync.batch.psc.dto.PscDetailDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
import java.sql.Timestamp;
|
import java.sql.Timestamp;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class PscDetailReader implements ItemReader<PscDetailDto> {
|
public class PscDetailReader extends BaseSyncReader<PscDetailDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<PscDetailDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public PscDetailReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public PscDetailReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public PscDetailDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourcePscDetail;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected PscDetailDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourcePscDetail), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[PscDetailReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourcePscDetail);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
Timestamp inspectionYmdTs = rs.getTimestamp("inspection_ymd");
|
Timestamp inspectionYmdTs = rs.getTimestamp("inspection_ymd");
|
||||||
Timestamp tkoffPrmtYmdTs = rs.getTimestamp("tkoff_prmt_ymd");
|
Timestamp tkoffPrmtYmdTs = rs.getTimestamp("tkoff_prmt_ymd");
|
||||||
Timestamp lastMdfcnDtTs = rs.getTimestamp("last_mdfcn_dt");
|
Timestamp lastMdfcnDtTs = rs.getTimestamp("last_mdfcn_dt");
|
||||||
@ -86,13 +62,5 @@ public class PscDetailReader implements ItemReader<PscDetailDto> {
|
|||||||
.unPortCd(rs.getString("un_port_cd"))
|
.unPortCd(rs.getString("un_port_cd"))
|
||||||
.buildYy(rs.getString("build_yy"))
|
.buildYy(rs.getString("build_yy"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourcePscDetail);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -83,7 +83,7 @@ public class PscRepositoryImpl extends MultiDataSourceJdbcRepository<PscDetailEn
|
|||||||
if (pscDetailEntityList == null || pscDetailEntityList.isEmpty()) {
|
if (pscDetailEntityList == null || pscDetailEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "PscDetailEntity", pscDetailEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "PscDetailEntity", pscDetailEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, pscDetailEntityList, pscDetailEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, pscDetailEntityList, pscDetailEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -95,7 +95,7 @@ public class PscRepositoryImpl extends MultiDataSourceJdbcRepository<PscDetailEn
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "PscDetailEntity", pscDetailEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "PscDetailEntity", pscDetailEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindPscDetail(PreparedStatement pstmt, PscDetailEntity entity) throws Exception {
|
public void bindPscDetail(PreparedStatement pstmt, PscDetailEntity entity) throws Exception {
|
||||||
@ -139,7 +139,7 @@ public class PscRepositoryImpl extends MultiDataSourceJdbcRepository<PscDetailEn
|
|||||||
if (pscDefectEntityList == null || pscDefectEntityList.isEmpty()) {
|
if (pscDefectEntityList == null || pscDefectEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "PscDefectEntity", pscDefectEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "PscDefectEntity", pscDefectEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, pscDefectEntityList, pscDefectEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, pscDefectEntityList, pscDefectEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -151,7 +151,7 @@ public class PscRepositoryImpl extends MultiDataSourceJdbcRepository<PscDetailEn
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "PscDefectEntity", pscDefectEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "PscDefectEntity", pscDefectEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindPscDefect(PreparedStatement pstmt, PscDefectEntity entity) throws Exception {
|
public void bindPscDefect(PreparedStatement pstmt, PscDefectEntity entity) throws Exception {
|
||||||
@ -189,7 +189,7 @@ public class PscRepositoryImpl extends MultiDataSourceJdbcRepository<PscDetailEn
|
|||||||
if (pscAllCertificateEntityList == null || pscAllCertificateEntityList.isEmpty()) {
|
if (pscAllCertificateEntityList == null || pscAllCertificateEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "PscAllCertificateEntity", pscAllCertificateEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "PscAllCertificateEntity", pscAllCertificateEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, pscAllCertificateEntityList, pscAllCertificateEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, pscAllCertificateEntityList, pscAllCertificateEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -201,7 +201,7 @@ public class PscRepositoryImpl extends MultiDataSourceJdbcRepository<PscDetailEn
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "PscAllCertificateEntity", pscAllCertificateEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "PscAllCertificateEntity", pscAllCertificateEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindPscAllCertificate(PreparedStatement pstmt, PscAllCertificateEntity entity) throws Exception {
|
public void bindPscAllCertificate(PreparedStatement pstmt, PscAllCertificateEntity entity) throws Exception {
|
||||||
|
|||||||
@ -2,10 +2,7 @@ package com.snp.batch.jobs.datasync.batch.risk.config;
|
|||||||
|
|
||||||
import com.snp.batch.common.batch.config.BaseJobConfig;
|
import com.snp.batch.common.batch.config.BaseJobConfig;
|
||||||
import com.snp.batch.common.util.BatchWriteListener;
|
import com.snp.batch.common.util.BatchWriteListener;
|
||||||
import com.snp.batch.common.util.CommonSql;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
|
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.risk.dto.RiskDto;
|
import com.snp.batch.jobs.datasync.batch.risk.dto.RiskDto;
|
||||||
import com.snp.batch.jobs.datasync.batch.risk.entity.RiskEntity;
|
import com.snp.batch.jobs.datasync.batch.risk.entity.RiskEntity;
|
||||||
@ -102,8 +99,7 @@ public class RiskSyncJobConfig extends BaseJobConfig<RiskDto, RiskEntity> {
|
|||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<RiskEntity> riskWriteListener() {
|
public BatchWriteListener<RiskEntity> riskWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceRisk);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceRisk);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Steps ---
|
// --- Steps ---
|
||||||
@ -112,12 +108,10 @@ public class RiskSyncJobConfig extends BaseJobConfig<RiskDto, RiskEntity> {
|
|||||||
public Step riskSyncStep() {
|
public Step riskSyncStep() {
|
||||||
log.info("Step 생성: riskSyncStep");
|
log.info("Step 생성: riskSyncStep");
|
||||||
return new StepBuilder(getStepName(), jobRepository)
|
return new StepBuilder(getStepName(), jobRepository)
|
||||||
.<RiskDto, RiskEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<RiskDto, RiskEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(createReader())
|
.reader(createReader())
|
||||||
.processor(createProcessor())
|
.processor(createProcessor())
|
||||||
.writer(createWriter())
|
.writer(createWriter())
|
||||||
.listener(new GroupByExecutionIdReadListener<RiskDto>())
|
|
||||||
.listener(new GroupByExecutionIdChunkListener())
|
|
||||||
.listener(riskWriteListener())
|
.listener(riskWriteListener())
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,54 +1,30 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.risk.reader;
|
package com.snp.batch.jobs.datasync.batch.risk.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.risk.dto.RiskDto;
|
import com.snp.batch.jobs.datasync.batch.risk.dto.RiskDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
import java.sql.Timestamp;
|
import java.sql.Timestamp;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class RiskReader implements ItemReader<RiskDto> {
|
public class RiskReader extends BaseSyncReader<RiskDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<RiskDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public RiskReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public RiskReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public RiskDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceRisk;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected RiskDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceRisk), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[RiskReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceRisk);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
Timestamp lastMdfcnDtTs = rs.getTimestamp("last_mdfcn_dt");
|
Timestamp lastMdfcnDtTs = rs.getTimestamp("last_mdfcn_dt");
|
||||||
|
|
||||||
return RiskDto.builder()
|
return RiskDto.builder()
|
||||||
@ -96,13 +72,5 @@ public class RiskReader implements ItemReader<RiskDto> {
|
|||||||
.rssOwnrReg(rs.getString("rss_ownr_reg"))
|
.rssOwnrReg(rs.getString("rss_ownr_reg"))
|
||||||
.rssSts(rs.getString("rss_sts"))
|
.rssSts(rs.getString("rss_sts"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceRisk);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -80,7 +80,7 @@ public class RiskRepositoryImpl extends MultiDataSourceJdbcRepository<RiskEntity
|
|||||||
if (riskEntityList == null || riskEntityList.isEmpty()) {
|
if (riskEntityList == null || riskEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "RiskEntity", riskEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "RiskEntity", riskEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, riskEntityList, riskEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, riskEntityList, riskEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -92,7 +92,7 @@ public class RiskRepositoryImpl extends MultiDataSourceJdbcRepository<RiskEntity
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "RiskEntity", riskEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "RiskEntity", riskEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -101,7 +101,7 @@ public class RiskRepositoryImpl extends MultiDataSourceJdbcRepository<RiskEntity
|
|||||||
if (riskEntityList == null || riskEntityList.isEmpty()) {
|
if (riskEntityList == null || riskEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "RiskEntity", riskEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "RiskEntity", riskEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, riskEntityList, riskEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, riskEntityList, riskEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -113,7 +113,7 @@ public class RiskRepositoryImpl extends MultiDataSourceJdbcRepository<RiskEntity
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "RiskEntity", riskEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "RiskEntity", riskEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindRisk(PreparedStatement pstmt, RiskEntity entity) throws Exception {
|
public void bindRisk(PreparedStatement pstmt, RiskEntity entity) throws Exception {
|
||||||
|
|||||||
@ -2,10 +2,7 @@ package com.snp.batch.jobs.datasync.batch.ship.config;
|
|||||||
|
|
||||||
import com.snp.batch.common.batch.config.BaseJobConfig;
|
import com.snp.batch.common.batch.config.BaseJobConfig;
|
||||||
import com.snp.batch.common.util.BatchWriteListener;
|
import com.snp.batch.common.util.BatchWriteListener;
|
||||||
import com.snp.batch.common.util.CommonSql;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdChunkListener;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdPolicy;
|
|
||||||
import com.snp.batch.common.util.GroupByExecutionIdReadListener;
|
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.BareboatCharterHistoryDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.BareboatCharterHistoryDto;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.CallsignAndMmsiHistoryDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.CallsignAndMmsiHistoryDto;
|
||||||
@ -475,158 +472,132 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
// --- Listeners ---
|
// --- Listeners ---
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<ShipInfoMstEntity> shipWriteListener() {
|
public BatchWriteListener<ShipInfoMstEntity> shipWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceShipDetailData);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceShipDetailData);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<OwnerHistoryEntity> ownerHistoryWriteListener() {
|
public BatchWriteListener<OwnerHistoryEntity> ownerHistoryWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceOwnerHistory);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceOwnerHistory);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<ShipAddInfoEntity> shipAddInfoWriteListener() {
|
public BatchWriteListener<ShipAddInfoEntity> shipAddInfoWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceAdditionalShipsData);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceAdditionalShipsData);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<BareboatCharterHistoryEntity> bareboatCharterHistoryWriteListener() {
|
public BatchWriteListener<BareboatCharterHistoryEntity> bareboatCharterHistoryWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceBareboatCharterHistory);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceBareboatCharterHistory);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<CallsignAndMmsiHistoryEntity> callsignAndMmsiHistoryWriteListener() {
|
public BatchWriteListener<CallsignAndMmsiHistoryEntity> callsignAndMmsiHistoryWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceCallsignAndMmsiHistory);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceCallsignAndMmsiHistory);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<ClassHistoryEntity> classHistoryWriteListener() {
|
public BatchWriteListener<ClassHistoryEntity> classHistoryWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceClassHistory);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceClassHistory);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<CompanyVesselRelationshipsEntity> companyVesselRelationshipsWriteListener() {
|
public BatchWriteListener<CompanyVesselRelationshipsEntity> companyVesselRelationshipsWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceCompanyVesselRelationships);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceCompanyVesselRelationships);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<CrewListEntity> crewListWriteListener() {
|
public BatchWriteListener<CrewListEntity> crewListWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceCrewList);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceCrewList);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<DarkActivityConfirmedEntity> darkActivityConfirmedWriteListener() {
|
public BatchWriteListener<DarkActivityConfirmedEntity> darkActivityConfirmedWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceDarkActivityConfirmed);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceDarkActivityConfirmed);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<FlagHistoryEntity> flagHistoryWriteListener() {
|
public BatchWriteListener<FlagHistoryEntity> flagHistoryWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceFlagHistory);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceFlagHistory);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<GroupBeneficialOwnerHistoryEntity> groupBeneficialOwnerHistoryWriteListener() {
|
public BatchWriteListener<GroupBeneficialOwnerHistoryEntity> groupBeneficialOwnerHistoryWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceGroupBeneficialOwnerHistory);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceGroupBeneficialOwnerHistory);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<IceClassEntity> iceClassWriteListener() {
|
public BatchWriteListener<IceClassEntity> iceClassWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceIceClass);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceIceClass);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<NameHistoryEntity> nameHistoryWriteListener() {
|
public BatchWriteListener<NameHistoryEntity> nameHistoryWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceNameHistory);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceNameHistory);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<OperatorHistoryEntity> operatorHistoryWriteListener() {
|
public BatchWriteListener<OperatorHistoryEntity> operatorHistoryWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceOperatorHistory);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceOperatorHistory);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<PandIHistoryEntity> pandIHistoryWriteListener() {
|
public BatchWriteListener<PandIHistoryEntity> pandIHistoryWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourcePandiHistory);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourcePandiHistory);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<SafetyManagementCertificateHistEntity> safetyManagementCertificateHistWriteListener() {
|
public BatchWriteListener<SafetyManagementCertificateHistEntity> safetyManagementCertificateHistWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceSafetyManagementCertificateHist);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceSafetyManagementCertificateHist);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<ShipManagerHistoryEntity> shipManagerHistoryWriteListener() {
|
public BatchWriteListener<ShipManagerHistoryEntity> shipManagerHistoryWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceShipManagerHistory);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceShipManagerHistory);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<SisterShipLinksEntity> sisterShipLinksWriteListener() {
|
public BatchWriteListener<SisterShipLinksEntity> sisterShipLinksWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceSisterShipLinks);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceSisterShipLinks);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<SpecialFeatureEntity> specialFeatureWriteListener() {
|
public BatchWriteListener<SpecialFeatureEntity> specialFeatureWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceSpecialFeature);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceSpecialFeature);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<StatusHistoryEntity> statusHistoryWriteListener() {
|
public BatchWriteListener<StatusHistoryEntity> statusHistoryWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceStatusHistory);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceStatusHistory);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<StowageCommodityEntity> stowageCommodityWriteListener() {
|
public BatchWriteListener<StowageCommodityEntity> stowageCommodityWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceStowageCommodity);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceStowageCommodity);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<SurveyDatesEntity> surveyDatesWriteListener() {
|
public BatchWriteListener<SurveyDatesEntity> surveyDatesWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceSurveyDates);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceSurveyDates);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<SurveyDatesHistoryUniqueEntity> surveyDatesHistoryUniqueWriteListener() {
|
public BatchWriteListener<SurveyDatesHistoryUniqueEntity> surveyDatesHistoryUniqueWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceSurveyDatesHistoryUnique);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceSurveyDatesHistoryUnique);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<TechnicalManagerHistoryEntity> technicalManagerHistoryWriteListener() {
|
public BatchWriteListener<TechnicalManagerHistoryEntity> technicalManagerHistoryWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTechnicalManagerHistory);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceTechnicalManagerHistory);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<ThrustersEntity> thrustersWriteListener() {
|
public BatchWriteListener<ThrustersEntity> thrustersWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceThrusters);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceThrusters);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchWriteListener<TbCompanyDetailEntity> tbCompanyDetailWriteListener() {
|
public BatchWriteListener<TbCompanyDetailEntity> tbCompanyDetailWriteListener() {
|
||||||
String sql = CommonSql.getCompleteBatchQuery(tableMetaInfo.sourceTbCompanyDetail);
|
return new BatchWriteListener<>(businessJdbcTemplate, tableMetaInfo.sourceTbCompanyDetail);
|
||||||
return new BatchWriteListener<>(businessJdbcTemplate, sql);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Steps ---
|
// --- Steps ---
|
||||||
@ -634,12 +605,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
@Bean(name = "snpShipDetailSyncStep")
|
@Bean(name = "snpShipDetailSyncStep")
|
||||||
public Step snpShipDetailSyncStep() {
|
public Step snpShipDetailSyncStep() {
|
||||||
return new StepBuilder(getStepName(), jobRepository)
|
return new StepBuilder(getStepName(), jobRepository)
|
||||||
.<ShipInfoMstDto, ShipInfoMstEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<ShipInfoMstDto, ShipInfoMstEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(createReader())
|
.reader(createReader())
|
||||||
.processor(createProcessor())
|
.processor(createProcessor())
|
||||||
.writer(createWriter())
|
.writer(createWriter())
|
||||||
.listener(new GroupByExecutionIdReadListener<ShipInfoMstDto>()) // Reader 리스너 (ThreadLocal 설정)
|
|
||||||
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너 (ThreadLocal 정리)
|
|
||||||
.listener(shipWriteListener()) // Write 완료 후 batch_flag 업데이트
|
.listener(shipWriteListener()) // Write 완료 후 batch_flag 업데이트
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -648,12 +617,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
public Step ownerHistorySyncStep() {
|
public Step ownerHistorySyncStep() {
|
||||||
log.info("Step 생성: ownerHistorySyncStep");
|
log.info("Step 생성: ownerHistorySyncStep");
|
||||||
return new StepBuilder("ownerHistorySyncStep", jobRepository)
|
return new StepBuilder("ownerHistorySyncStep", jobRepository)
|
||||||
.<OwnerHistoryDto, OwnerHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<OwnerHistoryDto, OwnerHistoryEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(ownerHistoryReader(businessDataSource, tableMetaInfo))
|
.reader(ownerHistoryReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new OwnerHistoryProcessor())
|
.processor(new OwnerHistoryProcessor())
|
||||||
.writer(new OwnerHistoryWriter(shipRepository, transactionManager, subChunkSize))
|
.writer(new OwnerHistoryWriter(shipRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<OwnerHistoryDto>()) // Reader 리스너
|
|
||||||
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
|
|
||||||
.listener(ownerHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트
|
.listener(ownerHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -662,12 +629,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
public Step shipAddInfoSyncStep() {
|
public Step shipAddInfoSyncStep() {
|
||||||
log.info("Step 생성: shipAddInfoSyncStep");
|
log.info("Step 생성: shipAddInfoSyncStep");
|
||||||
return new StepBuilder("shipAddInfoSyncStep", jobRepository)
|
return new StepBuilder("shipAddInfoSyncStep", jobRepository)
|
||||||
.<ShipAddInfoDto, ShipAddInfoEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<ShipAddInfoDto, ShipAddInfoEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(shipAddInfoReader(businessDataSource, tableMetaInfo))
|
.reader(shipAddInfoReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new ShipAddInfoProcessor())
|
.processor(new ShipAddInfoProcessor())
|
||||||
.writer(new ShipAddInfoWriter(shipRepository, transactionManager, subChunkSize))
|
.writer(new ShipAddInfoWriter(shipRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<ShipAddInfoDto>()) // Reader 리스너
|
|
||||||
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
|
|
||||||
.listener(shipAddInfoWriteListener()) // Write 완료 후 batch_flag 업데이트
|
.listener(shipAddInfoWriteListener()) // Write 완료 후 batch_flag 업데이트
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -676,12 +641,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
public Step bareboatCharterHistorySyncStep() {
|
public Step bareboatCharterHistorySyncStep() {
|
||||||
log.info("Step 생성: bareboatCharterHistorySyncStep");
|
log.info("Step 생성: bareboatCharterHistorySyncStep");
|
||||||
return new StepBuilder("bareboatCharterHistorySyncStep", jobRepository)
|
return new StepBuilder("bareboatCharterHistorySyncStep", jobRepository)
|
||||||
.<BareboatCharterHistoryDto, BareboatCharterHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<BareboatCharterHistoryDto, BareboatCharterHistoryEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(bareboatCharterHistoryReader(businessDataSource, tableMetaInfo))
|
.reader(bareboatCharterHistoryReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new BareboatCharterHistoryProcessor())
|
.processor(new BareboatCharterHistoryProcessor())
|
||||||
.writer(new BareboatCharterHistoryWriter(shipRepository, transactionManager, subChunkSize))
|
.writer(new BareboatCharterHistoryWriter(shipRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<BareboatCharterHistoryDto>()) // Reader 리스너
|
|
||||||
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
|
|
||||||
.listener(bareboatCharterHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트
|
.listener(bareboatCharterHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -690,12 +653,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
public Step callsignAndMmsiHistorySyncStep() {
|
public Step callsignAndMmsiHistorySyncStep() {
|
||||||
log.info("Step 생성: callsignAndMmsiHistorySyncStep");
|
log.info("Step 생성: callsignAndMmsiHistorySyncStep");
|
||||||
return new StepBuilder("callsignAndMmsiHistorySyncStep", jobRepository)
|
return new StepBuilder("callsignAndMmsiHistorySyncStep", jobRepository)
|
||||||
.<CallsignAndMmsiHistoryDto, CallsignAndMmsiHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<CallsignAndMmsiHistoryDto, CallsignAndMmsiHistoryEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(callsignAndMmsiHistoryReader(businessDataSource, tableMetaInfo))
|
.reader(callsignAndMmsiHistoryReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new CallsignAndMmsiHistoryProcessor())
|
.processor(new CallsignAndMmsiHistoryProcessor())
|
||||||
.writer(new CallsignAndMmsiHistoryWriter(shipRepository, transactionManager, subChunkSize))
|
.writer(new CallsignAndMmsiHistoryWriter(shipRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<CallsignAndMmsiHistoryDto>()) // Reader 리스너
|
|
||||||
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
|
|
||||||
.listener(callsignAndMmsiHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트
|
.listener(callsignAndMmsiHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -704,12 +665,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
public Step classHistorySyncStep() {
|
public Step classHistorySyncStep() {
|
||||||
log.info("Step 생성: classHistorySyncStep");
|
log.info("Step 생성: classHistorySyncStep");
|
||||||
return new StepBuilder("classHistorySyncStep", jobRepository)
|
return new StepBuilder("classHistorySyncStep", jobRepository)
|
||||||
.<ClassHistoryDto, ClassHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<ClassHistoryDto, ClassHistoryEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(classHistoryReader(businessDataSource, tableMetaInfo))
|
.reader(classHistoryReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new ClassHistoryProcessor())
|
.processor(new ClassHistoryProcessor())
|
||||||
.writer(new ClassHistoryWriter(shipRepository, transactionManager, subChunkSize))
|
.writer(new ClassHistoryWriter(shipRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<ClassHistoryDto>()) // Reader 리스너
|
|
||||||
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
|
|
||||||
.listener(classHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트
|
.listener(classHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -718,12 +677,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
public Step companyVesselRelationshipsSyncStep() {
|
public Step companyVesselRelationshipsSyncStep() {
|
||||||
log.info("Step 생성: companyVesselRelationshipsSyncStep");
|
log.info("Step 생성: companyVesselRelationshipsSyncStep");
|
||||||
return new StepBuilder("companyVesselRelationshipsSyncStep", jobRepository)
|
return new StepBuilder("companyVesselRelationshipsSyncStep", jobRepository)
|
||||||
.<CompanyVesselRelationshipsDto, CompanyVesselRelationshipsEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<CompanyVesselRelationshipsDto, CompanyVesselRelationshipsEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(companyVesselRelationshipsReader(businessDataSource, tableMetaInfo))
|
.reader(companyVesselRelationshipsReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new CompanyVesselRelationshipsProcessor())
|
.processor(new CompanyVesselRelationshipsProcessor())
|
||||||
.writer(new CompanyVesselRelationshipsWriter(shipRepository, transactionManager, subChunkSize))
|
.writer(new CompanyVesselRelationshipsWriter(shipRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<CompanyVesselRelationshipsDto>()) // Reader 리스너
|
|
||||||
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
|
|
||||||
.listener(companyVesselRelationshipsWriteListener()) // Write 완료 후 batch_flag 업데이트
|
.listener(companyVesselRelationshipsWriteListener()) // Write 완료 후 batch_flag 업데이트
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -732,12 +689,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
public Step crewListSyncStep() {
|
public Step crewListSyncStep() {
|
||||||
log.info("Step 생성: crewListSyncStep");
|
log.info("Step 생성: crewListSyncStep");
|
||||||
return new StepBuilder("crewListSyncStep", jobRepository)
|
return new StepBuilder("crewListSyncStep", jobRepository)
|
||||||
.<CrewListDto, CrewListEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<CrewListDto, CrewListEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(crewListReader(businessDataSource, tableMetaInfo))
|
.reader(crewListReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new CrewListProcessor())
|
.processor(new CrewListProcessor())
|
||||||
.writer(new CrewListWriter(shipRepository, transactionManager, subChunkSize))
|
.writer(new CrewListWriter(shipRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<CrewListDto>()) // Reader 리스너
|
|
||||||
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
|
|
||||||
.listener(crewListWriteListener()) // Write 완료 후 batch_flag 업데이트
|
.listener(crewListWriteListener()) // Write 완료 후 batch_flag 업데이트
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -746,12 +701,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
public Step darkActivityConfirmedSyncStep() {
|
public Step darkActivityConfirmedSyncStep() {
|
||||||
log.info("Step 생성: darkActivityConfirmedSyncStep");
|
log.info("Step 생성: darkActivityConfirmedSyncStep");
|
||||||
return new StepBuilder("darkActivityConfirmedSyncStep", jobRepository)
|
return new StepBuilder("darkActivityConfirmedSyncStep", jobRepository)
|
||||||
.<DarkActivityConfirmedDto, DarkActivityConfirmedEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<DarkActivityConfirmedDto, DarkActivityConfirmedEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(darkActivityConfirmedReader(businessDataSource, tableMetaInfo))
|
.reader(darkActivityConfirmedReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new DarkActivityConfirmedProcessor())
|
.processor(new DarkActivityConfirmedProcessor())
|
||||||
.writer(new DarkActivityConfirmedWriter(shipRepository, transactionManager, subChunkSize))
|
.writer(new DarkActivityConfirmedWriter(shipRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<DarkActivityConfirmedDto>()) // Reader 리스너
|
|
||||||
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
|
|
||||||
.listener(darkActivityConfirmedWriteListener()) // Write 완료 후 batch_flag 업데이트
|
.listener(darkActivityConfirmedWriteListener()) // Write 완료 후 batch_flag 업데이트
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -760,12 +713,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
public Step flagHistorySyncStep() {
|
public Step flagHistorySyncStep() {
|
||||||
log.info("Step 생성: flagHistorySyncStep");
|
log.info("Step 생성: flagHistorySyncStep");
|
||||||
return new StepBuilder("flagHistorySyncStep", jobRepository)
|
return new StepBuilder("flagHistorySyncStep", jobRepository)
|
||||||
.<FlagHistoryDto, FlagHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<FlagHistoryDto, FlagHistoryEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(flagHistoryReader(businessDataSource, tableMetaInfo))
|
.reader(flagHistoryReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new FlagHistoryProcessor())
|
.processor(new FlagHistoryProcessor())
|
||||||
.writer(new FlagHistoryWriter(shipRepository, transactionManager, subChunkSize))
|
.writer(new FlagHistoryWriter(shipRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<FlagHistoryDto>()) // Reader 리스너
|
|
||||||
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
|
|
||||||
.listener(flagHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트
|
.listener(flagHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -774,12 +725,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
public Step groupBeneficialOwnerHistorySyncStep() {
|
public Step groupBeneficialOwnerHistorySyncStep() {
|
||||||
log.info("Step 생성: groupBeneficialOwnerHistorySyncStep");
|
log.info("Step 생성: groupBeneficialOwnerHistorySyncStep");
|
||||||
return new StepBuilder("groupBeneficialOwnerHistorySyncStep", jobRepository)
|
return new StepBuilder("groupBeneficialOwnerHistorySyncStep", jobRepository)
|
||||||
.<GroupBeneficialOwnerHistoryDto, GroupBeneficialOwnerHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<GroupBeneficialOwnerHistoryDto, GroupBeneficialOwnerHistoryEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(groupBeneficialOwnerHistoryReader(businessDataSource, tableMetaInfo))
|
.reader(groupBeneficialOwnerHistoryReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new GroupBeneficialOwnerHistoryProcessor())
|
.processor(new GroupBeneficialOwnerHistoryProcessor())
|
||||||
.writer(new GroupBeneficialOwnerHistoryWriter(shipRepository, transactionManager, subChunkSize))
|
.writer(new GroupBeneficialOwnerHistoryWriter(shipRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<GroupBeneficialOwnerHistoryDto>()) // Reader 리스너
|
|
||||||
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
|
|
||||||
.listener(groupBeneficialOwnerHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트
|
.listener(groupBeneficialOwnerHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -788,12 +737,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
public Step iceClassSyncStep() {
|
public Step iceClassSyncStep() {
|
||||||
log.info("Step 생성: iceClassSyncStep");
|
log.info("Step 생성: iceClassSyncStep");
|
||||||
return new StepBuilder("iceClassSyncStep", jobRepository)
|
return new StepBuilder("iceClassSyncStep", jobRepository)
|
||||||
.<IceClassDto, IceClassEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<IceClassDto, IceClassEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(iceClassReader(businessDataSource, tableMetaInfo))
|
.reader(iceClassReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new IceClassProcessor())
|
.processor(new IceClassProcessor())
|
||||||
.writer(new IceClassWriter(shipRepository, transactionManager, subChunkSize))
|
.writer(new IceClassWriter(shipRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<IceClassDto>()) // Reader 리스너
|
|
||||||
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
|
|
||||||
.listener(iceClassWriteListener()) // Write 완료 후 batch_flag 업데이트
|
.listener(iceClassWriteListener()) // Write 완료 후 batch_flag 업데이트
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -802,12 +749,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
public Step nameHistorySyncStep() {
|
public Step nameHistorySyncStep() {
|
||||||
log.info("Step 생성: nameHistorySyncStep");
|
log.info("Step 생성: nameHistorySyncStep");
|
||||||
return new StepBuilder("nameHistorySyncStep", jobRepository)
|
return new StepBuilder("nameHistorySyncStep", jobRepository)
|
||||||
.<NameHistoryDto, NameHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<NameHistoryDto, NameHistoryEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(nameHistoryReader(businessDataSource, tableMetaInfo))
|
.reader(nameHistoryReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new NameHistoryProcessor())
|
.processor(new NameHistoryProcessor())
|
||||||
.writer(new NameHistoryWriter(shipRepository, transactionManager, subChunkSize))
|
.writer(new NameHistoryWriter(shipRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<NameHistoryDto>()) // Reader 리스너
|
|
||||||
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
|
|
||||||
.listener(nameHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트
|
.listener(nameHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -816,12 +761,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
public Step operatorHistorySyncStep() {
|
public Step operatorHistorySyncStep() {
|
||||||
log.info("Step 생성: operatorHistorySyncStep");
|
log.info("Step 생성: operatorHistorySyncStep");
|
||||||
return new StepBuilder("operatorHistorySyncStep", jobRepository)
|
return new StepBuilder("operatorHistorySyncStep", jobRepository)
|
||||||
.<OperatorHistoryDto, OperatorHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<OperatorHistoryDto, OperatorHistoryEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(operatorHistoryReader(businessDataSource, tableMetaInfo))
|
.reader(operatorHistoryReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new OperatorHistoryProcessor())
|
.processor(new OperatorHistoryProcessor())
|
||||||
.writer(new OperatorHistoryWriter(shipRepository, transactionManager, subChunkSize))
|
.writer(new OperatorHistoryWriter(shipRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<OperatorHistoryDto>()) // Reader 리스너
|
|
||||||
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
|
|
||||||
.listener(operatorHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트
|
.listener(operatorHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -830,12 +773,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
public Step pandIHistorySyncStep() {
|
public Step pandIHistorySyncStep() {
|
||||||
log.info("Step 생성: pandIHistorySyncStep");
|
log.info("Step 생성: pandIHistorySyncStep");
|
||||||
return new StepBuilder("pandIHistorySyncStep", jobRepository)
|
return new StepBuilder("pandIHistorySyncStep", jobRepository)
|
||||||
.<PandIHistoryDto, PandIHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<PandIHistoryDto, PandIHistoryEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(pandIHistoryReader(businessDataSource, tableMetaInfo))
|
.reader(pandIHistoryReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new PandIHistoryProcessor())
|
.processor(new PandIHistoryProcessor())
|
||||||
.writer(new PandIHistoryWriter(shipRepository, transactionManager, subChunkSize))
|
.writer(new PandIHistoryWriter(shipRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<PandIHistoryDto>()) // Reader 리스너
|
|
||||||
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
|
|
||||||
.listener(pandIHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트
|
.listener(pandIHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -844,12 +785,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
public Step safetyManagementCertificateHistSyncStep() {
|
public Step safetyManagementCertificateHistSyncStep() {
|
||||||
log.info("Step 생성: safetyManagementCertificateHistSyncStep");
|
log.info("Step 생성: safetyManagementCertificateHistSyncStep");
|
||||||
return new StepBuilder("safetyManagementCertificateHistSyncStep", jobRepository)
|
return new StepBuilder("safetyManagementCertificateHistSyncStep", jobRepository)
|
||||||
.<SafetyManagementCertificateHistDto, SafetyManagementCertificateHistEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<SafetyManagementCertificateHistDto, SafetyManagementCertificateHistEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(safetyManagementCertificateHistReader(businessDataSource, tableMetaInfo))
|
.reader(safetyManagementCertificateHistReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new SafetyManagementCertificateHistProcessor())
|
.processor(new SafetyManagementCertificateHistProcessor())
|
||||||
.writer(new SafetyManagementCertificateHistWriter(shipRepository, transactionManager, subChunkSize))
|
.writer(new SafetyManagementCertificateHistWriter(shipRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<SafetyManagementCertificateHistDto>()) // Reader 리스너
|
|
||||||
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
|
|
||||||
.listener(safetyManagementCertificateHistWriteListener()) // Write 완료 후 batch_flag 업데이트
|
.listener(safetyManagementCertificateHistWriteListener()) // Write 완료 후 batch_flag 업데이트
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -858,12 +797,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
public Step shipManagerHistorySyncStep() {
|
public Step shipManagerHistorySyncStep() {
|
||||||
log.info("Step 생성: shipManagerHistorySyncStep");
|
log.info("Step 생성: shipManagerHistorySyncStep");
|
||||||
return new StepBuilder("shipManagerHistorySyncStep", jobRepository)
|
return new StepBuilder("shipManagerHistorySyncStep", jobRepository)
|
||||||
.<ShipManagerHistoryDto, ShipManagerHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<ShipManagerHistoryDto, ShipManagerHistoryEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(shipManagerHistoryReader(businessDataSource, tableMetaInfo))
|
.reader(shipManagerHistoryReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new ShipManagerHistoryProcessor())
|
.processor(new ShipManagerHistoryProcessor())
|
||||||
.writer(new ShipManagerHistoryWriter(shipRepository, transactionManager, subChunkSize))
|
.writer(new ShipManagerHistoryWriter(shipRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<ShipManagerHistoryDto>()) // Reader 리스너
|
|
||||||
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
|
|
||||||
.listener(shipManagerHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트
|
.listener(shipManagerHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -872,12 +809,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
public Step sisterShipLinksSyncStep() {
|
public Step sisterShipLinksSyncStep() {
|
||||||
log.info("Step 생성: sisterShipLinksSyncStep");
|
log.info("Step 생성: sisterShipLinksSyncStep");
|
||||||
return new StepBuilder("sisterShipLinksSyncStep", jobRepository)
|
return new StepBuilder("sisterShipLinksSyncStep", jobRepository)
|
||||||
.<SisterShipLinksDto, SisterShipLinksEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<SisterShipLinksDto, SisterShipLinksEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(sisterShipLinksReader(businessDataSource, tableMetaInfo))
|
.reader(sisterShipLinksReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new SisterShipLinksProcessor())
|
.processor(new SisterShipLinksProcessor())
|
||||||
.writer(new SisterShipLinksWriter(shipRepository, transactionManager, subChunkSize))
|
.writer(new SisterShipLinksWriter(shipRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<SisterShipLinksDto>()) // Reader 리스너
|
|
||||||
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
|
|
||||||
.listener(sisterShipLinksWriteListener()) // Write 완료 후 batch_flag 업데이트
|
.listener(sisterShipLinksWriteListener()) // Write 완료 후 batch_flag 업데이트
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -886,12 +821,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
public Step specialFeatureSyncStep() {
|
public Step specialFeatureSyncStep() {
|
||||||
log.info("Step 생성: specialFeatureSyncStep");
|
log.info("Step 생성: specialFeatureSyncStep");
|
||||||
return new StepBuilder("specialFeatureSyncStep", jobRepository)
|
return new StepBuilder("specialFeatureSyncStep", jobRepository)
|
||||||
.<SpecialFeatureDto, SpecialFeatureEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<SpecialFeatureDto, SpecialFeatureEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(specialFeatureReader(businessDataSource, tableMetaInfo))
|
.reader(specialFeatureReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new SpecialFeatureProcessor())
|
.processor(new SpecialFeatureProcessor())
|
||||||
.writer(new SpecialFeatureWriter(shipRepository, transactionManager, subChunkSize))
|
.writer(new SpecialFeatureWriter(shipRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<SpecialFeatureDto>()) // Reader 리스너
|
|
||||||
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
|
|
||||||
.listener(specialFeatureWriteListener()) // Write 완료 후 batch_flag 업데이트
|
.listener(specialFeatureWriteListener()) // Write 완료 후 batch_flag 업데이트
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -900,12 +833,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
public Step statusHistorySyncStep() {
|
public Step statusHistorySyncStep() {
|
||||||
log.info("Step 생성: statusHistorySyncStep");
|
log.info("Step 생성: statusHistorySyncStep");
|
||||||
return new StepBuilder("statusHistorySyncStep", jobRepository)
|
return new StepBuilder("statusHistorySyncStep", jobRepository)
|
||||||
.<StatusHistoryDto, StatusHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<StatusHistoryDto, StatusHistoryEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(statusHistoryReader(businessDataSource, tableMetaInfo))
|
.reader(statusHistoryReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new StatusHistoryProcessor())
|
.processor(new StatusHistoryProcessor())
|
||||||
.writer(new StatusHistoryWriter(shipRepository, transactionManager, subChunkSize))
|
.writer(new StatusHistoryWriter(shipRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<StatusHistoryDto>()) // Reader 리스너
|
|
||||||
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
|
|
||||||
.listener(statusHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트
|
.listener(statusHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -914,12 +845,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
public Step stowageCommoditySyncStep() {
|
public Step stowageCommoditySyncStep() {
|
||||||
log.info("Step 생성: stowageCommoditySyncStep");
|
log.info("Step 생성: stowageCommoditySyncStep");
|
||||||
return new StepBuilder("stowageCommoditySyncStep", jobRepository)
|
return new StepBuilder("stowageCommoditySyncStep", jobRepository)
|
||||||
.<StowageCommodityDto, StowageCommodityEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<StowageCommodityDto, StowageCommodityEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(stowageCommodityReader(businessDataSource, tableMetaInfo))
|
.reader(stowageCommodityReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new StowageCommodityProcessor())
|
.processor(new StowageCommodityProcessor())
|
||||||
.writer(new StowageCommodityWriter(shipRepository, transactionManager, subChunkSize))
|
.writer(new StowageCommodityWriter(shipRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<StowageCommodityDto>()) // Reader 리스너
|
|
||||||
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
|
|
||||||
.listener(stowageCommodityWriteListener()) // Write 완료 후 batch_flag 업데이트
|
.listener(stowageCommodityWriteListener()) // Write 완료 후 batch_flag 업데이트
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -928,12 +857,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
public Step surveyDatesSyncStep() {
|
public Step surveyDatesSyncStep() {
|
||||||
log.info("Step 생성: surveyDatesSyncStep");
|
log.info("Step 생성: surveyDatesSyncStep");
|
||||||
return new StepBuilder("surveyDatesSyncStep", jobRepository)
|
return new StepBuilder("surveyDatesSyncStep", jobRepository)
|
||||||
.<SurveyDatesDto, SurveyDatesEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<SurveyDatesDto, SurveyDatesEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(surveyDatesReader(businessDataSource, tableMetaInfo))
|
.reader(surveyDatesReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new SurveyDatesProcessor())
|
.processor(new SurveyDatesProcessor())
|
||||||
.writer(new SurveyDatesWriter(shipRepository, transactionManager, subChunkSize))
|
.writer(new SurveyDatesWriter(shipRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<SurveyDatesDto>()) // Reader 리스너
|
|
||||||
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
|
|
||||||
.listener(surveyDatesWriteListener()) // Write 완료 후 batch_flag 업데이트
|
.listener(surveyDatesWriteListener()) // Write 완료 후 batch_flag 업데이트
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -942,12 +869,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
public Step surveyDatesHistoryUniqueSyncStep() {
|
public Step surveyDatesHistoryUniqueSyncStep() {
|
||||||
log.info("Step 생성: surveyDatesHistoryUniqueSyncStep");
|
log.info("Step 생성: surveyDatesHistoryUniqueSyncStep");
|
||||||
return new StepBuilder("surveyDatesHistoryUniqueSyncStep", jobRepository)
|
return new StepBuilder("surveyDatesHistoryUniqueSyncStep", jobRepository)
|
||||||
.<SurveyDatesHistoryUniqueDto, SurveyDatesHistoryUniqueEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<SurveyDatesHistoryUniqueDto, SurveyDatesHistoryUniqueEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(surveyDatesHistoryUniqueReader(businessDataSource, tableMetaInfo))
|
.reader(surveyDatesHistoryUniqueReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new SurveyDatesHistoryUniqueProcessor())
|
.processor(new SurveyDatesHistoryUniqueProcessor())
|
||||||
.writer(new SurveyDatesHistoryUniqueWriter(shipRepository, transactionManager, subChunkSize))
|
.writer(new SurveyDatesHistoryUniqueWriter(shipRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<SurveyDatesHistoryUniqueDto>()) // Reader 리스너
|
|
||||||
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
|
|
||||||
.listener(surveyDatesHistoryUniqueWriteListener()) // Write 완료 후 batch_flag 업데이트
|
.listener(surveyDatesHistoryUniqueWriteListener()) // Write 완료 후 batch_flag 업데이트
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -956,12 +881,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
public Step technicalManagerHistorySyncStep() {
|
public Step technicalManagerHistorySyncStep() {
|
||||||
log.info("Step 생성: technicalManagerHistorySyncStep");
|
log.info("Step 생성: technicalManagerHistorySyncStep");
|
||||||
return new StepBuilder("technicalManagerHistorySyncStep", jobRepository)
|
return new StepBuilder("technicalManagerHistorySyncStep", jobRepository)
|
||||||
.<TechnicalManagerHistoryDto, TechnicalManagerHistoryEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<TechnicalManagerHistoryDto, TechnicalManagerHistoryEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(technicalManagerHistoryReader(businessDataSource, tableMetaInfo))
|
.reader(technicalManagerHistoryReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new TechnicalManagerHistoryProcessor())
|
.processor(new TechnicalManagerHistoryProcessor())
|
||||||
.writer(new TechnicalManagerHistoryWriter(shipRepository, transactionManager, subChunkSize))
|
.writer(new TechnicalManagerHistoryWriter(shipRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<TechnicalManagerHistoryDto>()) // Reader 리스너
|
|
||||||
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
|
|
||||||
.listener(technicalManagerHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트
|
.listener(technicalManagerHistoryWriteListener()) // Write 완료 후 batch_flag 업데이트
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -970,12 +893,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
public Step thrustersSyncStep() {
|
public Step thrustersSyncStep() {
|
||||||
log.info("Step 생성: thrustersSyncStep");
|
log.info("Step 생성: thrustersSyncStep");
|
||||||
return new StepBuilder("thrustersSyncStep", jobRepository)
|
return new StepBuilder("thrustersSyncStep", jobRepository)
|
||||||
.<ThrustersDto, ThrustersEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<ThrustersDto, ThrustersEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(thrustersReader(businessDataSource, tableMetaInfo))
|
.reader(thrustersReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new ThrustersProcessor())
|
.processor(new ThrustersProcessor())
|
||||||
.writer(new ThrustersWriter(shipRepository, transactionManager, subChunkSize))
|
.writer(new ThrustersWriter(shipRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<ThrustersDto>()) // Reader 리스너
|
|
||||||
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
|
|
||||||
.listener(thrustersWriteListener()) // Write 완료 후 batch_flag 업데이트
|
.listener(thrustersWriteListener()) // Write 완료 후 batch_flag 업데이트
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
@ -984,12 +905,10 @@ public class ShipDetailSyncJobConfig extends BaseJobConfig<ShipInfoMstDto, ShipI
|
|||||||
public Step tbCompanyDetailSyncStep() {
|
public Step tbCompanyDetailSyncStep() {
|
||||||
log.info("Step 생성: tbCompanyDetailSyncStep");
|
log.info("Step 생성: tbCompanyDetailSyncStep");
|
||||||
return new StepBuilder("tbCompanyDetailSyncStep", jobRepository)
|
return new StepBuilder("tbCompanyDetailSyncStep", jobRepository)
|
||||||
.<TbCompanyDetailDto, TbCompanyDetailEntity>chunk(new GroupByExecutionIdPolicy(), transactionManager)
|
.<TbCompanyDetailDto, TbCompanyDetailEntity>chunk(Integer.MAX_VALUE, transactionManager)
|
||||||
.reader(tbCompanyDetailReader(businessDataSource, tableMetaInfo))
|
.reader(tbCompanyDetailReader(businessDataSource, tableMetaInfo))
|
||||||
.processor(new TbCompanyDetailProcessor())
|
.processor(new TbCompanyDetailProcessor())
|
||||||
.writer(new TbCompanyDetailWriter(shipRepository, transactionManager, subChunkSize))
|
.writer(new TbCompanyDetailWriter(shipRepository, transactionManager, subChunkSize))
|
||||||
.listener(new GroupByExecutionIdReadListener<TbCompanyDetailDto>()) // Reader 리스너
|
|
||||||
.listener(new GroupByExecutionIdChunkListener()) // Chunk 리스너
|
|
||||||
.listener(tbCompanyDetailWriteListener()) // Write 완료 후 batch_flag 업데이트
|
.listener(tbCompanyDetailWriteListener()) // Write 완료 후 batch_flag 업데이트
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,56 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.BareboatCharterHistoryDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.BareboatCharterHistoryDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class BareboatCharterHistoryReader implements ItemReader<BareboatCharterHistoryDto> {
|
public class BareboatCharterHistoryReader extends BaseSyncReader<BareboatCharterHistoryDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<BareboatCharterHistoryDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public BareboatCharterHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public BareboatCharterHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BareboatCharterHistoryDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceBareboatCharterHistory;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
@Override
|
||||||
return null;
|
protected BareboatCharterHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
}
|
|
||||||
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
|
||||||
Long nextTargetId = null;
|
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceBareboatCharterHistory), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[BareboatCharterHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceBareboatCharterHistory);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return BareboatCharterHistoryDto.builder()
|
return BareboatCharterHistoryDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
@ -60,14 +33,5 @@ public class BareboatCharterHistoryReader implements ItemReader<BareboatCharterH
|
|||||||
.bbctrCompanyCd(rs.getString("bbctr_company_cd"))
|
.bbctrCompanyCd(rs.getString("bbctr_company_cd"))
|
||||||
.bbctrCompany(rs.getString("bbctr_company"))
|
.bbctrCompany(rs.getString("bbctr_company"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceBareboatCharterHistory);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,56 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.CallsignAndMmsiHistoryDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.CallsignAndMmsiHistoryDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class CallsignAndMmsiHistoryReader implements ItemReader<CallsignAndMmsiHistoryDto> {
|
public class CallsignAndMmsiHistoryReader extends BaseSyncReader<CallsignAndMmsiHistoryDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<CallsignAndMmsiHistoryDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public CallsignAndMmsiHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public CallsignAndMmsiHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public CallsignAndMmsiHistoryDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceCallsignAndMmsiHistory;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
@Override
|
||||||
return null;
|
protected CallsignAndMmsiHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
}
|
|
||||||
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
|
||||||
Long nextTargetId = null;
|
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceCallsignAndMmsiHistory), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[CallsignAndMmsiHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceCallsignAndMmsiHistory);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return CallsignAndMmsiHistoryDto.builder()
|
return CallsignAndMmsiHistoryDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
@ -60,14 +33,5 @@ public class CallsignAndMmsiHistoryReader implements ItemReader<CallsignAndMmsiH
|
|||||||
.clsgnNo(rs.getString("clsgn_no"))
|
.clsgnNo(rs.getString("clsgn_no"))
|
||||||
.mmsiNo(rs.getString("mmsi_no"))
|
.mmsiNo(rs.getString("mmsi_no"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceCallsignAndMmsiHistory);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,56 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.ClassHistoryDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.ClassHistoryDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class ClassHistoryReader implements ItemReader<ClassHistoryDto> {
|
public class ClassHistoryReader extends BaseSyncReader<ClassHistoryDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<ClassHistoryDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public ClassHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public ClassHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ClassHistoryDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceClassHistory;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
@Override
|
||||||
return null;
|
protected ClassHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
}
|
|
||||||
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
|
||||||
Long nextTargetId = null;
|
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceClassHistory), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[ClassHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceClassHistory);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return ClassHistoryDto.builder()
|
return ClassHistoryDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
@ -63,14 +36,5 @@ public class ClassHistoryReader implements ItemReader<ClassHistoryDto> {
|
|||||||
.clficHasYn(rs.getString("clfic_has_yn"))
|
.clficHasYn(rs.getString("clfic_has_yn"))
|
||||||
.nowYn(rs.getString("now_yn"))
|
.nowYn(rs.getString("now_yn"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceClassHistory);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,56 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.CompanyVesselRelationshipsDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.CompanyVesselRelationshipsDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class CompanyVesselRelationshipsReader implements ItemReader<CompanyVesselRelationshipsDto> {
|
public class CompanyVesselRelationshipsReader extends BaseSyncReader<CompanyVesselRelationshipsDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<CompanyVesselRelationshipsDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public CompanyVesselRelationshipsReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public CompanyVesselRelationshipsReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public CompanyVesselRelationshipsDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceCompanyVesselRelationships;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
@Override
|
||||||
return null;
|
protected CompanyVesselRelationshipsDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
}
|
|
||||||
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
|
||||||
Long nextTargetId = null;
|
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceCompanyVesselRelationships), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[CompanyVesselRelationshipsReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceCompanyVesselRelationships);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return CompanyVesselRelationshipsDto.builder()
|
return CompanyVesselRelationshipsDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
@ -76,14 +49,5 @@ public class CompanyVesselRelationshipsReader implements ItemReader<CompanyVesse
|
|||||||
.techMngCompanyGroup(rs.getString("tech_mng_company_group"))
|
.techMngCompanyGroup(rs.getString("tech_mng_company_group"))
|
||||||
.techMngCompanyGroupCd(rs.getString("tech_mng_company_group_cd"))
|
.techMngCompanyGroupCd(rs.getString("tech_mng_company_group_cd"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceCompanyVesselRelationships);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,56 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.CrewListDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.CrewListDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class CrewListReader implements ItemReader<CrewListDto> {
|
public class CrewListReader extends BaseSyncReader<CrewListDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<CrewListDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public CrewListReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public CrewListReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public CrewListDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceCrewList;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
@Override
|
||||||
return null;
|
protected CrewListDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
}
|
|
||||||
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
|
||||||
Long nextTargetId = null;
|
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceCrewList), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[CrewListReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceCrewList);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return CrewListDto.builder()
|
return CrewListDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
@ -67,14 +40,5 @@ public class CrewListReader implements ItemReader<CrewListDto> {
|
|||||||
.embrkMntncCrewCnt(rs.getBigDecimal("embrk_mntnc_crew_cnt"))
|
.embrkMntncCrewCnt(rs.getBigDecimal("embrk_mntnc_crew_cnt"))
|
||||||
.unrprtCnt(rs.getBigDecimal("unrprt_cnt"))
|
.unrprtCnt(rs.getBigDecimal("unrprt_cnt"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceCrewList);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,56 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.DarkActivityConfirmedDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.DarkActivityConfirmedDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class DarkActivityConfirmedReader implements ItemReader<DarkActivityConfirmedDto> {
|
public class DarkActivityConfirmedReader extends BaseSyncReader<DarkActivityConfirmedDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<DarkActivityConfirmedDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public DarkActivityConfirmedReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public DarkActivityConfirmedReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DarkActivityConfirmedDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceDarkActivityConfirmed;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
@Override
|
||||||
return null;
|
protected DarkActivityConfirmedDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
}
|
|
||||||
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
|
||||||
Long nextTargetId = null;
|
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceDarkActivityConfirmed), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[DarkActivityConfirmedReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceDarkActivityConfirmed);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return DarkActivityConfirmedDto.builder()
|
return DarkActivityConfirmedDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
@ -81,14 +54,5 @@ public class DarkActivityConfirmedReader implements ItemReader<DarkActivityConfi
|
|||||||
.nxtCptrLon(rs.getObject("nxt_cptr_lon", Double.class))
|
.nxtCptrLon(rs.getObject("nxt_cptr_lon", Double.class))
|
||||||
.nxtCptrRptDestAis(rs.getString("nxt_cptr_rpt_dest_ais"))
|
.nxtCptrRptDestAis(rs.getString("nxt_cptr_rpt_dest_ais"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceDarkActivityConfirmed);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,56 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.FlagHistoryDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.FlagHistoryDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class FlagHistoryReader implements ItemReader<FlagHistoryDto> {
|
public class FlagHistoryReader extends BaseSyncReader<FlagHistoryDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<FlagHistoryDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public FlagHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public FlagHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FlagHistoryDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceFlagHistory;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
@Override
|
||||||
return null;
|
protected FlagHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
}
|
|
||||||
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
|
||||||
Long nextTargetId = null;
|
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceFlagHistory), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[FlagHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceFlagHistory);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return FlagHistoryDto.builder()
|
return FlagHistoryDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
@ -60,14 +33,5 @@ public class FlagHistoryReader implements ItemReader<FlagHistoryDto> {
|
|||||||
.countryCd(rs.getString("country_cd"))
|
.countryCd(rs.getString("country_cd"))
|
||||||
.country(rs.getString("country"))
|
.country(rs.getString("country"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceFlagHistory);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,56 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.GroupBeneficialOwnerHistoryDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.GroupBeneficialOwnerHistoryDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class GroupBeneficialOwnerHistoryReader implements ItemReader<GroupBeneficialOwnerHistoryDto> {
|
public class GroupBeneficialOwnerHistoryReader extends BaseSyncReader<GroupBeneficialOwnerHistoryDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<GroupBeneficialOwnerHistoryDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public GroupBeneficialOwnerHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public GroupBeneficialOwnerHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public GroupBeneficialOwnerHistoryDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceGroupBeneficialOwnerHistory;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
@Override
|
||||||
return null;
|
protected GroupBeneficialOwnerHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
}
|
|
||||||
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
|
||||||
Long nextTargetId = null;
|
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceGroupBeneficialOwnerHistory), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[GroupBeneficialOwnerHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceGroupBeneficialOwnerHistory);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return GroupBeneficialOwnerHistoryDto.builder()
|
return GroupBeneficialOwnerHistoryDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
@ -61,14 +34,5 @@ public class GroupBeneficialOwnerHistoryReader implements ItemReader<GroupBenefi
|
|||||||
.groupActlOwnr(rs.getString("group_actl_ownr"))
|
.groupActlOwnr(rs.getString("group_actl_ownr"))
|
||||||
.companyStatus(rs.getString("company_status"))
|
.companyStatus(rs.getString("company_status"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceGroupBeneficialOwnerHistory);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,56 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.IceClassDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.IceClassDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class IceClassReader implements ItemReader<IceClassDto> {
|
public class IceClassReader extends BaseSyncReader<IceClassDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<IceClassDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public IceClassReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public IceClassReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public IceClassDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceIceClass;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
@Override
|
||||||
return null;
|
protected IceClassDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
}
|
|
||||||
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
|
||||||
Long nextTargetId = null;
|
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceIceClass), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[IceClassReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceIceClass);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return IceClassDto.builder()
|
return IceClassDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
@ -58,14 +31,5 @@ public class IceClassReader implements ItemReader<IceClassDto> {
|
|||||||
.iceGrdCd(rs.getString("ice_grd_cd"))
|
.iceGrdCd(rs.getString("ice_grd_cd"))
|
||||||
.iceGrd(rs.getString("ice_grd"))
|
.iceGrd(rs.getString("ice_grd"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceIceClass);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,56 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.NameHistoryDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.NameHistoryDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class NameHistoryReader implements ItemReader<NameHistoryDto> {
|
public class NameHistoryReader extends BaseSyncReader<NameHistoryDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<NameHistoryDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public NameHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public NameHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public NameHistoryDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceNameHistory;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
@Override
|
||||||
return null;
|
protected NameHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
}
|
|
||||||
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
|
||||||
Long nextTargetId = null;
|
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceNameHistory), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[NameHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceNameHistory);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return NameHistoryDto.builder()
|
return NameHistoryDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
@ -59,14 +32,5 @@ public class NameHistoryReader implements ItemReader<NameHistoryDto> {
|
|||||||
.efectStaDay(rs.getString("efect_sta_day"))
|
.efectStaDay(rs.getString("efect_sta_day"))
|
||||||
.shipNm(rs.getString("ship_nm"))
|
.shipNm(rs.getString("ship_nm"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceNameHistory);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,56 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.OperatorHistoryDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.OperatorHistoryDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class OperatorHistoryReader implements ItemReader<OperatorHistoryDto> {
|
public class OperatorHistoryReader extends BaseSyncReader<OperatorHistoryDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<OperatorHistoryDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public OperatorHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public OperatorHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public OperatorHistoryDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceOperatorHistory;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
@Override
|
||||||
return null;
|
protected OperatorHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
}
|
|
||||||
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
|
||||||
Long nextTargetId = null;
|
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceOperatorHistory), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[OperatorHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceOperatorHistory);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return OperatorHistoryDto.builder()
|
return OperatorHistoryDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
@ -61,14 +34,5 @@ public class OperatorHistoryReader implements ItemReader<OperatorHistoryDto> {
|
|||||||
.shipOperator(rs.getString("ship_operator"))
|
.shipOperator(rs.getString("ship_operator"))
|
||||||
.companyStatus(rs.getString("company_status"))
|
.companyStatus(rs.getString("company_status"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceOperatorHistory);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,56 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.OwnerHistoryDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.OwnerHistoryDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class OwnerHistoryReader implements ItemReader<OwnerHistoryDto> {
|
public class OwnerHistoryReader extends BaseSyncReader<OwnerHistoryDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<OwnerHistoryDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public OwnerHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public OwnerHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public OwnerHistoryDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceOwnerHistory;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
@Override
|
||||||
return null;
|
protected OwnerHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
}
|
|
||||||
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
|
||||||
Long nextTargetId = null;
|
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceOwnerHistory), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[OwnerHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceOwnerHistory);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return OwnerHistoryDto.builder()
|
return OwnerHistoryDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
@ -61,14 +34,5 @@ public class OwnerHistoryReader implements ItemReader<OwnerHistoryDto> {
|
|||||||
.ownr(rs.getString("ownr"))
|
.ownr(rs.getString("ownr"))
|
||||||
.companyStatus(rs.getString("company_status"))
|
.companyStatus(rs.getString("company_status"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceOwnerHistory);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,56 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.PandIHistoryDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.PandIHistoryDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class PandIHistoryReader implements ItemReader<PandIHistoryDto> {
|
public class PandIHistoryReader extends BaseSyncReader<PandIHistoryDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<PandIHistoryDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public PandIHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public PandIHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public PandIHistoryDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourcePandiHistory;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
@Override
|
||||||
return null;
|
protected PandIHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
}
|
|
||||||
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
|
||||||
Long nextTargetId = null;
|
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourcePandiHistory), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[PandIHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourcePandiHistory);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return PandIHistoryDto.builder()
|
return PandIHistoryDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
@ -61,14 +34,5 @@ public class PandIHistoryReader implements ItemReader<PandIHistoryDto> {
|
|||||||
.pniClubNm(rs.getString("pni_club_nm"))
|
.pniClubNm(rs.getString("pni_club_nm"))
|
||||||
.src(rs.getString("src"))
|
.src(rs.getString("src"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourcePandiHistory);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,56 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.SafetyManagementCertificateHistDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.SafetyManagementCertificateHistDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class SafetyManagementCertificateHistReader implements ItemReader<SafetyManagementCertificateHistDto> {
|
public class SafetyManagementCertificateHistReader extends BaseSyncReader<SafetyManagementCertificateHistDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<SafetyManagementCertificateHistDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public SafetyManagementCertificateHistReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public SafetyManagementCertificateHistReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SafetyManagementCertificateHistDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceSafetyManagementCertificateHist;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
@Override
|
||||||
return null;
|
protected SafetyManagementCertificateHistDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
}
|
|
||||||
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
|
||||||
Long nextTargetId = null;
|
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceSafetyManagementCertificateHist), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[SafetyManagementCertificateHistReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceSafetyManagementCertificateHist);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return SafetyManagementCertificateHistDto.builder()
|
return SafetyManagementCertificateHistDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
@ -69,14 +42,5 @@ public class SafetyManagementCertificateHistReader implements ItemReader<SafetyM
|
|||||||
.smgrcSrc(rs.getString("smgrc_src"))
|
.smgrcSrc(rs.getString("smgrc_src"))
|
||||||
.smgrcCompanyCd(rs.getString("smgrc_company_cd"))
|
.smgrcCompanyCd(rs.getString("smgrc_company_cd"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceSafetyManagementCertificateHist);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,61 +1,31 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.ShipAddInfoDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.ShipAddInfoDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class ShipAddInfoReader implements ItemReader<ShipAddInfoDto> {
|
public class ShipAddInfoReader extends BaseSyncReader<ShipAddInfoDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<ShipAddInfoDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public ShipAddInfoReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public ShipAddInfoReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ShipAddInfoDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
// 1. 버퍼가 비어있을 때만 DB에서 "다음 처리 대상 ID 하나"의 데이터를 긁어옵니다.
|
return tableMetaInfo.sourceAdditionalShipsData;
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
fetchNextGroup();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
@Override
|
||||||
return null; // 진짜 데이터가 없으면 종료
|
protected ShipAddInfoDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
}
|
|
||||||
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
|
||||||
// 1. 아직 'N'인 최소 ID 하나를 찾음
|
|
||||||
Long nextTargetId = null;
|
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceAdditionalShipsData), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return; // 대상 없음
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[ShipAddInfoReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
|
|
||||||
// 2. 해당 ID의 데이터만 버퍼에 로드
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceAdditionalShipsData);
|
|
||||||
final Long targetId = nextTargetId; // lambda 내부에서 사용하기 위해 final 변수로
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return ShipAddInfoDto.builder()
|
return ShipAddInfoDto.builder()
|
||||||
.jobExecutionId(targetId) // job_execution_id 설정
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
.imoNo(rs.getString("imo_no"))
|
.imoNo(rs.getString("imo_no"))
|
||||||
.shipEml(rs.getString("ship_eml"))
|
.shipEml(rs.getString("ship_eml"))
|
||||||
@ -70,15 +40,5 @@ public class ShipAddInfoReader implements ItemReader<ShipAddInfoDto> {
|
|||||||
.shipSatlitCommId(rs.getString("ship_satlit_comm_id"))
|
.shipSatlitCommId(rs.getString("ship_satlit_comm_id"))
|
||||||
.shipSatlitCmrspCd(rs.getString("ship_satlit_cmrsp_cd"))
|
.shipSatlitCmrspCd(rs.getString("ship_satlit_cmrsp_cd"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
|
|
||||||
// 3. 해당 ID 'P'로 변경
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceAdditionalShipsData);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,61 +1,31 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.ShipInfoMstDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.ShipInfoMstDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class ShipDataReader implements ItemReader<ShipInfoMstDto> {
|
public class ShipDataReader extends BaseSyncReader<ShipInfoMstDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<ShipInfoMstDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public ShipDataReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public ShipDataReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ShipInfoMstDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
// 1. 버퍼가 비어있을 때만 DB에서 "다음 처리 대상 ID 하나"의 데이터를 긁어옵니다.
|
return tableMetaInfo.sourceShipDetailData;
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
fetchNextGroup();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
@Override
|
||||||
return null; // 진짜 데이터가 없으면 종료
|
protected ShipInfoMstDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
}
|
|
||||||
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
|
||||||
// 1. 아직 'N'인 최소 ID 하나를 찾음
|
|
||||||
Long nextTargetId = null;
|
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(CommonSql.getNextTargetQuery(tableMetaInfo.sourceShipDetailData), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return; // 대상 없음
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[ShipDataReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
|
|
||||||
// 2. 해당 ID의 데이터만 버퍼에 로드
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceShipDetailData);
|
|
||||||
final Long targetId = nextTargetId; // lambda 내부에서 사용하기 위해 final 변수로
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return ShipInfoMstDto.builder()
|
return ShipInfoMstDto.builder()
|
||||||
.jobExecutionId(targetId) // job_execution_id 설정
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
.imoNo(rs.getString("imo_no"))
|
.imoNo(rs.getString("imo_no"))
|
||||||
.mmsiNo(rs.getString("mmsi_no"))
|
.mmsiNo(rs.getString("mmsi_no"))
|
||||||
@ -141,16 +111,5 @@ public class ShipDataReader implements ItemReader<ShipInfoMstDto> {
|
|||||||
.regShponrCd(rs.getString("reg_shponr_cd"))
|
.regShponrCd(rs.getString("reg_shponr_cd"))
|
||||||
.lastMdfcnDt(rs.getString("last_mdfcn_dt"))
|
.lastMdfcnDt(rs.getString("last_mdfcn_dt"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
|
|
||||||
// 3. 해당 ID 'P'로 변경
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceShipDetailData);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,56 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.ShipManagerHistoryDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.ShipManagerHistoryDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class ShipManagerHistoryReader implements ItemReader<ShipManagerHistoryDto> {
|
public class ShipManagerHistoryReader extends BaseSyncReader<ShipManagerHistoryDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<ShipManagerHistoryDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public ShipManagerHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public ShipManagerHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ShipManagerHistoryDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceShipManagerHistory;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
@Override
|
||||||
return null;
|
protected ShipManagerHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
}
|
|
||||||
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
|
||||||
Long nextTargetId = null;
|
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceShipManagerHistory), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[ShipManagerHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceShipManagerHistory);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return ShipManagerHistoryDto.builder()
|
return ShipManagerHistoryDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
@ -61,14 +34,5 @@ public class ShipManagerHistoryReader implements ItemReader<ShipManagerHistoryDt
|
|||||||
.shipMngr(rs.getString("ship_mngr"))
|
.shipMngr(rs.getString("ship_mngr"))
|
||||||
.companyStatus(rs.getString("company_status"))
|
.companyStatus(rs.getString("company_status"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceShipManagerHistory);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,66 +1,34 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.SisterShipLinksDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.SisterShipLinksDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class SisterShipLinksReader implements ItemReader<SisterShipLinksDto> {
|
public class SisterShipLinksReader extends BaseSyncReader<SisterShipLinksDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<SisterShipLinksDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public SisterShipLinksReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public SisterShipLinksReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SisterShipLinksDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceSisterShipLinks;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected SisterShipLinksDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceSisterShipLinks), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[SisterShipLinksReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceSisterShipLinks);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return SisterShipLinksDto.builder()
|
return SisterShipLinksDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
.imoNo(rs.getString("imo_no"))
|
.imoNo(rs.getString("imo_no"))
|
||||||
.linkImoNo(rs.getString("link_imo_no"))
|
.linkImoNo(rs.getString("link_imo_no"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceSisterShipLinks);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,53 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.SpecialFeatureDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.SpecialFeatureDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class SpecialFeatureReader implements ItemReader<SpecialFeatureDto> {
|
public class SpecialFeatureReader extends BaseSyncReader<SpecialFeatureDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<SpecialFeatureDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public SpecialFeatureReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public SpecialFeatureReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SpecialFeatureDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceSpecialFeature;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected SpecialFeatureDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceSpecialFeature), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[SpecialFeatureReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceSpecialFeature);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return SpecialFeatureDto.builder()
|
return SpecialFeatureDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
@ -56,13 +32,5 @@ public class SpecialFeatureReader implements ItemReader<SpecialFeatureDto> {
|
|||||||
.spcMttrCd(rs.getString("spc_mttr_cd"))
|
.spcMttrCd(rs.getString("spc_mttr_cd"))
|
||||||
.spcMttr(rs.getString("spc_mttr"))
|
.spcMttr(rs.getString("spc_mttr"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceSpecialFeature);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,53 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.StatusHistoryDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.StatusHistoryDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class StatusHistoryReader implements ItemReader<StatusHistoryDto> {
|
public class StatusHistoryReader extends BaseSyncReader<StatusHistoryDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<StatusHistoryDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public StatusHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public StatusHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public StatusHistoryDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceStatusHistory;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected StatusHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceStatusHistory), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[StatusHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceStatusHistory);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return StatusHistoryDto.builder()
|
return StatusHistoryDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
@ -57,13 +33,5 @@ public class StatusHistoryReader implements ItemReader<StatusHistoryDto> {
|
|||||||
.statusChgYmd(rs.getString("status_chg_ymd"))
|
.statusChgYmd(rs.getString("status_chg_ymd"))
|
||||||
.status(rs.getString("status"))
|
.status(rs.getString("status"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceStatusHistory);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,53 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.StowageCommodityDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.StowageCommodityDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class StowageCommodityReader implements ItemReader<StowageCommodityDto> {
|
public class StowageCommodityReader extends BaseSyncReader<StowageCommodityDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<StowageCommodityDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public StowageCommodityReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public StowageCommodityReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public StowageCommodityDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceStowageCommodity;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected StowageCommodityDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceStowageCommodity), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[StowageCommodityReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceStowageCommodity);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return StowageCommodityDto.builder()
|
return StowageCommodityDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
@ -58,13 +34,5 @@ public class StowageCommodityReader implements ItemReader<StowageCommodityDto> {
|
|||||||
.cargoCd(rs.getString("cargo_cd"))
|
.cargoCd(rs.getString("cargo_cd"))
|
||||||
.cargoNm(rs.getString("cargo_nm"))
|
.cargoNm(rs.getString("cargo_nm"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceStowageCommodity);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,53 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.SurveyDatesHistoryUniqueDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.SurveyDatesHistoryUniqueDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class SurveyDatesHistoryUniqueReader implements ItemReader<SurveyDatesHistoryUniqueDto> {
|
public class SurveyDatesHistoryUniqueReader extends BaseSyncReader<SurveyDatesHistoryUniqueDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<SurveyDatesHistoryUniqueDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public SurveyDatesHistoryUniqueReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public SurveyDatesHistoryUniqueReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SurveyDatesHistoryUniqueDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceSurveyDatesHistoryUnique;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected SurveyDatesHistoryUniqueDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceSurveyDatesHistoryUnique), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[SurveyDatesHistoryUniqueReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceSurveyDatesHistoryUnique);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return SurveyDatesHistoryUniqueDto.builder()
|
return SurveyDatesHistoryUniqueDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
@ -57,13 +33,5 @@ public class SurveyDatesHistoryUniqueReader implements ItemReader<SurveyDatesHis
|
|||||||
.inspectionYmd(rs.getString("inspection_ymd"))
|
.inspectionYmd(rs.getString("inspection_ymd"))
|
||||||
.clfic(rs.getString("clfic"))
|
.clfic(rs.getString("clfic"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceSurveyDatesHistoryUnique);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,53 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.SurveyDatesDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.SurveyDatesDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class SurveyDatesReader implements ItemReader<SurveyDatesDto> {
|
public class SurveyDatesReader extends BaseSyncReader<SurveyDatesDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<SurveyDatesDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public SurveyDatesReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public SurveyDatesReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SurveyDatesDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceSurveyDates;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected SurveyDatesDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceSurveyDates), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[SurveyDatesReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceSurveyDates);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return SurveyDatesDto.builder()
|
return SurveyDatesDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
@ -60,13 +36,5 @@ public class SurveyDatesReader implements ItemReader<SurveyDatesDto> {
|
|||||||
.mchnFxtmInspectionYmd(rs.getString("mchn_fxtm_inspection_ymd"))
|
.mchnFxtmInspectionYmd(rs.getString("mchn_fxtm_inspection_ymd"))
|
||||||
.tlsftInspectionYmd(rs.getString("tlsft_inspection_ymd"))
|
.tlsftInspectionYmd(rs.getString("tlsft_inspection_ymd"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceSurveyDates);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,53 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.TbCompanyDetailDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.TbCompanyDetailDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class TbCompanyDetailReader implements ItemReader<TbCompanyDetailDto> {
|
public class TbCompanyDetailReader extends BaseSyncReader<TbCompanyDetailDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<TbCompanyDetailDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public TbCompanyDetailReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public TbCompanyDetailReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TbCompanyDetailDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceTbCompanyDetail;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected TbCompanyDetailDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTbCompanyDetail), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[TbCompanyDetailReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTbCompanyDetail);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return TbCompanyDetailDto.builder()
|
return TbCompanyDetailDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
@ -81,13 +57,5 @@ public class TbCompanyDetailReader implements ItemReader<TbCompanyDetailDto> {
|
|||||||
.dtlAddrThr(rs.getString("dtl_addr_thr"))
|
.dtlAddrThr(rs.getString("dtl_addr_thr"))
|
||||||
.tlx(rs.getString("tlx"))
|
.tlx(rs.getString("tlx"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTbCompanyDetail);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,53 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.TechnicalManagerHistoryDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.TechnicalManagerHistoryDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class TechnicalManagerHistoryReader implements ItemReader<TechnicalManagerHistoryDto> {
|
public class TechnicalManagerHistoryReader extends BaseSyncReader<TechnicalManagerHistoryDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<TechnicalManagerHistoryDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public TechnicalManagerHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public TechnicalManagerHistoryReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TechnicalManagerHistoryDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceTechnicalManagerHistory;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected TechnicalManagerHistoryDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceTechnicalManagerHistory), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[TechnicalManagerHistoryReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceTechnicalManagerHistory);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return TechnicalManagerHistoryDto.builder()
|
return TechnicalManagerHistoryDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
@ -58,13 +34,5 @@ public class TechnicalManagerHistoryReader implements ItemReader<TechnicalManage
|
|||||||
.techMngr(rs.getString("tech_mngr"))
|
.techMngr(rs.getString("tech_mngr"))
|
||||||
.companyStatus(rs.getString("company_status"))
|
.companyStatus(rs.getString("company_status"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceTechnicalManagerHistory);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,53 +1,29 @@
|
|||||||
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
package com.snp.batch.jobs.datasync.batch.ship.reader;
|
||||||
|
|
||||||
import com.snp.batch.common.util.CommonSql;
|
import com.snp.batch.common.batch.reader.BaseSyncReader;
|
||||||
import com.snp.batch.common.util.TableMetaInfo;
|
import com.snp.batch.common.util.TableMetaInfo;
|
||||||
import com.snp.batch.jobs.datasync.batch.ship.dto.ThrustersDto;
|
import com.snp.batch.jobs.datasync.batch.ship.dto.ThrustersDto;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.batch.item.ItemReader;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
|
||||||
|
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
import java.util.ArrayList;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class ThrustersReader implements ItemReader<ThrustersDto> {
|
public class ThrustersReader extends BaseSyncReader<ThrustersDto> {
|
||||||
private final TableMetaInfo tableMetaInfo;
|
|
||||||
private final JdbcTemplate businessJdbcTemplate;
|
|
||||||
private List<ThrustersDto> allDataBuffer = new ArrayList<>();
|
|
||||||
|
|
||||||
public ThrustersReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
public ThrustersReader(@Qualifier("businessDataSource") DataSource businessDataSource, TableMetaInfo tableMetaInfo) {
|
||||||
this.businessJdbcTemplate = new JdbcTemplate(businessDataSource);
|
super(businessDataSource, tableMetaInfo);
|
||||||
this.tableMetaInfo = tableMetaInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ThrustersDto read() throws Exception {
|
protected String getSourceTable() {
|
||||||
if (allDataBuffer.isEmpty()) {
|
return tableMetaInfo.sourceThrusters;
|
||||||
fetchNextGroup();
|
|
||||||
}
|
|
||||||
if (allDataBuffer.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return allDataBuffer.remove(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchNextGroup() {
|
@Override
|
||||||
Long nextTargetId = null;
|
protected ThrustersDto mapRow(ResultSet rs, Long targetId) throws SQLException {
|
||||||
try {
|
|
||||||
nextTargetId = businessJdbcTemplate.queryForObject(
|
|
||||||
CommonSql.getNextTargetQuery(tableMetaInfo.sourceThrusters), Long.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextTargetId != null) {
|
|
||||||
log.info("[ThrustersReader] 다음 처리 대상 ID 발견: {}", nextTargetId);
|
|
||||||
String sql = CommonSql.getTargetDataQuery(tableMetaInfo.sourceThrusters);
|
|
||||||
final Long targetId = nextTargetId;
|
|
||||||
this.allDataBuffer = businessJdbcTemplate.query(sql, (rs, rowNum) -> {
|
|
||||||
return ThrustersDto.builder()
|
return ThrustersDto.builder()
|
||||||
.jobExecutionId(targetId)
|
.jobExecutionId(targetId)
|
||||||
.datasetVer(rs.getString("dataset_ver"))
|
.datasetVer(rs.getString("dataset_ver"))
|
||||||
@ -61,13 +37,5 @@ public class ThrustersReader implements ItemReader<ThrustersDto> {
|
|||||||
.thrstrPowerKw(rs.getBigDecimal("thrstr_power_kw"))
|
.thrstrPowerKw(rs.getBigDecimal("thrstr_power_kw"))
|
||||||
.instlMth(rs.getString("instl_mth"))
|
.instlMth(rs.getString("instl_mth"))
|
||||||
.build();
|
.build();
|
||||||
}, nextTargetId);
|
|
||||||
updateBatchProcessing(nextTargetId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateBatchProcessing(Long targetExecutionId) {
|
|
||||||
String sql = CommonSql.getProcessBatchQuery(tableMetaInfo.sourceThrusters);
|
|
||||||
businessJdbcTemplate.update(sql, targetExecutionId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -237,7 +237,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
if (shipAddInfoEntityList == null || shipAddInfoEntityList.isEmpty()) {
|
if (shipAddInfoEntityList == null || shipAddInfoEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "ShipAddInfoEntity", shipAddInfoEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "ShipAddInfoEntity", shipAddInfoEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, shipAddInfoEntityList, shipAddInfoEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, shipAddInfoEntityList, shipAddInfoEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -249,7 +249,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "ShipAddInfoEntity", shipAddInfoEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "ShipAddInfoEntity", shipAddInfoEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindShipAddInfo(PreparedStatement pstmt, ShipAddInfoEntity entity) throws Exception {
|
public void bindShipAddInfo(PreparedStatement pstmt, ShipAddInfoEntity entity) throws Exception {
|
||||||
@ -276,7 +276,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
if (bareboatCharterHistoryEntityList == null || bareboatCharterHistoryEntityList.isEmpty()) {
|
if (bareboatCharterHistoryEntityList == null || bareboatCharterHistoryEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "BareboatCharterHistoryEntity", bareboatCharterHistoryEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "BareboatCharterHistoryEntity", bareboatCharterHistoryEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, bareboatCharterHistoryEntityList, bareboatCharterHistoryEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, bareboatCharterHistoryEntityList, bareboatCharterHistoryEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -288,7 +288,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "BareboatCharterHistoryEntity", bareboatCharterHistoryEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "BareboatCharterHistoryEntity", bareboatCharterHistoryEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindBareboatCharterHistory(PreparedStatement pstmt, BareboatCharterHistoryEntity entity) throws Exception {
|
public void bindBareboatCharterHistory(PreparedStatement pstmt, BareboatCharterHistoryEntity entity) throws Exception {
|
||||||
@ -308,7 +308,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
if (callsignAndMmsiHistoryEntityList == null || callsignAndMmsiHistoryEntityList.isEmpty()) {
|
if (callsignAndMmsiHistoryEntityList == null || callsignAndMmsiHistoryEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "CallsignAndMmsiHistoryEntity", callsignAndMmsiHistoryEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "CallsignAndMmsiHistoryEntity", callsignAndMmsiHistoryEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, callsignAndMmsiHistoryEntityList, callsignAndMmsiHistoryEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, callsignAndMmsiHistoryEntityList, callsignAndMmsiHistoryEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -320,7 +320,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "CallsignAndMmsiHistoryEntity", callsignAndMmsiHistoryEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "CallsignAndMmsiHistoryEntity", callsignAndMmsiHistoryEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindCallsignAndMmsiHistory(PreparedStatement pstmt, CallsignAndMmsiHistoryEntity entity) throws Exception {
|
public void bindCallsignAndMmsiHistory(PreparedStatement pstmt, CallsignAndMmsiHistoryEntity entity) throws Exception {
|
||||||
@ -340,7 +340,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
if (classHistoryEntityList == null || classHistoryEntityList.isEmpty()) {
|
if (classHistoryEntityList == null || classHistoryEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "ClassHistoryEntity", classHistoryEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "ClassHistoryEntity", classHistoryEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, classHistoryEntityList, classHistoryEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, classHistoryEntityList, classHistoryEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -352,7 +352,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "ClassHistoryEntity", classHistoryEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "ClassHistoryEntity", classHistoryEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindClassHistory(PreparedStatement pstmt, ClassHistoryEntity entity) throws Exception {
|
public void bindClassHistory(PreparedStatement pstmt, ClassHistoryEntity entity) throws Exception {
|
||||||
@ -375,7 +375,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
if (companyVesselRelationshipsEntityList == null || companyVesselRelationshipsEntityList.isEmpty()) {
|
if (companyVesselRelationshipsEntityList == null || companyVesselRelationshipsEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "CompanyVesselRelationshipsEntity", companyVesselRelationshipsEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "CompanyVesselRelationshipsEntity", companyVesselRelationshipsEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, companyVesselRelationshipsEntityList, companyVesselRelationshipsEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, companyVesselRelationshipsEntityList, companyVesselRelationshipsEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -387,7 +387,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "CompanyVesselRelationshipsEntity", companyVesselRelationshipsEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "CompanyVesselRelationshipsEntity", companyVesselRelationshipsEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindCompanyVesselRelationships(PreparedStatement pstmt, CompanyVesselRelationshipsEntity entity) throws Exception {
|
public void bindCompanyVesselRelationships(PreparedStatement pstmt, CompanyVesselRelationshipsEntity entity) throws Exception {
|
||||||
@ -423,7 +423,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
if (crewListEntityList == null || crewListEntityList.isEmpty()) {
|
if (crewListEntityList == null || crewListEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "CrewListEntity", crewListEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "CrewListEntity", crewListEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, crewListEntityList, crewListEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, crewListEntityList, crewListEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -435,7 +435,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "CrewListEntity", crewListEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "CrewListEntity", crewListEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindCrewList(PreparedStatement pstmt, CrewListEntity entity) throws Exception {
|
public void bindCrewList(PreparedStatement pstmt, CrewListEntity entity) throws Exception {
|
||||||
@ -462,7 +462,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
if (darkActivityConfirmedEntityList == null || darkActivityConfirmedEntityList.isEmpty()) {
|
if (darkActivityConfirmedEntityList == null || darkActivityConfirmedEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "DarkActivityConfirmedEntity", darkActivityConfirmedEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "DarkActivityConfirmedEntity", darkActivityConfirmedEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, darkActivityConfirmedEntityList, darkActivityConfirmedEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, darkActivityConfirmedEntityList, darkActivityConfirmedEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -474,7 +474,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "DarkActivityConfirmedEntity", darkActivityConfirmedEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "DarkActivityConfirmedEntity", darkActivityConfirmedEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindDarkActivityConfirmed(PreparedStatement pstmt, DarkActivityConfirmedEntity entity) throws Exception {
|
public void bindDarkActivityConfirmed(PreparedStatement pstmt, DarkActivityConfirmedEntity entity) throws Exception {
|
||||||
@ -515,7 +515,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
if (flagHistoryEntityList == null || flagHistoryEntityList.isEmpty()) {
|
if (flagHistoryEntityList == null || flagHistoryEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "FlagHistoryEntity", flagHistoryEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "FlagHistoryEntity", flagHistoryEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, flagHistoryEntityList, flagHistoryEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, flagHistoryEntityList, flagHistoryEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -527,7 +527,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "FlagHistoryEntity", flagHistoryEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "FlagHistoryEntity", flagHistoryEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindFlagHistory(PreparedStatement pstmt, FlagHistoryEntity entity) throws Exception {
|
public void bindFlagHistory(PreparedStatement pstmt, FlagHistoryEntity entity) throws Exception {
|
||||||
@ -547,7 +547,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
if (groupBeneficialOwnerHistoryEntityList == null || groupBeneficialOwnerHistoryEntityList.isEmpty()) {
|
if (groupBeneficialOwnerHistoryEntityList == null || groupBeneficialOwnerHistoryEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "GroupBeneficialOwnerHistoryEntity", groupBeneficialOwnerHistoryEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "GroupBeneficialOwnerHistoryEntity", groupBeneficialOwnerHistoryEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, groupBeneficialOwnerHistoryEntityList, groupBeneficialOwnerHistoryEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, groupBeneficialOwnerHistoryEntityList, groupBeneficialOwnerHistoryEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -559,7 +559,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "GroupBeneficialOwnerHistoryEntity", groupBeneficialOwnerHistoryEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "GroupBeneficialOwnerHistoryEntity", groupBeneficialOwnerHistoryEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindGroupBeneficialOwnerHistory(PreparedStatement pstmt, GroupBeneficialOwnerHistoryEntity entity) throws Exception {
|
public void bindGroupBeneficialOwnerHistory(PreparedStatement pstmt, GroupBeneficialOwnerHistoryEntity entity) throws Exception {
|
||||||
@ -580,7 +580,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
if (iceClassEntityList == null || iceClassEntityList.isEmpty()) {
|
if (iceClassEntityList == null || iceClassEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "IceClassEntity", iceClassEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "IceClassEntity", iceClassEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, iceClassEntityList, iceClassEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, iceClassEntityList, iceClassEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -592,7 +592,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "IceClassEntity", iceClassEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "IceClassEntity", iceClassEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindIceClass(PreparedStatement pstmt, IceClassEntity entity) throws Exception {
|
public void bindIceClass(PreparedStatement pstmt, IceClassEntity entity) throws Exception {
|
||||||
@ -610,7 +610,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
if (nameHistoryEntityList == null || nameHistoryEntityList.isEmpty()) {
|
if (nameHistoryEntityList == null || nameHistoryEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "NameHistoryEntity", nameHistoryEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "NameHistoryEntity", nameHistoryEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, nameHistoryEntityList, nameHistoryEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, nameHistoryEntityList, nameHistoryEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -622,7 +622,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "NameHistoryEntity", nameHistoryEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "NameHistoryEntity", nameHistoryEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindNameHistory(PreparedStatement pstmt, NameHistoryEntity entity) throws Exception {
|
public void bindNameHistory(PreparedStatement pstmt, NameHistoryEntity entity) throws Exception {
|
||||||
@ -641,7 +641,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
if (operatorHistoryEntityList == null || operatorHistoryEntityList.isEmpty()) {
|
if (operatorHistoryEntityList == null || operatorHistoryEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "OperatorHistoryEntity", operatorHistoryEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "OperatorHistoryEntity", operatorHistoryEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, operatorHistoryEntityList, operatorHistoryEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, operatorHistoryEntityList, operatorHistoryEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -653,7 +653,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "OperatorHistoryEntity", operatorHistoryEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "OperatorHistoryEntity", operatorHistoryEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindOperatorHistory(PreparedStatement pstmt, OperatorHistoryEntity entity) throws Exception {
|
public void bindOperatorHistory(PreparedStatement pstmt, OperatorHistoryEntity entity) throws Exception {
|
||||||
@ -674,7 +674,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
if (ownerHistoryEntityList == null || ownerHistoryEntityList.isEmpty()) {
|
if (ownerHistoryEntityList == null || ownerHistoryEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "OwnerHistoryEntity", ownerHistoryEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "OwnerHistoryEntity", ownerHistoryEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, ownerHistoryEntityList, ownerHistoryEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, ownerHistoryEntityList, ownerHistoryEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -686,7 +686,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "OwnerHistoryEntity", ownerHistoryEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "OwnerHistoryEntity", ownerHistoryEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindOwnerHistory(PreparedStatement pstmt, OwnerHistoryEntity entity) throws Exception {
|
public void bindOwnerHistory(PreparedStatement pstmt, OwnerHistoryEntity entity) throws Exception {
|
||||||
@ -707,7 +707,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
if (pandIHistoryEntityList == null || pandIHistoryEntityList.isEmpty()) {
|
if (pandIHistoryEntityList == null || pandIHistoryEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "PandIHistoryEntity", pandIHistoryEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "PandIHistoryEntity", pandIHistoryEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, pandIHistoryEntityList, pandIHistoryEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, pandIHistoryEntityList, pandIHistoryEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -719,7 +719,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "PandIHistoryEntity", pandIHistoryEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "PandIHistoryEntity", pandIHistoryEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindPandIHistory(PreparedStatement pstmt, PandIHistoryEntity entity) throws Exception {
|
public void bindPandIHistory(PreparedStatement pstmt, PandIHistoryEntity entity) throws Exception {
|
||||||
@ -740,7 +740,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
if (safetyManagementCertificateHistEntityList == null || safetyManagementCertificateHistEntityList.isEmpty()) {
|
if (safetyManagementCertificateHistEntityList == null || safetyManagementCertificateHistEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "SafetyManagementCertificateHistEntity", safetyManagementCertificateHistEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "SafetyManagementCertificateHistEntity", safetyManagementCertificateHistEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, safetyManagementCertificateHistEntityList, safetyManagementCertificateHistEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, safetyManagementCertificateHistEntityList, safetyManagementCertificateHistEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -752,7 +752,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "SafetyManagementCertificateHistEntity", safetyManagementCertificateHistEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "SafetyManagementCertificateHistEntity", safetyManagementCertificateHistEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindSafetyManagementCertificateHist(PreparedStatement pstmt, SafetyManagementCertificateHistEntity entity) throws Exception {
|
public void bindSafetyManagementCertificateHist(PreparedStatement pstmt, SafetyManagementCertificateHistEntity entity) throws Exception {
|
||||||
@ -781,7 +781,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
if (shipManagerHistoryEntityList == null || shipManagerHistoryEntityList.isEmpty()) {
|
if (shipManagerHistoryEntityList == null || shipManagerHistoryEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "ShipManagerHistoryEntity", shipManagerHistoryEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "ShipManagerHistoryEntity", shipManagerHistoryEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, shipManagerHistoryEntityList, shipManagerHistoryEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, shipManagerHistoryEntityList, shipManagerHistoryEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -793,7 +793,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "ShipManagerHistoryEntity", shipManagerHistoryEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "ShipManagerHistoryEntity", shipManagerHistoryEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindShipManagerHistory(PreparedStatement pstmt, ShipManagerHistoryEntity entity) throws Exception {
|
public void bindShipManagerHistory(PreparedStatement pstmt, ShipManagerHistoryEntity entity) throws Exception {
|
||||||
@ -814,7 +814,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
if (sisterShipLinksEntityList == null || sisterShipLinksEntityList.isEmpty()) {
|
if (sisterShipLinksEntityList == null || sisterShipLinksEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "SisterShipLinksEntity", sisterShipLinksEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "SisterShipLinksEntity", sisterShipLinksEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, sisterShipLinksEntityList, sisterShipLinksEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, sisterShipLinksEntityList, sisterShipLinksEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -826,7 +826,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "SisterShipLinksEntity", sisterShipLinksEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "SisterShipLinksEntity", sisterShipLinksEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindSisterShipLinks(PreparedStatement pstmt, SisterShipLinksEntity entity) throws Exception {
|
public void bindSisterShipLinks(PreparedStatement pstmt, SisterShipLinksEntity entity) throws Exception {
|
||||||
@ -843,7 +843,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
if (specialFeatureEntityList == null || specialFeatureEntityList.isEmpty()) {
|
if (specialFeatureEntityList == null || specialFeatureEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "SpecialFeatureEntity", specialFeatureEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "SpecialFeatureEntity", specialFeatureEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, specialFeatureEntityList, specialFeatureEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, specialFeatureEntityList, specialFeatureEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -855,7 +855,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "SpecialFeatureEntity", specialFeatureEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "SpecialFeatureEntity", specialFeatureEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindSpecialFeature(PreparedStatement pstmt, SpecialFeatureEntity entity) throws Exception {
|
public void bindSpecialFeature(PreparedStatement pstmt, SpecialFeatureEntity entity) throws Exception {
|
||||||
@ -874,7 +874,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
if (statusHistoryEntityList == null || statusHistoryEntityList.isEmpty()) {
|
if (statusHistoryEntityList == null || statusHistoryEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "StatusHistoryEntity", statusHistoryEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "StatusHistoryEntity", statusHistoryEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, statusHistoryEntityList, statusHistoryEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, statusHistoryEntityList, statusHistoryEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -886,7 +886,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "StatusHistoryEntity", statusHistoryEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "StatusHistoryEntity", statusHistoryEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindStatusHistory(PreparedStatement pstmt, StatusHistoryEntity entity) throws Exception {
|
public void bindStatusHistory(PreparedStatement pstmt, StatusHistoryEntity entity) throws Exception {
|
||||||
@ -906,7 +906,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
if (stowageCommodityEntityList == null || stowageCommodityEntityList.isEmpty()) {
|
if (stowageCommodityEntityList == null || stowageCommodityEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "StowageCommodityEntity", stowageCommodityEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "StowageCommodityEntity", stowageCommodityEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, stowageCommodityEntityList, stowageCommodityEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, stowageCommodityEntityList, stowageCommodityEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -918,7 +918,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "StowageCommodityEntity", stowageCommodityEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "StowageCommodityEntity", stowageCommodityEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindStowageCommodity(PreparedStatement pstmt, StowageCommodityEntity entity) throws Exception {
|
public void bindStowageCommodity(PreparedStatement pstmt, StowageCommodityEntity entity) throws Exception {
|
||||||
@ -939,7 +939,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
if (surveyDatesEntityList == null || surveyDatesEntityList.isEmpty()) {
|
if (surveyDatesEntityList == null || surveyDatesEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "SurveyDatesEntity", surveyDatesEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "SurveyDatesEntity", surveyDatesEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, surveyDatesEntityList, surveyDatesEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, surveyDatesEntityList, surveyDatesEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -951,7 +951,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "SurveyDatesEntity", surveyDatesEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "SurveyDatesEntity", surveyDatesEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindSurveyDates(PreparedStatement pstmt, SurveyDatesEntity entity) throws Exception {
|
public void bindSurveyDates(PreparedStatement pstmt, SurveyDatesEntity entity) throws Exception {
|
||||||
@ -974,7 +974,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
if (surveyDatesHistoryUniqueEntityList == null || surveyDatesHistoryUniqueEntityList.isEmpty()) {
|
if (surveyDatesHistoryUniqueEntityList == null || surveyDatesHistoryUniqueEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "SurveyDatesHistoryUniqueEntity", surveyDatesHistoryUniqueEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "SurveyDatesHistoryUniqueEntity", surveyDatesHistoryUniqueEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, surveyDatesHistoryUniqueEntityList, surveyDatesHistoryUniqueEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, surveyDatesHistoryUniqueEntityList, surveyDatesHistoryUniqueEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -986,7 +986,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "SurveyDatesHistoryUniqueEntity", surveyDatesHistoryUniqueEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "SurveyDatesHistoryUniqueEntity", surveyDatesHistoryUniqueEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindSurveyDatesHistoryUnique(PreparedStatement pstmt, SurveyDatesHistoryUniqueEntity entity) throws Exception {
|
public void bindSurveyDatesHistoryUnique(PreparedStatement pstmt, SurveyDatesHistoryUniqueEntity entity) throws Exception {
|
||||||
@ -1006,7 +1006,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
if (technicalManagerHistoryEntityList == null || technicalManagerHistoryEntityList.isEmpty()) {
|
if (technicalManagerHistoryEntityList == null || technicalManagerHistoryEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "TechnicalManagerHistoryEntity", technicalManagerHistoryEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "TechnicalManagerHistoryEntity", technicalManagerHistoryEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, technicalManagerHistoryEntityList, technicalManagerHistoryEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, technicalManagerHistoryEntityList, technicalManagerHistoryEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -1018,7 +1018,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "TechnicalManagerHistoryEntity", technicalManagerHistoryEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "TechnicalManagerHistoryEntity", technicalManagerHistoryEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindTechnicalManagerHistory(PreparedStatement pstmt, TechnicalManagerHistoryEntity entity) throws Exception {
|
public void bindTechnicalManagerHistory(PreparedStatement pstmt, TechnicalManagerHistoryEntity entity) throws Exception {
|
||||||
@ -1039,7 +1039,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
if (thrustersEntityList == null || thrustersEntityList.isEmpty()) {
|
if (thrustersEntityList == null || thrustersEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "ThrustersEntity", thrustersEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "ThrustersEntity", thrustersEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, thrustersEntityList, thrustersEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, thrustersEntityList, thrustersEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -1051,7 +1051,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "ThrustersEntity", thrustersEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "ThrustersEntity", thrustersEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindThrusters(PreparedStatement pstmt, ThrustersEntity entity) throws Exception {
|
public void bindThrusters(PreparedStatement pstmt, ThrustersEntity entity) throws Exception {
|
||||||
@ -1075,7 +1075,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
if (tbCompanyDetailEntityList == null || tbCompanyDetailEntityList.isEmpty()) {
|
if (tbCompanyDetailEntityList == null || tbCompanyDetailEntityList.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log.debug("{} 배치 삽입 시작: {} 건", "TbCompanyDetailEntity", tbCompanyDetailEntityList.size());
|
// log.debug("{} 배치 삽입 시작: {} 건", "TbCompanyDetailEntity", tbCompanyDetailEntityList.size());
|
||||||
|
|
||||||
batchJdbcTemplate.batchUpdate(sql, tbCompanyDetailEntityList, tbCompanyDetailEntityList.size(),
|
batchJdbcTemplate.batchUpdate(sql, tbCompanyDetailEntityList, tbCompanyDetailEntityList.size(),
|
||||||
(ps, entity) -> {
|
(ps, entity) -> {
|
||||||
@ -1087,7 +1087,7 @@ public class ShipRepositoryImpl extends MultiDataSourceJdbcRepository<ShipInfoMs
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.debug("{} 배치 삽입 완료: {} 건", "TbCompanyDetailEntity", tbCompanyDetailEntityList.size());
|
// log.debug("{} 배치 삽입 완료: {} 건", "TbCompanyDetailEntity", tbCompanyDetailEntityList.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void bindTbCompanyDetail(PreparedStatement pstmt, TbCompanyDetailEntity entity) throws Exception {
|
public void bindTbCompanyDetail(PreparedStatement pstmt, TbCompanyDetailEntity entity) throws Exception {
|
||||||
|
|||||||
306
src/main/java/com/snp/batch/service/SyncStatusService.java
Normal file
306
src/main/java/com/snp/batch/service/SyncStatusService.java
Normal file
@ -0,0 +1,306 @@
|
|||||||
|
package com.snp.batch.service;
|
||||||
|
|
||||||
|
import com.snp.batch.global.config.BatchTableProperties;
|
||||||
|
import com.snp.batch.global.dto.SyncDataPreviewResponse;
|
||||||
|
import com.snp.batch.global.dto.SyncStatusResponse;
|
||||||
|
import com.snp.batch.global.dto.SyncStatusResponse.SyncDomainGroup;
|
||||||
|
import com.snp.batch.global.dto.SyncStatusResponse.SyncStatusSummary;
|
||||||
|
import com.snp.batch.global.dto.SyncStatusResponse.SyncTableStatus;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.concurrent.CompletableFuture;
|
||||||
|
import java.util.concurrent.ExecutorService;
|
||||||
|
import java.util.concurrent.Executors;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 동기화 현황 조회 서비스
|
||||||
|
* - batch_flag 기반 테이블별 N/P/S 건수 집계
|
||||||
|
* - 타겟 스키마 데이터 미리보기
|
||||||
|
* - P 상태 고착 레코드 리셋
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
@Service
|
||||||
|
public class SyncStatusService {
|
||||||
|
|
||||||
|
private static final Map<String, String> DOMAIN_LABELS = Map.of(
|
||||||
|
"ship", "Ship (선박)",
|
||||||
|
"company", "Company (회사)",
|
||||||
|
"event", "Event (사건)",
|
||||||
|
"facility", "Facility (시설)",
|
||||||
|
"psc", "PSC (검사)",
|
||||||
|
"movements", "Movements (이동)",
|
||||||
|
"code", "Code (코드)",
|
||||||
|
"risk-compliance", "Risk & Compliance"
|
||||||
|
);
|
||||||
|
|
||||||
|
private static final List<String> DOMAIN_ORDER = List.of(
|
||||||
|
"ship", "company", "event", "facility", "psc",
|
||||||
|
"movements", "code", "risk-compliance"
|
||||||
|
);
|
||||||
|
|
||||||
|
private final JdbcTemplate businessJdbc;
|
||||||
|
private final BatchTableProperties tableProps;
|
||||||
|
|
||||||
|
private String sourceSchema;
|
||||||
|
private String targetSchema;
|
||||||
|
private Map<String, String> sourceTables;
|
||||||
|
private Map<String, String> targetTables;
|
||||||
|
|
||||||
|
public SyncStatusService(@Qualifier("businessDataSource") DataSource businessDataSource,
|
||||||
|
BatchTableProperties tableProps) {
|
||||||
|
this.businessJdbc = new JdbcTemplate(businessDataSource);
|
||||||
|
this.tableProps = tableProps;
|
||||||
|
this.sourceSchema = tableProps.getSourceSchema().getName();
|
||||||
|
this.targetSchema = tableProps.getTargetSchema().getName();
|
||||||
|
this.sourceTables = tableProps.getSourceSchema().getTables();
|
||||||
|
this.targetTables = tableProps.getTargetSchema().getTables();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 전체 동기화 현황 조회
|
||||||
|
*/
|
||||||
|
public SyncStatusResponse getSyncStatus() {
|
||||||
|
// 테이블을 병렬 조회 (HikariCP pool=10 기준 동시 10개)
|
||||||
|
ExecutorService executor = Executors.newFixedThreadPool(
|
||||||
|
Math.min(sourceTables.size(), 10));
|
||||||
|
|
||||||
|
List<CompletableFuture<SyncTableStatus>> futures = sourceTables.entrySet().stream()
|
||||||
|
.map(entry -> CompletableFuture.supplyAsync(() -> {
|
||||||
|
String tableKey = entry.getKey();
|
||||||
|
String sourceTable = entry.getValue();
|
||||||
|
String targetTable = targetTables.getOrDefault(tableKey, "");
|
||||||
|
|
||||||
|
try {
|
||||||
|
return queryTableStatus(tableKey, sourceTable, targetTable);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.warn("테이블 상태 조회 실패: {} ({})", tableKey, e.getMessage());
|
||||||
|
return SyncTableStatus.builder()
|
||||||
|
.tableKey(tableKey)
|
||||||
|
.sourceTable(sourceTable)
|
||||||
|
.targetTable(targetTable)
|
||||||
|
.domain(extractDomain(tableKey))
|
||||||
|
.pendingCount(0)
|
||||||
|
.processingCount(0)
|
||||||
|
.completedCount(0)
|
||||||
|
.stuck(false)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
}, executor))
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
List<SyncTableStatus> allStatuses = futures.stream()
|
||||||
|
.map(CompletableFuture::join)
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
executor.shutdown();
|
||||||
|
|
||||||
|
// 도메인별 그룹핑
|
||||||
|
Map<String, List<SyncTableStatus>> grouped = allStatuses.stream()
|
||||||
|
.collect(Collectors.groupingBy(SyncTableStatus::getDomain));
|
||||||
|
|
||||||
|
List<SyncDomainGroup> domains = DOMAIN_ORDER.stream()
|
||||||
|
.filter(grouped::containsKey)
|
||||||
|
.map(domain -> SyncDomainGroup.builder()
|
||||||
|
.domain(domain)
|
||||||
|
.domainLabel(DOMAIN_LABELS.getOrDefault(domain, domain))
|
||||||
|
.tables(grouped.get(domain))
|
||||||
|
.build())
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
// 요약
|
||||||
|
long totalPending = allStatuses.stream().mapToLong(SyncTableStatus::getPendingCount).sum();
|
||||||
|
long totalProcessing = allStatuses.stream().mapToLong(SyncTableStatus::getProcessingCount).sum();
|
||||||
|
long totalCompleted = allStatuses.stream().mapToLong(SyncTableStatus::getCompletedCount).sum();
|
||||||
|
int stuckTables = (int) allStatuses.stream().filter(SyncTableStatus::isStuck).count();
|
||||||
|
|
||||||
|
SyncStatusSummary summary = SyncStatusSummary.builder()
|
||||||
|
.totalTables(allStatuses.size())
|
||||||
|
.pendingCount(totalPending)
|
||||||
|
.processingCount(totalProcessing)
|
||||||
|
.completedCount(totalCompleted)
|
||||||
|
.stuckTables(stuckTables)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
return SyncStatusResponse.builder()
|
||||||
|
.summary(summary)
|
||||||
|
.domains(domains)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 특정 테이블의 최근 동기화 데이터 미리보기
|
||||||
|
*/
|
||||||
|
public SyncDataPreviewResponse getDataPreview(String tableKey, int limit) {
|
||||||
|
String targetTable = targetTables.get(tableKey);
|
||||||
|
if (targetTable == null) {
|
||||||
|
throw new IllegalArgumentException("존재하지 않는 테이블 키: " + tableKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
String countSql = "SELECT COUNT(*) FROM %s.%s".formatted(targetSchema, targetTable);
|
||||||
|
Long totalCount = businessJdbc.queryForObject(countSql, Long.class);
|
||||||
|
|
||||||
|
String sql = "SELECT * FROM %s.%s ORDER BY mdfcn_dt DESC NULLS LAST LIMIT %d"
|
||||||
|
.formatted(targetSchema, targetTable, limit);
|
||||||
|
|
||||||
|
List<Map<String, Object>> rows = businessJdbc.queryForList(sql);
|
||||||
|
|
||||||
|
List<String> columns = rows.isEmpty()
|
||||||
|
? getTableColumns(targetTable)
|
||||||
|
: new ArrayList<>(rows.get(0).keySet());
|
||||||
|
|
||||||
|
return SyncDataPreviewResponse.builder()
|
||||||
|
.tableKey(tableKey)
|
||||||
|
.targetTable(targetTable)
|
||||||
|
.targetSchema(targetSchema)
|
||||||
|
.columns(columns)
|
||||||
|
.rows(rows)
|
||||||
|
.totalCount(totalCount != null ? totalCount : 0)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* P 상태 고착 레코드 조회
|
||||||
|
*/
|
||||||
|
public SyncDataPreviewResponse getStuckRecords(String tableKey, int limit) {
|
||||||
|
String sourceTable = sourceTables.get(tableKey);
|
||||||
|
if (sourceTable == null) {
|
||||||
|
throw new IllegalArgumentException("존재하지 않는 테이블 키: " + tableKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
String countSql = """
|
||||||
|
SELECT COUNT(*)
|
||||||
|
FROM %s.%s a
|
||||||
|
INNER JOIN %s.batch_job_execution b
|
||||||
|
ON a.job_execution_id = b.job_execution_id
|
||||||
|
AND b.status = 'COMPLETED'
|
||||||
|
WHERE a.batch_flag = 'P'
|
||||||
|
""".formatted(sourceSchema, sourceTable, sourceSchema);
|
||||||
|
Long totalCount = businessJdbc.queryForObject(countSql, Long.class);
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
SELECT a.*
|
||||||
|
FROM %s.%s a
|
||||||
|
INNER JOIN %s.batch_job_execution b
|
||||||
|
ON a.job_execution_id = b.job_execution_id
|
||||||
|
AND b.status = 'COMPLETED'
|
||||||
|
WHERE a.batch_flag = 'P'
|
||||||
|
ORDER BY a.mdfcn_dt DESC NULLS LAST
|
||||||
|
LIMIT %d
|
||||||
|
""".formatted(sourceSchema, sourceTable, sourceSchema, limit);
|
||||||
|
|
||||||
|
List<Map<String, Object>> rows = businessJdbc.queryForList(sql);
|
||||||
|
|
||||||
|
List<String> columns = rows.isEmpty()
|
||||||
|
? getTableColumns(sourceTable)
|
||||||
|
: new ArrayList<>(rows.get(0).keySet());
|
||||||
|
|
||||||
|
return SyncDataPreviewResponse.builder()
|
||||||
|
.tableKey(tableKey)
|
||||||
|
.targetTable(sourceTable)
|
||||||
|
.targetSchema(sourceSchema)
|
||||||
|
.columns(columns)
|
||||||
|
.rows(rows)
|
||||||
|
.totalCount(totalCount != null ? totalCount : 0)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* P 상태 고착 레코드를 N으로 리셋
|
||||||
|
*/
|
||||||
|
public int resetStuckRecords(String tableKey) {
|
||||||
|
String sourceTable = sourceTables.get(tableKey);
|
||||||
|
if (sourceTable == null) {
|
||||||
|
throw new IllegalArgumentException("존재하지 않는 테이블 키: " + tableKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
String sql = """
|
||||||
|
UPDATE %s.%s
|
||||||
|
SET batch_flag = 'N'
|
||||||
|
, mdfcn_dt = CURRENT_TIMESTAMP
|
||||||
|
, mdfr_id = 'MANUAL_RESET'
|
||||||
|
WHERE batch_flag = 'P'
|
||||||
|
""".formatted(sourceSchema, sourceTable);
|
||||||
|
|
||||||
|
int updated = businessJdbc.update(sql);
|
||||||
|
log.info("P→N 리셋 완료: {} ({}) {}건", tableKey, sourceTable, updated);
|
||||||
|
return updated;
|
||||||
|
}
|
||||||
|
|
||||||
|
private SyncTableStatus queryTableStatus(String tableKey, String sourceTable, String targetTable) {
|
||||||
|
// batch_job_execution.status = 'COMPLETED'인 데이터만 집계
|
||||||
|
// (수집/적재가 완전히 완료된 데이터만 동기화 대상)
|
||||||
|
String sql = """
|
||||||
|
SELECT a.batch_flag, COUNT(*) AS cnt
|
||||||
|
FROM %s.%s a
|
||||||
|
INNER JOIN %s.batch_job_execution b
|
||||||
|
ON a.job_execution_id = b.job_execution_id
|
||||||
|
AND b.status = 'COMPLETED'
|
||||||
|
WHERE a.batch_flag IN ('N', 'P', 'S')
|
||||||
|
GROUP BY a.batch_flag
|
||||||
|
""".formatted(sourceSchema, sourceTable, sourceSchema);
|
||||||
|
|
||||||
|
Map<String, Long> counts = new HashMap<>();
|
||||||
|
counts.put("N", 0L);
|
||||||
|
counts.put("P", 0L);
|
||||||
|
counts.put("S", 0L);
|
||||||
|
|
||||||
|
businessJdbc.query(sql, rs -> {
|
||||||
|
counts.put(rs.getString("batch_flag"), rs.getLong("cnt"));
|
||||||
|
});
|
||||||
|
|
||||||
|
// 최근 동기화 시간 (COMPLETED된 job의 batch_flag='S'인 가장 최근 mdfcn_dt)
|
||||||
|
String lastSyncSql = """
|
||||||
|
SELECT MAX(a.mdfcn_dt)
|
||||||
|
FROM %s.%s a
|
||||||
|
INNER JOIN %s.batch_job_execution b
|
||||||
|
ON a.job_execution_id = b.job_execution_id
|
||||||
|
AND b.status = 'COMPLETED'
|
||||||
|
WHERE a.batch_flag = 'S'
|
||||||
|
""".formatted(sourceSchema, sourceTable, sourceSchema);
|
||||||
|
|
||||||
|
String lastSyncTime = null;
|
||||||
|
try {
|
||||||
|
lastSyncTime = businessJdbc.queryForObject(lastSyncSql, String.class);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.trace("최근 동기화 시간 조회 실패: {}", tableKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
boolean stuck = counts.get("P") > 0;
|
||||||
|
|
||||||
|
return SyncTableStatus.builder()
|
||||||
|
.tableKey(tableKey)
|
||||||
|
.sourceTable(sourceTable)
|
||||||
|
.targetTable(targetTable)
|
||||||
|
.domain(extractDomain(tableKey))
|
||||||
|
.pendingCount(counts.get("N"))
|
||||||
|
.processingCount(counts.get("P"))
|
||||||
|
.completedCount(counts.get("S"))
|
||||||
|
.lastSyncTime(lastSyncTime)
|
||||||
|
.stuck(stuck)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
private String extractDomain(String tableKey) {
|
||||||
|
int dashIndex = tableKey.indexOf('-');
|
||||||
|
if (dashIndex < 0) return tableKey;
|
||||||
|
String prefix = tableKey.substring(0, dashIndex);
|
||||||
|
// "risk" prefix → "risk-compliance" domain
|
||||||
|
if ("risk".equals(prefix)) return "risk-compliance";
|
||||||
|
return prefix;
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<String> getTableColumns(String tableName) {
|
||||||
|
String sql = """
|
||||||
|
SELECT column_name FROM information_schema.columns
|
||||||
|
WHERE table_schema = ? AND table_name = ?
|
||||||
|
ORDER BY ordinal_position
|
||||||
|
""";
|
||||||
|
return businessJdbc.queryForList(sql, String.class, targetSchema, tableName);
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -103,6 +103,8 @@ logging:
|
|||||||
# Custom Application Properties
|
# Custom Application Properties
|
||||||
app:
|
app:
|
||||||
batch:
|
batch:
|
||||||
|
chunk-size: 10000
|
||||||
|
sub-chunk-size: 5000 # Writer Sub-Chunk 분할 크기
|
||||||
api:
|
api:
|
||||||
url: https://api.example.com/data
|
url: https://api.example.com/data
|
||||||
timeout: 30000
|
timeout: 30000
|
||||||
|
|||||||
불러오는 중...
Reference in New Issue
Block a user