diff --git a/src/main/java/com/snp/batch/common/batch/repository/BaseJdbcRepository.java b/src/main/java/com/snp/batch/common/batch/repository/BaseJdbcRepository.java index d0c82e3..3bf752a 100644 --- a/src/main/java/com/snp/batch/common/batch/repository/BaseJdbcRepository.java +++ b/src/main/java/com/snp/batch/common/batch/repository/BaseJdbcRepository.java @@ -1,6 +1,5 @@ package com.snp.batch.common.batch.repository; -import com.snp.batch.jobs.shipMovement.batch.entity.ShipMovementEntity; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.jdbc.core.JdbcTemplate; diff --git a/src/main/java/com/snp/batch/global/config/SwaggerConfig.java b/src/main/java/com/snp/batch/global/config/SwaggerConfig.java index 9b51529..f03338d 100644 --- a/src/main/java/com/snp/batch/global/config/SwaggerConfig.java +++ b/src/main/java/com/snp/batch/global/config/SwaggerConfig.java @@ -39,7 +39,6 @@ public class SwaggerConfig { .info(apiInfo()) .servers(List.of( new Server() - .url("http://localhost:" + serverPort + contextPath) .description("로컬 개발 서버"), new Server() @@ -89,4 +88,4 @@ public class SwaggerConfig { .name("Apache 2.0") .url("https://www.apache.org/licenses/LICENSE-2.0")); } -} +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/pscInspection/batch/config/PscInspectionJobConfig.java b/src/main/java/com/snp/batch/jobs/pscInspection/batch/config/PscInspectionJobConfig.java index 3cf87f0..b753867 100644 --- a/src/main/java/com/snp/batch/jobs/pscInspection/batch/config/PscInspectionJobConfig.java +++ b/src/main/java/com/snp/batch/jobs/pscInspection/batch/config/PscInspectionJobConfig.java @@ -80,8 +80,8 @@ public class PscInspectionJobConfig extends BaseJobConfig { - //private final JdbcTemplate jdbcTemplate; - - private final String fromDate; - private final String toDate; -// private List allImoNumbers; + private final String startDate; + private final String stopDate; private List allData; private int currentBatchIndex = 0; - private final int batchSize = 10; + private final int batchSize = 1000; public PscApiReader(@Qualifier("maritimeApiWebClient") WebClient webClient, - @Value("#{jobParameters['fromDate']}") String fromDate, - @Value("#{jobParameters['toDate']}") String toDate) { + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate) { super(webClient); - //this.jdbcTemplate = jdbcTemplate; - this.fromDate = fromDate; - this.toDate = toDate; + + // 날짜가 없으면 전날 하루 기준 + if (startDate == null || startDate.isBlank() || + stopDate == null || stopDate.isBlank()) { + + LocalDate yesterday = LocalDate.now().minusDays(1); + this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + } else { + this.startDate = startDate; + this.stopDate = stopDate; + } + enableChunkMode(); } @@ -45,7 +55,7 @@ public class PscApiReader extends BaseApiReader { @Override protected void resetCustomState() { this.currentBatchIndex = 0; -// this.allImoNumbers = null; + this.allData = null; } @Override @@ -53,37 +63,18 @@ public class PscApiReader extends BaseApiReader { return "/MaritimeWCF/PSCService.svc/RESTFul/GetPSCDataByLastUpdateDateRange"; } - private static final String GET_ALL_IMO_QUERY = - "SELECT imo_number FROM ship_data ORDER BY id"; -// "SELECT imo_number FROM snp_data.ship_data where imo_number > (select max(imo) from snp_data.t_berthcalls) ORDER BY imo_number"; - @Override protected void beforeFetch() { - // 전처리 과정 - // Step 1. IMO 전체 번호 조회 - /*log.info("[{}] ship_data 테이블에서 IMO 번호 조회 시작...", getReaderName()); - - allImoNumbers = jdbcTemplate.queryForList(GET_ALL_IMO_QUERY, String.class); - int totalBatches = (int) Math.ceil((double) allImoNumbers.size() / batchSize); - - log.info("[{}] 총 {} 개의 IMO 번호 조회 완료", getReaderName(), allImoNumbers.size()); - log.info("[{}] {}개씩 배치로 분할하여 API 호출 예정", getReaderName(), batchSize); - log.info("[{}] 예상 배치 수: {} 개", getReaderName(), totalBatches); - - // API 통계 초기화 - updateApiCallStats(totalBatches, 0);*/ - log.info("[PSC] 요청 날짜 범위: {} → {}", fromDate, toDate); + log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate); } - @Override protected List fetchNextBatch() { - // 1) 처음 호출이면 API 한 번 호출해서 전체 데이터를 가져온다 if (allData == null) { - log.info("[PSC] 최초 API 조회 실행: {} ~ {}", fromDate, toDate); - allData = callApiWithBatch(fromDate, toDate); + log.info("[PSC] 최초 API 조회 실행: {} ~ {}", startDate, stopDate); + allData = callApiWithBatch(startDate, stopDate); if (allData == null || allData.isEmpty()) { log.warn("[PSC] 조회된 데이터 없음 → 종료"); @@ -116,20 +107,19 @@ public class PscApiReader extends BaseApiReader { return batch; } - // private List callApiWithBatch(String lrno) { - private List callApiWithBatch(String from, String to) { + private List callApiWithBatch(String startDate, String stopDate) { - String[] f = from.split("-"); - String[] t = to.split("-"); + LocalDateTime fromDay = parseToDateTime(startDate, true); + LocalDateTime toDay = parseToDateTime(stopDate, false); String url = getApiPath() + "?shipsCategory=0" - + "&fromYear=" + f[0] - + "&fromMonth=" + f[1] - + "&fromDay=" + f[2] - + "&toYear=" + t[0] - + "&toMonth=" + t[1] - + "&toDay=" + t[2]; + + "&fromYear=" + fromDay.getYear() + + "&fromMonth=" + fromDay.getMonthValue() + + "&fromDay=" + fromDay.getDayOfMonth() + + "&toYear=" + toDay.getYear() + + "&toMonth=" + toDay.getMonthValue() + + "&toDay=" + toDay.getDayOfMonth(); log.info("[PSC] API 호출 URL = {}", url); @@ -170,4 +160,18 @@ public class PscApiReader extends BaseApiReader { getReaderName(), allData.size()); } } + + private LocalDateTime parseToDateTime(String value, boolean isStart) { + + // yyyy-MM-dd 인 경우 + if (value.length() == 10) { + LocalDate date = LocalDate.parse(value); + return isStart + ? date.atStartOfDay() + : date.plusDays(1).atStartOfDay(); + } + + // yyyy-MM-ddTHH:mm:ssZ 인 경우 + return OffsetDateTime.parse(value).toLocalDateTime(); + } } diff --git a/src/main/java/com/snp/batch/jobs/shipMovement/batch/repository/ShipMovementRepository.java b/src/main/java/com/snp/batch/jobs/shipMovement/batch/repository/ShipMovementRepository.java deleted file mode 100644 index bf0162c..0000000 --- a/src/main/java/com/snp/batch/jobs/shipMovement/batch/repository/ShipMovementRepository.java +++ /dev/null @@ -1,18 +0,0 @@ -package com.snp.batch.jobs.shipMovement.batch.repository; - -import com.snp.batch.jobs.shipMovement.batch.entity.ShipMovementEntity; -import org.springframework.stereotype.Repository; -import org.springframework.stereotype.Service; - -import java.util.List; - -/** - * 선박 상세 정보 Repository 인터페이스 - */ - -public interface ShipMovementRepository { - - void saveAll(List entities); - - boolean existsByPortCallId(Integer portCallId); -} diff --git a/src/main/java/com/snp/batch/jobs/shipMovement/batch/writer/ShipMovementWriter.java b/src/main/java/com/snp/batch/jobs/shipMovement/batch/writer/ShipMovementWriter.java deleted file mode 100644 index 4368940..0000000 --- a/src/main/java/com/snp/batch/jobs/shipMovement/batch/writer/ShipMovementWriter.java +++ /dev/null @@ -1,40 +0,0 @@ -package com.snp.batch.jobs.shipMovement.batch.writer; - -import com.snp.batch.common.batch.writer.BaseWriter; -import com.snp.batch.jobs.shipMovement.batch.entity.ShipMovementEntity; -import com.snp.batch.jobs.shipMovement.batch.repository.ShipMovementRepository; -import com.snp.batch.jobs.shipdetail.batch.repository.ShipDetailRepository; -import com.snp.batch.jobs.shipdetail.batch.repository.ShipHashRepository; -import lombok.extern.slf4j.Slf4j; -import org.springframework.stereotype.Component; - -import java.util.List; - -/** - * 선박 상세 정보 Writer - */ -@Slf4j -@Component -public class ShipMovementWriter extends BaseWriter { - - private final ShipMovementRepository shipMovementRepository; - - - public ShipMovementWriter(ShipDetailRepository shipDetailRepository, ShipHashRepository shipHashRepository, ShipMovementRepository shipMovementRepositoryy) { - super("ShipMovement"); - this.shipMovementRepository = shipMovementRepositoryy; - } - - @Override - protected void writeItems(List items) throws Exception { - - if (items.isEmpty()) { return; } - - log.info("선박 상세 정보 데이터 저장: {} 건", items.size()); - - shipMovementRepository.saveAll(items); - log.info("선박 상세 정보 및 해시 데이터 저장 완료: {} 건", items.size()); - - } - -} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/config/AnchorageCallsRangeJobConfig.java b/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/config/AnchorageCallsRangeJobConfig.java new file mode 100644 index 0000000..764e26b --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/config/AnchorageCallsRangeJobConfig.java @@ -0,0 +1,114 @@ +package com.snp.batch.jobs.shipMovementAnchorageCalls.batch.config; + +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.dto.AnchorageCallsDto; +import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.entity.AnchorageCallsEntity; +import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.processor.AnchorageCallsProcessor; +import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.reader.AnchorageCallsRangeReader; +import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.writer.AnchorageCallsWriter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.web.reactive.function.client.WebClient; + +/** + * 선박 상세 정보 Import Job Config + * + * 특징: + * - ship_data 테이블에서 IMO 번호 조회 + * - IMO 번호를 100개씩 배치로 분할 + * - Maritime API GetShipsByIHSLRorIMONumbers 호출 + * TODO : GetShipsByIHSLRorIMONumbersAll 호출로 변경 + * - 선박 상세 정보를 ship_detail 테이블에 저장 (UPSERT) + * + * 데이터 흐름: + * AnchorageCallsReader (ship_data → Maritime API) + * ↓ (AnchorageCallsDto) + * AnchorageCallsProcessor + * ↓ (AnchorageCallsEntity) + * AnchorageCallsWriter + * ↓ (t_anchoragecall 테이블) + */ + +@Slf4j +@Configuration +public class AnchorageCallsRangeJobConfig extends BaseJobConfig { + + private final AnchorageCallsProcessor anchorageCallsProcessor; + private final AnchorageCallsWriter anchorageCallsWriter; + private final AnchorageCallsRangeReader anchorageCallsRangeReader; + + public AnchorageCallsRangeJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + AnchorageCallsProcessor anchorageCallsProcessor, + AnchorageCallsWriter anchorageCallsWriter, + AnchorageCallsRangeReader anchorageCallsRangeReader + ) { // ObjectMapper 주입 추가 + super(jobRepository, transactionManager); + this.anchorageCallsProcessor = anchorageCallsProcessor; + this.anchorageCallsWriter = anchorageCallsWriter; + this.anchorageCallsRangeReader = anchorageCallsRangeReader; + } + + @Override + protected String getJobName() { + return "AnchorageCallsRangeImportJob"; + } + + @Override + protected String getStepName() { + return "AnchorageCallsRangeImportStep"; + } + + @Override + protected ItemReader createReader() { // 타입 변경 + return anchorageCallsRangeReader; + } + + @Bean + @StepScope + public AnchorageCallsRangeReader anchorageCallsReader( + @Qualifier("maritimeServiceApiWebClient") WebClient webClient, + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate + ) { + return new AnchorageCallsRangeReader(webClient, startDate, stopDate); + } + + @Override + protected ItemProcessor createProcessor() { + return anchorageCallsProcessor; + } + + @Override + protected ItemWriter createWriter() { // 타입 변경 + return anchorageCallsWriter; + } + + @Override + protected int getChunkSize() { + return 5000; // API에서 100개씩 가져오므로 chunk도 100으로 설정 + } + + @Bean(name = "AnchorageCallsRangeImportJob") + public Job anchorageCallsRangeImportJob() { + return job(); + } + + @Bean(name = "AnchorageCallsRangeImportStep") + public Step anchorageCallsRangeImportStep() { + return step(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/reader/AnchorageCallsRangeReader.java b/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/reader/AnchorageCallsRangeReader.java new file mode 100644 index 0000000..66f9021 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/reader/AnchorageCallsRangeReader.java @@ -0,0 +1,153 @@ +package com.snp.batch.jobs.shipMovementAnchorageCalls.batch.reader; + +import com.snp.batch.common.batch.reader.BaseApiReader; +import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.dto.AnchorageCallsDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.web.reactive.function.client.WebClient; + +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.util.List; + +/** + * 선박 상세 정보 Reader (v2.0 - Chunk 기반) + * + * 기능: + * 1. ship_data 테이블에서 IMO 번호 전체 조회 (최초 1회) + * 2. IMO 번호를 100개씩 분할하여 배치 단위로 처리 + * 3. fetchNextBatch() 호출 시마다 100개씩 API 호출 + * 4. Spring Batch가 100건씩 Process → Write 수행 + * + * Chunk 처리 흐름: + * - beforeFetch() → IMO 전체 조회 (1회) + * - fetchNextBatch() → 100개 IMO로 API 호출 (1,718회) + * - read() → 1건씩 반환 (100번) + * - Processor/Writer → 100건 처리 + * - 반복... (1,718번의 Chunk) + * + * 기존 방식과의 차이: + * - 기존: 17만건 전체 메모리 로드 → Process → Write + * - 신규: 100건씩 로드 → Process → Write (Chunk 1,718회) + */ +@Slf4j +@StepScope +public class AnchorageCallsRangeReader extends BaseApiReader { + + private List allData; + private int currentBatchIndex = 0; + private final int batchSize = 5000; + private String startDate; + private String stopDate; + + public AnchorageCallsRangeReader(WebClient webClient, + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate) { + super(webClient); + + // 날짜가 없으면 전날 하루 기준 + if (startDate == null || startDate.isBlank() || + stopDate == null || stopDate.isBlank()) { + + LocalDate yesterday = LocalDate.now().minusDays(1); + this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + } else { + this.startDate = startDate; + this.stopDate = stopDate; + } + + enableChunkMode(); + } + + @Override + protected String getReaderName() { + return "AnchorageCallsReader"; + } + + @Override + protected void resetCustomState() { + this.currentBatchIndex = 0; + this.allData = null; + } + + @Override + protected String getApiPath() { + return "/Movements/AnchorageCalls"; + } + + @Override + protected String getApiBaseUrl() { + return "https://webservices.maritime.spglobal.com"; + } + + @Override + protected void beforeFetch() { + log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate); + } + + @Override + protected List fetchNextBatch() throws Exception { + // 1) 처음 호출이면 API 한 번 호출해서 전체 데이터를 가져온다 + if (allData == null) { + log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate); + allData = callApiWithBatch(startDate, stopDate); + + if (allData == null || allData.isEmpty()) { + log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName()); + return null; + } + + log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize); + } + + // 2) 이미 끝까지 읽었으면 종료 + if (currentBatchIndex >= allData.size()) { + log.info("[{}] 모든 배치 처리 완료", getReaderName()); + return null; + } + + // 3) 이번 배치의 end 계산 + int end = Math.min(currentBatchIndex + batchSize, allData.size()); + + // 4) 현재 batch 리스트 잘라서 반환 + List batch = allData.subList(currentBatchIndex, end); + + int batchNum = (currentBatchIndex / batchSize) + 1; + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + + log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), batchNum, totalBatches, batch.size()); + + // 다음 batch 인덱스 이동 + currentBatchIndex = end; + updateApiCallStats(totalBatches, batchNum); + + return batch; + } + + /** + * Query Parameter를 사용한 API 호출 + * @return API 응답 + */ + private List callApiWithBatch(String startDate, String stopDate) { + String url = getApiPath() + "?startDate=" + startDate +"&stopDate=" + stopDate; + log.info("[{}] API 호출: {}", getReaderName(), url); + + return webClient.get() + .uri(url) + .retrieve() + .bodyToFlux(AnchorageCallsDto.class) + .collectList() + .block(); + } + + @Override + protected void afterFetch(List data) { + if (data == null) { + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches); + } + } + +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/repository/AnchorageCallsRepository.java b/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/repository/AnchorageCallsRepository.java index 5bcfa85..991f551 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/repository/AnchorageCallsRepository.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/repository/AnchorageCallsRepository.java @@ -1,6 +1,5 @@ package com.snp.batch.jobs.shipMovementAnchorageCalls.batch.repository; -import com.snp.batch.jobs.shipMovement.batch.entity.ShipMovementEntity; import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.entity.AnchorageCallsEntity; import java.util.List; diff --git a/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/repository/AnchorageCallsRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/repository/AnchorageCallsRepositoryImpl.java index 0a590a9..0b8e741 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/repository/AnchorageCallsRepositoryImpl.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/repository/AnchorageCallsRepositoryImpl.java @@ -3,8 +3,6 @@ package com.snp.batch.jobs.shipMovementAnchorageCalls.batch.repository; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.snp.batch.common.batch.repository.BaseJdbcRepository; -import com.snp.batch.jobs.shipMovement.batch.entity.ShipMovementEntity; -import com.snp.batch.jobs.shipMovement.batch.repository.ShipMovementRepository; import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.entity.AnchorageCallsEntity; import lombok.extern.slf4j.Slf4j; import org.springframework.jdbc.core.JdbcTemplate; @@ -32,7 +30,8 @@ public class AnchorageCallsRepositoryImpl extends BaseJdbcRepository { + + private final BerthCallsProcessor berthCallsProcessor; + private final BerthCallsWriter berthCallsWriter; + private final BerthCallsRangeReader berthCallsRangeReader; + private final JdbcTemplate jdbcTemplate; + private final WebClient maritimeApiWebClient; + private final ObjectMapper objectMapper; // ObjectMapper 주입 추가 + + public BerthCallsRangJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + BerthCallsProcessor berthCallsProcessor, + BerthCallsWriter berthCallsWriter, BerthCallsRangeReader berthCallsRangeReader, JdbcTemplate jdbcTemplate, + @Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient, + ObjectMapper objectMapper) { // ObjectMapper 주입 추가 + super(jobRepository, transactionManager); + this.berthCallsProcessor = berthCallsProcessor; + this.berthCallsWriter = berthCallsWriter; + this.berthCallsRangeReader = berthCallsRangeReader; + this.jdbcTemplate = jdbcTemplate; + this.maritimeApiWebClient = maritimeApiWebClient; + this.objectMapper = objectMapper; // ObjectMapper 초기화 + } + + @Override + protected String getJobName() { + return "BerthCallsRangeImportJob"; + } + + @Override + protected String getStepName() { + return "BerthCallsRangeImportStep"; + } + + @Override + protected ItemReader createReader() { // 타입 변경 + return berthCallsRangeReader; + } + @Bean + @StepScope + public BerthCallsRangeReader berthCallsRangeReader( + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate + ) { + return new BerthCallsRangeReader(maritimeApiWebClient, startDate, stopDate); + } + @Override + protected ItemProcessor createProcessor() { + return berthCallsProcessor; + } + + @Override + protected ItemWriter createWriter() { + return berthCallsWriter; + } + + @Override + protected int getChunkSize() { + return 5000; // API에서 5000개씩 가져오므로 chunk도 5000으로 설정 + } + + @Bean(name = "BerthCallsRangeImportJob") + public Job berthCallsRangeImportJob() { + return job(); + } + + @Bean(name = "BerthCallsRangeImportStep") + public Step berthCallsRangeImportStep() { + return step(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementBerthCalls/batch/reader/BerthCallsRangeReader.java b/src/main/java/com/snp/batch/jobs/shipMovementBerthCalls/batch/reader/BerthCallsRangeReader.java new file mode 100644 index 0000000..5ebfdf2 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementBerthCalls/batch/reader/BerthCallsRangeReader.java @@ -0,0 +1,154 @@ +package com.snp.batch.jobs.shipMovementBerthCalls.batch.reader; + +import com.snp.batch.common.batch.reader.BaseApiReader; +import com.snp.batch.jobs.shipMovementBerthCalls.batch.dto.BerthCallsDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.web.reactive.function.client.WebClient; + +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.util.List; + +/** + * 선박 상세 정보 Reader (v2.0 - Chunk 기반) + * + * 기능: + * 1. ship_data 테이블에서 IMO 번호 전체 조회 (최초 1회) + * 2. IMO 번호를 100개씩 분할하여 배치 단위로 처리 + * 3. fetchNextBatch() 호출 시마다 100개씩 API 호출 + * 4. Spring Batch가 100건씩 Process → Write 수행 + * + * Chunk 처리 흐름: + * - beforeFetch() → IMO 전체 조회 (1회) + * - fetchNextBatch() → 100개 IMO로 API 호출 (1,718회) + * - read() → 1건씩 반환 (100번) + * - Processor/Writer → 100건 처리 + * - 반복... (1,718번의 Chunk) + * + * 기존 방식과의 차이: + * - 기존: 17만건 전체 메모리 로드 → Process → Write + * - 신규: 100건씩 로드 → Process → Write (Chunk 1,718회) + */ +@Slf4j +@StepScope +public class BerthCallsRangeReader extends BaseApiReader { + + private List allData; + private int currentBatchIndex = 0; + private final int batchSize = 5000; + private String startDate; + private String stopDate; + + public BerthCallsRangeReader(WebClient webClient, + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate) { + + super(webClient); + + // 날짜가 없으면 전날 하루 기준 + if (startDate == null || startDate.isBlank() || stopDate == null || stopDate.isBlank()) { + LocalDate yesterday = LocalDate.now().minusDays(1); + this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + } else { + this.startDate = startDate; + this.stopDate = stopDate; + } + + enableChunkMode(); + } + + @Override + protected String getReaderName() { + return "BerthCallsRangeReader"; + } + + @Override + protected void resetCustomState() { + this.currentBatchIndex = 0; + this.allData = null; + } + + @Override + protected String getApiPath() { + return "/Movements/BerthCalls"; + } + + @Override + protected String getApiBaseUrl() { + return "https://webservices.maritime.spglobal.com"; + } + + @Override + protected void beforeFetch() { + log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate); + } + + @Override + protected List fetchNextBatch() throws Exception { + // 1) 처음 호출이면 API 한 번 호출해서 전체 데이터를 가져온다 + if (allData == null) { + log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate); + allData = callApiWithBatch(startDate, stopDate); + + if (allData == null || allData.isEmpty()) { + log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName()); + return null; + } + + log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize); + } + + // 2) 이미 끝까지 읽었으면 종료 + if (currentBatchIndex >= allData.size()) { + log.info("[{}] 모든 배치 처리 완료", getReaderName()); + return null; + } + + // 3) 이번 배치의 end 계산 + int end = Math.min(currentBatchIndex + batchSize, allData.size()); + + // 4) 현재 batch 리스트 잘라서 반환 + List batch = allData.subList(currentBatchIndex, end); + + int currentBatchNumber = (currentBatchIndex / batchSize) + 1; + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + + log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), currentBatchNumber, totalBatches, batch.size()); + + // 다음 batch 인덱스 이동 + currentBatchIndex = end; + updateApiCallStats(totalBatches, currentBatchNumber); + return batch; + + } + + /** + * Query Parameter를 사용한 API 호출 + * @return API 응답 + */ + private List callApiWithBatch(String startDate, String stopDate) { + String url = getApiPath() + "?startDate=" + startDate +"&stopDate=" + stopDate; +// "&lrno=" + lrno; + + log.debug("[{}] API 호출: {}", getReaderName(), url); + + return webClient.get() + .uri(url) + .retrieve() + .bodyToFlux(BerthCallsDto.class) + .collectList() + .block(); + } + + @Override + protected void afterFetch(List data) { + if (data == null) { + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches); + } + } + +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementBerthCalls/batch/repository/BerthCallsRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/shipMovementBerthCalls/batch/repository/BerthCallsRepositoryImpl.java index db5d696..6cf0f61 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovementBerthCalls/batch/repository/BerthCallsRepositoryImpl.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementBerthCalls/batch/repository/BerthCallsRepositoryImpl.java @@ -32,7 +32,8 @@ public class BerthCallsRepositoryImpl extends BaseJdbcRepository { + + private final CurrentlyAtProcessor currentlyAtProcessor; + private final CurrentlyAtWriter currentlyAtWriter; + private final CurrentlyAtRangeReader currentlyAtRangeReader; + private final JdbcTemplate jdbcTemplate; + private final WebClient maritimeApiWebClient; + + public CurrentlyAtRangeJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + CurrentlyAtProcessor currentlyAtProcessor, + CurrentlyAtWriter currentlyAtWriter, CurrentlyAtRangeReader currentlyAtRangeReader, JdbcTemplate jdbcTemplate, + @Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient) { // ObjectMapper 주입 추가 + super(jobRepository, transactionManager); + this.currentlyAtProcessor = currentlyAtProcessor; + this.currentlyAtWriter = currentlyAtWriter; + this.currentlyAtRangeReader = currentlyAtRangeReader; + this.jdbcTemplate = jdbcTemplate; + this.maritimeApiWebClient = maritimeApiWebClient; + } + + @Override + protected String getJobName() { + return "CurrentlyAtRangeImportJob"; + } + + @Override + protected String getStepName() { + return "currentlyAtRangeImportStep"; + } + + @Override + protected ItemReader createReader() { // 타입 변경 + return currentlyAtRangeReader; + } + @Bean + @StepScope + public CurrentlyAtRangeReader currentlyAtReader( + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate + ) { + // jobParameters 없으면 null 넘어오고 Reader에서 default 처리 + return new CurrentlyAtRangeReader(maritimeApiWebClient, startDate, stopDate); + } + @Override + protected ItemProcessor createProcessor() { + return currentlyAtProcessor; + } + + @Override + protected ItemWriter createWriter() { // 타입 변경 + return currentlyAtWriter; + } + + @Override + protected int getChunkSize() { + return 5000; // API에서 100개씩 가져오므로 chunk도 100으로 설정 + } + + @Bean(name = "CurrentlyAtRangeImportJob") + public Job currentlyAtRangeImportJob() { + return job(); + } + + @Bean(name = "CurrentlyAtRangeImportStep") + public Step currentlyAtRangeImportStep() { + return step(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementCurrentlyAt/batch/dto/CurrentlyAtDto.java b/src/main/java/com/snp/batch/jobs/shipMovementCurrentlyAt/batch/dto/CurrentlyAtDto.java index f20bad1..121cdca 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovementCurrentlyAt/batch/dto/CurrentlyAtDto.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementCurrentlyAt/batch/dto/CurrentlyAtDto.java @@ -1,6 +1,6 @@ package com.snp.batch.jobs.shipMovementCurrentlyAt.batch.dto; -import com.snp.batch.jobs.shipMovement.batch.dto.PortCallsPositionDto; +import com.snp.batch.jobs.shipMovementPortCalls.batch.dto.PortCallsPositionDto; import lombok.Data; @Data diff --git a/src/main/java/com/snp/batch/jobs/shipMovementCurrentlyAt/batch/reader/CurrentlyAtRangeReader.java b/src/main/java/com/snp/batch/jobs/shipMovementCurrentlyAt/batch/reader/CurrentlyAtRangeReader.java new file mode 100644 index 0000000..1d7120e --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementCurrentlyAt/batch/reader/CurrentlyAtRangeReader.java @@ -0,0 +1,154 @@ +package com.snp.batch.jobs.shipMovementCurrentlyAt.batch.reader; + +import com.snp.batch.common.batch.reader.BaseApiReader; +import com.snp.batch.jobs.shipMovementCurrentlyAt.batch.dto.CurrentlyAtDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.web.reactive.function.client.WebClient; + +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.util.List; + +/** + * 선박 상세 정보 Reader (v2.0 - Chunk 기반) + *

+ * 기능: + * 1. ship_data 테이블에서 IMO 번호 전체 조회 (최초 1회) + * 2. IMO 번호를 100개씩 분할하여 배치 단위로 처리 + * 3. fetchNextBatch() 호출 시마다 100개씩 API 호출 + * 4. Spring Batch가 100건씩 Process → Write 수행 + *

+ * Chunk 처리 흐름: + * - beforeFetch() → IMO 전체 조회 (1회) + * - fetchNextBatch() → 100개 IMO로 API 호출 (1,718회) + * - read() → 1건씩 반환 (100번) + * - Processor/Writer → 100건 처리 + * - 반복... (1,718번의 Chunk) + *

+ * 기존 방식과의 차이: + * - 기존: 17만건 전체 메모리 로드 → Process → Write + * - 신규: 100건씩 로드 → Process → Write (Chunk 1,718회) + */ +@Slf4j +@StepScope +public class CurrentlyAtRangeReader extends BaseApiReader { + + private List allData; + private int currentBatchIndex = 0; + private final int batchSize = 5000; + private String startDate; + private String stopDate; + + public CurrentlyAtRangeReader(WebClient webClient, + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate) { + + super(webClient); + + // 날짜가 없으면 전날 하루 기준 + if (startDate == null || startDate.isBlank() || stopDate == null || stopDate.isBlank()) { + LocalDate yesterday = LocalDate.now().minusDays(1); + this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + } else { + this.startDate = startDate; + this.stopDate = stopDate; + } + + enableChunkMode(); // ✨ Chunk 모드 활성화 + } + + @Override + protected String getReaderName() { + return "CurrentlyAtReader"; + } + + @Override + protected void resetCustomState() { + this.currentBatchIndex = 0; + this.allData = null; + } + + @Override + protected String getApiPath() { + return "/Movements/CurrentlyAt"; + } + + @Override + protected String getApiBaseUrl() { + return "https://webservices.maritime.spglobal.com"; + } + + @Override + protected void beforeFetch() { + // 전처리 과정 + log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate); + } + + @Override + protected List fetchNextBatch() throws Exception { + + // 모든 배치 처리 완료 확인 + if (allData == null ) { + log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate); + allData = callApiWithBatch(startDate, stopDate); + + if (allData == null || allData.isEmpty()) { + log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName()); + return null; + } + + log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize); + } + + // 2) 이미 끝까지 읽었으면 종료 + if (currentBatchIndex >= allData.size()) { + log.info("[{}] 모든 배치 처리 완료", getReaderName()); + return null; + } + + // 3) 이번 배치의 end 계산 + int endIndex = Math.min(currentBatchIndex + batchSize, allData.size()); + + // 현재 배치의 IMO 번호 추출 (100개) + List batch = allData.subList(currentBatchIndex, endIndex); + + int currentBatchNumber = (currentBatchIndex / batchSize) + 1; + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + + log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), currentBatchNumber, totalBatches, batch.size()); + + currentBatchIndex = endIndex; + updateApiCallStats(totalBatches, currentBatchNumber); + return batch; + + } + + /** + * Query Parameter를 사용한 API 호출 + * @return API 응답 + */ + private List callApiWithBatch(String startDate, String stopDate) { + String url = getApiPath() + "?dateCreatedUpdatedStart=" + startDate +"&dateCreatedUpdatedStop="+stopDate; + + log.debug("[{}] API 호출: {}", getReaderName(), url); + + return webClient.get() + .uri(url) + .retrieve() + .bodyToFlux(CurrentlyAtDto.class) + .collectList() + .block(); + } + + @Override + protected void afterFetch(List data) { + if (data == null) { + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches); + } + } + +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementCurrentlyAt/batch/repository/CurrentlyAtRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/shipMovementCurrentlyAt/batch/repository/CurrentlyAtRepositoryImpl.java index 9cb26eb..358731d 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovementCurrentlyAt/batch/repository/CurrentlyAtRepositoryImpl.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementCurrentlyAt/batch/repository/CurrentlyAtRepositoryImpl.java @@ -27,7 +27,8 @@ public class CurrentlyAtRepositoryImpl extends BaseJdbcRepository { - @Override - public ShipMovementEntity mapRow(ResultSet rs, int rowNum) throws SQLException { - ShipMovementEntity entity = ShipMovementEntity.builder() - .id(rs.getLong("id")) - .imolRorIHSNumber(rs.getString("imolRorIHSNumber")) - .portCallId(rs.getObject("portCallId", Integer.class)) - .facilityId(rs.getObject("facilityId", Integer.class)) - .facilityName(rs.getString("facilityName")) - .facilityType(rs.getString("facilityType")) - .subFacilityId(rs.getObject("subFacilityId", Integer.class)) - .subFacilityName(rs.getString("subFacilityName")) - .subFacilityType(rs.getString("subFacilityType")) - .parentFacilityId(rs.getObject("parentFacilityId", Integer.class)) - .parentFacilityName(rs.getString("parentFacilityName")) - .parentFacilityType(rs.getString("parentFacilityType")) - .countryCode(rs.getString("countryCode")) - .countryName(rs.getString("countryName")) - .draught(rs.getObject("draught", Double.class)) - .latitude(rs.getObject("latitude", Double.class)) - .longitude(rs.getObject("longitude", Double.class)) - .destination(rs.getString("destination")) - .iso2(rs.getString("iso2")) - .position(parseJson(rs.getString("position"))) - .schemaType(rs.getString("schemaType")) - .build(); - - Timestamp movementDate = rs.getTimestamp("movementDate"); - if (movementDate != null) { - entity.setMovementDate(movementDate.toLocalDateTime()); - } - - return entity; - } - - private JsonNode parseJson(String json) { - try { - if (json == null) return null; - return new ObjectMapper().readTree(json); - } catch (Exception e) { - throw new RuntimeException("JSON 파싱 오류: " + json); - } - } - }*/ } diff --git a/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/config/DarkActivityRangeJobConfig.java b/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/config/DarkActivityRangeJobConfig.java new file mode 100644 index 0000000..1a7d521 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/config/DarkActivityRangeJobConfig.java @@ -0,0 +1,119 @@ +package com.snp.batch.jobs.shipMovementDarkActivity.batch.config; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.jobs.shipMovementDarkActivity.batch.dto.DarkActivityDto; +import com.snp.batch.jobs.shipMovementDarkActivity.batch.entity.DarkActivityEntity; +import com.snp.batch.jobs.shipMovementDarkActivity.batch.processor.DarkActivityProcessor; +import com.snp.batch.jobs.shipMovementDarkActivity.batch.reader.DarkActivityRangeReader; +import com.snp.batch.jobs.shipMovementDarkActivity.batch.writer.DarkActivityWriter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.web.reactive.function.client.WebClient; + +/** + * 선박 상세 정보 Import Job Config + * + * 특징: + * - ship_data 테이블에서 IMO 번호 조회 + * - IMO 번호를 100개씩 배치로 분할 + * - Maritime API GetShipsByIHSLRorIMONumbers 호출 + * TODO : GetShipsByIHSLRorIMONumbersAll 호출로 변경 + * - 선박 상세 정보를 ship_detail 테이블에 저장 (UPSERT) + * + * 데이터 흐름: + * DarkActivityReader (ship_data → Maritime API) + * ↓ (DarkActivityDto) + * DarkActivityProcessor + * ↓ (DarkActivityEntity) + * DarkActivityWriter + * ↓ (t_darkactivity 테이블) + */ + +@Slf4j +@Configuration +public class DarkActivityRangeJobConfig extends BaseJobConfig { + + private final DarkActivityProcessor darkActivityProcessor; + private final DarkActivityWriter darkActivityWriter; + private final DarkActivityRangeReader darkActivityRangeReader; + private final JdbcTemplate jdbcTemplate; + private final WebClient maritimeApiWebClient; + + public DarkActivityRangeJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + DarkActivityProcessor darkActivityProcessor, + DarkActivityWriter darkActivityWriter, JdbcTemplate jdbcTemplate, + @Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient, + ObjectMapper objectMapper, DarkActivityRangeReader darkActivityRangeReader) { // ObjectMapper 주입 추가 + super(jobRepository, transactionManager); + this.darkActivityProcessor = darkActivityProcessor; + this.darkActivityWriter = darkActivityWriter; + this.jdbcTemplate = jdbcTemplate; + this.maritimeApiWebClient = maritimeApiWebClient; + this.darkActivityRangeReader = darkActivityRangeReader; + } + + @Override + protected String getJobName() { + return "DarkActivityRangeImportJob"; + } + + @Override + protected String getStepName() { + return "DarkActivityRangeImportStep"; + } + + @Override + protected ItemReader createReader() { // 타입 변경 + // Reader 생성자 수정: ObjectMapper를 전달합니다. + return darkActivityRangeReader; + } + @Bean + @StepScope + public DarkActivityRangeReader darkActivityReader( + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate + ) { + // jobParameters 없으면 null 넘어오고 Reader에서 default 처리 + return new DarkActivityRangeReader(maritimeApiWebClient, startDate, stopDate); + } + + @Override + protected ItemProcessor createProcessor() { + return darkActivityProcessor; + } + + @Override + protected ItemWriter createWriter() { // 타입 변경 + return darkActivityWriter; + } + + @Override + protected int getChunkSize() { + return 5000; // API에서 100개씩 가져오므로 chunk도 100으로 설정 + } + + @Bean(name = "DarkActivityRangeImportJob") + public Job darkActivityRangeImportJob() { + return job(); + } + + @Bean(name = "DarkActivityRangeImportStep") + public Step darkActivityRangeImportStep() { + return step(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/dto/DarkActivityDto.java b/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/dto/DarkActivityDto.java index 9cb7b81..2c05582 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/dto/DarkActivityDto.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/dto/DarkActivityDto.java @@ -24,7 +24,7 @@ public class DarkActivityDto { private Double latitude; private Double longitude; - private AnchorageCallsPositionDto position; + private DarkActivityPositionDto position; private String eventStartDate; } diff --git a/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/dto/DarkActivityPositionDto.java b/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/dto/DarkActivityPositionDto.java new file mode 100644 index 0000000..d67d7db --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/dto/DarkActivityPositionDto.java @@ -0,0 +1,17 @@ +package com.snp.batch.jobs.shipMovementDarkActivity.batch.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; +import lombok.Data; + +@Data +public class DarkActivityPositionDto { + private boolean isNull; + private int stSrid; + private double lat; + @JsonProperty("long") + private double lon; + private double z; + private double m; + private boolean hasZ; + private boolean hasM; +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/reader/DarkActivityRangeReader.java b/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/reader/DarkActivityRangeReader.java new file mode 100644 index 0000000..2c72717 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/reader/DarkActivityRangeReader.java @@ -0,0 +1,182 @@ +package com.snp.batch.jobs.shipMovementDarkActivity.batch.reader; + +import com.snp.batch.common.batch.reader.BaseApiReader; +import com.snp.batch.jobs.shipMovementDarkActivity.batch.dto.DarkActivityDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.web.reactive.function.client.WebClient; + +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.util.List; + +/** + * 선박 상세 정보 Reader (v2.0 - Chunk 기반) + * + * 기능: + * 1. ship_data 테이블에서 IMO 번호 전체 조회 (최초 1회) + * 2. IMO 번호를 100개씩 분할하여 배치 단위로 처리 + * 3. fetchNextBatch() 호출 시마다 100개씩 API 호출 + * 4. Spring Batch가 100건씩 Process → Write 수행 + * + * Chunk 처리 흐름: + * - beforeFetch() → IMO 전체 조회 (1회) + * - fetchNextBatch() → 100개 IMO로 API 호출 (1,718회) + * - read() → 1건씩 반환 (100번) + * - Processor/Writer → 100건 처리 + * - 반복... (1,718번의 Chunk) + * + * 기존 방식과의 차이: + * - 기존: 17만건 전체 메모리 로드 → Process → Write + * - 신규: 100건씩 로드 → Process → Write (Chunk 1,718회) + */ +@Slf4j +@StepScope +public class DarkActivityRangeReader extends BaseApiReader { + + + private List allData; + // DB 해시값을 저장할 맵 + private int currentBatchIndex = 0; + private final int batchSize = 5000; + + // @Value("#{jobParameters['startDate']}") + private String startDate; +// private String startDate = "2025-01-01"; + + // @Value("#{jobParameters['stopDate']}") + private String stopDate; +// private String stopDate = "2025-12-31"; + + /*public DarkActivityRangeReader(WebClient webClient) { + super(webClient); + enableChunkMode(); // ✨ Chunk 모드 활성화 + }*/ + public DarkActivityRangeReader(WebClient webClient, + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate) { + super(webClient); + // 날짜가 없으면 전날 하루 기준 + if (startDate == null || startDate.isBlank() || stopDate == null || stopDate.isBlank()) { + LocalDate yesterday = LocalDate.now().minusDays(1); + this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + } else { + this.startDate = startDate; + this.stopDate = stopDate; + } + + enableChunkMode(); // ✨ Chunk 모드 활성화 + } + + @Override + protected String getReaderName() { + return "DarkActivityReader"; + } + + @Override + protected void resetCustomState() { + this.currentBatchIndex = 0; + this.allData = null; + } + + @Override + protected String getApiPath() { + return "/Movements/DarkActivity"; + } + + @Override + protected String getApiBaseUrl() { + return "https://webservices.maritime.spglobal.com"; + } + + private static final String GET_ALL_IMO_QUERY = + "SELECT imo_number FROM ship_data ORDER BY id"; +// "SELECT imo_number FROM snp_data.ship_data where imo_number > (select max(imo) from snp_data.t_darkactivity) ORDER BY imo_number"; + + /** + * 최초 1회만 실행: ship_data 테이블에서 IMO 번호 전체 조회 + */ + @Override + protected void beforeFetch() { + log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate); + } + + /** + * ✨ Chunk 기반 핵심 메서드: 다음 100개 배치를 조회하여 반환 + * + * Spring Batch가 100건씩 read() 호출 완료 후 이 메서드 재호출 + * + * @return 다음 배치 100건 (더 이상 없으면 null) + */ + @Override + protected List fetchNextBatch() throws Exception { + + // 모든 배치 처리 완료 확인 + if (allData == null ) { + log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate); + allData = callApiWithBatch(startDate, stopDate); + + if (allData == null || allData.isEmpty()) { + log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName()); + return null; + } + + log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize); + } + + // 2) 이미 끝까지 읽었으면 종료 + if (currentBatchIndex >= allData.size()) { + log.info("[{}] 모든 배치 처리 완료", getReaderName()); + return null; + } + + // 3) 이번 배치의 end 계산 + int endIndex = Math.min(currentBatchIndex + batchSize, allData.size()); + + // 현재 배치의 IMO 번호 추출 (100개) + List batch = allData.subList(currentBatchIndex, endIndex); + + int currentBatchNumber = (currentBatchIndex / batchSize) + 1; + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + + log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), currentBatchNumber, totalBatches, batch.size()); + + currentBatchIndex = endIndex; + updateApiCallStats(totalBatches, currentBatchNumber); + return batch; + + } + + /** + * Query Parameter를 사용한 API 호출 + * + * @param startDate,stopDate + * @return API 응답 + */ + private List callApiWithBatch(String startDate, String stopDate){ + String url = getApiPath() + "?startDate=" + startDate +"&stopDate="+stopDate; +// +"&lrno=" + lrno; + + log.debug("[{}] API 호출: {}", getReaderName(), url); + + return webClient.get() + .uri(url) + .retrieve() + .bodyToFlux(DarkActivityDto.class) + .collectList() + .block(); + } + + @Override + protected void afterFetch(List data) { + if (data == null) { + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches); + /* log.info("[{}] 총 {} 개의 IMO 번호에 대한 API 호출 종료", + getReaderName(), allData.size());*/ + } + } + +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/repository/DarkActivityRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/repository/DarkActivityRepositoryImpl.java index 2055651..12ceb9e 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/repository/DarkActivityRepositoryImpl.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/repository/DarkActivityRepositoryImpl.java @@ -32,7 +32,8 @@ public class DarkActivityRepositoryImpl extends BaseJdbcRepository { + + private final DestinationProcessor DestinationProcessor; + private final DestinationWriter DestinationWriter; + private final DestinationRangeReader destinationRangeReader; + private final WebClient maritimeApiWebClient; + + public DestinationsRangeJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + DestinationProcessor DestinationProcessor, + DestinationWriter DestinationWriter, DestinationRangeReader destinationRangeReader, + @Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient) { // ObjectMapper 주입 추가 + super(jobRepository, transactionManager); + this.DestinationProcessor = DestinationProcessor; + this.DestinationWriter = DestinationWriter; + this.destinationRangeReader = destinationRangeReader; + this.maritimeApiWebClient = maritimeApiWebClient; + } + + @Override + protected String getJobName() { + return "DestinationsRangeImportJob"; + } + + @Override + protected String getStepName() { + return "DestinationsRangeImportStep"; + } + + @Override + protected ItemReader createReader() { // 타입 변경 + return destinationRangeReader; + } + @Bean + @StepScope + public DestinationRangeReader destinationRangeReader( + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate + ) { + // jobParameters 없으면 null 넘어오고 Reader에서 default 처리 + return new DestinationRangeReader(maritimeApiWebClient, startDate, stopDate); + } + @Override + protected ItemProcessor createProcessor() { + return DestinationProcessor; + } + + @Override + protected ItemWriter createWriter() { // 타입 변경 + return DestinationWriter; + } + + @Override + protected int getChunkSize() { + return 1000; // API에서 100개씩 가져오므로 chunk도 100으로 설정 + } + + @Bean(name = "DestinationsRangeImportJob") + public Job destinationsRangeImportJob() { + return job(); + } + + @Bean(name = "DestinationsRangeImportStep") + public Step destinationsRangeImportStep() { + return step(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementDestination/batch/reader/DestinationRangeReader.java b/src/main/java/com/snp/batch/jobs/shipMovementDestination/batch/reader/DestinationRangeReader.java new file mode 100644 index 0000000..7ce34c4 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementDestination/batch/reader/DestinationRangeReader.java @@ -0,0 +1,161 @@ +package com.snp.batch.jobs.shipMovementDestination.batch.reader; + +import com.snp.batch.common.batch.reader.BaseApiReader; +import com.snp.batch.jobs.shipMovementDestination.batch.dto.DestinationDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.web.reactive.function.client.WebClient; + +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.util.List; + +/** + * 선박 상세 정보 Reader (v2.0 - Chunk 기반) + *

+ * 기능: + * 1. ship_data 테이블에서 IMO 번호 전체 조회 (최초 1회) + * 2. IMO 번호를 100개씩 분할하여 배치 단위로 처리 + * 3. fetchNextBatch() 호출 시마다 100개씩 API 호출 + * 4. Spring Batch가 100건씩 Process → Write 수행 + *

+ * Chunk 처리 흐름: + * - beforeFetch() → IMO 전체 조회 (1회) + * - fetchNextBatch() → 100개 IMO로 API 호출 (1,718회) + * - read() → 1건씩 반환 (100번) + * - Processor/Writer → 100건 처리 + * - 반복... (1,718번의 Chunk) + *

+ * 기존 방식과의 차이: + * - 기존: 17만건 전체 메모리 로드 → Process → Write + * - 신규: 100건씩 로드 → Process → Write (Chunk 1,718회) + */ +@Slf4j +@StepScope +public class DestinationRangeReader extends BaseApiReader { + + private List allData; + private int currentBatchIndex = 0; + private final int batchSize = 1000; + private String startDate; + private String stopDate; + + public DestinationRangeReader(WebClient webClient, + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate) { + super(webClient); + // 날짜가 + 한달 기간 도착예정지 정보 update + if (startDate == null || startDate.isBlank() || stopDate == null || stopDate.isBlank()) { + LocalDate today = LocalDate.now(); + this.startDate = today + .atStartOfDay() + .format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + + this.stopDate = today + .plusDays(15) + .atStartOfDay() + .format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + } else { + this.startDate = startDate; + this.stopDate = stopDate; + } + + enableChunkMode(); // ✨ Chunk 모드 활성화 + } + + @Override + protected String getReaderName() { + return "DestinationsRange"; + } + + @Override + protected void resetCustomState() { + this.currentBatchIndex = 0; + this.allData = null; + } + + @Override + protected String getApiPath() { + return "/Movements/Destinations"; + } + + @Override + protected String getApiBaseUrl() { + return "https://webservices.maritime.spglobal.com"; + } + + /** + * 최초 1회만 실행: ship_data 테이블에서 IMO 번호 전체 조회 + */ + @Override + protected void beforeFetch() { + log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate); + } + + @Override + protected List fetchNextBatch() throws Exception { + + // 모든 배치 처리 완료 확인 + // 모든 배치 처리 완료 확인 + if (allData == null) { + log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate); + allData = callApiWithBatch(startDate, stopDate); + + if (allData == null || allData.isEmpty()) { + log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName()); + return null; + } + + log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize); + } + + // 2) 이미 끝까지 읽었으면 종료 + if (currentBatchIndex >= allData.size()) { + log.info("[{}] 모든 배치 처리 완료", getReaderName()); + return null; + } + + // 3) 이번 배치의 end 계산 + int endIndex = Math.min(currentBatchIndex + batchSize, allData.size()); + + // 현재 배치의 IMO 번호 추출 (100개) + List batch = allData.subList(currentBatchIndex, endIndex); + + int currentBatchNumber = (currentBatchIndex / batchSize) + 1; + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + + log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), currentBatchNumber, totalBatches, batch.size()); + + currentBatchIndex = endIndex; + updateApiCallStats(totalBatches, currentBatchNumber); + return batch; + } + + /** + * Query Parameter를 사용한 API 호출 + * @return API 응답 + */ + private List callApiWithBatch(String startDate, String stopDate) { + String url = getApiPath() + "?startDate=" + startDate + "&stopDate=" + stopDate; +// +"&lrno=" + lrno; + + log.debug("[{}] API 호출: {}", getReaderName(), url); + + return webClient.get() + .uri(url) + .retrieve() + .bodyToFlux(DestinationDto.class) + .collectList() + .block(); + } + + @Override + protected void afterFetch(List data) { + if (data == null) { + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches); + } + } + +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementDestination/batch/repository/DestinationRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/shipMovementDestination/batch/repository/DestinationRepositoryImpl.java index bea7875..7147469 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovementDestination/batch/repository/DestinationRepositoryImpl.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementDestination/batch/repository/DestinationRepositoryImpl.java @@ -27,12 +27,13 @@ public class DestinationRepositoryImpl extends BaseJdbcRepository entities) { if (entities == null || entities.isEmpty()) return; - log.info("Destinations 저장 시작 = {}건", entities.size()); batchInsert(entities); diff --git a/src/main/java/com/snp/batch/jobs/shipMovement/batch/config/ShipMovementJobConfig.java b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/config/ShipPortCallsJobConfig.java similarity index 56% rename from src/main/java/com/snp/batch/jobs/shipMovement/batch/config/ShipMovementJobConfig.java rename to src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/config/ShipPortCallsJobConfig.java index c840630..7971e39 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovement/batch/config/ShipMovementJobConfig.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/config/ShipPortCallsJobConfig.java @@ -1,12 +1,12 @@ -package com.snp.batch.jobs.shipMovement.batch.config; +package com.snp.batch.jobs.shipMovementPortCalls.batch.config; import com.fasterxml.jackson.databind.ObjectMapper; import com.snp.batch.common.batch.config.BaseJobConfig; -import com.snp.batch.jobs.shipMovement.batch.dto.PortCallsDto; -import com.snp.batch.jobs.shipMovement.batch.entity.ShipMovementEntity; -import com.snp.batch.jobs.shipMovement.batch.processor.ShipMovementProcessor; -import com.snp.batch.jobs.shipMovement.batch.reader.ShipMovementReader; -import com.snp.batch.jobs.shipMovement.batch.writer.ShipMovementWriter; +import com.snp.batch.jobs.shipMovementPortCalls.batch.dto.PortCallsDto; +import com.snp.batch.jobs.shipMovementPortCalls.batch.entity.PortCallsEntity; +import com.snp.batch.jobs.shipMovementPortCalls.batch.processor.PortCallsProcessor; +import com.snp.batch.jobs.shipMovementPortCalls.batch.reader.PortCallsReader; +import com.snp.batch.jobs.shipMovementPortCalls.batch.writer.PortCallsWriter; import lombok.extern.slf4j.Slf4j; import org.springframework.batch.core.Job; import org.springframework.batch.core.Step; @@ -37,34 +37,34 @@ import java.time.format.DateTimeFormatter; * - 선박 상세 정보를 ship_detail 테이블에 저장 (UPSERT) * * 데이터 흐름: - * ShipMovementReader (ship_data → Maritime API) + * PortCallsReader (ship_data → Maritime API) * ↓ (PortCallDto) - * ShipMovementProcessor - * ↓ (ShipMovementEntity) + * PortCallsProcessor + * ↓ (PortCallsEntity) * ShipDetailDataWriter * ↓ (ship_movement 테이블) */ @Slf4j @Configuration -public class ShipMovementJobConfig extends BaseJobConfig { +public class ShipPortCallsJobConfig extends BaseJobConfig { - private final ShipMovementProcessor shipMovementProcessor; - private final ShipMovementWriter shipMovementWriter; + private final PortCallsProcessor portCallsProcessor; + private final PortCallsWriter portCallsWriter; private final JdbcTemplate jdbcTemplate; private final WebClient maritimeApiWebClient; private final ObjectMapper objectMapper; // ObjectMapper 주입 추가 - public ShipMovementJobConfig( + public ShipPortCallsJobConfig( JobRepository jobRepository, PlatformTransactionManager transactionManager, - ShipMovementProcessor shipMovementProcessor, - ShipMovementWriter shipMovementWriter, JdbcTemplate jdbcTemplate, + PortCallsProcessor portCallsProcessor, + PortCallsWriter portCallsWriter, JdbcTemplate jdbcTemplate, @Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient, ObjectMapper objectMapper) { // ObjectMapper 주입 추가 super(jobRepository, transactionManager); - this.shipMovementProcessor = shipMovementProcessor; - this.shipMovementWriter = shipMovementWriter; + this.portCallsProcessor = portCallsProcessor; + this.portCallsWriter = portCallsWriter; this.jdbcTemplate = jdbcTemplate; this.maritimeApiWebClient = maritimeApiWebClient; this.objectMapper = objectMapper; // ObjectMapper 초기화 @@ -72,30 +72,28 @@ public class ShipMovementJobConfig extends BaseJobConfig createReader() { // 타입 변경 // Reader 생성자 수정: ObjectMapper를 전달합니다. - return shipMovementReader(null, null); - //return new ShipMovementReader(maritimeApiWebClient, jdbcTemplate, objectMapper); + return portCallsReader( null, null); + //return new PortCallsReader(maritimeApiWebClient, jdbcTemplate, objectMapper); } @Override - protected ItemProcessor createProcessor() { - return shipMovementProcessor; + protected ItemProcessor createProcessor() { + return portCallsProcessor; } @Override - protected ItemWriter createWriter() { // 타입 변경 - return shipMovementWriter; + protected ItemWriter createWriter() { // 타입 변경 + return portCallsWriter; } @Override protected int getChunkSize() { - return 50; // API에서 100개씩 가져오므로 chunk도 100으로 설정 + return 1000; // API에서 5000개씩 가져오므로 chunk도 5000으로 설정 } - @Bean(name = "shipMovementJob") - public Job shipMovementJob() { + @Bean(name = "PortCallsImportJob") + public Job portCallsImportJob() { return job(); } - @Bean(name = "shipMovementStep") - public Step shipMovementStep() { + @Bean(name = "PortCallsImportStep") + public Step portCallsImportStep() { return step(); } } diff --git a/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/config/ShipPortCallsRangeJobConfig.java b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/config/ShipPortCallsRangeJobConfig.java new file mode 100644 index 0000000..702412e --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/config/ShipPortCallsRangeJobConfig.java @@ -0,0 +1,114 @@ +package com.snp.batch.jobs.shipMovementPortCalls.batch.config; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.jobs.shipMovementPortCalls.batch.dto.PortCallsDto; +import com.snp.batch.jobs.shipMovementPortCalls.batch.entity.PortCallsEntity; +import com.snp.batch.jobs.shipMovementPortCalls.batch.processor.PortCallsProcessor; +import com.snp.batch.jobs.shipMovementPortCalls.batch.reader.PortCallsRangeReader; +import com.snp.batch.jobs.shipMovementPortCalls.batch.reader.PortCallsReader; +import com.snp.batch.jobs.shipMovementPortCalls.batch.writer.PortCallsWriter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.web.reactive.function.client.WebClient; + +/** + * 선박 상세 정보 Import Job Config + * + * 특징: + * - ship_data 테이블에서 IMO 번호 조회 + * - IMO 번호를 100개씩 배치로 분할 + * - Maritime API GetShipsByIHSLRorIMONumbers 호출 + * TODO : GetShipsByIHSLRorIMONumbersAll 호출로 변경 + * - 선박 상세 정보를 ship_detail 테이블에 저장 (UPSERT) + * + * 데이터 흐름: + * PortCallsReader (ship_data → Maritime API) + * ↓ (PortCallDto) + * PortCallsProcessor + * ↓ (PortCallsEntity) + * ShipDetailDataWriter + * ↓ (ship_movement 테이블) + */ + +@Slf4j +@Configuration +public class ShipPortCallsRangeJobConfig extends BaseJobConfig { + + private final PortCallsProcessor portCallsProcessor; + private final PortCallsWriter portCallsWriter; + private final PortCallsRangeReader portCallsRangeReader; + public ShipPortCallsRangeJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + PortCallsProcessor portCallsProcessor, + PortCallsWriter portCallsWriter, JdbcTemplate jdbcTemplate, + @Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient, + ObjectMapper objectMapper, PortCallsRangeReader portCallsRangeReader) { // ObjectMapper 주입 추가 + super(jobRepository, transactionManager); + this.portCallsProcessor = portCallsProcessor; + this.portCallsWriter = portCallsWriter; + this.portCallsRangeReader = portCallsRangeReader; + } + + @Override + protected String getJobName() { + return "PortCallsRangeImportJob"; + } + + @Override + protected String getStepName() { + return "PortCallsRangeImportStep"; + } + + @Bean + @StepScope + public PortCallsRangeReader portCallsRangeReader( + @Qualifier("maritimeServiceApiWebClient") WebClient webClient, + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate + ) { + return new PortCallsRangeReader(webClient, startDate, stopDate); + } + @Override + protected ItemReader createReader() { // 타입 변경 + return portCallsRangeReader; + } + + @Override + protected ItemProcessor createProcessor() { + return portCallsProcessor; + } + + @Override + protected ItemWriter createWriter() { // 타입 변경 + return portCallsWriter; + } + + @Override + protected int getChunkSize() { + return 5000; // API에서 5000개 가져오므로 chunk도 5000개씩 설정 + } + + @Bean(name = "PortCallsRangeImportJob") + public Job portCallsRangeImportJob() { + return job(); + } + + @Bean(name = "PortCallsRangeImportStep") + public Step portCallsRangeImportStep() { + return step(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovement/batch/dto/PortCallsDto.java b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/dto/PortCallsDto.java similarity index 92% rename from src/main/java/com/snp/batch/jobs/shipMovement/batch/dto/PortCallsDto.java rename to src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/dto/PortCallsDto.java index c97db50..272626d 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovement/batch/dto/PortCallsDto.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/dto/PortCallsDto.java @@ -1,4 +1,4 @@ -package com.snp.batch.jobs.shipMovement.batch.dto; +package com.snp.batch.jobs.shipMovementPortCalls.batch.dto; import lombok.Data; diff --git a/src/main/java/com/snp/batch/jobs/shipMovement/batch/dto/PortCallsPositionDto.java b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/dto/PortCallsPositionDto.java similarity index 85% rename from src/main/java/com/snp/batch/jobs/shipMovement/batch/dto/PortCallsPositionDto.java rename to src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/dto/PortCallsPositionDto.java index 8906ba0..a960e8c 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovement/batch/dto/PortCallsPositionDto.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/dto/PortCallsPositionDto.java @@ -1,4 +1,4 @@ -package com.snp.batch.jobs.shipMovement.batch.dto; +package com.snp.batch.jobs.shipMovementPortCalls.batch.dto; import com.fasterxml.jackson.annotation.JsonProperty; import lombok.Data; diff --git a/src/main/java/com/snp/batch/jobs/shipMovement/batch/dto/ShipMovementApiResponse.java b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/dto/ShipMovementApiResponse.java similarity index 78% rename from src/main/java/com/snp/batch/jobs/shipMovement/batch/dto/ShipMovementApiResponse.java rename to src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/dto/ShipMovementApiResponse.java index eb8fae8..f32b864 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovement/batch/dto/ShipMovementApiResponse.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/dto/ShipMovementApiResponse.java @@ -1,4 +1,4 @@ -package com.snp.batch.jobs.shipMovement.batch.dto; +package com.snp.batch.jobs.shipMovementPortCalls.batch.dto; import com.fasterxml.jackson.annotation.JsonProperty; import lombok.Data; diff --git a/src/main/java/com/snp/batch/jobs/shipMovement/batch/entity/ShipMovementEntity.java b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/entity/PortCallsEntity.java similarity index 92% rename from src/main/java/com/snp/batch/jobs/shipMovement/batch/entity/ShipMovementEntity.java rename to src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/entity/PortCallsEntity.java index 50bee84..d519d63 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovement/batch/entity/ShipMovementEntity.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/entity/PortCallsEntity.java @@ -1,4 +1,4 @@ -package com.snp.batch.jobs.shipMovement.batch.entity; +package com.snp.batch.jobs.shipMovementPortCalls.batch.entity; import com.fasterxml.jackson.databind.JsonNode; import jakarta.persistence.GeneratedValue; @@ -7,7 +7,6 @@ import jakarta.persistence.Id; import jakarta.persistence.SequenceGenerator; import lombok.AllArgsConstructor; import lombok.Data; -import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; import lombok.experimental.SuperBuilder; @@ -17,7 +16,7 @@ import java.time.LocalDateTime; @SuperBuilder @NoArgsConstructor @AllArgsConstructor -public class ShipMovementEntity { +public class PortCallsEntity { @Id @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "ship_movement_id_seq") @SequenceGenerator(name = "ship_movement_id_seq", sequenceName = "ship_movement_id_seq", allocationSize = 1) diff --git a/src/main/java/com/snp/batch/jobs/shipMovement/batch/processor/ShipMovementProcessor.java b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/processor/PortCallsProcessor.java similarity index 82% rename from src/main/java/com/snp/batch/jobs/shipMovement/batch/processor/ShipMovementProcessor.java rename to src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/processor/PortCallsProcessor.java index 102e404..4df08aa 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovement/batch/processor/ShipMovementProcessor.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/processor/PortCallsProcessor.java @@ -1,10 +1,10 @@ -package com.snp.batch.jobs.shipMovement.batch.processor; +package com.snp.batch.jobs.shipMovementPortCalls.batch.processor; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.snp.batch.common.batch.processor.BaseProcessor; -import com.snp.batch.jobs.shipMovement.batch.dto.PortCallsDto; -import com.snp.batch.jobs.shipMovement.batch.entity.ShipMovementEntity; +import com.snp.batch.jobs.shipMovementPortCalls.batch.dto.PortCallsDto; +import com.snp.batch.jobs.shipMovementPortCalls.batch.entity.PortCallsEntity; import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Component; @@ -22,16 +22,16 @@ import java.time.LocalDateTime; */ @Slf4j @Component -public class ShipMovementProcessor extends BaseProcessor { +public class PortCallsProcessor extends BaseProcessor { private final ObjectMapper objectMapper; - public ShipMovementProcessor(ObjectMapper objectMapper) { + public PortCallsProcessor(ObjectMapper objectMapper) { this.objectMapper = objectMapper; } @Override - protected ShipMovementEntity processItem(PortCallsDto dto) throws Exception { + protected PortCallsEntity processItem(PortCallsDto dto) throws Exception { log.debug("선박 상세 정보 처리 시작: imoNumber={}, facilityName={}", dto.getImolRorIHSNumber(), dto.getFacilityName()); @@ -41,7 +41,7 @@ public class ShipMovementProcessor extends BaseProcessor { + + private List allData; + private int currentBatchIndex = 0; + private final int batchSize = 5000; + private String startDate; + private String stopDate; + public PortCallsRangeReader(WebClient webClient, + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate) { + super(webClient); + + // 날짜가 없으면 전날 하루 기준 + if (startDate == null || startDate.isBlank() || + stopDate == null || stopDate.isBlank()) { + + LocalDate yesterday = LocalDate.now().minusDays(1); + this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + } else { + this.startDate = startDate; + this.stopDate = stopDate; + } + + enableChunkMode(); + } + + @Override + protected String getReaderName() { + return "PortCallsRangeReader"; + } + + @Override + protected void resetCustomState() { + this.currentBatchIndex = 0; + this.allData = null; + } + + @Override + protected String getApiPath() { + return "/Movements/PortCalls"; + } + + @Override + protected String getApiBaseUrl() { + return "https://webservices.maritime.spglobal.com"; + } + + @Override + protected void beforeFetch() { + log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate); + } + + /** + * ✨ Chunk 기반 핵심 메서드: 다음 배치를 조회하여 반환 + * + * Spring Batch가 batchsize만큼 read() 호출 완료 후 이 메서드 재호출 + * + * @return 다음 배치 (더 이상 없으면 null) + */ + @Override + protected List fetchNextBatch() throws Exception { + + // 모든 배치 처리 완료 확인 + if (allData == null) { + log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate); + allData = callApiWithBatch(startDate, stopDate); + + if (allData == null || allData.isEmpty()) { + log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName()); + return null; + } + + log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize); + } + + // 2) 이미 끝까지 읽었으면 종료 + if (currentBatchIndex >= allData.size()) { + log.info("[{}] 모든 배치 처리 완료", getReaderName()); + return null; + } + + // 3) 이번 배치의 end 계산 + int end = Math.min(currentBatchIndex + batchSize, allData.size()); + + // 4) 현재 batch 리스트 잘라서 반환 + List batch = allData.subList(currentBatchIndex, end); + + int batchNum = (currentBatchIndex / batchSize) + 1; + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + + log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), batchNum, totalBatches, batch.size()); + + // 다음 batch 인덱스 이동 + currentBatchIndex = end; + updateApiCallStats(totalBatches, batchNum); + + return batch; + } + + /** + * Query Parameter를 사용한 API 호출 + * @return API 응답 + */ + private List callApiWithBatch(String startDate, String stopDate) { + String url = getApiPath() + "?startDate=" + startDate + "&stopDate=" + stopDate; + log.info("[{}] API 호출: {}", getReaderName(), url); + + return webClient.get() + .uri(url) + .retrieve() + .bodyToFlux(PortCallsDto.class) + .collectList() + .block(); + } + + @Override + protected void afterFetch(List data) { + if (data == null) { + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches); + } + } + +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovement/batch/reader/ShipMovementReader.java b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/reader/PortCallsReader.java similarity index 86% rename from src/main/java/com/snp/batch/jobs/shipMovement/batch/reader/ShipMovementReader.java rename to src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/reader/PortCallsReader.java index 1277732..086a902 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovement/batch/reader/ShipMovementReader.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/reader/PortCallsReader.java @@ -1,9 +1,10 @@ -package com.snp.batch.jobs.shipMovement.batch.reader; +package com.snp.batch.jobs.shipMovementPortCalls.batch.reader; import com.fasterxml.jackson.databind.ObjectMapper; import com.snp.batch.common.batch.reader.BaseApiReader; -import com.snp.batch.jobs.shipMovement.batch.dto.PortCallsDto; -import com.snp.batch.jobs.shipMovement.batch.dto.ShipMovementApiResponse; +import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.dto.AnchorageCallsDto; +import com.snp.batch.jobs.shipMovementPortCalls.batch.dto.PortCallsDto; +import com.snp.batch.jobs.shipMovementPortCalls.batch.dto.ShipMovementApiResponse; import lombok.extern.slf4j.Slf4j; import org.springframework.batch.core.configuration.annotation.StepScope; import org.springframework.beans.factory.annotation.Value; @@ -34,7 +35,7 @@ import java.util.*; */ @Slf4j @StepScope -public class ShipMovementReader extends BaseApiReader { +public class PortCallsReader extends BaseApiReader { private final JdbcTemplate jdbcTemplate; private final ObjectMapper objectMapper; @@ -46,16 +47,16 @@ public class ShipMovementReader extends BaseApiReader { private int currentBatchIndex = 0; private final int batchSize = 10; - @Value("#{jobParameters['startDate']}") - private String startDate; -// private String startDate = "2024-01-01"; + // @Value("#{jobParameters['startDate']}") +// private String startDate; + private String startDate = "2025-01-01"; - @Value("#{jobParameters['stopDate']}") - private String stopDate; - // private String stopDate = "2024-12-31"; + // @Value("#{jobParameters['stopDate']}") +// private String stopDate; + private String stopDate = "2025-12-31"; public void setStartDate(String startDate) {this.startDate = startDate;} public void setStopDate(String stopDate){this.stopDate=stopDate;} - public ShipMovementReader(WebClient webClient, JdbcTemplate jdbcTemplate, ObjectMapper objectMapper) { + public PortCallsReader(WebClient webClient, JdbcTemplate jdbcTemplate, ObjectMapper objectMapper) { super(webClient); this.jdbcTemplate = jdbcTemplate; this.objectMapper = objectMapper; @@ -76,7 +77,7 @@ public class ShipMovementReader extends BaseApiReader { @Override protected String getApiPath() { - return "/Movements"; + return "/Movements/PortCalls"; } @Override @@ -88,9 +89,6 @@ public class ShipMovementReader extends BaseApiReader { "SELECT imo_number FROM ship_data ORDER BY id"; // "SELECT imo_number FROM snp_data.ship_data where imo_number > (select max(imo) from snp_data.t_ship_stpov_info) ORDER BY imo_number"; - private static final String FETCH_ALL_HASHES_QUERY = - "SELECT imo_number, ship_detail_hash FROM ship_detail_hash_json ORDER BY imo_number"; - /** * 최초 1회만 실행: ship_data 테이블에서 IMO 번호 전체 조회 */ @@ -144,15 +142,16 @@ public class ShipMovementReader extends BaseApiReader { String imoParam = String.join(",", currentBatch); // API 호출 - ShipMovementApiResponse response = callApiWithBatch(imoParam); +// ShipMovementApiResponse response = callApiWithBatch(imoParam); + List response= callApiWithBatch(imoParam); // 다음 배치로 인덱스 이동 currentBatchIndex = endIndex; // 응답 처리 - if (response != null && response.getPortCallList() != null) { - List portCalls = response.getPortCallList(); + if (response != null) { + List portCalls = response; log.info("[{}] 배치 {}/{} 완료: {} 건 조회", getReaderName(), currentBatchNumber, totalBatches, portCalls.size()); @@ -194,7 +193,7 @@ public class ShipMovementReader extends BaseApiReader { * @param lrno 쉼표로 연결된 IMO 번호 (예: "1000019,1000021,...") * @return API 응답 */ - private ShipMovementApiResponse callApiWithBatch(String lrno) { + private List callApiWithBatch(String lrno) { String url = getApiPath() + "?startDate=" + startDate +"&stopDate="+stopDate+"&lrno=" + lrno; log.debug("[{}] API 호출: {}", getReaderName(), url); @@ -202,7 +201,8 @@ public class ShipMovementReader extends BaseApiReader { return webClient.get() .uri(url) .retrieve() - .bodyToMono(ShipMovementApiResponse.class) + .bodyToFlux(PortCallsDto.class) + .collectList() .block(); } diff --git a/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/repository/PortCallsRepository.java b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/repository/PortCallsRepository.java new file mode 100644 index 0000000..bb45152 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/repository/PortCallsRepository.java @@ -0,0 +1,16 @@ +package com.snp.batch.jobs.shipMovementPortCalls.batch.repository; + +import com.snp.batch.jobs.shipMovementPortCalls.batch.entity.PortCallsEntity; + +import java.util.List; + +/** + * 선박 상세 정보 Repository 인터페이스 + */ + +public interface PortCallsRepository { + + void saveAll(List entities); + + boolean existsByPortCallId(Integer portCallId); +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovement/batch/repository/ShipMovementRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/repository/PortCallsRepositoryImpl.java similarity index 88% rename from src/main/java/com/snp/batch/jobs/shipMovement/batch/repository/ShipMovementRepositoryImpl.java rename to src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/repository/PortCallsRepositoryImpl.java index 13a3ac0..eb3f6e6 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovement/batch/repository/ShipMovementRepositoryImpl.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/repository/PortCallsRepositoryImpl.java @@ -1,9 +1,9 @@ -package com.snp.batch.jobs.shipMovement.batch.repository; +package com.snp.batch.jobs.shipMovementPortCalls.batch.repository; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.snp.batch.common.batch.repository.BaseJdbcRepository; -import com.snp.batch.jobs.shipMovement.batch.entity.ShipMovementEntity; +import com.snp.batch.jobs.shipMovementPortCalls.batch.entity.PortCallsEntity; import lombok.extern.slf4j.Slf4j; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.RowMapper; @@ -21,16 +21,17 @@ import java.util.List; */ @Slf4j @Repository("ShipMovementRepository") -public class ShipMovementRepositoryImpl extends BaseJdbcRepository - implements ShipMovementRepository { +public class PortCallsRepositoryImpl extends BaseJdbcRepository + implements PortCallsRepository { - public ShipMovementRepositoryImpl(JdbcTemplate jdbcTemplate) { + public PortCallsRepositoryImpl(JdbcTemplate jdbcTemplate) { super(jdbcTemplate); } private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); @Override protected String getTableName() { - return "snp_data.t_ship_stpov_info"; +// return "snp_data.t_ship_stpov_info"; + return "new_snp.t_ship_stpov_info"; } @Override @@ -39,14 +40,16 @@ public class ShipMovementRepositoryImpl extends BaseJdbcRepository getRowMapper() { + protected RowMapper getRowMapper() { return new ShipMovementRowMapper(); } @Override - public void saveAll(List entities) { + public void saveAll(List entities) { if (entities == null || entities.isEmpty()) return; log.info("ShipMovement 저장 시작 = {}건", entities.size()); @@ -205,10 +208,10 @@ public class ShipMovementRepositoryImpl extends BaseJdbcRepository { + private static class ShipMovementRowMapper implements RowMapper { @Override - public ShipMovementEntity mapRow(ResultSet rs, int rowNum) throws SQLException { - ShipMovementEntity entity = ShipMovementEntity.builder() + public PortCallsEntity mapRow(ResultSet rs, int rowNum) throws SQLException { + PortCallsEntity entity = PortCallsEntity.builder() .id(rs.getLong("id")) .imolRorIHSNumber(rs.getString("imolRorIHSNumber")) .portCallId(rs.getObject("portCallId", Integer.class)) diff --git a/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/writer/PortCallsWriter.java b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/writer/PortCallsWriter.java new file mode 100644 index 0000000..20fe890 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/writer/PortCallsWriter.java @@ -0,0 +1,38 @@ +package com.snp.batch.jobs.shipMovementPortCalls.batch.writer; + +import com.snp.batch.common.batch.writer.BaseWriter; +import com.snp.batch.jobs.shipMovementPortCalls.batch.entity.PortCallsEntity; +import com.snp.batch.jobs.shipMovementPortCalls.batch.repository.PortCallsRepository; +import com.snp.batch.jobs.shipdetail.batch.repository.ShipDetailRepository; +import com.snp.batch.jobs.shipdetail.batch.repository.ShipHashRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Component; + +import java.util.List; + +/** + * 선박 상세 정보 Writer + */ +@Slf4j +@Component +public class PortCallsWriter extends BaseWriter { + + private final PortCallsRepository shipMovementRepository; + + + public PortCallsWriter(PortCallsRepository shipMovementRepository) { + super("ShipPortCalls"); + this.shipMovementRepository = shipMovementRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + + if (items.isEmpty()) { return; } + + shipMovementRepository.saveAll(items); + log.info("PortCalls 데이터 저장 완료: {} 건", items.size()); + + } + +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementStsOperations/batch/config/StsOperationRangeJobConfig.java b/src/main/java/com/snp/batch/jobs/shipMovementStsOperations/batch/config/StsOperationRangeJobConfig.java new file mode 100644 index 0000000..d348e3b --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementStsOperations/batch/config/StsOperationRangeJobConfig.java @@ -0,0 +1,117 @@ +package com.snp.batch.jobs.shipMovementStsOperations.batch.config; + +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.jobs.shipMovementDarkActivity.batch.reader.DarkActivityRangeReader; +import com.snp.batch.jobs.shipMovementStsOperations.batch.dto.StsOperationDto; +import com.snp.batch.jobs.shipMovementStsOperations.batch.entity.StsOperationEntity; +import com.snp.batch.jobs.shipMovementStsOperations.batch.processor.StsOperationProcessor; +import com.snp.batch.jobs.shipMovementStsOperations.batch.reader.StsOperationRangeReader; +import com.snp.batch.jobs.shipMovementStsOperations.batch.writer.StsOperationWriter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.web.reactive.function.client.WebClient; + +/** + * 선박 상세 정보 Import Job Config + * + * 특징: + * - ship_data 테이블에서 IMO 번호 조회 + * - IMO 번호를 100개씩 배치로 분할 + * - Maritime API GetShipsByIHSLRorIMONumbers 호출 + * TODO : GetShipsByIHSLRorIMONumbersAll 호출로 변경 + * - 선박 상세 정보를 ship_detail 테이블에 저장 (UPSERT) + * + * 데이터 흐름: + * StsOperationReader (ship_data → Maritime API) + * ↓ (StsOperationDto) + * StsOperationProcessor + * ↓ (StsOperationEntity) + * StsOperationWriter + * ↓ (t_stsoperation 테이블) + */ + +@Slf4j +@Configuration +public class StsOperationRangeJobConfig extends BaseJobConfig { + + private final StsOperationProcessor stsOperationProcessor; + private final StsOperationWriter stsOperationWriter; + private final StsOperationRangeReader stsOperationRangeReader; + private final JdbcTemplate jdbcTemplate; + private final WebClient maritimeApiWebClient; + + public StsOperationRangeJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + StsOperationProcessor stsOperationProcessor, + StsOperationWriter stsOperationWriter, StsOperationRangeReader stsOperationRangeReader, JdbcTemplate jdbcTemplate, + @Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient) { // ObjectMapper 주입 추가 + super(jobRepository, transactionManager); + this.stsOperationProcessor = stsOperationProcessor; + this.stsOperationWriter = stsOperationWriter; + this.stsOperationRangeReader = stsOperationRangeReader; + this.jdbcTemplate = jdbcTemplate; + this.maritimeApiWebClient = maritimeApiWebClient; + } + + @Override + protected String getJobName() { + return "STSOperationRangeImportJob"; + } + + @Override + protected String getStepName() { + return "STSOperationRangeImportStep"; + } + + @Override + protected ItemReader createReader() { // 타입 변경 + // Reader 생성자 수정: ObjectMapper를 전달합니다. + return stsOperationRangeReader; + } + @Bean + @StepScope + public StsOperationRangeReader stsOperationRangeReader( + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate + ) { + // jobParameters 없으면 null 넘어오고 Reader에서 default 처리 + return new StsOperationRangeReader(maritimeApiWebClient, startDate, stopDate); + } + @Override + protected ItemProcessor createProcessor() { + return stsOperationProcessor; + } + + @Override + protected ItemWriter createWriter() { // 타입 변경 + return stsOperationWriter; + } + + @Override + protected int getChunkSize() { + return 5000; // API에서 100개씩 가져오므로 chunk도 100으로 설정 + } + + @Bean(name = "STSOperationRangeImportJob") + public Job STSOperationRangeImportJob() { + return job(); + } + + @Bean(name = "STSOperationRangeImportStep") + public Step STSOperationRangeImportStep() { + return step(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementStsOperations/batch/reader/StsOperationRangeReader.java b/src/main/java/com/snp/batch/jobs/shipMovementStsOperations/batch/reader/StsOperationRangeReader.java new file mode 100644 index 0000000..0b2c56e --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementStsOperations/batch/reader/StsOperationRangeReader.java @@ -0,0 +1,164 @@ +package com.snp.batch.jobs.shipMovementStsOperations.batch.reader; + +import com.snp.batch.common.batch.reader.BaseApiReader; +import com.snp.batch.jobs.shipMovementStsOperations.batch.dto.StsOperationDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.web.reactive.function.client.WebClient; + +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.util.List; + +/** + * 선박 상세 정보 Reader (v2.0 - Chunk 기반) + * + * 기능: + * 1. ship_data 테이블에서 IMO 번호 전체 조회 (최초 1회) + * 2. IMO 번호를 100개씩 분할하여 배치 단위로 처리 + * 3. fetchNextBatch() 호출 시마다 100개씩 API 호출 + * 4. Spring Batch가 100건씩 Process → Write 수행 + * + * Chunk 처리 흐름: + * - beforeFetch() → IMO 전체 조회 (1회) + * - fetchNextBatch() → 100개 IMO로 API 호출 (1,718회) + * - read() → 1건씩 반환 (100번) + * - Processor/Writer → 100건 처리 + * - 반복... (1,718번의 Chunk) + * + * 기존 방식과의 차이: + * - 기존: 17만건 전체 메모리 로드 → Process → Write + * - 신규: 100건씩 로드 → Process → Write (Chunk 1,718회) + */ +@Slf4j +@StepScope +public class StsOperationRangeReader extends BaseApiReader { + + private List allData; + private int currentBatchIndex = 0; + private final int batchSize = 5000; + private String startDate; + private String stopDate; + + public StsOperationRangeReader(WebClient webClient, + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate) { + super(webClient); + // 날짜가 없으면 전날 하루 기준 + if (startDate == null || startDate.isBlank() || stopDate == null || stopDate.isBlank()) { + LocalDate yesterday = LocalDate.now().minusDays(1); + this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + } else { + this.startDate = startDate; + this.stopDate = stopDate; + } + + enableChunkMode(); // ✨ Chunk 모드 활성화 + } + + @Override + protected String getReaderName() { + return "StsOperationReader"; + } + + @Override + protected void resetCustomState() { + this.currentBatchIndex = 0; + this.allData = null; + } + + @Override + protected String getApiPath() { + return "/Movements/StsOperations"; + } + + @Override + protected String getApiBaseUrl() { + return "https://webservices.maritime.spglobal.com"; + } + + /** + * 최초 1회만 실행: ship_data 테이블에서 IMO 번호 전체 조회 + */ + @Override + protected void beforeFetch() { + log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate); + } + + /** + * ✨ Chunk 기반 핵심 메서드: 다음 100개 배치를 조회하여 반환 + * + * Spring Batch가 100건씩 read() 호출 완료 후 이 메서드 재호출 + * + * @return 다음 배치 100건 (더 이상 없으면 null) + */ + @Override + protected List fetchNextBatch() throws Exception { + + // 모든 배치 처리 완료 확인 + if (allData == null ) { + log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate); + allData = callApiWithBatch(startDate, stopDate); + + if (allData == null || allData.isEmpty()) { + log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName()); + return null; + } + + log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize); + } + + // 2) 이미 끝까지 읽었으면 종료 + if (currentBatchIndex >= allData.size()) { + log.info("[{}] 모든 배치 처리 완료", getReaderName()); + return null; + } + + // 3) 이번 배치의 end 계산 + int endIndex = Math.min(currentBatchIndex + batchSize, allData.size()); + + // 현재 배치의 IMO 번호 추출 (100개) + List batch = allData.subList(currentBatchIndex, endIndex); + + int currentBatchNumber = (currentBatchIndex / batchSize) + 1; + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + + log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), currentBatchNumber, totalBatches, batch.size()); + + currentBatchIndex = endIndex; + updateApiCallStats(totalBatches, currentBatchNumber); + return batch; + } + + /** + * Query Parameter를 사용한 API 호출 + * + * @param startDate,stopDate + * @return API 응답 + */ + private List callApiWithBatch(String startDate, String stopDate) { + String url = getApiPath() + "?startDate=" + startDate + "&stopDate=" + stopDate; + + log.debug("[{}] API 호출: {}", getReaderName(), url); + + return webClient.get() + .uri(url) + .retrieve() + .bodyToFlux(StsOperationDto.class) + .collectList() + .block(); + } + + @Override + protected void afterFetch(List data) { + if (data == null) { + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches); + /*log.info("[{}] 총 {} 개의 IMO 번호에 대한 API 호출 종료", + getReaderName(), allImoNumbers.size());*/ + } + } + +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementStsOperations/batch/repository/StsOperationRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/shipMovementStsOperations/batch/repository/StsOperationRepositoryImpl.java index 4cebb94..8dabe87 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovementStsOperations/batch/repository/StsOperationRepositoryImpl.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementStsOperations/batch/repository/StsOperationRepositoryImpl.java @@ -30,7 +30,8 @@ public class StsOperationRepositoryImpl extends BaseJdbcRepository { + + private final TerminalCallsProcessor terminalCallsProcessor; + private final TerminalCallsWriter terminalCallsWriter; + private final TerminalCallsRangeReader terminalCallsRangeReader; + private final JdbcTemplate jdbcTemplate; + private final WebClient maritimeApiWebClient; + + public TerminalCallsRangeJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + TerminalCallsProcessor terminalCallsProcessor, + TerminalCallsWriter terminalCallsWriter, TerminalCallsRangeReader terminalCallsRangeReader, JdbcTemplate jdbcTemplate, + @Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient) { // ObjectMapper 주입 추가 + super(jobRepository, transactionManager); + this.terminalCallsProcessor = terminalCallsProcessor; + this.terminalCallsWriter = terminalCallsWriter; + this.terminalCallsRangeReader = terminalCallsRangeReader; + this.jdbcTemplate = jdbcTemplate; + this.maritimeApiWebClient = maritimeApiWebClient; + } + + @Override + protected String getJobName() { + return "TerminalCallsRangeImportJob"; + } + + @Override + protected String getStepName() { + return "TerminalCallsRangeImportStep"; + } + + @Override + protected ItemReader createReader() { // 타입 변경 + return terminalCallsRangeReader; + } + @Bean + @StepScope + public TerminalCallsRangeReader terminalCallsRangeReader( + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate + ) { + // jobParameters 없으면 null 넘어오고 Reader에서 default 처리 + return new TerminalCallsRangeReader(maritimeApiWebClient, startDate, stopDate); + } + @Override + protected ItemProcessor createProcessor() { + return terminalCallsProcessor; + } + + @Override + protected ItemWriter createWriter() { // 타입 변경 + return terminalCallsWriter; + } + + @Override + protected int getChunkSize() { + return 1000; // API에서 100개씩 가져오므로 chunk도 1000으로 설정 + } + + @Bean(name = "TerminalCallsRangeImportJob") + public Job terminalCallsRangeImportJob() { + return job(); + } + + @Bean(name = "TerminalCallsRangeImportStep") + public Step terminalCallsRangeImportStep() { + return step(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementTerminalCalls/batch/reader/TerminalCallsRangeReader.java b/src/main/java/com/snp/batch/jobs/shipMovementTerminalCalls/batch/reader/TerminalCallsRangeReader.java new file mode 100644 index 0000000..f5bff28 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementTerminalCalls/batch/reader/TerminalCallsRangeReader.java @@ -0,0 +1,162 @@ +package com.snp.batch.jobs.shipMovementTerminalCalls.batch.reader; + +import com.snp.batch.common.batch.reader.BaseApiReader; +import com.snp.batch.jobs.shipMovementDarkActivity.batch.dto.DarkActivityDto; +import com.snp.batch.jobs.shipMovementTerminalCalls.batch.dto.TerminalCallsDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.web.reactive.function.client.WebClient; + +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +/** + * 선박 상세 정보 Reader (v2.0 - Chunk 기반) + *

+ * 기능: + * 1. ship_data 테이블에서 IMO 번호 전체 조회 (최초 1회) + * 2. IMO 번호를 100개씩 분할하여 배치 단위로 처리 + * 3. fetchNextBatch() 호출 시마다 100개씩 API 호출 + * 4. Spring Batch가 100건씩 Process → Write 수행 + *

+ * Chunk 처리 흐름: + * - beforeFetch() → IMO 전체 조회 (1회) + * - fetchNextBatch() → 100개 IMO로 API 호출 (1,718회) + * - read() → 1건씩 반환 (100번) + * - Processor/Writer → 100건 처리 + * - 반복... (1,718번의 Chunk) + *

+ * 기존 방식과의 차이: + * - 기존: 17만건 전체 메모리 로드 → Process → Write + * - 신규: 100건씩 로드 → Process → Write (Chunk 1,718회) + */ +@Slf4j +@StepScope +public class TerminalCallsRangeReader extends BaseApiReader { + + private List allData; + private int currentBatchIndex = 0; + private final int batchSize = 1000; + private String startDate; + private String stopDate; + + public TerminalCallsRangeReader(WebClient webClient, + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate) { + super(webClient); + // 날짜가 없으면 전날 하루 기준 + if (startDate == null || startDate.isBlank() || stopDate == null || stopDate.isBlank()) { + LocalDate yesterday = LocalDate.now().minusDays(1); + this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + } else { + this.startDate = startDate; + this.stopDate = stopDate; + } + + enableChunkMode(); // ✨ Chunk 모드 활성화 + } + + @Override + protected String getReaderName() { + return "TerminalCalls"; + } + + @Override + protected void resetCustomState() { + this.currentBatchIndex = 0; + this.allData = null; + } + + @Override + protected String getApiPath() { + return "/Movements/TerminalCalls"; + } + + @Override + protected String getApiBaseUrl() { + return "https://webservices.maritime.spglobal.com"; + } + + + @Override + protected void beforeFetch() { + log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate); + } + + /** + * ✨ Chunk 기반 핵심 메서드: 다음 100개 배치를 조회하여 반환 + *

+ * Spring Batch가 100건씩 read() 호출 완료 후 이 메서드 재호출 + * + * @return 다음 배치 100건 (더 이상 없으면 null) + */ + @Override + protected List fetchNextBatch() throws Exception { + + // 모든 배치 처리 완료 확인 + if (allData == null ) { + log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate); + allData = callApiWithBatch(startDate, stopDate); + + if (allData == null || allData.isEmpty()) { + log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName()); + return null; + } + + log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize); + } + + // 2) 이미 끝까지 읽었으면 종료 + if (currentBatchIndex >= allData.size()) { + log.info("[{}] 모든 배치 처리 완료", getReaderName()); + return null; + } + + // 3) 이번 배치의 end 계산 + int endIndex = Math.min(currentBatchIndex + batchSize, allData.size()); + + // 현재 배치의 IMO 번호 추출 (100개) + List batch = allData.subList(currentBatchIndex, endIndex); + + int currentBatchNumber = (currentBatchIndex / batchSize) + 1; + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + + log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), currentBatchNumber, totalBatches, batch.size()); + + currentBatchIndex = endIndex; + updateApiCallStats(totalBatches, currentBatchNumber); + return batch; + } + + /** + * Query Parameter를 사용한 API 호출 + * @param startDate, stopDate + * @return API 응답 + */ + private List callApiWithBatch(String startDate, String stopDate) { + String url = getApiPath() + "?startDate=" + startDate + "&stopDate=" + stopDate; + log.debug("[{}] API 호출: {}", getReaderName(), url); + + return webClient.get() + .uri(url) + .retrieve() + .bodyToFlux(TerminalCallsDto.class) + .collectList() + .block(); + } + + @Override + protected void afterFetch(List data) { + if (data == null) { + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches); + } + } + +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementTerminalCalls/batch/repository/TerminalCallsRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/shipMovementTerminalCalls/batch/repository/TerminalCallsRepositoryImpl.java index 66366e1..2eb31f0 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovementTerminalCalls/batch/repository/TerminalCallsRepositoryImpl.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementTerminalCalls/batch/repository/TerminalCallsRepositoryImpl.java @@ -30,7 +30,8 @@ public class TerminalCallsRepositoryImpl extends BaseJdbcRepository { + + private final TransitsProcessor transitsProcessor; + private final TransitsWriter transitsWriter; + private final TransitsRangeReader transitsRangeReader; + private final WebClient maritimeApiWebClient; + + public TransitsRangeJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + TransitsProcessor TransitsProcessor, + TransitsWriter transitsWriter, TransitsRangeReader transitsRangeReader, + @Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient) { // ObjectMapper 주입 추가 + super(jobRepository, transactionManager); + this.transitsProcessor = TransitsProcessor; + this.transitsWriter = transitsWriter; + this.transitsRangeReader = transitsRangeReader; + this.maritimeApiWebClient = maritimeApiWebClient; + } + + @Override + protected String getJobName() { + return "TransitsRangeImportJob"; + } + + @Override + protected String getStepName() { + return "TransitsRangeImportStep"; + } + + @Override + protected ItemReader createReader() { // 타입 변경 + return transitsRangeReader; + } + @Bean + @StepScope + public TransitsRangeReader transitsRangeReader( + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate + ) { + // jobParameters 없으면 null 넘어오고 Reader에서 default 처리 + return new TransitsRangeReader(maritimeApiWebClient, startDate, stopDate); + } + @Override + protected ItemProcessor createProcessor() { + return transitsProcessor; + } + + @Override + protected ItemWriter createWriter() { // 타입 변경 + return transitsWriter; + } + + @Override + protected int getChunkSize() { + return 1000; // API에서 100개씩 가져오므로 chunk도 100으로 설정 + } + + @Bean(name = "TransitsRangeImportJob") + public Job transitsRangeImportJob() { + return job(); + } + + @Bean(name = "TransitsRangeImportStep") + public Step transitsRangeImportStep() { + return step(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementTransits/batch/reader/TransitsRangeReader.java b/src/main/java/com/snp/batch/jobs/shipMovementTransits/batch/reader/TransitsRangeReader.java new file mode 100644 index 0000000..23abbb0 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementTransits/batch/reader/TransitsRangeReader.java @@ -0,0 +1,159 @@ +package com.snp.batch.jobs.shipMovementTransits.batch.reader; + +import com.snp.batch.common.batch.reader.BaseApiReader; +import com.snp.batch.jobs.shipMovementTransits.batch.dto.TransitsDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.web.reactive.function.client.WebClient; + +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.util.List; + +/** + * 선박 상세 정보 Reader (v2.0 - Chunk 기반) + * + * 기능: + * 1. ship_data 테이블에서 IMO 번호 전체 조회 (최초 1회) + * 2. IMO 번호를 100개씩 분할하여 배치 단위로 처리 + * 3. fetchNextBatch() 호출 시마다 100개씩 API 호출 + * 4. Spring Batch가 100건씩 Process → Write 수행 + * + * Chunk 처리 흐름: + * - beforeFetch() → IMO 전체 조회 (1회) + * - fetchNextBatch() → 100개 IMO로 API 호출 (1,718회) + * - read() → 1건씩 반환 (100번) + * - Processor/Writer → 100건 처리 + * - 반복... (1,718번의 Chunk) + * + * 기존 방식과의 차이: + * - 기존: 17만건 전체 메모리 로드 → Process → Write + * - 신규: 100건씩 로드 → Process → Write (Chunk 1,718회) + */ +@Slf4j +@StepScope +public class TransitsRangeReader extends BaseApiReader { + + private List allData; + private int currentBatchIndex = 0; + private final int batchSize = 1000; + private String startDate; + private String stopDate; + + public TransitsRangeReader(WebClient webClient, + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate) { + super(webClient); + // 날짜가 없으면 전날 하루 기준 + if (startDate == null || startDate.isBlank() || stopDate == null || stopDate.isBlank()) { + LocalDate yesterday = LocalDate.now().minusDays(1); + this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + } else { + this.startDate = startDate; + this.stopDate = stopDate; + } + + enableChunkMode(); // ✨ Chunk 모드 활성화 + } + + @Override + protected String getReaderName() { + return "Transits"; + } + + @Override + protected void resetCustomState() { + this.currentBatchIndex = 0; + this.allData = null; + } + + @Override + protected String getApiPath() { + return "/Movements/Transits"; + } + + @Override + protected String getApiBaseUrl() { + return "https://webservices.maritime.spglobal.com"; + } + + @Override + protected void beforeFetch() { + log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate); + } + + /** + * ✨ Chunk 기반 핵심 메서드: 다음 100개 배치를 조회하여 반환 + * + * Spring Batch가 100건씩 read() 호출 완료 후 이 메서드 재호출 + * + * @return 다음 배치 100건 (더 이상 없으면 null) + */ + @Override + protected List fetchNextBatch() throws Exception { + + // 모든 배치 처리 완료 확인 + if (allData == null ) { + log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate); + allData = callApiWithBatch(startDate, stopDate); + + if (allData == null || allData.isEmpty()) { + log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName()); + return null; + } + + log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize); + } + + // 2) 이미 끝까지 읽었으면 종료 + if (currentBatchIndex >= allData.size()) { + log.info("[{}] 모든 배치 처리 완료", getReaderName()); + return null; + } + + // 3) 이번 배치의 end 계산 + int endIndex = Math.min(currentBatchIndex + batchSize, allData.size()); + + // 현재 배치의 IMO 번호 추출 (100개) + List batch = allData.subList(currentBatchIndex, endIndex); + + int currentBatchNumber = (currentBatchIndex / batchSize) + 1; + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + + log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), currentBatchNumber, totalBatches, batch.size()); + + currentBatchIndex = endIndex; + updateApiCallStats(totalBatches, currentBatchNumber); + return batch; + } + + /** + * Query Parameter를 사용한 API 호출 + * @param startDate,stopDate + * @return API 응답 + */ + private List callApiWithBatch(String startDate, String stopDate) { + String url = getApiPath() + "?startDate=" + startDate +"&stopDate="+stopDate; +// +"&lrno=" + lrno; + + log.debug("[{}] API 호출: {}", getReaderName(), url); + + return webClient.get() + .uri(url) + .retrieve() + .bodyToFlux(TransitsDto.class) + .collectList() + .block(); + } + + @Override + protected void afterFetch(List data) { + if (data == null) { + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches); + } + } + +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementTransits/batch/repository/TransitlsRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/shipMovementTransits/batch/repository/TransitsRepositoryImpl.java similarity index 90% rename from src/main/java/com/snp/batch/jobs/shipMovementTransits/batch/repository/TransitlsRepositoryImpl.java rename to src/main/java/com/snp/batch/jobs/shipMovementTransits/batch/repository/TransitsRepositoryImpl.java index af747c0..50f2802 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovementTransits/batch/repository/TransitlsRepositoryImpl.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementTransits/batch/repository/TransitsRepositoryImpl.java @@ -18,16 +18,17 @@ import java.util.List; */ @Slf4j @Repository("TransitsRepository") -public class TransitlsRepositoryImpl extends BaseJdbcRepository +public class TransitsRepositoryImpl extends BaseJdbcRepository implements TransitsRepository { - public TransitlsRepositoryImpl(JdbcTemplate jdbcTemplate) { + public TransitsRepositoryImpl(JdbcTemplate jdbcTemplate) { super(jdbcTemplate); } private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); @Override protected String getTableName() { - return "snp_data.t_transit"; +// return "snp_data.t_transit"; + return "new_snp.t_transit"; } @Override @@ -42,8 +43,10 @@ public class TransitlsRepositoryImpl extends BaseJdbcRepository