Merge branch 'dev_event' into develop

This commit is contained in:
hyojin kim 2025-12-23 12:36:48 +09:00
커밋 6d7b7c9eea
73개의 변경된 파일4159개의 추가작업 그리고 363개의 파일을 삭제

파일 보기

@ -1,6 +1,5 @@
package com.snp.batch.common.batch.repository;
import com.snp.batch.jobs.shipMovement.batch.entity.ShipMovementEntity;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.jdbc.core.JdbcTemplate;

파일 보기

@ -1,8 +1,8 @@
package com.snp.batch.jobs.event.batch.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.jobs.event.batch.dto.EventDto;
import com.snp.batch.jobs.event.batch.entity.EventEntity;
import com.snp.batch.jobs.event.batch.dto.EventDetailDto;
import com.snp.batch.jobs.event.batch.entity.EventDetailEntity;
import com.snp.batch.jobs.event.batch.processor.EventDataProcessor;
import com.snp.batch.jobs.event.batch.reader.EventDataReader;
import com.snp.batch.jobs.event.batch.writer.EventDataWriter;
@ -23,7 +23,7 @@ import org.springframework.web.reactive.function.client.WebClient;
@Slf4j
@Configuration
public class EventImportJobConfig extends BaseJobConfig<EventDto, EventEntity> {
public class EventImportJobConfig extends BaseJobConfig<EventDetailDto, EventDetailEntity> {
private final JdbcTemplate jdbcTemplate;
private final WebClient maritimeApiWebClient;
@ -34,7 +34,7 @@ public class EventImportJobConfig extends BaseJobConfig<EventDto, EventEntity> {
@Override
protected int getChunkSize() {
return 5000; // API에서 5000개씩 가져오므로 chunk도 5000으로 설정
return 10; // API에서 5000개씩 가져오므로 chunk도 5000으로 설정
}
public EventImportJobConfig(
JobRepository jobRepository,
@ -63,17 +63,17 @@ public class EventImportJobConfig extends BaseJobConfig<EventDto, EventEntity> {
}
@Override
protected ItemReader<EventDto> createReader() {
protected ItemReader<EventDetailDto> createReader() {
return new EventDataReader(maritimeApiWebClient, jdbcTemplate, batchDateService);
}
@Override
protected ItemProcessor<EventDto, EventEntity> createProcessor() {
protected ItemProcessor<EventDetailDto, EventDetailEntity> createProcessor() {
return eventDataProcessor;
}
@Override
protected ItemWriter<EventEntity> createWriter() { return eventDataWriter; }
protected ItemWriter<EventDetailEntity> createWriter() { return eventDataWriter; }
@Bean(name = "eventImportJob")
public Job eventImportJob() {

파일 보기

@ -0,0 +1,50 @@
package com.snp.batch.jobs.event.batch.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.snp.batch.jobs.event.batch.entity.CargoEntity;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class CargoDto {
@JsonProperty("EventID")
private Integer eventID;
@JsonProperty("Sequence")
private String sequence;
@JsonProperty("IHSLRorIMOShipNo")
private String ihslrOrImoShipNo;
@JsonProperty("Type")
private String type;
@JsonProperty("Quantity")
private Integer quantity;
@JsonProperty("UnitShort")
private String unitShort;
@JsonProperty("Unit")
private String unit;
@JsonProperty("Text")
private String text;
@JsonProperty("CargoDamage")
private String cargoDamage;
@JsonProperty("Dangerous")
private String dangerous;
public CargoEntity toEntity() {
return CargoEntity.builder()
.eventID(this.eventID)
.sequence(this.sequence)
.ihslrOrImoShipNo(this.ihslrOrImoShipNo)
.type(this.type)
.unit(this.unit)
.quantity(this.quantity)
.unitShort(this.unitShort)
.text(this.text)
.cargoDamage(this.cargoDamage)
.dangerous(this.dangerous)
.build();
}
}

파일 보기

@ -0,0 +1,109 @@
package com.snp.batch.jobs.event.batch.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.util.List;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class EventDetailDto {
@JsonProperty("IncidentID")
private Integer incidentID;
@JsonProperty("EventID")
private Long eventID;
@JsonProperty("EventTypeID")
private Integer eventTypeID;
@JsonProperty("EventType")
private String eventType;
@JsonProperty("Significance")
private String significance;
@JsonProperty("Headline")
private String headline;
@JsonProperty("IHSLRorIMOShipNo")
private String ihslrOrImoShipNo;
@JsonProperty("VesselName")
private String vesselName;
@JsonProperty("VesselType")
private String vesselType;
@JsonProperty("VesselTypeDecode")
private String vesselTypeDecode;
@JsonProperty("VesselFlag")
private String vesselFlagCode;
@JsonProperty("Flag")
private String vesselFlagDecode;
@JsonProperty("CargoLoadingStatusCode")
private String cargoLoadingStatusCode;
@JsonProperty("VesselDWT")
private Integer vesselDWT;
@JsonProperty("VesselGT")
private Integer vesselGT;
@JsonProperty("LDTAtTime")
private Integer ldtAtTime;
@JsonProperty("DateOfBuild")
private Integer dateOfBuild;
@JsonProperty("RegisteredOwnerCodeAtTime")
private String registeredOwnerCodeAtTime;
@JsonProperty("RegisteredOwnerAtTime")
private String registeredOwnerAtTime;
@JsonProperty("RegisteredOwnerCoDAtTime")
private String registeredOwnerCountryCodeAtTime;
@JsonProperty("RegisteredOwnerCountryAtTime")
private String registeredOwnerCountryAtTime;
@JsonProperty("Weather")
private String weather;
@JsonProperty("EventTypeDetail")
private String eventTypeDetail;
@JsonProperty("EventTypeDetailID")
private Integer eventTypeDetailID;
@JsonProperty("CasualtyAction")
private String casualtyAction;
@JsonProperty("LocationName")
private String locationName;
@JsonProperty("TownName")
private String townName;
@JsonProperty("MarsdenGridReference")
private Integer marsdenGridReference;
@JsonProperty("EnvironmentLocation")
private String environmentLocation;
@JsonProperty("CasualtyZone")
private String casualtyZone;
@JsonProperty("CasualtyZoneCode")
private String casualtyZoneCode;
@JsonProperty("CountryCode")
private String countryCode;
@JsonProperty("AttemptedBoarding")
private String attemptedBoarding;
@JsonProperty("Description")
private String description;
@JsonProperty("Pollutant")
private String pollutant;
@JsonProperty("PollutantUnit")
private String pollutantUnit;
@JsonProperty("PollutantQuantity")
private Double pollutantQuantity;
@JsonProperty("PublishedDate")
private String publishedDate;
@JsonProperty("Component2")
private String component2;
@JsonProperty("FiredUpon")
private String firedUpon;
private String eventStartDate;
private String eventEndDate;
@JsonProperty("Cargoes")
private List<CargoDto> cargoes;
@JsonProperty("HumanCasualties")
private List<HumanCasualtyDto> humanCasualties;
@JsonProperty("Relationships")
private List<RelationshipDto> relationships;
}

파일 보기

@ -0,0 +1,18 @@
package com.snp.batch.jobs.event.batch.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class EventDetailResponse {
@JsonProperty("MaritimeEvent")
private EventDetailDto eventDetailDto;
}

파일 보기

@ -0,0 +1,12 @@
package com.snp.batch.jobs.event.batch.dto;
import lombok.AllArgsConstructor;
import lombok.Data;
import java.time.LocalDateTime;
@Data
@AllArgsConstructor
public class EventPeriod {
private String eventStartDate;
private String eventEndDate;}

파일 보기

@ -0,0 +1,35 @@
package com.snp.batch.jobs.event.batch.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.snp.batch.jobs.event.batch.entity.HumanCasualtyEntity;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class HumanCasualtyDto {
@JsonProperty("EventID")
private Integer eventID;
@JsonProperty("Scope")
private String scope;
@JsonProperty("Type")
private String type;
@JsonProperty("Qualifier")
private String qualifier;
@JsonProperty("Count")
private Integer count;
public HumanCasualtyEntity toEntity() {
return HumanCasualtyEntity.builder()
.eventID(this.eventID)
.scope(this.scope)
.type(this.type)
.qualifier(this.qualifier)
.count(this.count)
.build();
}
}

파일 보기

@ -0,0 +1,41 @@
package com.snp.batch.jobs.event.batch.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.snp.batch.jobs.event.batch.entity.RelationshipEntity;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class RelationshipDto {
@JsonProperty("IncidentID")
private String incidentID;
@JsonProperty("EventID")
private Integer eventID;
@JsonProperty("RelationshipType")
private String relationshipType;
@JsonProperty("RelationshipTypeCode")
private String relationshipTypeCode;
@JsonProperty("EventID2")
private Integer eventID2;
@JsonProperty("EventType")
private String eventType;
@JsonProperty("EventTypeCode")
private String eventTypeCode;
public RelationshipEntity toEntity() {
return RelationshipEntity.builder()
.incidentID(this.incidentID)
.eventID(this.eventID)
.relationshipType(this.relationshipType)
.relationshipTypeCode(this.relationshipTypeCode)
.eventID2(this.eventID2)
.eventType(this.eventType)
.eventTypeCode(this.eventTypeCode)
.build();
}
}

파일 보기

@ -0,0 +1,26 @@
package com.snp.batch.jobs.event.batch.entity;
import com.snp.batch.common.batch.entity.BaseEntity;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import lombok.experimental.SuperBuilder;
@Data
@SuperBuilder
@NoArgsConstructor
@AllArgsConstructor
@EqualsAndHashCode(callSuper = true)
public class CargoEntity extends BaseEntity {
private Integer eventID;
private String sequence;
private String ihslrOrImoShipNo;
private String type;
private Integer quantity;
private String unitShort;
private String unit;
private String text;
private String cargoDamage;
private String dangerous;
}

파일 보기

@ -0,0 +1,67 @@
package com.snp.batch.jobs.event.batch.entity;
import com.fasterxml.jackson.datatype.jsr310.deser.LocalDateDeserializer;
import com.snp.batch.common.batch.entity.BaseEntity;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import lombok.experimental.SuperBuilder;
import java.time.LocalDateTime;
import java.util.List;
@Data
@SuperBuilder
@NoArgsConstructor
@AllArgsConstructor
@EqualsAndHashCode(callSuper = true)
public class EventDetailEntity extends BaseEntity {
private Integer incidentID;
private Long eventID;
private Integer eventTypeID;
private String eventType;
private String significance;
private String headline;
private String ihslrOrImoShipNo;
private String vesselName;
private String vesselType;
private String vesselTypeDecode;
private String vesselFlagCode;
private String vesselFlagDecode;
private String cargoLoadingStatusCode;
private Integer vesselDWT;
private Integer vesselGT;
private Integer ldtAtTime;
private Integer dateOfBuild;
private String registeredOwnerCodeAtTime;
private String registeredOwnerAtTime;
private String registeredOwnerCountryCodeAtTime;
private String registeredOwnerCountryAtTime;
private String weather;
private String eventTypeDetail;
private Integer eventTypeDetailID;
private String casualtyAction;
private String locationName;
private String townName;
private Integer marsdenGridReference;
private String environmentLocation;
private String casualtyZone;
private String casualtyZoneCode;
private String countryCode;
private String attemptedBoarding;
private String description;
private String pollutant;
private String pollutantUnit;
private Double pollutantQuantity;
private String publishedDate;
private String component2;
private String firedUpon;
private String eventStartDate;
private String eventEndDate;
private List<CargoEntity> cargoes;
private List<HumanCasualtyEntity> humanCasualties;
private List<RelationshipEntity> relationships;
}

파일 보기

@ -1,7 +1,6 @@
package com.snp.batch.jobs.event.batch.entity;
import com.snp.batch.common.batch.entity.BaseEntity;
import jakarta.persistence.Embedded;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.EqualsAndHashCode;

파일 보기

@ -0,0 +1,21 @@
package com.snp.batch.jobs.event.batch.entity;
import com.snp.batch.common.batch.entity.BaseEntity;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import lombok.experimental.SuperBuilder;
@Data
@SuperBuilder
@NoArgsConstructor
@AllArgsConstructor
@EqualsAndHashCode(callSuper = true)
public class HumanCasualtyEntity extends BaseEntity {
private Integer eventID;
private String scope;
private String type;
private String qualifier;
private Integer count;
}

파일 보기

@ -0,0 +1,23 @@
package com.snp.batch.jobs.event.batch.entity;
import com.snp.batch.common.batch.entity.BaseEntity;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import lombok.experimental.SuperBuilder;
@Data
@SuperBuilder
@NoArgsConstructor
@AllArgsConstructor
@EqualsAndHashCode(callSuper = true)
public class RelationshipEntity extends BaseEntity {
private String incidentID;
private Integer eventID;
private String relationshipType;
private String relationshipTypeCode;
private Integer eventID2;
private String eventType;
private String eventTypeCode;
}

파일 보기

@ -1,34 +1,75 @@
package com.snp.batch.jobs.event.batch.processor;
import com.snp.batch.common.batch.processor.BaseProcessor;
import com.snp.batch.jobs.event.batch.dto.EventDto;
import com.snp.batch.jobs.event.batch.entity.EventEntity;
import com.snp.batch.jobs.event.batch.dto.CargoDto;
import com.snp.batch.jobs.event.batch.dto.EventDetailDto;
import com.snp.batch.jobs.event.batch.dto.HumanCasualtyDto;
import com.snp.batch.jobs.event.batch.dto.RelationshipDto;
import com.snp.batch.jobs.event.batch.entity.EventDetailEntity;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.stream.Collectors;
@Slf4j
@Component
public class EventDataProcessor extends BaseProcessor<EventDto, EventEntity> {
public class EventDataProcessor extends BaseProcessor<EventDetailDto, EventDetailEntity> {
@Override
protected EventEntity processItem(EventDto dto) throws Exception {
log.debug("Event 데이터 처리 시작: Event ID = {}", dto.getEventId());
protected EventDetailEntity processItem(EventDetailDto dto) throws Exception {
log.debug("Event 데이터 처리 시작: Event ID = {}", dto.getEventID());
EventEntity entity = EventEntity.builder()
.incidentId(dto.getIncidentId())
.eventId(dto.getEventId())
.startDate(dto.getStartDate())
EventDetailEntity entity = EventDetailEntity.builder()
.eventID(dto.getEventID())
.incidentID(dto.getIncidentID())
.eventTypeID(dto.getEventTypeID())
.eventType(dto.getEventType())
.significance(dto.getSignificance())
.headline(dto.getHeadline())
.endDate(dto.getEndDate())
.ihslRorImoShipNo(dto.getIhslRorImoShipNo())
.ihslrOrImoShipNo(dto.getIhslrOrImoShipNo())
.vesselName(dto.getVesselName())
.vesselType(dto.getVesselType())
.vesselTypeDecode(dto.getVesselTypeDecode())
.vesselFlagCode(dto.getVesselFlagCode())
.vesselFlagDecode(dto.getVesselFlagDecode())
.cargoLoadingStatusCode(dto.getCargoLoadingStatusCode())
.vesselDWT(dto.getVesselDWT())
.vesselGT(dto.getVesselGT())
.ldtAtTime(dto.getLdtAtTime())
.dateOfBuild(dto.getDateOfBuild())
.registeredOwnerCodeAtTime(dto.getRegisteredOwnerCodeAtTime())
.registeredOwnerAtTime(dto.getRegisteredOwnerAtTime())
.registeredOwnerCountryCodeAtTime(dto.getRegisteredOwnerCountryCodeAtTime())
.registeredOwnerCountryAtTime(dto.getRegisteredOwnerCountryAtTime())
.weather(dto.getWeather())
.eventTypeDetail(dto.getEventTypeDetail())
.eventTypeDetailID(dto.getEventTypeDetailID())
.casualtyAction(dto.getCasualtyAction())
.locationName(dto.getLocationName())
.townName(dto.getTownName())
.marsdenGridReference(dto.getMarsdenGridReference())
.environmentLocation(dto.getEnvironmentLocation())
.casualtyZone(dto.getCasualtyZone())
.casualtyZoneCode(dto.getCasualtyZoneCode())
.countryCode(dto.getCountryCode())
.attemptedBoarding(dto.getAttemptedBoarding())
.description(dto.getDescription())
.pollutant(dto.getPollutant())
.pollutantUnit(dto.getPollutantUnit())
.pollutantQuantity(dto.getPollutantQuantity())
.publishedDate(dto.getPublishedDate())
.component2(dto.getComponent2())
.firedUpon(dto.getFiredUpon())
.eventStartDate(dto.getEventStartDate())
.eventEndDate(dto.getEventEndDate())
.cargoes(dto.getCargoes() != null ?
dto.getCargoes().stream().map(CargoDto::toEntity).collect(Collectors.toList()) : null)
.humanCasualties(dto.getHumanCasualties() != null ?
dto.getHumanCasualties().stream().map(HumanCasualtyDto::toEntity).collect(Collectors.toList()) : null)
.relationships(dto.getRelationships() != null ?
dto.getRelationships().stream().map(RelationshipDto::toEntity).collect(Collectors.toList()) : null)
.build();
log.debug("Event 데이터 처리 완료: Event ID = {}", dto.getEventId());
log.debug("Event 데이터 처리 완료: Event ID = {}", dto.getEventID());
return entity;
}

파일 보기

@ -1,20 +1,24 @@
package com.snp.batch.jobs.event.batch.reader;
import com.snp.batch.common.batch.reader.BaseApiReader;
import com.snp.batch.jobs.event.batch.dto.EventDto;
import com.snp.batch.jobs.event.batch.dto.EventResponse;
import com.snp.batch.jobs.event.batch.dto.*;
import com.snp.batch.jobs.event.batch.dto.EventDetailDto;
import com.snp.batch.jobs.event.batch.entity.EventDetailEntity;
import com.snp.batch.jobs.shipdetail.batch.dto.ShipDetailComparisonData;
import com.snp.batch.service.BatchDateService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.core.ParameterizedTypeReference;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.web.reactive.function.client.WebClient;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.util.*;
import java.util.stream.Collectors;
@Slf4j
public class EventDataReader extends BaseApiReader<EventDto> {
public class EventDataReader extends BaseApiReader<EventDetailDto> {
private Map<Long, EventPeriod> eventPeriodMap;
private final JdbcTemplate jdbcTemplate;
private final BatchDateService batchDateService; // BatchDateService 필드 추가
@ -22,6 +26,7 @@ public class EventDataReader extends BaseApiReader<EventDto> {
super(webClient);
this.jdbcTemplate = jdbcTemplate;
this.batchDateService = batchDateService;
enableChunkMode(); // Chunk 모드 활성화
}
@Override
@ -33,27 +38,163 @@ public class EventDataReader extends BaseApiReader<EventDto> {
protected String getApiPath() {
return "/MaritimeWCF/MaritimeAndTradeEventsService.svc/RESTFul/GetEventListByEventChangeDateRange";
}
protected String getApiKey() {return "EVENT_IMPORT_JOB";}
protected String getEventDetailApiPath() {
return "/MaritimeWCF/MaritimeAndTradeEventsService.svc/RESTFul/GetEventDataByEventID";
}
protected String getApiKey() {
return "EVENT_IMPORT_JOB";
}
// 배치 처리 상태
private List<Long> eventIds;
// DB 해시값을 저장할
private int currentBatchIndex = 0;
private final int batchSize = 1;
@Override
protected List<EventDto> fetchDataFromApi() {
try {
log.info("Event API 호출 시작");
EventResponse response = callEventApiWithBatch();
protected void resetCustomState() {
this.currentBatchIndex = 0;
this.eventIds = null;
this.eventPeriodMap = new HashMap<>();
}
@Override
protected void beforeFetch() {
// 1. 기간내 기록된 Event List 조회 (API 요청)
log.info("Event API 호출");
EventResponse response = callEventApiWithBatch();
// 2-1. Event List 에서 EventID List 추출
// TODO: 2-2. Event List 에서 Map<EventId,Map<StartDate,EndDate>> 추출
eventIds = extractEventIdList(response);
log.info("EvnetId List 추출 완료 : {} 개", eventIds.size());
eventPeriodMap = response.getMaritimeEvents().stream()
.filter(e -> e.getEventId() != null)
.collect(Collectors.toMap(
EventDto::getEventId,
e -> new EventPeriod(
e.getStartDate(),
e.getEndDate()
)
));
updateApiCallStats(eventIds.size(), 0);
}
@Override
protected List<EventDetailDto> fetchNextBatch() throws Exception {
// 3. EventID List Event Detail 조회 (API요청) : 청크단위 실행
// 모든 배치 처리 완료 확인
if (eventIds == null || currentBatchIndex >= eventIds.size()) {
return null; // Job 종료
}
// 현재 배치의 시작/ 인덱스 계산
int startIndex = currentBatchIndex;
int endIndex = Math.min(currentBatchIndex + batchSize, eventIds.size());
// 현재 배치의 IMO 번호 추출 (100개)
List<Long> currentBatch = eventIds.subList(startIndex, endIndex);
int currentBatchNumber = (currentBatchIndex / batchSize) + 1;
int totalBatches = (int) Math.ceil((double) eventIds.size() / batchSize);
log.info("[{}] 배치 {}/{} 처리 중 (Event ID : {} 개)...",
getReaderName(), currentBatchNumber, totalBatches, currentBatch.size());
try {
// API 호출
EventDetailResponse response = callEventDetailApiWithBatch(currentBatch.get(0));
// 다음 배치로 인덱스 이동
currentBatchIndex = endIndex;
List<EventDetailDto> eventDetailList = new ArrayList<>();
// 응답 처리
if (response != null && response.getEventDetailDto() != null) {
// TODO: getEventDetailDto에 Map<EventId,Map<StartDate,EndDate>> 데이터 세팅
EventDetailDto detailDto = response.getEventDetailDto();
Long eventId = detailDto.getEventID();
EventPeriod period = eventPeriodMap.get(eventId);
if (period != null) {
detailDto.setEventStartDate(period.getEventStartDate());
detailDto.setEventEndDate(period.getEventEndDate());
}
eventDetailList.add(response.getEventDetailDto());
log.info("[{}] 배치 {}/{} 완료: {} 건 조회",
getReaderName(), currentBatchNumber, totalBatches, eventDetailList.size());
// API 호출 통계 업데이트
updateApiCallStats(totalBatches, currentBatchNumber);
// API 과부하 방지 (다음 배치 0.5초 대기)
if (currentBatchIndex < eventIds.size()) {
Thread.sleep(500);
}
return eventDetailList;
if (response != null && response.getMaritimeEvents() != null) {
log.info("API 응답 성공: 총 {} 개의 Event 데이터 수신", response.getEventCount());
return response.getMaritimeEvents();
} else {
log.warn("API 응답이 null이거나 Event 데이터가 없습니다");
return new ArrayList<>();
log.warn("[{}] 배치 {}/{} 응답 없음",
getReaderName(), currentBatchNumber, totalBatches);
// API 호출 통계 업데이트 (실패도 카운트)
updateApiCallStats(totalBatches, currentBatchNumber);
return Collections.emptyList();
}
} catch (Exception e) {
log.error("Event API 호출 실패", e);
log.error("에러 메시지: {}", e.getMessage());
return new ArrayList<>();
log.error("[{}] 배치 {}/{} 처리 중 오류: {}",
getReaderName(), currentBatchNumber, totalBatches, e.getMessage(), e);
// 오류 발생 시에도 다음 배치로 이동 (부분 실패 허용)
currentBatchIndex = endIndex;
// 리스트 반환 (Job 계속 진행)
return Collections.emptyList();
}
}
@Override
protected void afterFetch(List<EventDetailDto> data) {
int totalBatches = (int) Math.ceil((double) eventIds.size() / batchSize);
try {
if (data == null) {
// 3. 배치 성공 상태 업데이트 (트랜잭션 커밋 직전에 실행)
LocalDate successDate = LocalDate.now(); // 현재 배치 실행 시점의 날짜 (Reader의 toDay와 동일한 )
batchDateService.updateLastSuccessDate(getApiKey(), successDate);
log.info("batch_last_execution update 완료 : {}", getApiKey());
log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches);
log.info("[{}] 총 {} 개의 Event ID에 대한 API 호출 종료",
getReaderName(), eventIds.size());
}
} catch (Exception e) {
log.info("[{}] 전체 {} 개 배치 처리 실패", getReaderName(), totalBatches);
log.info("[{}] 총 {} 개의 Event ID에 대한 API 호출 종료",
getReaderName(), eventIds.size());
}
}
private List<Long> extractEventIdList(EventResponse response) {
if (response.getMaritimeEvents() == null) {
return Collections.emptyList();
}
return response.getMaritimeEvents().stream()
// ShipDto 객체에서 imoNumber 필드 (String 타입) 추출
.map(EventDto::getEventId)
// IMO 번호가 null이 아닌 경우만 필터링 (선택 사항이지만 안전성을 위해)
.filter(eventId -> eventId != null)
// 추출된 String imoNumber들을 List<String>으로 수집
.collect(Collectors.toList());
}
private EventResponse callEventApiWithBatch() {
@ -77,4 +218,25 @@ public class EventDataReader extends BaseApiReader<EventDto> {
.block();
}
private EventDetailResponse callEventDetailApiWithBatch(Long eventId) {
String url = getEventDetailApiPath();
log.info("[{}] API 호출: {}", getReaderName(), url);
return webClient.get()
.uri(url, uriBuilder -> uriBuilder
// 맵에서 파라미터 값을 동적으로 가져와 세팅
.queryParam("eventID", eventId)
.build())
.retrieve()
.bodyToMono(EventDetailResponse.class)
.block();
}
private LocalDateTime parseToLocalDate(String value) {
if (value == null || value.isBlank()) {
return null;
}
return LocalDateTime.parse(value);
}
}

파일 보기

@ -1,9 +1,15 @@
package com.snp.batch.jobs.event.batch.repository;
import com.snp.batch.jobs.event.batch.entity.EventEntity;
import com.snp.batch.jobs.event.batch.entity.CargoEntity;
import com.snp.batch.jobs.event.batch.entity.EventDetailEntity;
import com.snp.batch.jobs.event.batch.entity.HumanCasualtyEntity;
import com.snp.batch.jobs.event.batch.entity.RelationshipEntity;
import java.util.List;
public interface EventRepository {
void saveEventAll(List<EventEntity> items);
void saveEventAll(List<EventDetailEntity> items);
void saveCargoAll(List<CargoEntity> items);
void saveHumanCasualtyAll(List<HumanCasualtyEntity> items);
void saveRelationshipAll(List<RelationshipEntity> items);
}

파일 보기

@ -1,7 +1,12 @@
package com.snp.batch.jobs.event.batch.repository;
import com.snp.batch.common.batch.repository.BaseJdbcRepository;
import com.snp.batch.jobs.event.batch.entity.EventEntity;
import com.snp.batch.jobs.event.batch.entity.CargoEntity;
import com.snp.batch.jobs.event.batch.entity.EventDetailEntity;
import com.snp.batch.jobs.event.batch.entity.HumanCasualtyEntity;
import com.snp.batch.jobs.event.batch.entity.RelationshipEntity;
import com.snp.batch.jobs.shipdetail.batch.entity.GroupBeneficialOwnerHistoryEntity;
import com.snp.batch.jobs.shipdetail.batch.repository.ShipDetailSql;
import lombok.extern.slf4j.Slf4j;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapper;
@ -13,7 +18,7 @@ import java.util.List;
@Slf4j
@Repository("EventRepository")
public class EventRepositoryImpl extends BaseJdbcRepository<EventEntity, Long> implements EventRepository {
public class EventRepositoryImpl extends BaseJdbcRepository<EventDetailEntity, Long> implements EventRepository {
public EventRepositoryImpl(JdbcTemplate jdbcTemplate) {
super(jdbcTemplate);
@ -25,12 +30,12 @@ public class EventRepositoryImpl extends BaseJdbcRepository<EventEntity, Long> i
}
@Override
protected RowMapper<EventEntity> getRowMapper() {
protected RowMapper<EventDetailEntity> getRowMapper() {
return null;
}
@Override
protected Long extractId(EventEntity entity) {
protected Long extractId(EventDetailEntity entity) {
return null;
}
@ -42,7 +47,7 @@ public class EventRepositoryImpl extends BaseJdbcRepository<EventEntity, Long> i
@Override
protected String getUpdateSql() {
return """
INSERT INTO snp_data.event (
INSERT INTO snp_data.event_detail (
Event_ID, Incident_ID, IHSLRorIMOShipNo, Vessel_Name, Vessel_Type,
Event_Type, Significance, Headline, Location_Name,
Published_Date, Event_Start_Date, Event_End_Date, batch_flag
@ -69,48 +74,170 @@ public class EventRepositoryImpl extends BaseJdbcRepository<EventEntity, Long> i
}
@Override
protected void setInsertParameters(PreparedStatement ps, EventEntity entity) throws Exception {
protected void setInsertParameters(PreparedStatement ps, EventDetailEntity entity) throws Exception {
}
@Override
protected void setUpdateParameters(PreparedStatement ps, EventEntity entity) throws Exception {
int idx = 1;
ps.setLong(idx++, entity.getEventId());
ps.setLong(idx++, entity.getIncidentId());
ps.setString(idx++, entity.getIhslRorImoShipNo());
ps.setString(idx++, entity.getVesselName());
ps.setString(idx++, entity.getVesselType());
ps.setString(idx++, entity.getEventType());
ps.setString(idx++, entity.getSignificance());
ps.setString(idx++, entity.getHeadline());
ps.setString(idx++, entity.getLocationName());
ps.setString(idx++, entity.getPublishedDate());
ps.setString(idx++, entity.getStartDate());
ps.setString(idx++, entity.getEndDate());
}
@Override
protected String getEntityName() {
return "EventEntity";
return "EventDetailEntity";
}
@Override
public void saveEventAll(List<EventEntity> items) {
if (items == null || items.isEmpty()) {
return;
}
jdbcTemplate.batchUpdate(getUpdateSql(), items, items.size(),
public void saveEventAll(List<EventDetailEntity> items) {
String entityName = "EventDetailEntity";
String sql = EventSql.getEventDetailUpdateSql();
jdbcTemplate.batchUpdate(sql, items, items.size(),
(ps, entity) -> {
try {
setUpdateParameters(ps, entity);
setUpdateParameters(ps, (EventDetailEntity) entity);
} catch (Exception e) {
log.error("배치 수정 파라미터 설정 실패", e);
throw new RuntimeException(e);
}
});
log.info("{} 전체 저장 완료: 수정={} 건", getEntityName(), items.size());
log.info("{} 배치 삽입 완료: {} 건", entityName, items.size());
}
@Override
public void saveCargoAll(List<CargoEntity> items) {
String entityName = "CargoEntity";
String sql = EventSql.getEventCargoSql();
jdbcTemplate.batchUpdate(sql, items, items.size(),
(ps, entity) -> {
try {
setCargoInsertParameters(ps, (CargoEntity) entity);
} catch (Exception e) {
log.error("배치 삽입 파라미터 설정 실패 - " + entityName, e);
throw new RuntimeException(e);
}
});
log.info("{} 배치 삽입 완료: {} 건", entityName, items.size());
}
@Override
public void saveHumanCasualtyAll(List<HumanCasualtyEntity> items) {
String entityName = "HumanCasualtyEntity";
String sql = EventSql.getEventHumanCasualtySql();
jdbcTemplate.batchUpdate(sql, items, items.size(),
(ps, entity) -> {
try {
setHumanCasualtyInsertParameters(ps, (HumanCasualtyEntity) entity);
} catch (Exception e) {
log.error("배치 삽입 파라미터 설정 실패 - " + entityName, e);
throw new RuntimeException(e);
}
});
log.info("{} 배치 삽입 완료: {} 건", entityName, items.size());
}
@Override
public void saveRelationshipAll(List<RelationshipEntity> items) {
String entityName = "RelationshipEntity";
String sql = EventSql.getEventRelationshipSql();
jdbcTemplate.batchUpdate(sql, items, items.size(),
(ps, entity) -> {
try {
setRelationshipInsertParameters(ps, (RelationshipEntity) entity);
} catch (Exception e) {
log.error("배치 삽입 파라미터 설정 실패 - " + entityName, e);
throw new RuntimeException(e);
}
});
log.info("{} 배치 삽입 완료: {} 건", entityName, items.size());
}
@Override
protected void setUpdateParameters(PreparedStatement ps, EventDetailEntity entity) throws Exception {
int idx = 1;
ps.setObject(idx++, entity.getEventID()); // event_id
ps.setObject(idx++, entity.getIncidentID()); // incident_id (누락됨)
ps.setObject(idx++, entity.getIhslrOrImoShipNo()); // ihslrorimoshipno (누락됨)
ps.setObject(idx++, entity.getPublishedDate()); // published_date (누락됨)
ps.setObject(idx++, entity.getEventStartDate()); // event_start_date
ps.setObject(idx++, entity.getEventEndDate()); // event_end_date
ps.setString(idx++, entity.getAttemptedBoarding()); // attempted_boarding
ps.setString(idx++, entity.getCargoLoadingStatusCode());// cargo_loading_status_code
ps.setString(idx++, entity.getCasualtyAction()); // casualty_action
ps.setString(idx++, entity.getCasualtyZone()); // casualty_zone
// 11~20
ps.setString(idx++, entity.getCasualtyZoneCode()); // casualty_zone_code
ps.setString(idx++, entity.getComponent2()); // component2
ps.setString(idx++, entity.getCountryCode()); // country_code
ps.setObject(idx++, entity.getDateOfBuild()); // date_of_build (Integer)
ps.setString(idx++, entity.getDescription()); // description
ps.setString(idx++, entity.getEnvironmentLocation()); // environment_location
ps.setString(idx++, entity.getLocationName()); // location_name (누락됨)
ps.setObject(idx++, entity.getMarsdenGridReference()); // marsden_grid_reference (Integer)
ps.setString(idx++, entity.getTownName()); // town_name
ps.setString(idx++, entity.getEventType()); // event_type (누락됨)
// 21~30
ps.setString(idx++, entity.getEventTypeDetail()); // event_type_detail
ps.setObject(idx++, entity.getEventTypeDetailID()); // event_type_detail_id (Integer)
ps.setObject(idx++, entity.getEventTypeID()); // event_type_id (Integer)
ps.setString(idx++, entity.getFiredUpon()); // fired_upon
ps.setString(idx++, entity.getHeadline()); // headline (누락됨)
ps.setObject(idx++, entity.getLdtAtTime()); // ldt_at_time (Integer)
ps.setString(idx++, entity.getSignificance()); // significance (누락됨)
ps.setString(idx++, entity.getWeather()); // weather
ps.setString(idx++, entity.getPollutant()); // pollutant
ps.setObject(idx++, entity.getPollutantQuantity()); // pollutant_quantity (Double)
// 31~42
ps.setString(idx++, entity.getPollutantUnit()); // pollutant_unit
ps.setString(idx++, entity.getRegisteredOwnerCodeAtTime()); // registered_owner_code_at_time
ps.setString(idx++, entity.getRegisteredOwnerAtTime()); // registered_owner_at_time
ps.setString(idx++, entity.getRegisteredOwnerCountryCodeAtTime()); // registered_owner_country_code_at_time
ps.setString(idx++, entity.getRegisteredOwnerCountryAtTime()); // registered_owner_country_at_time
ps.setObject(idx++, entity.getVesselDWT()); // vessel_dwt (Integer)
ps.setString(idx++, entity.getVesselFlagCode()); // vessel_flag_code
ps.setString(idx++, entity.getVesselFlagDecode()); // vessel_flag_decode (누락됨)
ps.setObject(idx++, entity.getVesselGT()); // vessel_gt (Integer)
ps.setString(idx++, entity.getVesselName()); // vessel_name (누락됨)
ps.setString(idx++, entity.getVesselType()); // vessel_type (누락됨)
ps.setString(idx++, entity.getVesselTypeDecode()); // vessel_type_decode
}
private void setCargoInsertParameters(PreparedStatement ps, CargoEntity entity)throws Exception{
int idx = 1;
// INSERT 필드
ps.setObject(idx++, entity.getEventID());
ps.setString(idx++, entity.getSequence());
ps.setString(idx++, entity.getIhslrOrImoShipNo());
ps.setString(idx++, entity.getType());
ps.setObject(idx++, entity.getQuantity()); // quantity 필드 (Entity에 없을 경우 null 처리)
ps.setString(idx++, entity.getUnitShort()); // unit_short 필드
ps.setString(idx++, entity.getUnit());
ps.setString(idx++, entity.getCargoDamage());
ps.setString(idx++, entity.getDangerous());
ps.setString(idx++, entity.getText());
}
private void setHumanCasualtyInsertParameters(PreparedStatement ps, HumanCasualtyEntity entity)throws Exception{
int idx = 1;
ps.setObject(idx++, entity.getEventID());
ps.setString(idx++, entity.getScope());
ps.setString(idx++, entity.getType());
ps.setString(idx++, entity.getQualifier());
ps.setObject(idx++, entity.getCount());
}
private void setRelationshipInsertParameters(PreparedStatement ps, RelationshipEntity entity)throws Exception{
int idx = 1;
ps.setString(idx++, entity.getIncidentID());
ps.setObject(idx++, entity.getEventID());
ps.setString(idx++, entity.getRelationshipType());
ps.setString(idx++, entity.getRelationshipTypeCode());
ps.setObject(idx++, entity.getEventID2());
ps.setString(idx++, entity.getEventType());
ps.setString(idx++, entity.getEventTypeCode());
}
private static void setStringOrNull(PreparedStatement ps, int index, String value) throws Exception {

파일 보기

@ -0,0 +1,126 @@
package com.snp.batch.jobs.event.batch.repository;
public class EventSql {
public static String getEventDetailUpdateSql(){
return """
INSERT INTO snp_data.event_detail (
event_id, incident_id, ihslrorimoshipno, published_date, event_start_date, event_end_date,
attempted_boarding, cargo_loading_status_code, casualty_action,
casualty_zone, casualty_zone_code, component2, country_code,
date_of_build, description, environment_location, location_name,
marsden_grid_reference, town_name, event_type, event_type_detail,
event_type_detail_id, event_type_id, fired_upon, headline,
ldt_at_time, significance, weather, pollutant, pollutant_quantity,
pollutant_unit, registered_owner_code_at_time, registered_owner_at_time,
registered_owner_country_code_at_time, registered_owner_country_at_time,
vessel_dwt, vessel_flag_code, vessel_flag_decode, vessel_gt,
vessel_name, vessel_type, vessel_type_decode
)
VALUES (
?, ?, ?, ?::timestamptz,?::timestamptz,?::timestamptz, ?, ?, ?, ?, ?, ?,
?, ?, ?, ?, ?, ?, ?, ?, ?, ?,
?, ?, ?, ?, ?, ?, ?, ?, ?, ?,
?, ?, ?, ?, ?, ?, ?, ?, ?, ?
)
ON CONFLICT (event_id)
DO UPDATE SET
incident_id = EXCLUDED.incident_id,
ihslrorimoshipno = EXCLUDED.ihslrorimoshipno,
published_date = EXCLUDED.published_date,
event_start_date = EXCLUDED.event_start_date,
event_end_date = EXCLUDED.event_end_date,
attempted_boarding = EXCLUDED.attempted_boarding,
cargo_loading_status_code = EXCLUDED.cargo_loading_status_code,
casualty_action = EXCLUDED.casualty_action,
casualty_zone = EXCLUDED.casualty_zone,
casualty_zone_code = EXCLUDED.casualty_zone_code,
component2 = EXCLUDED.component2,
country_code = EXCLUDED.country_code,
date_of_build = EXCLUDED.date_of_build,
description = EXCLUDED.description,
environment_location = EXCLUDED.environment_location,
location_name = EXCLUDED.location_name,
marsden_grid_reference = EXCLUDED.marsden_grid_reference,
town_name = EXCLUDED.town_name,
event_type = EXCLUDED.event_type,
event_type_detail = EXCLUDED.event_type_detail,
event_type_detail_id = EXCLUDED.event_type_detail_id,
event_type_id = EXCLUDED.event_type_id,
fired_upon = EXCLUDED.fired_upon,
headline = EXCLUDED.headline,
ldt_at_time = EXCLUDED.ldt_at_time,
significance = EXCLUDED.significance,
weather = EXCLUDED.weather,
pollutant = EXCLUDED.pollutant,
pollutant_quantity = EXCLUDED.pollutant_quantity,
pollutant_unit = EXCLUDED.pollutant_unit,
registered_owner_code_at_time = EXCLUDED.registered_owner_code_at_time,
registered_owner_at_time = EXCLUDED.registered_owner_at_time,
registered_owner_country_code_at_time = EXCLUDED.registered_owner_country_code_at_time,
registered_owner_country_at_time = EXCLUDED.registered_owner_country_at_time,
vessel_dwt = EXCLUDED.vessel_dwt,
vessel_flag_code = EXCLUDED.vessel_flag_code,
vessel_flag_decode = EXCLUDED.vessel_flag_decode,
vessel_gt = EXCLUDED.vessel_gt,
vessel_name = EXCLUDED.vessel_name,
vessel_type = EXCLUDED.vessel_type,
vessel_type_decode = EXCLUDED.vessel_type_decode,
batch_flag = 'N';
""";
}
public static String getEventCargoSql(){
return """
INSERT INTO snp_data.event_cargo (
event_id, "sequence", ihslrorimoshipno, "type", quantity,
unit_short, unit, cargo_damage, dangerous, "text"
)
VALUES (
?, ?, ?, ?, ?,
?, ?, ?, ?, ?
)
ON CONFLICT (event_id, ihslrorimoshipno, "type", "sequence")
DO UPDATE SET
quantity = EXCLUDED.quantity,
unit_short = EXCLUDED.unit_short,
unit = EXCLUDED.unit,
cargo_damage = EXCLUDED.cargo_damage,
dangerous = EXCLUDED.dangerous,
"text" = EXCLUDED."text",
batch_flag = 'N';
""";
}
public static String getEventRelationshipSql(){
return """
INSERT INTO snp_data.event_relationship (
incident_id, event_id, relationship_type, relationship_type_code,
event_id_2, event_type, event_type_code
)
VALUES (
?, ?, ?, ?,
?, ?, ?
)
ON CONFLICT (incident_id, event_id, event_id_2, event_type_code, relationship_type_code)
DO UPDATE SET
relationship_type = EXCLUDED.relationship_type,
event_type = EXCLUDED.event_type,
batch_flag = 'N';
""";
}
public static String getEventHumanCasualtySql(){
return """
INSERT INTO snp_data.event_humancasualty (
event_id, "scope", "type", qualifier, "count"
)
VALUES (
?, ?, ?, ?, ?
)
ON CONFLICT (event_id, "scope", "type", qualifier)
DO UPDATE SET
"count" = EXCLUDED."count",
batch_flag = 'N';
""";
}
}

파일 보기

@ -1,35 +1,49 @@
package com.snp.batch.jobs.event.batch.writer;
import com.snp.batch.common.batch.writer.BaseWriter;
import com.snp.batch.jobs.event.batch.entity.EventEntity;
import com.snp.batch.jobs.event.batch.entity.EventDetailEntity;
import com.snp.batch.jobs.event.batch.repository.EventRepository;
import com.snp.batch.service.BatchDateService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.time.LocalDate;
import java.util.List;
@Slf4j
@Component
public class EventDataWriter extends BaseWriter<EventEntity> {
public class EventDataWriter extends BaseWriter<EventDetailEntity> {
private final EventRepository eventRepository;
private final BatchDateService batchDateService; // BatchDateService 필드 추가
protected String getApiKey() {return "EVENT_IMPORT_JOB";}
public EventDataWriter(EventRepository eventRepository, BatchDateService batchDateService) {
public EventDataWriter(EventRepository eventRepository) {
super("EventRepository");
this.eventRepository = eventRepository;
this.batchDateService = batchDateService;
}
@Override
protected void writeItems(List<EventEntity> items) throws Exception {
eventRepository.saveEventAll(items);
log.info("Event 저장 완료: 수정={} 건", items.size());
protected void writeItems(List<EventDetailEntity> items) throws Exception {
// 배치 성공 상태 업데이트 (트랜잭션 커밋 직전에 실행)
LocalDate successDate = LocalDate.now();
batchDateService.updateLastSuccessDate(getApiKey(), successDate);
log.info("batch_last_execution update 완료 : {}", getApiKey());
if (CollectionUtils.isEmpty(items)) {
return;
}
// 1. EventDetail 메인 데이터 저장
eventRepository.saveEventAll(items);
for (EventDetailEntity event : items) {
// 2. CargoEntityList Save
if (!CollectionUtils.isEmpty(event.getCargoes())) {
eventRepository.saveCargoAll(event.getCargoes());
}
// 3. HumanCasualtyEntityList Save
if (!CollectionUtils.isEmpty(event.getHumanCasualties())) {
eventRepository.saveHumanCasualtyAll(event.getHumanCasualties());
}
// 4. RelationshipEntityList Save
if (!CollectionUtils.isEmpty(event.getRelationships())) {
eventRepository.saveRelationshipAll(event.getRelationships());
}
}
log.info("Batch Write 완료: {} 건의 Event 처리됨", items.size());
}
}

파일 보기

@ -80,8 +80,8 @@ public class PscInspectionJobConfig extends BaseJobConfig<PscInspectionDto, PscI
@StepScope
public PscApiReader pscApiReader(
@Qualifier("maritimeApiWebClient") WebClient webClient,
@Value("#{jobParameters['fromDate']}") String fromDate,
@Value("#{jobParameters['toDate']}") String toDate
@Value("#{jobParameters['startDate']}") String fromDate,
@Value("#{jobParameters['stopDate']}") String toDate
) {
return new PscApiReader(webClient, fromDate, toDate);
}
@ -103,7 +103,7 @@ public class PscInspectionJobConfig extends BaseJobConfig<PscInspectionDto, PscI
@Override
protected int getChunkSize() {
return 10; // API에서 100개씩 가져오므로 chunk도 100으로 설정
return 1000; // API에서 100개씩 가져오므로 chunk도 100으로 설정
}
@Bean(name = "PSCDetailImportJob")

파일 보기

@ -8,9 +8,12 @@ import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.web.reactive.function.client.WebClient;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.OffsetDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Collections;
import java.util.List;
@ -18,22 +21,29 @@ import java.util.List;
@StepScope
public class PscApiReader extends BaseApiReader<PscInspectionDto> {
//private final JdbcTemplate jdbcTemplate;
private final String fromDate;
private final String toDate;
// private List<String> allImoNumbers;
private final String startDate;
private final String stopDate;
private List<PscInspectionDto> allData;
private int currentBatchIndex = 0;
private final int batchSize = 10;
private final int batchSize = 1000;
public PscApiReader(@Qualifier("maritimeApiWebClient") WebClient webClient,
@Value("#{jobParameters['fromDate']}") String fromDate,
@Value("#{jobParameters['toDate']}") String toDate) {
@Value("#{jobParameters['startDate']}") String startDate,
@Value("#{jobParameters['stopDate']}") String stopDate) {
super(webClient);
//this.jdbcTemplate = jdbcTemplate;
this.fromDate = fromDate;
this.toDate = toDate;
// 날짜가 없으면 전날 하루 기준
if (startDate == null || startDate.isBlank() ||
stopDate == null || stopDate.isBlank()) {
LocalDate yesterday = LocalDate.now().minusDays(1);
this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z";
this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z";
} else {
this.startDate = startDate;
this.stopDate = stopDate;
}
enableChunkMode();
}
@ -45,7 +55,7 @@ public class PscApiReader extends BaseApiReader<PscInspectionDto> {
@Override
protected void resetCustomState() {
this.currentBatchIndex = 0;
// this.allImoNumbers = null;
this.allData = null;
}
@Override
@ -53,37 +63,18 @@ public class PscApiReader extends BaseApiReader<PscInspectionDto> {
return "/MaritimeWCF/PSCService.svc/RESTFul/GetPSCDataByLastUpdateDateRange";
}
private static final String GET_ALL_IMO_QUERY =
"SELECT imo_number FROM ship_data ORDER BY id";
// "SELECT imo_number FROM snp_data.ship_data where imo_number > (select max(imo) from snp_data.t_berthcalls) ORDER BY imo_number";
@Override
protected void beforeFetch() {
// 전처리 과정
// Step 1. IMO 전체 번호 조회
/*log.info("[{}] ship_data 테이블에서 IMO 번호 조회 시작...", getReaderName());
allImoNumbers = jdbcTemplate.queryForList(GET_ALL_IMO_QUERY, String.class);
int totalBatches = (int) Math.ceil((double) allImoNumbers.size() / batchSize);
log.info("[{}] 총 {} 개의 IMO 번호 조회 완료", getReaderName(), allImoNumbers.size());
log.info("[{}] {}개씩 배치로 분할하여 API 호출 예정", getReaderName(), batchSize);
log.info("[{}] 예상 배치 수: {} 개", getReaderName(), totalBatches);
// API 통계 초기화
updateApiCallStats(totalBatches, 0);*/
log.info("[PSC] 요청 날짜 범위: {} → {}", fromDate, toDate);
log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate);
}
@Override
protected List<PscInspectionDto> fetchNextBatch() {
// 1) 처음 호출이면 API 호출해서 전체 데이터를 가져온다
if (allData == null) {
log.info("[PSC] 최초 API 조회 실행: {} ~ {}", fromDate, toDate);
allData = callApiWithBatch(fromDate, toDate);
log.info("[PSC] 최초 API 조회 실행: {} ~ {}", startDate, stopDate);
allData = callApiWithBatch(startDate, stopDate);
if (allData == null || allData.isEmpty()) {
log.warn("[PSC] 조회된 데이터 없음 → 종료");
@ -116,20 +107,19 @@ public class PscApiReader extends BaseApiReader<PscInspectionDto> {
return batch;
}
// private List<PscInspectionDto> callApiWithBatch(String lrno) {
private List<PscInspectionDto> callApiWithBatch(String from, String to) {
private List<PscInspectionDto> callApiWithBatch(String startDate, String stopDate) {
String[] f = from.split("-");
String[] t = to.split("-");
LocalDateTime fromDay = parseToDateTime(startDate, true);
LocalDateTime toDay = parseToDateTime(stopDate, false);
String url = getApiPath()
+ "?shipsCategory=0"
+ "&fromYear=" + f[0]
+ "&fromMonth=" + f[1]
+ "&fromDay=" + f[2]
+ "&toYear=" + t[0]
+ "&toMonth=" + t[1]
+ "&toDay=" + t[2];
+ "&fromYear=" + fromDay.getYear()
+ "&fromMonth=" + fromDay.getMonthValue()
+ "&fromDay=" + fromDay.getDayOfMonth()
+ "&toYear=" + toDay.getYear()
+ "&toMonth=" + toDay.getMonthValue()
+ "&toDay=" + toDay.getDayOfMonth();
log.info("[PSC] API 호출 URL = {}", url);
@ -170,4 +160,18 @@ public class PscApiReader extends BaseApiReader<PscInspectionDto> {
getReaderName(), allData.size());
}
}
private LocalDateTime parseToDateTime(String value, boolean isStart) {
// yyyy-MM-dd 경우
if (value.length() == 10) {
LocalDate date = LocalDate.parse(value);
return isStart
? date.atStartOfDay()
: date.plusDays(1).atStartOfDay();
}
// yyyy-MM-ddTHH:mm:ssZ 경우
return OffsetDateTime.parse(value).toLocalDateTime();
}
}

파일 보기

@ -0,0 +1,94 @@
package com.snp.batch.jobs.risk.batch.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.jobs.risk.batch.dto.RiskDto;
import com.snp.batch.jobs.risk.batch.entity.RiskEntity;
import com.snp.batch.jobs.risk.batch.processor.RiskDataProcessor;
import com.snp.batch.jobs.risk.batch.reader.RiskDataRangeReader;
import com.snp.batch.jobs.risk.batch.reader.RiskDataReader;
import com.snp.batch.jobs.risk.batch.writer.RiskDataWriter;
import com.snp.batch.jobs.sanction.batch.reader.ComplianceDataRangeReader;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.web.reactive.function.client.WebClient;
@Slf4j
@Configuration
public class RiskImportRangeJobConfig extends BaseJobConfig<RiskDto, RiskEntity> {
private final WebClient maritimeServiceApiWebClient;
private final RiskDataProcessor riskDataProcessor;
private final RiskDataWriter riskDataWriter;
private final RiskDataRangeReader riskDataRangeReader;
@Override
protected int getChunkSize() {
return 5000; // API에서 5000개씩 가져오므로 chunk도 5000으로 설정
}
public RiskImportRangeJobConfig(
JobRepository jobRepository,
PlatformTransactionManager transactionManager,
RiskDataProcessor riskDataProcessor,
RiskDataWriter riskDataWriter,
JdbcTemplate jdbcTemplate,
@Qualifier("maritimeServiceApiWebClient")WebClient maritimeServiceApiWebClient, RiskDataRangeReader riskDataRangeReader) {
super(jobRepository, transactionManager);
this.maritimeServiceApiWebClient = maritimeServiceApiWebClient;
this.riskDataProcessor = riskDataProcessor;
this.riskDataWriter = riskDataWriter;
this.riskDataRangeReader = riskDataRangeReader;
}
@Override
protected String getJobName() {
return "RiskRangeImportJob";
}
@Override
protected String getStepName() {
return "RiskRangeImportStep";
}
@Override
protected ItemReader<RiskDto> createReader() {
return riskDataRangeReader;
}
@Bean
@StepScope
public RiskDataRangeReader riskDataRangeReader(
@Value("#{jobParameters['fromDate']}") String startDate,
@Value("#{jobParameters['toDate']}") String stopDate
) {
return new RiskDataRangeReader(maritimeServiceApiWebClient, startDate, stopDate);
}
@Override
protected ItemProcessor<RiskDto, RiskEntity> createProcessor() {
return riskDataProcessor;
}
@Override
protected ItemWriter<RiskEntity> createWriter() { return riskDataWriter; }
@Bean(name = "RiskRangeImportJob")
public Job riskRangeImportJob() {
return job();
}
@Bean(name = "RiskRangeImportStep")
public Step riskRangeImportStep() {
return step();
}
}

파일 보기

@ -0,0 +1,120 @@
package com.snp.batch.jobs.risk.batch.reader;
import com.snp.batch.common.batch.reader.BaseApiReader;
import com.snp.batch.jobs.risk.batch.dto.RiskDto;
import com.snp.batch.jobs.sanction.batch.dto.ComplianceDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.ParameterizedTypeReference;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.web.reactive.function.client.WebClient;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.Collections;
import java.util.List;
@Slf4j
public class RiskDataRangeReader extends BaseApiReader<RiskDto> {
//TODO :
// 1. Core20 IMO_NUMBER 전체 조회
// 2. IMO번호에 대한 마지막 AIS 신호 요청 (1회 최대 5000개 : Chunk 단위로 반복)
// 3. Response Data -> Core20에 업데이트 (Chunk 단위로 반복)
private List<RiskDto> allData;
private int currentBatchIndex = 0;
private final int batchSize = 100;
private String fromDate;
private String toDate;
public RiskDataRangeReader(WebClient webClient,
@Value("#{jobParameters['fromDate']}") String fromDate,
@Value("#{jobParameters['toDate']}") String toDate) {
super(webClient);
// 날짜가 없으면 전날 하루 기준
if (fromDate == null || fromDate.isBlank() ||
toDate == null || toDate.isBlank()) {
LocalDate yesterday = LocalDate.now().minusDays(1);
this.fromDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z";
this.toDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z";
} else {
this.fromDate = fromDate;
this.toDate = toDate;
}
enableChunkMode();
}
@Override
protected String getReaderName() {
return "riskDataRangeReader";
}
@Override
protected void resetCustomState() {
this.currentBatchIndex = 0;
this.allData = null;
}
@Override
protected String getApiPath() {
return "/RiskAndCompliance/UpdatedRiskList";
}
@Override
protected void beforeFetch(){
log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), fromDate, toDate);
}
@Override
protected List<RiskDto> fetchNextBatch() throws Exception {
// 모든 배치 처리 완료 확인
if (allData == null) {
log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), fromDate, toDate);
allData = callApiWithBatch(fromDate, toDate);
if (allData == null || allData.isEmpty()) {
log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName());
return null;
}
log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize);
}
// 2) 이미 끝까지 읽었으면 종료
if (currentBatchIndex >= allData.size()) {
log.info("[{}] 모든 배치 처리 완료", getReaderName());
return null;
}
// 3) 이번 배치의 end 계산
int end = Math.min(currentBatchIndex + batchSize, allData.size());
// 4) 현재 batch 리스트 잘라서 반환
List<RiskDto> batch = allData.subList(currentBatchIndex, end);
int batchNum = (currentBatchIndex / batchSize) + 1;
int totalBatches = (int) Math.ceil((double) allData.size() / batchSize);
log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), batchNum, totalBatches, batch.size());
// 다음 batch 인덱스 이동
currentBatchIndex = end;
updateApiCallStats(totalBatches, batchNum);
return batch;
}
private List<RiskDto> callApiWithBatch(String fromDate, String stopDate) {
String url = getApiPath() + "?fromDate=" + fromDate +"&stopDate=" + stopDate;
log.debug("[{}] API 호출: {}", getReaderName(), url);
return webClient.get()
.uri(url)
.retrieve()
.bodyToMono(new ParameterizedTypeReference<List<RiskDto>>() {})
.block();
}
}

파일 보기

@ -41,7 +41,7 @@ public class RiskRepositoryImpl extends BaseJdbcRepository<RiskEntity, Long> imp
@Override
protected String getUpdateSql() {
return """
INSERT INTO snp_data.risk (
INSERT INTO new_snp.risk (
lrno, lastupdated, riskdatamaintained, dayssincelastseenonais, dayssincelastseenonaisnarrative,
daysunderais, daysunderaisnarrative, imocorrectonais, imocorrectonaisnarrative, sailingundername,
sailingundernamenarrative, anomalousmessagesfrommmsi, anomalousmessagesfrommmsinarrative,

파일 보기

@ -0,0 +1,98 @@
package com.snp.batch.jobs.sanction.batch.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.jobs.sanction.batch.dto.ComplianceDto;
import com.snp.batch.jobs.sanction.batch.entity.ComplianceEntity;
import com.snp.batch.jobs.sanction.batch.processor.ComplianceDataProcessor;
import com.snp.batch.jobs.sanction.batch.reader.ComplianceDataRangeReader;
import com.snp.batch.jobs.sanction.batch.reader.ComplianceDataReader;
import com.snp.batch.jobs.sanction.batch.writer.ComplianceDataWriter;
import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.reader.AnchorageCallsRangeReader;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.web.reactive.function.client.WebClient;
@Slf4j
@Configuration
public class SanctionUpdateRangeJobConfig extends BaseJobConfig<ComplianceDto, ComplianceEntity> {
private final JdbcTemplate jdbcTemplate;
private final WebClient maritimeServiceApiWebClient;
private final ComplianceDataProcessor complianceDataProcessor;
private final ComplianceDataWriter complianceDataWriter;
private final ComplianceDataRangeReader complianceDataRangeReader;
@Override
protected int getChunkSize() {
return 5000; // API에서 5000개씩 가져오므로 chunk도 5000으로 설정
}
public SanctionUpdateRangeJobConfig(
JobRepository jobRepository,
PlatformTransactionManager transactionManager,
ComplianceDataProcessor complianceDataProcessor,
ComplianceDataWriter complianceDataWriter,
JdbcTemplate jdbcTemplate,
@Qualifier("maritimeServiceApiWebClient")WebClient maritimeServiceApiWebClient, ComplianceDataRangeReader complianceDataRangeReader) {
super(jobRepository, transactionManager);
this.jdbcTemplate = jdbcTemplate;
this.maritimeServiceApiWebClient = maritimeServiceApiWebClient;
this.complianceDataProcessor = complianceDataProcessor;
this.complianceDataWriter = complianceDataWriter;
this.complianceDataRangeReader = complianceDataRangeReader;
}
@Override
protected String getJobName() {
return "SanctionRangeUpdateJob";
}
@Override
protected String getStepName() {
return "SanctionRangeUpdateStep";
}
@Override
protected ItemReader<ComplianceDto> createReader() {
return complianceDataRangeReader;
}
@Bean
@StepScope
public ComplianceDataRangeReader complianceDataRangeReader(
@Value("#{jobParameters['fromDate']}") String startDate,
@Value("#{jobParameters['toDate']}") String stopDate
) {
return new ComplianceDataRangeReader(maritimeServiceApiWebClient, startDate, stopDate);
}
@Override
protected ItemProcessor<ComplianceDto, ComplianceEntity> createProcessor() {
return complianceDataProcessor;
}
@Override
protected ItemWriter<ComplianceEntity> createWriter() {
return complianceDataWriter;
}
@Bean(name = "SanctionRangeUpdateJob")
public Job sanctionRangeUpdateJob() {
return job();
}
@Bean(name = "SanctionRangeUpdateStep")
public Step sanctionRangeUpdateStep() {
return step();
}
}

파일 보기

@ -0,0 +1,128 @@
package com.snp.batch.jobs.sanction.batch.reader;
import com.snp.batch.common.batch.reader.BaseApiReader;
import com.snp.batch.jobs.sanction.batch.dto.ComplianceDto;
import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.dto.AnchorageCallsDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.ParameterizedTypeReference;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.web.reactive.function.client.WebClient;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.Collections;
import java.util.List;
@Slf4j
public class ComplianceDataRangeReader extends BaseApiReader<ComplianceDto> {
//TODO :
// 1. Core20 IMO_NUMBER 전체 조회
// 2. IMO번호에 대한 마지막 AIS 신호 요청 (1회 최대 5000개 : Chunk 단위로 반복)
// 3. Response Data -> Core20에 업데이트 (Chunk 단위로 반복)
//private final JdbcTemplate jdbcTemplate;
private List<ComplianceDto> allData;
private int currentBatchIndex = 0;
private final int batchSize = 100;
private String fromDate;
private String toDate;
public ComplianceDataRangeReader(WebClient webClient,
@Value("#{jobParameters['fromDate']}") String fromDate,
@Value("#{jobParameters['toDate']}") String toDate) {
super(webClient);
// 날짜가 없으면 전날 하루 기준
if (fromDate == null || fromDate.isBlank() ||
toDate == null || toDate.isBlank()) {
LocalDate yesterday = LocalDate.now().minusDays(1);
this.fromDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z";
this.toDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z";
} else {
this.fromDate = fromDate;
this.toDate = toDate;
}
enableChunkMode();
}
@Override
protected String getReaderName() {
return "ComplianceDataReader";
}
@Override
protected void resetCustomState() {
this.currentBatchIndex = 0;
this.allData = null;
}
@Override
protected String getApiPath() {
return "/RiskAndCompliance/UpdatedComplianceList";
}
private String getTargetTable(){
return "snp_data.core20";
}
private String GET_CORE_IMO_LIST =
// "SELECT ihslrorimoshipno FROM " + getTargetTable() + " ORDER BY ihslrorimoshipno";
"select imo_number as ihslrorimoshipno from snp_data.ship_data order by imo_number";
@Override
protected void beforeFetch(){
log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), fromDate, toDate);
}
@Override
protected List<ComplianceDto> fetchNextBatch() throws Exception {
// 모든 배치 처리 완료 확인
if (allData == null) {
log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), fromDate, toDate);
allData = callApiWithBatch(fromDate, toDate);
if (allData == null || allData.isEmpty()) {
log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName());
return null;
}
log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize);
}
// 2) 이미 끝까지 읽었으면 종료
if (currentBatchIndex >= allData.size()) {
log.info("[{}] 모든 배치 처리 완료", getReaderName());
return null;
}
// 3) 이번 배치의 end 계산
int end = Math.min(currentBatchIndex + batchSize, allData.size());
// 4) 현재 batch 리스트 잘라서 반환
List<ComplianceDto> batch = allData.subList(currentBatchIndex, end);
int batchNum = (currentBatchIndex / batchSize) + 1;
int totalBatches = (int) Math.ceil((double) allData.size() / batchSize);
log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), batchNum, totalBatches, batch.size());
// 다음 batch 인덱스 이동
currentBatchIndex = end;
updateApiCallStats(totalBatches, batchNum);
return batch;
}
private List<ComplianceDto> callApiWithBatch(String fromDate, String stopDate) {
String url = getApiPath() + "?fromDate=" + fromDate +"&stopDate=" + stopDate;
log.debug("[{}] API 호출: {}", getReaderName(), url);
return webClient.get()
.uri(url)
.retrieve()
.bodyToMono(new ParameterizedTypeReference<List<ComplianceDto>>() {})
.block();
}
}

파일 보기

@ -42,7 +42,7 @@ public class ComplianceRepositoryImpl extends BaseJdbcRepository<ComplianceEntit
@Override
protected String getUpdateSql() {
return """
INSERT INTO snp_data.compliance (
INSERT INTO new_snp.compliance (
lrimoshipno, dateamended, legaloverall, shipbessanctionlist, shipdarkactivityindicator,
shipdetailsnolongermaintained, shipeusanctionlist, shipflagdisputed, shipflagsanctionedcountry,
shiphistoricalflagsanctionedcountry, shipofacnonsdnsanctionlist, shipofacsanctionlist,

파일 보기

@ -1,18 +0,0 @@
package com.snp.batch.jobs.shipMovement.batch.repository;
import com.snp.batch.jobs.shipMovement.batch.entity.ShipMovementEntity;
import org.springframework.stereotype.Repository;
import org.springframework.stereotype.Service;
import java.util.List;
/**
* 선박 상세 정보 Repository 인터페이스
*/
public interface ShipMovementRepository {
void saveAll(List<ShipMovementEntity> entities);
boolean existsByPortCallId(Integer portCallId);
}

파일 보기

@ -1,40 +0,0 @@
package com.snp.batch.jobs.shipMovement.batch.writer;
import com.snp.batch.common.batch.writer.BaseWriter;
import com.snp.batch.jobs.shipMovement.batch.entity.ShipMovementEntity;
import com.snp.batch.jobs.shipMovement.batch.repository.ShipMovementRepository;
import com.snp.batch.jobs.shipdetail.batch.repository.ShipDetailRepository;
import com.snp.batch.jobs.shipdetail.batch.repository.ShipHashRepository;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.List;
/**
* 선박 상세 정보 Writer
*/
@Slf4j
@Component
public class ShipMovementWriter extends BaseWriter<ShipMovementEntity> {
private final ShipMovementRepository shipMovementRepository;
public ShipMovementWriter(ShipDetailRepository shipDetailRepository, ShipHashRepository shipHashRepository, ShipMovementRepository shipMovementRepositoryy) {
super("ShipMovement");
this.shipMovementRepository = shipMovementRepositoryy;
}
@Override
protected void writeItems(List<ShipMovementEntity> items) throws Exception {
if (items.isEmpty()) { return; }
log.info("선박 상세 정보 데이터 저장: {} 건", items.size());
shipMovementRepository.saveAll(items);
log.info("선박 상세 정보 및 해시 데이터 저장 완료: {} 건", items.size());
}
}

파일 보기

@ -0,0 +1,114 @@
package com.snp.batch.jobs.shipMovementAnchorageCalls.batch.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.dto.AnchorageCallsDto;
import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.entity.AnchorageCallsEntity;
import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.processor.AnchorageCallsProcessor;
import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.reader.AnchorageCallsRangeReader;
import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.writer.AnchorageCallsWriter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.web.reactive.function.client.WebClient;
/**
* 선박 상세 정보 Import Job Config
*
* 특징:
* - ship_data 테이블에서 IMO 번호 조회
* - IMO 번호를 100개씩 배치로 분할
* - Maritime API GetShipsByIHSLRorIMONumbers 호출
* TODO : GetShipsByIHSLRorIMONumbersAll 호출로 변경
* - 선박 상세 정보를 ship_detail 테이블에 저장 (UPSERT)
*
* 데이터 흐름:
* AnchorageCallsReader (ship_data Maritime API)
* (AnchorageCallsDto)
* AnchorageCallsProcessor
* (AnchorageCallsEntity)
* AnchorageCallsWriter
* (t_anchoragecall 테이블)
*/
@Slf4j
@Configuration
public class AnchorageCallsRangeJobConfig extends BaseJobConfig<AnchorageCallsDto, AnchorageCallsEntity> {
private final AnchorageCallsProcessor anchorageCallsProcessor;
private final AnchorageCallsWriter anchorageCallsWriter;
private final AnchorageCallsRangeReader anchorageCallsRangeReader;
public AnchorageCallsRangeJobConfig(
JobRepository jobRepository,
PlatformTransactionManager transactionManager,
AnchorageCallsProcessor anchorageCallsProcessor,
AnchorageCallsWriter anchorageCallsWriter,
AnchorageCallsRangeReader anchorageCallsRangeReader
) { // ObjectMapper 주입 추가
super(jobRepository, transactionManager);
this.anchorageCallsProcessor = anchorageCallsProcessor;
this.anchorageCallsWriter = anchorageCallsWriter;
this.anchorageCallsRangeReader = anchorageCallsRangeReader;
}
@Override
protected String getJobName() {
return "AnchorageCallsRangeImportJob";
}
@Override
protected String getStepName() {
return "AnchorageCallsRangeImportStep";
}
@Override
protected ItemReader<AnchorageCallsDto> createReader() { // 타입 변경
return anchorageCallsRangeReader;
}
@Bean
@StepScope
public AnchorageCallsRangeReader anchorageCallsReader(
@Qualifier("maritimeServiceApiWebClient") WebClient webClient,
@Value("#{jobParameters['startDate']}") String startDate,
@Value("#{jobParameters['stopDate']}") String stopDate
) {
return new AnchorageCallsRangeReader(webClient, startDate, stopDate);
}
@Override
protected ItemProcessor<AnchorageCallsDto, AnchorageCallsEntity> createProcessor() {
return anchorageCallsProcessor;
}
@Override
protected ItemWriter<AnchorageCallsEntity> createWriter() { // 타입 변경
return anchorageCallsWriter;
}
@Override
protected int getChunkSize() {
return 5000; // API에서 100개씩 가져오므로 chunk도 100으로 설정
}
@Bean(name = "AnchorageCallsRangeImportJob")
public Job anchorageCallsRangeImportJob() {
return job();
}
@Bean(name = "AnchorageCallsRangeImportStep")
public Step anchorageCallsRangeImportStep() {
return step();
}
}

파일 보기

@ -0,0 +1,153 @@
package com.snp.batch.jobs.shipMovementAnchorageCalls.batch.reader;
import com.snp.batch.common.batch.reader.BaseApiReader;
import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.dto.AnchorageCallsDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.web.reactive.function.client.WebClient;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.List;
/**
* 선박 상세 정보 Reader (v2.0 - Chunk 기반)
*
* 기능:
* 1. ship_data 테이블에서 IMO 번호 전체 조회 (최초 1회)
* 2. IMO 번호를 100개씩 분할하여 배치 단위로 처리
* 3. fetchNextBatch() 호출 시마다 100개씩 API 호출
* 4. Spring Batch가 100건씩 Process Write 수행
*
* Chunk 처리 흐름:
* - beforeFetch() IMO 전체 조회 (1회)
* - fetchNextBatch() 100개 IMO로 API 호출 (1,718회)
* - read() 1건씩 반환 (100번)
* - Processor/Writer 100건 처리
* - 반복... (1,718번의 Chunk)
*
* 기존 방식과의 차이:
* - 기존: 17만건 전체 메모리 로드 Process Write
* - 신규: 100건씩 로드 Process Write (Chunk 1,718회)
*/
@Slf4j
@StepScope
public class AnchorageCallsRangeReader extends BaseApiReader<AnchorageCallsDto> {
private List<AnchorageCallsDto> allData;
private int currentBatchIndex = 0;
private final int batchSize = 5000;
private String startDate;
private String stopDate;
public AnchorageCallsRangeReader(WebClient webClient,
@Value("#{jobParameters['startDate']}") String startDate,
@Value("#{jobParameters['stopDate']}") String stopDate) {
super(webClient);
// 날짜가 없으면 전날 하루 기준
if (startDate == null || startDate.isBlank() ||
stopDate == null || stopDate.isBlank()) {
LocalDate yesterday = LocalDate.now().minusDays(1);
this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z";
this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z";
} else {
this.startDate = startDate;
this.stopDate = stopDate;
}
enableChunkMode();
}
@Override
protected String getReaderName() {
return "AnchorageCallsReader";
}
@Override
protected void resetCustomState() {
this.currentBatchIndex = 0;
this.allData = null;
}
@Override
protected String getApiPath() {
return "/Movements/AnchorageCalls";
}
@Override
protected String getApiBaseUrl() {
return "https://webservices.maritime.spglobal.com";
}
@Override
protected void beforeFetch() {
log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate);
}
@Override
protected List<AnchorageCallsDto> fetchNextBatch() throws Exception {
// 1) 처음 호출이면 API 호출해서 전체 데이터를 가져온다
if (allData == null) {
log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate);
allData = callApiWithBatch(startDate, stopDate);
if (allData == null || allData.isEmpty()) {
log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName());
return null;
}
log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize);
}
// 2) 이미 끝까지 읽었으면 종료
if (currentBatchIndex >= allData.size()) {
log.info("[{}] 모든 배치 처리 완료", getReaderName());
return null;
}
// 3) 이번 배치의 end 계산
int end = Math.min(currentBatchIndex + batchSize, allData.size());
// 4) 현재 batch 리스트 잘라서 반환
List<AnchorageCallsDto> batch = allData.subList(currentBatchIndex, end);
int batchNum = (currentBatchIndex / batchSize) + 1;
int totalBatches = (int) Math.ceil((double) allData.size() / batchSize);
log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), batchNum, totalBatches, batch.size());
// 다음 batch 인덱스 이동
currentBatchIndex = end;
updateApiCallStats(totalBatches, batchNum);
return batch;
}
/**
* Query Parameter를 사용한 API 호출
* @return API 응답
*/
private List<AnchorageCallsDto> callApiWithBatch(String startDate, String stopDate) {
String url = getApiPath() + "?startDate=" + startDate +"&stopDate=" + stopDate;
log.info("[{}] API 호출: {}", getReaderName(), url);
return webClient.get()
.uri(url)
.retrieve()
.bodyToFlux(AnchorageCallsDto.class)
.collectList()
.block();
}
@Override
protected void afterFetch(List<AnchorageCallsDto> data) {
if (data == null) {
int totalBatches = (int) Math.ceil((double) allData.size() / batchSize);
log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches);
}
}
}

파일 보기

@ -1,6 +1,5 @@
package com.snp.batch.jobs.shipMovementAnchorageCalls.batch.repository;
import com.snp.batch.jobs.shipMovement.batch.entity.ShipMovementEntity;
import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.entity.AnchorageCallsEntity;
import java.util.List;

파일 보기

@ -3,8 +3,6 @@ package com.snp.batch.jobs.shipMovementAnchorageCalls.batch.repository;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.snp.batch.common.batch.repository.BaseJdbcRepository;
import com.snp.batch.jobs.shipMovement.batch.entity.ShipMovementEntity;
import com.snp.batch.jobs.shipMovement.batch.repository.ShipMovementRepository;
import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.entity.AnchorageCallsEntity;
import lombok.extern.slf4j.Slf4j;
import org.springframework.jdbc.core.JdbcTemplate;
@ -32,7 +30,8 @@ public class AnchorageCallsRepositoryImpl extends BaseJdbcRepository<AnchorageCa
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
@Override
protected String getTableName() {
return "snp_data.t_anchoragecall";
// return "snp_data.t_anchoragecall";
return "new_snp.t_anchoragecall";
}
@Override
@ -47,8 +46,10 @@ public class AnchorageCallsRepositoryImpl extends BaseJdbcRepository<AnchorageCa
@Override
public String getInsertSql() {
/*return """
INSERT INTO snp_data.t_anchoragecall(*/
return """
INSERT INTO snp_data.t_anchoragecall(
INSERT INTO new_snp.t_anchoragecall(
imo,
mvmn_type,
mvmn_dt,

파일 보기

@ -1,11 +1,8 @@
package com.snp.batch.jobs.shipMovementAnchorageCalls.batch.writer;
import com.snp.batch.common.batch.writer.BaseWriter;
import com.snp.batch.jobs.shipMovement.batch.repository.ShipMovementRepository;
import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.entity.AnchorageCallsEntity;
import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.repository.AnchorageCallsRepository;
import com.snp.batch.jobs.shipdetail.batch.repository.ShipDetailRepository;
import com.snp.batch.jobs.shipdetail.batch.repository.ShipHashRepository;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;

파일 보기

@ -0,0 +1,118 @@
package com.snp.batch.jobs.shipMovementBerthCalls.batch.config;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.jobs.shipMovementBerthCalls.batch.dto.BerthCallsDto;
import com.snp.batch.jobs.shipMovementBerthCalls.batch.entiity.BerthCallsEntity;
import com.snp.batch.jobs.shipMovementBerthCalls.batch.processor.BerthCallsProcessor;
import com.snp.batch.jobs.shipMovementBerthCalls.batch.reader.BerthCallsRangeReader;
import com.snp.batch.jobs.shipMovementBerthCalls.batch.writer.BerthCallsWriter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.web.reactive.function.client.WebClient;
/**
* 선박 상세 정보 Import Job Config
*
* 특징:
* - ship_data 테이블에서 IMO 번호 조회
* - IMO 번호를 100개씩 배치로 분할
* - Maritime API GetShipsByIHSLRorIMONumbers 호출
* TODO : GetShipsByIHSLRorIMONumbersAll 호출로 변경
* - 선박 상세 정보를 ship_detail 테이블에 저장 (UPSERT)
*
* 데이터 흐름:
* ShipMovementReader (ship_data Maritime API)
* (PortCallDto)
* ShipMovementProcessor
* (ShipMovementEntity)
* ShipDetailDataWriter
* (ship_movement 테이블)
*/
@Slf4j
@Configuration
public class BerthCallsRangJobConfig extends BaseJobConfig<BerthCallsDto, BerthCallsEntity> {
private final BerthCallsProcessor berthCallsProcessor;
private final BerthCallsWriter berthCallsWriter;
private final BerthCallsRangeReader berthCallsRangeReader;
private final JdbcTemplate jdbcTemplate;
private final WebClient maritimeApiWebClient;
private final ObjectMapper objectMapper; // ObjectMapper 주입 추가
public BerthCallsRangJobConfig(
JobRepository jobRepository,
PlatformTransactionManager transactionManager,
BerthCallsProcessor berthCallsProcessor,
BerthCallsWriter berthCallsWriter, BerthCallsRangeReader berthCallsRangeReader, JdbcTemplate jdbcTemplate,
@Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient,
ObjectMapper objectMapper) { // ObjectMapper 주입 추가
super(jobRepository, transactionManager);
this.berthCallsProcessor = berthCallsProcessor;
this.berthCallsWriter = berthCallsWriter;
this.berthCallsRangeReader = berthCallsRangeReader;
this.jdbcTemplate = jdbcTemplate;
this.maritimeApiWebClient = maritimeApiWebClient;
this.objectMapper = objectMapper; // ObjectMapper 초기화
}
@Override
protected String getJobName() {
return "BerthCallsRangeImportJob";
}
@Override
protected String getStepName() {
return "BerthCallsRangeImportStep";
}
@Override
protected ItemReader<BerthCallsDto> createReader() { // 타입 변경
return berthCallsRangeReader;
}
@Bean
@StepScope
public BerthCallsRangeReader berthCallsRangeReader(
@Value("#{jobParameters['startDate']}") String startDate,
@Value("#{jobParameters['stopDate']}") String stopDate
) {
return new BerthCallsRangeReader(maritimeApiWebClient, startDate, stopDate);
}
@Override
protected ItemProcessor<BerthCallsDto, BerthCallsEntity> createProcessor() {
return berthCallsProcessor;
}
@Override
protected ItemWriter<BerthCallsEntity> createWriter() {
return berthCallsWriter;
}
@Override
protected int getChunkSize() {
return 5000; // API에서 5000개씩 가져오므로 chunk도 5000으로 설정
}
@Bean(name = "BerthCallsRangeImportJob")
public Job berthCallsRangeImportJob() {
return job();
}
@Bean(name = "BerthCallsRangeImportStep")
public Step berthCallsRangeImportStep() {
return step();
}
}

파일 보기

@ -0,0 +1,154 @@
package com.snp.batch.jobs.shipMovementBerthCalls.batch.reader;
import com.snp.batch.common.batch.reader.BaseApiReader;
import com.snp.batch.jobs.shipMovementBerthCalls.batch.dto.BerthCallsDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.web.reactive.function.client.WebClient;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.List;
/**
* 선박 상세 정보 Reader (v2.0 - Chunk 기반)
*
* 기능:
* 1. ship_data 테이블에서 IMO 번호 전체 조회 (최초 1회)
* 2. IMO 번호를 100개씩 분할하여 배치 단위로 처리
* 3. fetchNextBatch() 호출 시마다 100개씩 API 호출
* 4. Spring Batch가 100건씩 Process Write 수행
*
* Chunk 처리 흐름:
* - beforeFetch() IMO 전체 조회 (1회)
* - fetchNextBatch() 100개 IMO로 API 호출 (1,718회)
* - read() 1건씩 반환 (100번)
* - Processor/Writer 100건 처리
* - 반복... (1,718번의 Chunk)
*
* 기존 방식과의 차이:
* - 기존: 17만건 전체 메모리 로드 Process Write
* - 신규: 100건씩 로드 Process Write (Chunk 1,718회)
*/
@Slf4j
@StepScope
public class BerthCallsRangeReader extends BaseApiReader<BerthCallsDto> {
private List<BerthCallsDto> allData;
private int currentBatchIndex = 0;
private final int batchSize = 5000;
private String startDate;
private String stopDate;
public BerthCallsRangeReader(WebClient webClient,
@Value("#{jobParameters['startDate']}") String startDate,
@Value("#{jobParameters['stopDate']}") String stopDate) {
super(webClient);
// 날짜가 없으면 전날 하루 기준
if (startDate == null || startDate.isBlank() || stopDate == null || stopDate.isBlank()) {
LocalDate yesterday = LocalDate.now().minusDays(1);
this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z";
this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z";
} else {
this.startDate = startDate;
this.stopDate = stopDate;
}
enableChunkMode();
}
@Override
protected String getReaderName() {
return "BerthCallsRangeReader";
}
@Override
protected void resetCustomState() {
this.currentBatchIndex = 0;
this.allData = null;
}
@Override
protected String getApiPath() {
return "/Movements/BerthCalls";
}
@Override
protected String getApiBaseUrl() {
return "https://webservices.maritime.spglobal.com";
}
@Override
protected void beforeFetch() {
log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate);
}
@Override
protected List<BerthCallsDto> fetchNextBatch() throws Exception {
// 1) 처음 호출이면 API 호출해서 전체 데이터를 가져온다
if (allData == null) {
log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate);
allData = callApiWithBatch(startDate, stopDate);
if (allData == null || allData.isEmpty()) {
log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName());
return null;
}
log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize);
}
// 2) 이미 끝까지 읽었으면 종료
if (currentBatchIndex >= allData.size()) {
log.info("[{}] 모든 배치 처리 완료", getReaderName());
return null;
}
// 3) 이번 배치의 end 계산
int end = Math.min(currentBatchIndex + batchSize, allData.size());
// 4) 현재 batch 리스트 잘라서 반환
List<BerthCallsDto> batch = allData.subList(currentBatchIndex, end);
int currentBatchNumber = (currentBatchIndex / batchSize) + 1;
int totalBatches = (int) Math.ceil((double) allData.size() / batchSize);
log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), currentBatchNumber, totalBatches, batch.size());
// 다음 batch 인덱스 이동
currentBatchIndex = end;
updateApiCallStats(totalBatches, currentBatchNumber);
return batch;
}
/**
* Query Parameter를 사용한 API 호출
* @return API 응답
*/
private List<BerthCallsDto> callApiWithBatch(String startDate, String stopDate) {
String url = getApiPath() + "?startDate=" + startDate +"&stopDate=" + stopDate;
// "&lrno=" + lrno;
log.debug("[{}] API 호출: {}", getReaderName(), url);
return webClient.get()
.uri(url)
.retrieve()
.bodyToFlux(BerthCallsDto.class)
.collectList()
.block();
}
@Override
protected void afterFetch(List<BerthCallsDto> data) {
if (data == null) {
int totalBatches = (int) Math.ceil((double) allData.size() / batchSize);
log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches);
}
}
}

파일 보기

@ -32,7 +32,8 @@ public class BerthCallsRepositoryImpl extends BaseJdbcRepository<BerthCallsEntit
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
@Override
protected String getTableName() {
return "snp_data.t_berthcall";
// return "snp_data.t_berthcall";
return "new_snp.t_berthcall";
}
@Override
@ -47,8 +48,10 @@ public class BerthCallsRepositoryImpl extends BaseJdbcRepository<BerthCallsEntit
@Override
public String getInsertSql() {
/*return """
INSERT INTO snp_data.t_berthcall(*/
return """
INSERT INTO snp_data.t_berthcall(
INSERT INTO new_snp.t_berthcall(
imo,
mvmn_type,
mvmn_dt,

파일 보기

@ -1,8 +1,6 @@
package com.snp.batch.jobs.shipMovementBerthCalls.batch.writer;
import com.snp.batch.common.batch.writer.BaseWriter;
import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.entity.AnchorageCallsEntity;
import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.repository.AnchorageCallsRepository;
import com.snp.batch.jobs.shipMovementBerthCalls.batch.entiity.BerthCallsEntity;
import com.snp.batch.jobs.shipMovementBerthCalls.batch.repository.BerthCallsRepository;
import lombok.extern.slf4j.Slf4j;

파일 보기

@ -0,0 +1,116 @@
package com.snp.batch.jobs.shipMovementCurrentlyAt.batch.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.jobs.shipMovementCurrentlyAt.batch.reader.CurrentlyAtRangeReader;
import com.snp.batch.jobs.shipMovementCurrentlyAt.batch.reader.CurrentlyAtReader;
import com.snp.batch.jobs.shipMovementCurrentlyAt.batch.dto.CurrentlyAtDto;
import com.snp.batch.jobs.shipMovementCurrentlyAt.batch.entity.CurrentlyAtEntity;
import com.snp.batch.jobs.shipMovementCurrentlyAt.batch.processor.CurrentlyAtProcessor;
import com.snp.batch.jobs.shipMovementCurrentlyAt.batch.writer.CurrentlyAtWriter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.web.reactive.function.client.WebClient;
/**
* 선박 상세 정보 Import Job Config
*
* 특징:
* - ship_data 테이블에서 IMO 번호 조회
* - IMO 번호를 100개씩 배치로 분할
* - Maritime API GetShipsByIHSLRorIMONumbers 호출
* TODO : GetShipsByIHSLRorIMONumbersAll 호출로 변경
* - 선박 상세 정보를 ship_detail 테이블에 저장 (UPSERT)
*
* 데이터 흐름:
* CurrentlyAtReader (ship_data Maritime API)
* (CurrentlyAtDto)
* CurrentlyAtProcessor
* (CurrentlyAtEntity)
* CurrentlyAtWriter
* (currentlyat 테이블)
*/
@Slf4j
@Configuration
public class CurrentlyAtRangeJobConfig extends BaseJobConfig<CurrentlyAtDto, CurrentlyAtEntity> {
private final CurrentlyAtProcessor currentlyAtProcessor;
private final CurrentlyAtWriter currentlyAtWriter;
private final CurrentlyAtRangeReader currentlyAtRangeReader;
private final JdbcTemplate jdbcTemplate;
private final WebClient maritimeApiWebClient;
public CurrentlyAtRangeJobConfig(
JobRepository jobRepository,
PlatformTransactionManager transactionManager,
CurrentlyAtProcessor currentlyAtProcessor,
CurrentlyAtWriter currentlyAtWriter, CurrentlyAtRangeReader currentlyAtRangeReader, JdbcTemplate jdbcTemplate,
@Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient) { // ObjectMapper 주입 추가
super(jobRepository, transactionManager);
this.currentlyAtProcessor = currentlyAtProcessor;
this.currentlyAtWriter = currentlyAtWriter;
this.currentlyAtRangeReader = currentlyAtRangeReader;
this.jdbcTemplate = jdbcTemplate;
this.maritimeApiWebClient = maritimeApiWebClient;
}
@Override
protected String getJobName() {
return "CurrentlyAtRangeImportJob";
}
@Override
protected String getStepName() {
return "currentlyAtRangeImportStep";
}
@Override
protected ItemReader<CurrentlyAtDto> createReader() { // 타입 변경
return currentlyAtRangeReader;
}
@Bean
@StepScope
public CurrentlyAtRangeReader currentlyAtReader(
@Value("#{jobParameters['startDate']}") String startDate,
@Value("#{jobParameters['stopDate']}") String stopDate
) {
// jobParameters 없으면 null 넘어오고 Reader에서 default 처리
return new CurrentlyAtRangeReader(maritimeApiWebClient, startDate, stopDate);
}
@Override
protected ItemProcessor<CurrentlyAtDto, CurrentlyAtEntity> createProcessor() {
return currentlyAtProcessor;
}
@Override
protected ItemWriter<CurrentlyAtEntity> createWriter() { // 타입 변경
return currentlyAtWriter;
}
@Override
protected int getChunkSize() {
return 5000; // API에서 100개씩 가져오므로 chunk도 100으로 설정
}
@Bean(name = "CurrentlyAtRangeImportJob")
public Job currentlyAtRangeImportJob() {
return job();
}
@Bean(name = "CurrentlyAtRangeImportStep")
public Step currentlyAtRangeImportStep() {
return step();
}
}

파일 보기

@ -1,6 +1,6 @@
package com.snp.batch.jobs.shipMovementCurrentlyAt.batch.dto;
import com.snp.batch.jobs.shipMovement.batch.dto.PortCallsPositionDto;
import com.snp.batch.jobs.shipMovementPortCalls.batch.dto.PortCallsPositionDto;
import lombok.Data;
@Data

파일 보기

@ -0,0 +1,154 @@
package com.snp.batch.jobs.shipMovementCurrentlyAt.batch.reader;
import com.snp.batch.common.batch.reader.BaseApiReader;
import com.snp.batch.jobs.shipMovementCurrentlyAt.batch.dto.CurrentlyAtDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.web.reactive.function.client.WebClient;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.List;
/**
* 선박 상세 정보 Reader (v2.0 - Chunk 기반)
* <p>
* 기능:
* 1. ship_data 테이블에서 IMO 번호 전체 조회 (최초 1회)
* 2. IMO 번호를 100개씩 분할하여 배치 단위로 처리
* 3. fetchNextBatch() 호출 시마다 100개씩 API 호출
* 4. Spring Batch가 100건씩 Process Write 수행
* <p>
* Chunk 처리 흐름:
* - beforeFetch() IMO 전체 조회 (1회)
* - fetchNextBatch() 100개 IMO로 API 호출 (1,718회)
* - read() 1건씩 반환 (100번)
* - Processor/Writer 100건 처리
* - 반복... (1,718번의 Chunk)
* <p>
* 기존 방식과의 차이:
* - 기존: 17만건 전체 메모리 로드 Process Write
* - 신규: 100건씩 로드 Process Write (Chunk 1,718회)
*/
@Slf4j
@StepScope
public class CurrentlyAtRangeReader extends BaseApiReader<CurrentlyAtDto> {
private List<CurrentlyAtDto> allData;
private int currentBatchIndex = 0;
private final int batchSize = 5000;
private String startDate;
private String stopDate;
public CurrentlyAtRangeReader(WebClient webClient,
@Value("#{jobParameters['startDate']}") String startDate,
@Value("#{jobParameters['stopDate']}") String stopDate) {
super(webClient);
// 날짜가 없으면 전날 하루 기준
if (startDate == null || startDate.isBlank() || stopDate == null || stopDate.isBlank()) {
LocalDate yesterday = LocalDate.now().minusDays(1);
this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z";
this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z";
} else {
this.startDate = startDate;
this.stopDate = stopDate;
}
enableChunkMode(); // Chunk 모드 활성화
}
@Override
protected String getReaderName() {
return "CurrentlyAtReader";
}
@Override
protected void resetCustomState() {
this.currentBatchIndex = 0;
this.allData = null;
}
@Override
protected String getApiPath() {
return "/Movements/CurrentlyAt";
}
@Override
protected String getApiBaseUrl() {
return "https://webservices.maritime.spglobal.com";
}
@Override
protected void beforeFetch() {
// 전처리 과정
log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate);
}
@Override
protected List<CurrentlyAtDto> fetchNextBatch() throws Exception {
// 모든 배치 처리 완료 확인
if (allData == null ) {
log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate);
allData = callApiWithBatch(startDate, stopDate);
if (allData == null || allData.isEmpty()) {
log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName());
return null;
}
log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize);
}
// 2) 이미 끝까지 읽었으면 종료
if (currentBatchIndex >= allData.size()) {
log.info("[{}] 모든 배치 처리 완료", getReaderName());
return null;
}
// 3) 이번 배치의 end 계산
int endIndex = Math.min(currentBatchIndex + batchSize, allData.size());
// 현재 배치의 IMO 번호 추출 (100개)
List<CurrentlyAtDto> batch = allData.subList(currentBatchIndex, endIndex);
int currentBatchNumber = (currentBatchIndex / batchSize) + 1;
int totalBatches = (int) Math.ceil((double) allData.size() / batchSize);
log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), currentBatchNumber, totalBatches, batch.size());
currentBatchIndex = endIndex;
updateApiCallStats(totalBatches, currentBatchNumber);
return batch;
}
/**
* Query Parameter를 사용한 API 호출
* @return API 응답
*/
private List<CurrentlyAtDto> callApiWithBatch(String startDate, String stopDate) {
String url = getApiPath() + "?dateCreatedUpdatedStart=" + startDate +"&dateCreatedUpdatedStop="+stopDate;
log.debug("[{}] API 호출: {}", getReaderName(), url);
return webClient.get()
.uri(url)
.retrieve()
.bodyToFlux(CurrentlyAtDto.class)
.collectList()
.block();
}
@Override
protected void afterFetch(List<CurrentlyAtDto> data) {
if (data == null) {
int totalBatches = (int) Math.ceil((double) allData.size() / batchSize);
log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches);
}
}
}

파일 보기

@ -27,7 +27,8 @@ public class CurrentlyAtRepositoryImpl extends BaseJdbcRepository<CurrentlyAtEnt
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
@Override
protected String getTableName() {
return "snp_data.t_currentlyat";
// return "snp_data.t_currentlyat";
return "new_snp.t_currentlyat";
}
@Override
@ -42,8 +43,10 @@ public class CurrentlyAtRepositoryImpl extends BaseJdbcRepository<CurrentlyAtEnt
@Override
public String getInsertSql() {
/*return """
INSERT INTO snp_data.t_currentlyat(*/
return """
INSERT INTO snp_data.t_currentlyat(
INSERT INTO new_snp.t_currentlyat(
imo,
mvmn_type,
mvmn_dt,
@ -158,49 +161,4 @@ public class CurrentlyAtRepositoryImpl extends BaseJdbcRepository<CurrentlyAtEnt
}
/*private static class ShipMovementRowMapper implements RowMapper<ShipMovementEntity> {
@Override
public ShipMovementEntity mapRow(ResultSet rs, int rowNum) throws SQLException {
ShipMovementEntity entity = ShipMovementEntity.builder()
.id(rs.getLong("id"))
.imolRorIHSNumber(rs.getString("imolRorIHSNumber"))
.portCallId(rs.getObject("portCallId", Integer.class))
.facilityId(rs.getObject("facilityId", Integer.class))
.facilityName(rs.getString("facilityName"))
.facilityType(rs.getString("facilityType"))
.subFacilityId(rs.getObject("subFacilityId", Integer.class))
.subFacilityName(rs.getString("subFacilityName"))
.subFacilityType(rs.getString("subFacilityType"))
.parentFacilityId(rs.getObject("parentFacilityId", Integer.class))
.parentFacilityName(rs.getString("parentFacilityName"))
.parentFacilityType(rs.getString("parentFacilityType"))
.countryCode(rs.getString("countryCode"))
.countryName(rs.getString("countryName"))
.draught(rs.getObject("draught", Double.class))
.latitude(rs.getObject("latitude", Double.class))
.longitude(rs.getObject("longitude", Double.class))
.destination(rs.getString("destination"))
.iso2(rs.getString("iso2"))
.position(parseJson(rs.getString("position")))
.schemaType(rs.getString("schemaType"))
.build();
Timestamp movementDate = rs.getTimestamp("movementDate");
if (movementDate != null) {
entity.setMovementDate(movementDate.toLocalDateTime());
}
return entity;
}
private JsonNode parseJson(String json) {
try {
if (json == null) return null;
return new ObjectMapper().readTree(json);
} catch (Exception e) {
throw new RuntimeException("JSON 파싱 오류: " + json);
}
}
}*/
}

파일 보기

@ -0,0 +1,119 @@
package com.snp.batch.jobs.shipMovementDarkActivity.batch.config;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.jobs.shipMovementDarkActivity.batch.dto.DarkActivityDto;
import com.snp.batch.jobs.shipMovementDarkActivity.batch.entity.DarkActivityEntity;
import com.snp.batch.jobs.shipMovementDarkActivity.batch.processor.DarkActivityProcessor;
import com.snp.batch.jobs.shipMovementDarkActivity.batch.reader.DarkActivityRangeReader;
import com.snp.batch.jobs.shipMovementDarkActivity.batch.writer.DarkActivityWriter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.web.reactive.function.client.WebClient;
/**
* 선박 상세 정보 Import Job Config
*
* 특징:
* - ship_data 테이블에서 IMO 번호 조회
* - IMO 번호를 100개씩 배치로 분할
* - Maritime API GetShipsByIHSLRorIMONumbers 호출
* TODO : GetShipsByIHSLRorIMONumbersAll 호출로 변경
* - 선박 상세 정보를 ship_detail 테이블에 저장 (UPSERT)
*
* 데이터 흐름:
* DarkActivityReader (ship_data Maritime API)
* (DarkActivityDto)
* DarkActivityProcessor
* (DarkActivityEntity)
* DarkActivityWriter
* (t_darkactivity 테이블)
*/
@Slf4j
@Configuration
public class DarkActivityRangeJobConfig extends BaseJobConfig<DarkActivityDto, DarkActivityEntity> {
private final DarkActivityProcessor darkActivityProcessor;
private final DarkActivityWriter darkActivityWriter;
private final DarkActivityRangeReader darkActivityRangeReader;
private final JdbcTemplate jdbcTemplate;
private final WebClient maritimeApiWebClient;
public DarkActivityRangeJobConfig(
JobRepository jobRepository,
PlatformTransactionManager transactionManager,
DarkActivityProcessor darkActivityProcessor,
DarkActivityWriter darkActivityWriter, JdbcTemplate jdbcTemplate,
@Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient,
ObjectMapper objectMapper, DarkActivityRangeReader darkActivityRangeReader) { // ObjectMapper 주입 추가
super(jobRepository, transactionManager);
this.darkActivityProcessor = darkActivityProcessor;
this.darkActivityWriter = darkActivityWriter;
this.jdbcTemplate = jdbcTemplate;
this.maritimeApiWebClient = maritimeApiWebClient;
this.darkActivityRangeReader = darkActivityRangeReader;
}
@Override
protected String getJobName() {
return "DarkActivityRangeImportJob";
}
@Override
protected String getStepName() {
return "DarkActivityRangeImportStep";
}
@Override
protected ItemReader<DarkActivityDto> createReader() { // 타입 변경
// Reader 생성자 수정: ObjectMapper를 전달합니다.
return darkActivityRangeReader;
}
@Bean
@StepScope
public DarkActivityRangeReader darkActivityReader(
@Value("#{jobParameters['startDate']}") String startDate,
@Value("#{jobParameters['stopDate']}") String stopDate
) {
// jobParameters 없으면 null 넘어오고 Reader에서 default 처리
return new DarkActivityRangeReader(maritimeApiWebClient, startDate, stopDate);
}
@Override
protected ItemProcessor<DarkActivityDto, DarkActivityEntity> createProcessor() {
return darkActivityProcessor;
}
@Override
protected ItemWriter<DarkActivityEntity> createWriter() { // 타입 변경
return darkActivityWriter;
}
@Override
protected int getChunkSize() {
return 5000; // API에서 100개씩 가져오므로 chunk도 100으로 설정
}
@Bean(name = "DarkActivityRangeImportJob")
public Job darkActivityRangeImportJob() {
return job();
}
@Bean(name = "DarkActivityRangeImportStep")
public Step darkActivityRangeImportStep() {
return step();
}
}

파일 보기

@ -24,7 +24,7 @@ public class DarkActivityDto {
private Double latitude;
private Double longitude;
private AnchorageCallsPositionDto position;
private DarkActivityPositionDto position;
private String eventStartDate;
}

파일 보기

@ -0,0 +1,17 @@
package com.snp.batch.jobs.shipMovementDarkActivity.batch.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
@Data
public class DarkActivityPositionDto {
private boolean isNull;
private int stSrid;
private double lat;
@JsonProperty("long")
private double lon;
private double z;
private double m;
private boolean hasZ;
private boolean hasM;
}

파일 보기

@ -0,0 +1,182 @@
package com.snp.batch.jobs.shipMovementDarkActivity.batch.reader;
import com.snp.batch.common.batch.reader.BaseApiReader;
import com.snp.batch.jobs.shipMovementDarkActivity.batch.dto.DarkActivityDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.web.reactive.function.client.WebClient;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.List;
/**
* 선박 상세 정보 Reader (v2.0 - Chunk 기반)
*
* 기능:
* 1. ship_data 테이블에서 IMO 번호 전체 조회 (최초 1회)
* 2. IMO 번호를 100개씩 분할하여 배치 단위로 처리
* 3. fetchNextBatch() 호출 시마다 100개씩 API 호출
* 4. Spring Batch가 100건씩 Process Write 수행
*
* Chunk 처리 흐름:
* - beforeFetch() IMO 전체 조회 (1회)
* - fetchNextBatch() 100개 IMO로 API 호출 (1,718회)
* - read() 1건씩 반환 (100번)
* - Processor/Writer 100건 처리
* - 반복... (1,718번의 Chunk)
*
* 기존 방식과의 차이:
* - 기존: 17만건 전체 메모리 로드 Process Write
* - 신규: 100건씩 로드 Process Write (Chunk 1,718회)
*/
@Slf4j
@StepScope
public class DarkActivityRangeReader extends BaseApiReader<DarkActivityDto> {
private List<DarkActivityDto> allData;
// DB 해시값을 저장할
private int currentBatchIndex = 0;
private final int batchSize = 5000;
// @Value("#{jobParameters['startDate']}")
private String startDate;
// private String startDate = "2025-01-01";
// @Value("#{jobParameters['stopDate']}")
private String stopDate;
// private String stopDate = "2025-12-31";
/*public DarkActivityRangeReader(WebClient webClient) {
super(webClient);
enableChunkMode(); // Chunk 모드 활성화
}*/
public DarkActivityRangeReader(WebClient webClient,
@Value("#{jobParameters['startDate']}") String startDate,
@Value("#{jobParameters['stopDate']}") String stopDate) {
super(webClient);
// 날짜가 없으면 전날 하루 기준
if (startDate == null || startDate.isBlank() || stopDate == null || stopDate.isBlank()) {
LocalDate yesterday = LocalDate.now().minusDays(1);
this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z";
this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z";
} else {
this.startDate = startDate;
this.stopDate = stopDate;
}
enableChunkMode(); // Chunk 모드 활성화
}
@Override
protected String getReaderName() {
return "DarkActivityReader";
}
@Override
protected void resetCustomState() {
this.currentBatchIndex = 0;
this.allData = null;
}
@Override
protected String getApiPath() {
return "/Movements/DarkActivity";
}
@Override
protected String getApiBaseUrl() {
return "https://webservices.maritime.spglobal.com";
}
private static final String GET_ALL_IMO_QUERY =
"SELECT imo_number FROM ship_data ORDER BY id";
// "SELECT imo_number FROM snp_data.ship_data where imo_number > (select max(imo) from snp_data.t_darkactivity) ORDER BY imo_number";
/**
* 최초 1회만 실행: ship_data 테이블에서 IMO 번호 전체 조회
*/
@Override
protected void beforeFetch() {
log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate);
}
/**
* Chunk 기반 핵심 메서드: 다음 100개 배치를 조회하여 반환
*
* Spring Batch가 100건씩 read() 호출 완료 메서드 재호출
*
* @return 다음 배치 100건 ( 이상 없으면 null)
*/
@Override
protected List<DarkActivityDto> fetchNextBatch() throws Exception {
// 모든 배치 처리 완료 확인
if (allData == null ) {
log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate);
allData = callApiWithBatch(startDate, stopDate);
if (allData == null || allData.isEmpty()) {
log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName());
return null;
}
log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize);
}
// 2) 이미 끝까지 읽었으면 종료
if (currentBatchIndex >= allData.size()) {
log.info("[{}] 모든 배치 처리 완료", getReaderName());
return null;
}
// 3) 이번 배치의 end 계산
int endIndex = Math.min(currentBatchIndex + batchSize, allData.size());
// 현재 배치의 IMO 번호 추출 (100개)
List<DarkActivityDto> batch = allData.subList(currentBatchIndex, endIndex);
int currentBatchNumber = (currentBatchIndex / batchSize) + 1;
int totalBatches = (int) Math.ceil((double) allData.size() / batchSize);
log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), currentBatchNumber, totalBatches, batch.size());
currentBatchIndex = endIndex;
updateApiCallStats(totalBatches, currentBatchNumber);
return batch;
}
/**
* Query Parameter를 사용한 API 호출
*
* @param startDate,stopDate
* @return API 응답
*/
private List<DarkActivityDto> callApiWithBatch(String startDate, String stopDate){
String url = getApiPath() + "?startDate=" + startDate +"&stopDate="+stopDate;
// +"&lrno=" + lrno;
log.debug("[{}] API 호출: {}", getReaderName(), url);
return webClient.get()
.uri(url)
.retrieve()
.bodyToFlux(DarkActivityDto.class)
.collectList()
.block();
}
@Override
protected void afterFetch(List<DarkActivityDto> data) {
if (data == null) {
int totalBatches = (int) Math.ceil((double) allData.size() / batchSize);
log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches);
/* log.info("[{}] 총 {} 개의 IMO 번호에 대한 API 호출 종료",
getReaderName(), allData.size());*/
}
}
}

파일 보기

@ -32,7 +32,8 @@ public class DarkActivityRepositoryImpl extends BaseJdbcRepository<DarkActivityE
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
@Override
protected String getTableName() {
return "snp_data.t_darkactivity";
return "new_snp.t_darkactivity";
// return "snp_data.t_darkactivity";
}
@Override
@ -47,8 +48,10 @@ public class DarkActivityRepositoryImpl extends BaseJdbcRepository<DarkActivityE
@Override
public String getInsertSql() {
// return """
// INSERT INTO snp_data.t_darkactivity(
return """
INSERT INTO snp_data.t_darkactivity(
INSERT INTO new_snp.t_darkactivity(
imo,
mvmn_type,
mvmn_dt,
@ -66,7 +69,7 @@ public class DarkActivityRepositoryImpl extends BaseJdbcRepository<DarkActivityE
evt_start_dt,
lcinfo
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT (imo, mvmn_type, mvmn_dt)
ON CONFLICT (imo, mvmn_type, mvmn_dt, fclty_id)
DO UPDATE SET
mvmn_type = EXCLUDED.mvmn_type,
mvmn_dt = EXCLUDED.mvmn_dt,

파일 보기

@ -0,0 +1,114 @@
package com.snp.batch.jobs.shipMovementDestination.batch.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.jobs.shipMovementDarkActivity.batch.reader.DarkActivityRangeReader;
import com.snp.batch.jobs.shipMovementDestination.batch.dto.DestinationDto;
import com.snp.batch.jobs.shipMovementDestination.batch.entity.DestinationEntity;
import com.snp.batch.jobs.shipMovementDestination.batch.processor.DestinationProcessor;
import com.snp.batch.jobs.shipMovementDestination.batch.reader.DestinationRangeReader;
import com.snp.batch.jobs.shipMovementDestination.batch.writer.DestinationWriter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.web.reactive.function.client.WebClient;
/**
* 선박 상세 정보 Import Job Config
*
* 특징:
* - ship_data 테이블에서 IMO 번호 조회
* - IMO 번호를 100개씩 배치로 분할
* - Maritime API GetShipsByIHSLRorIMONumbers 호출
* TODO : GetShipsByIHSLRorIMONumbersAll 호출로 변경
* - 선박 상세 정보를 ship_detail 테이블에 저장 (UPSERT)
*
* 데이터 흐름:
* DestinationReader (ship_data Maritime API)
* (DestinationDto)
* DestinationProcessor
* (DestinationEntity)
* DestinationProcessor
* (t_destination 테이블)
*/
@Slf4j
@Configuration
public class DestinationsRangeJobConfig extends BaseJobConfig<DestinationDto, DestinationEntity> {
private final DestinationProcessor DestinationProcessor;
private final DestinationWriter DestinationWriter;
private final DestinationRangeReader destinationRangeReader;
private final WebClient maritimeApiWebClient;
public DestinationsRangeJobConfig(
JobRepository jobRepository,
PlatformTransactionManager transactionManager,
DestinationProcessor DestinationProcessor,
DestinationWriter DestinationWriter, DestinationRangeReader destinationRangeReader,
@Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient) { // ObjectMapper 주입 추가
super(jobRepository, transactionManager);
this.DestinationProcessor = DestinationProcessor;
this.DestinationWriter = DestinationWriter;
this.destinationRangeReader = destinationRangeReader;
this.maritimeApiWebClient = maritimeApiWebClient;
}
@Override
protected String getJobName() {
return "DestinationsRangeImportJob";
}
@Override
protected String getStepName() {
return "DestinationsRangeImportStep";
}
@Override
protected ItemReader<DestinationDto> createReader() { // 타입 변경
return destinationRangeReader;
}
@Bean
@StepScope
public DestinationRangeReader destinationRangeReader(
@Value("#{jobParameters['startDate']}") String startDate,
@Value("#{jobParameters['stopDate']}") String stopDate
) {
// jobParameters 없으면 null 넘어오고 Reader에서 default 처리
return new DestinationRangeReader(maritimeApiWebClient, startDate, stopDate);
}
@Override
protected ItemProcessor<DestinationDto, DestinationEntity> createProcessor() {
return DestinationProcessor;
}
@Override
protected ItemWriter<DestinationEntity> createWriter() { // 타입 변경
return DestinationWriter;
}
@Override
protected int getChunkSize() {
return 1000; // API에서 100개씩 가져오므로 chunk도 100으로 설정
}
@Bean(name = "DestinationsRangeImportJob")
public Job destinationsRangeImportJob() {
return job();
}
@Bean(name = "DestinationsRangeImportStep")
public Step destinationsRangeImportStep() {
return step();
}
}

파일 보기

@ -0,0 +1,161 @@
package com.snp.batch.jobs.shipMovementDestination.batch.reader;
import com.snp.batch.common.batch.reader.BaseApiReader;
import com.snp.batch.jobs.shipMovementDestination.batch.dto.DestinationDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.web.reactive.function.client.WebClient;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.List;
/**
* 선박 상세 정보 Reader (v2.0 - Chunk 기반)
* <p>
* 기능:
* 1. ship_data 테이블에서 IMO 번호 전체 조회 (최초 1회)
* 2. IMO 번호를 100개씩 분할하여 배치 단위로 처리
* 3. fetchNextBatch() 호출 시마다 100개씩 API 호출
* 4. Spring Batch가 100건씩 Process Write 수행
* <p>
* Chunk 처리 흐름:
* - beforeFetch() IMO 전체 조회 (1회)
* - fetchNextBatch() 100개 IMO로 API 호출 (1,718회)
* - read() 1건씩 반환 (100번)
* - Processor/Writer 100건 처리
* - 반복... (1,718번의 Chunk)
* <p>
* 기존 방식과의 차이:
* - 기존: 17만건 전체 메모리 로드 Process Write
* - 신규: 100건씩 로드 Process Write (Chunk 1,718회)
*/
@Slf4j
@StepScope
public class DestinationRangeReader extends BaseApiReader<DestinationDto> {
private List<DestinationDto> allData;
private int currentBatchIndex = 0;
private final int batchSize = 1000;
private String startDate;
private String stopDate;
public DestinationRangeReader(WebClient webClient,
@Value("#{jobParameters['startDate']}") String startDate,
@Value("#{jobParameters['stopDate']}") String stopDate) {
super(webClient);
// 날짜가 + 한달 기간 도착예정지 정보 update
if (startDate == null || startDate.isBlank() || stopDate == null || stopDate.isBlank()) {
LocalDate today = LocalDate.now();
this.startDate = today
.atStartOfDay()
.format(DateTimeFormatter.ISO_DATE_TIME) + "Z";
this.stopDate = today
.plusDays(15)
.atStartOfDay()
.format(DateTimeFormatter.ISO_DATE_TIME) + "Z";
} else {
this.startDate = startDate;
this.stopDate = stopDate;
}
enableChunkMode(); // Chunk 모드 활성화
}
@Override
protected String getReaderName() {
return "DestinationsRange";
}
@Override
protected void resetCustomState() {
this.currentBatchIndex = 0;
this.allData = null;
}
@Override
protected String getApiPath() {
return "/Movements/Destinations";
}
@Override
protected String getApiBaseUrl() {
return "https://webservices.maritime.spglobal.com";
}
/**
* 최초 1회만 실행: ship_data 테이블에서 IMO 번호 전체 조회
*/
@Override
protected void beforeFetch() {
log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate);
}
@Override
protected List<DestinationDto> fetchNextBatch() throws Exception {
// 모든 배치 처리 완료 확인
// 모든 배치 처리 완료 확인
if (allData == null) {
log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate);
allData = callApiWithBatch(startDate, stopDate);
if (allData == null || allData.isEmpty()) {
log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName());
return null;
}
log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize);
}
// 2) 이미 끝까지 읽었으면 종료
if (currentBatchIndex >= allData.size()) {
log.info("[{}] 모든 배치 처리 완료", getReaderName());
return null;
}
// 3) 이번 배치의 end 계산
int endIndex = Math.min(currentBatchIndex + batchSize, allData.size());
// 현재 배치의 IMO 번호 추출 (100개)
List<DestinationDto> batch = allData.subList(currentBatchIndex, endIndex);
int currentBatchNumber = (currentBatchIndex / batchSize) + 1;
int totalBatches = (int) Math.ceil((double) allData.size() / batchSize);
log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), currentBatchNumber, totalBatches, batch.size());
currentBatchIndex = endIndex;
updateApiCallStats(totalBatches, currentBatchNumber);
return batch;
}
/**
* Query Parameter를 사용한 API 호출
* @return API 응답
*/
private List<DestinationDto> callApiWithBatch(String startDate, String stopDate) {
String url = getApiPath() + "?startDate=" + startDate + "&stopDate=" + stopDate;
// +"&lrno=" + lrno;
log.debug("[{}] API 호출: {}", getReaderName(), url);
return webClient.get()
.uri(url)
.retrieve()
.bodyToFlux(DestinationDto.class)
.collectList()
.block();
}
@Override
protected void afterFetch(List<DestinationDto> data) {
if (data == null) {
int totalBatches = (int) Math.ceil((double) allData.size() / batchSize);
log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches);
}
}
}

파일 보기

@ -27,12 +27,13 @@ public class DestinationRepositoryImpl extends BaseJdbcRepository<DestinationEnt
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
@Override
protected String getTableName() {
return "snp_data.t_destination";
// return "snp_data.t_destination";
return "new_snp.t_destination";
}
@Override
protected String getEntityName() {
return "Destinations";
return "DestinationsRange";
}
@Override
@ -42,8 +43,10 @@ public class DestinationRepositoryImpl extends BaseJdbcRepository<DestinationEnt
@Override
public String getInsertSql() {
/*return """
INSERT INTO snp_data.t_destination(*/
return """
INSERT INTO snp_data.t_destination(
INSERT INTO new_snp.t_destination(
imo,
mvmn_type,
mvmn_dt,
@ -57,7 +60,7 @@ public class DestinationRepositoryImpl extends BaseJdbcRepository<DestinationEnt
iso2_ntn_cd,
lcinfo
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT (imo, mvmn_type, mvmn_dt)
ON CONFLICT (imo)
DO UPDATE SET
mvmn_type = EXCLUDED.mvmn_type,
mvmn_dt = EXCLUDED.mvmn_dt,
@ -122,7 +125,6 @@ public class DestinationRepositoryImpl extends BaseJdbcRepository<DestinationEnt
@Override
public void saveAll(List<DestinationEntity> entities) {
if (entities == null || entities.isEmpty()) return;
log.info("Destinations 저장 시작 = {}건", entities.size());
batchInsert(entities);

파일 보기

@ -1,12 +1,12 @@
package com.snp.batch.jobs.shipMovement.batch.config;
package com.snp.batch.jobs.shipMovementPortCalls.batch.config;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.jobs.shipMovement.batch.dto.PortCallsDto;
import com.snp.batch.jobs.shipMovement.batch.entity.ShipMovementEntity;
import com.snp.batch.jobs.shipMovement.batch.processor.ShipMovementProcessor;
import com.snp.batch.jobs.shipMovement.batch.reader.ShipMovementReader;
import com.snp.batch.jobs.shipMovement.batch.writer.ShipMovementWriter;
import com.snp.batch.jobs.shipMovementPortCalls.batch.dto.PortCallsDto;
import com.snp.batch.jobs.shipMovementPortCalls.batch.entity.PortCallsEntity;
import com.snp.batch.jobs.shipMovementPortCalls.batch.processor.PortCallsProcessor;
import com.snp.batch.jobs.shipMovementPortCalls.batch.reader.PortCallsReader;
import com.snp.batch.jobs.shipMovementPortCalls.batch.writer.PortCallsWriter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
@ -37,34 +37,34 @@ import java.time.format.DateTimeFormatter;
* - 선박 상세 정보를 ship_detail 테이블에 저장 (UPSERT)
*
* 데이터 흐름:
* ShipMovementReader (ship_data Maritime API)
* PortCallsReader (ship_data Maritime API)
* (PortCallDto)
* ShipMovementProcessor
* (ShipMovementEntity)
* PortCallsProcessor
* (PortCallsEntity)
* ShipDetailDataWriter
* (ship_movement 테이블)
*/
@Slf4j
@Configuration
public class ShipMovementJobConfig extends BaseJobConfig<PortCallsDto, ShipMovementEntity> {
public class ShipPortCallsJobConfig extends BaseJobConfig<PortCallsDto, PortCallsEntity> {
private final ShipMovementProcessor shipMovementProcessor;
private final ShipMovementWriter shipMovementWriter;
private final PortCallsProcessor portCallsProcessor;
private final PortCallsWriter portCallsWriter;
private final JdbcTemplate jdbcTemplate;
private final WebClient maritimeApiWebClient;
private final ObjectMapper objectMapper; // ObjectMapper 주입 추가
public ShipMovementJobConfig(
public ShipPortCallsJobConfig(
JobRepository jobRepository,
PlatformTransactionManager transactionManager,
ShipMovementProcessor shipMovementProcessor,
ShipMovementWriter shipMovementWriter, JdbcTemplate jdbcTemplate,
PortCallsProcessor portCallsProcessor,
PortCallsWriter portCallsWriter, JdbcTemplate jdbcTemplate,
@Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient,
ObjectMapper objectMapper) { // ObjectMapper 주입 추가
super(jobRepository, transactionManager);
this.shipMovementProcessor = shipMovementProcessor;
this.shipMovementWriter = shipMovementWriter;
this.portCallsProcessor = portCallsProcessor;
this.portCallsWriter = portCallsWriter;
this.jdbcTemplate = jdbcTemplate;
this.maritimeApiWebClient = maritimeApiWebClient;
this.objectMapper = objectMapper; // ObjectMapper 초기화
@ -72,30 +72,28 @@ public class ShipMovementJobConfig extends BaseJobConfig<PortCallsDto, ShipMovem
@Override
protected String getJobName() {
return "shipMovementJob";
return "PortCallsImportJob";
}
@Override
protected String getStepName() {
return "shipMovementStep";
return "PortCallsImportStep";
}
@Bean
@StepScope
public ShipMovementReader shipMovementReader(
public PortCallsReader portCallsReader(
@Value("#{jobParameters['startDate']}") String startDate,
@Value("#{jobParameters['stopDate']}") String stopDate) {
LocalDate today = LocalDate.now();
if (startDate == null || startDate.isBlank() ||
stopDate == null || stopDate.isBlank()) {
if(startDate == null || startDate.isBlank()) {
startDate = today.minusYears(1).plusDays(1).format(DateTimeFormatter.ISO_LOCAL_DATE);
LocalDate yesterday = LocalDate.now().minusDays(1);
startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z";
stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z";
}
if(stopDate == null || stopDate.isBlank()) {
stopDate = today.format(DateTimeFormatter.ISO_LOCAL_DATE);
}
ShipMovementReader reader = new ShipMovementReader(maritimeApiWebClient, jdbcTemplate, objectMapper);
PortCallsReader reader = new PortCallsReader(maritimeApiWebClient, jdbcTemplate, objectMapper);
reader.setStartDate(startDate);
reader.setStopDate(stopDate);
return reader;
@ -103,32 +101,32 @@ public class ShipMovementJobConfig extends BaseJobConfig<PortCallsDto, ShipMovem
@Override
protected ItemReader<PortCallsDto> createReader() { // 타입 변경
// Reader 생성자 수정: ObjectMapper를 전달합니다.
return shipMovementReader(null, null);
//return new ShipMovementReader(maritimeApiWebClient, jdbcTemplate, objectMapper);
return portCallsReader( null, null);
//return new PortCallsReader(maritimeApiWebClient, jdbcTemplate, objectMapper);
}
@Override
protected ItemProcessor<PortCallsDto, ShipMovementEntity> createProcessor() {
return shipMovementProcessor;
protected ItemProcessor<PortCallsDto, PortCallsEntity> createProcessor() {
return portCallsProcessor;
}
@Override
protected ItemWriter<ShipMovementEntity> createWriter() { // 타입 변경
return shipMovementWriter;
protected ItemWriter<PortCallsEntity> createWriter() { // 타입 변경
return portCallsWriter;
}
@Override
protected int getChunkSize() {
return 50; // API에서 100개씩 가져오므로 chunk도 100으로 설정
return 1000; // API에서 5000개씩 가져오므로 chunk도 5000으로 설정
}
@Bean(name = "shipMovementJob")
public Job shipMovementJob() {
@Bean(name = "PortCallsImportJob")
public Job portCallsImportJob() {
return job();
}
@Bean(name = "shipMovementStep")
public Step shipMovementStep() {
@Bean(name = "PortCallsImportStep")
public Step portCallsImportStep() {
return step();
}
}

파일 보기

@ -0,0 +1,114 @@
package com.snp.batch.jobs.shipMovementPortCalls.batch.config;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.jobs.shipMovementPortCalls.batch.dto.PortCallsDto;
import com.snp.batch.jobs.shipMovementPortCalls.batch.entity.PortCallsEntity;
import com.snp.batch.jobs.shipMovementPortCalls.batch.processor.PortCallsProcessor;
import com.snp.batch.jobs.shipMovementPortCalls.batch.reader.PortCallsRangeReader;
import com.snp.batch.jobs.shipMovementPortCalls.batch.reader.PortCallsReader;
import com.snp.batch.jobs.shipMovementPortCalls.batch.writer.PortCallsWriter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.web.reactive.function.client.WebClient;
/**
* 선박 상세 정보 Import Job Config
*
* 특징:
* - ship_data 테이블에서 IMO 번호 조회
* - IMO 번호를 100개씩 배치로 분할
* - Maritime API GetShipsByIHSLRorIMONumbers 호출
* TODO : GetShipsByIHSLRorIMONumbersAll 호출로 변경
* - 선박 상세 정보를 ship_detail 테이블에 저장 (UPSERT)
*
* 데이터 흐름:
* PortCallsReader (ship_data Maritime API)
* (PortCallDto)
* PortCallsProcessor
* (PortCallsEntity)
* ShipDetailDataWriter
* (ship_movement 테이블)
*/
@Slf4j
@Configuration
public class ShipPortCallsRangeJobConfig extends BaseJobConfig<PortCallsDto, PortCallsEntity> {
private final PortCallsProcessor portCallsProcessor;
private final PortCallsWriter portCallsWriter;
private final PortCallsRangeReader portCallsRangeReader;
public ShipPortCallsRangeJobConfig(
JobRepository jobRepository,
PlatformTransactionManager transactionManager,
PortCallsProcessor portCallsProcessor,
PortCallsWriter portCallsWriter, JdbcTemplate jdbcTemplate,
@Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient,
ObjectMapper objectMapper, PortCallsRangeReader portCallsRangeReader) { // ObjectMapper 주입 추가
super(jobRepository, transactionManager);
this.portCallsProcessor = portCallsProcessor;
this.portCallsWriter = portCallsWriter;
this.portCallsRangeReader = portCallsRangeReader;
}
@Override
protected String getJobName() {
return "PortCallsRangeImportJob";
}
@Override
protected String getStepName() {
return "PortCallsRangeImportStep";
}
@Bean
@StepScope
public PortCallsRangeReader portCallsRangeReader(
@Qualifier("maritimeServiceApiWebClient") WebClient webClient,
@Value("#{jobParameters['startDate']}") String startDate,
@Value("#{jobParameters['stopDate']}") String stopDate
) {
return new PortCallsRangeReader(webClient, startDate, stopDate);
}
@Override
protected ItemReader<PortCallsDto> createReader() { // 타입 변경
return portCallsRangeReader;
}
@Override
protected ItemProcessor<PortCallsDto, PortCallsEntity> createProcessor() {
return portCallsProcessor;
}
@Override
protected ItemWriter<PortCallsEntity> createWriter() { // 타입 변경
return portCallsWriter;
}
@Override
protected int getChunkSize() {
return 5000; // API에서 5000개 가져오므로 chunk도 5000개씩 설정
}
@Bean(name = "PortCallsRangeImportJob")
public Job portCallsRangeImportJob() {
return job();
}
@Bean(name = "PortCallsRangeImportStep")
public Step portCallsRangeImportStep() {
return step();
}
}

파일 보기

@ -1,4 +1,4 @@
package com.snp.batch.jobs.shipMovement.batch.dto;
package com.snp.batch.jobs.shipMovementPortCalls.batch.dto;
import lombok.Data;

파일 보기

@ -1,4 +1,4 @@
package com.snp.batch.jobs.shipMovement.batch.dto;
package com.snp.batch.jobs.shipMovementPortCalls.batch.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;

파일 보기

@ -1,4 +1,4 @@
package com.snp.batch.jobs.shipMovement.batch.dto;
package com.snp.batch.jobs.shipMovementPortCalls.batch.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;

파일 보기

@ -1,4 +1,4 @@
package com.snp.batch.jobs.shipMovement.batch.entity;
package com.snp.batch.jobs.shipMovementPortCalls.batch.entity;
import com.fasterxml.jackson.databind.JsonNode;
import jakarta.persistence.GeneratedValue;
@ -7,7 +7,6 @@ import jakarta.persistence.Id;
import jakarta.persistence.SequenceGenerator;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import lombok.experimental.SuperBuilder;
@ -17,7 +16,7 @@ import java.time.LocalDateTime;
@SuperBuilder
@NoArgsConstructor
@AllArgsConstructor
public class ShipMovementEntity {
public class PortCallsEntity {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "ship_movement_id_seq")
@SequenceGenerator(name = "ship_movement_id_seq", sequenceName = "ship_movement_id_seq", allocationSize = 1)

파일 보기

@ -1,10 +1,10 @@
package com.snp.batch.jobs.shipMovement.batch.processor;
package com.snp.batch.jobs.shipMovementPortCalls.batch.processor;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.snp.batch.common.batch.processor.BaseProcessor;
import com.snp.batch.jobs.shipMovement.batch.dto.PortCallsDto;
import com.snp.batch.jobs.shipMovement.batch.entity.ShipMovementEntity;
import com.snp.batch.jobs.shipMovementPortCalls.batch.dto.PortCallsDto;
import com.snp.batch.jobs.shipMovementPortCalls.batch.entity.PortCallsEntity;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
@ -22,16 +22,16 @@ import java.time.LocalDateTime;
*/
@Slf4j
@Component
public class ShipMovementProcessor extends BaseProcessor<PortCallsDto, ShipMovementEntity> {
public class PortCallsProcessor extends BaseProcessor<PortCallsDto, PortCallsEntity> {
private final ObjectMapper objectMapper;
public ShipMovementProcessor(ObjectMapper objectMapper) {
public PortCallsProcessor(ObjectMapper objectMapper) {
this.objectMapper = objectMapper;
}
@Override
protected ShipMovementEntity processItem(PortCallsDto dto) throws Exception {
protected PortCallsEntity processItem(PortCallsDto dto) throws Exception {
log.debug("선박 상세 정보 처리 시작: imoNumber={}, facilityName={}",
dto.getImolRorIHSNumber(), dto.getFacilityName());
@ -41,7 +41,7 @@ public class ShipMovementProcessor extends BaseProcessor<PortCallsDto, ShipMovem
positionNode = objectMapper.valueToTree(dto.getPosition());
}
ShipMovementEntity entity = ShipMovementEntity.builder()
PortCallsEntity entity = PortCallsEntity.builder()
.movementType(dto.getMovementType())
.imolRorIHSNumber(dto.getImolRorIHSNumber())
.movementDate(LocalDateTime.parse(dto.getMovementDate()))

파일 보기

@ -0,0 +1,160 @@
package com.snp.batch.jobs.shipMovementPortCalls.batch.reader;
import com.snp.batch.common.batch.reader.BaseApiReader;
import com.snp.batch.jobs.shipMovementPortCalls.batch.dto.PortCallsDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.web.reactive.function.client.WebClient;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.List;
/**
* 선박 상세 정보 Reader (v2.0 - Chunk 기반)
*
* 기능:
* 1. ship_data 테이블에서 IMO 번호 전체 조회 (최초 1회)
* 2. IMO 번호를 100개씩 분할하여 배치 단위로 처리
* 3. fetchNextBatch() 호출 시마다 100개씩 API 호출
* 4. Spring Batch가 100건씩 Process Write 수행
*
* Chunk 처리 흐름:
* - beforeFetch() IMO 전체 조회 (1회)
* - fetchNextBatch() 100개 IMO로 API 호출 (1,718회)
* - read() 1건씩 반환 (100번)
* - Processor/Writer 100건 처리
* - 반복... (1,718번의 Chunk)
*
* 기존 방식과의 차이:
* - 기존: 17만건 전체 메모리 로드 Process Write
* - 신규: 100건씩 로드 Process Write (Chunk 1,718회)
*/
@Slf4j
@StepScope
public class PortCallsRangeReader extends BaseApiReader<PortCallsDto> {
private List<PortCallsDto> allData;
private int currentBatchIndex = 0;
private final int batchSize = 5000;
private String startDate;
private String stopDate;
public PortCallsRangeReader(WebClient webClient,
@Value("#{jobParameters['startDate']}") String startDate,
@Value("#{jobParameters['stopDate']}") String stopDate) {
super(webClient);
// 날짜가 없으면 전날 하루 기준
if (startDate == null || startDate.isBlank() ||
stopDate == null || stopDate.isBlank()) {
LocalDate yesterday = LocalDate.now().minusDays(1);
this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z";
this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z";
} else {
this.startDate = startDate;
this.stopDate = stopDate;
}
enableChunkMode();
}
@Override
protected String getReaderName() {
return "PortCallsRangeReader";
}
@Override
protected void resetCustomState() {
this.currentBatchIndex = 0;
this.allData = null;
}
@Override
protected String getApiPath() {
return "/Movements/PortCalls";
}
@Override
protected String getApiBaseUrl() {
return "https://webservices.maritime.spglobal.com";
}
@Override
protected void beforeFetch() {
log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate);
}
/**
* Chunk 기반 핵심 메서드: 다음 배치를 조회하여 반환
*
* Spring Batch가 batchsize만큼 read() 호출 완료 메서드 재호출
*
* @return 다음 배치 ( 이상 없으면 null)
*/
@Override
protected List<PortCallsDto> fetchNextBatch() throws Exception {
// 모든 배치 처리 완료 확인
if (allData == null) {
log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate);
allData = callApiWithBatch(startDate, stopDate);
if (allData == null || allData.isEmpty()) {
log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName());
return null;
}
log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize);
}
// 2) 이미 끝까지 읽었으면 종료
if (currentBatchIndex >= allData.size()) {
log.info("[{}] 모든 배치 처리 완료", getReaderName());
return null;
}
// 3) 이번 배치의 end 계산
int end = Math.min(currentBatchIndex + batchSize, allData.size());
// 4) 현재 batch 리스트 잘라서 반환
List<PortCallsDto> batch = allData.subList(currentBatchIndex, end);
int batchNum = (currentBatchIndex / batchSize) + 1;
int totalBatches = (int) Math.ceil((double) allData.size() / batchSize);
log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), batchNum, totalBatches, batch.size());
// 다음 batch 인덱스 이동
currentBatchIndex = end;
updateApiCallStats(totalBatches, batchNum);
return batch;
}
/**
* Query Parameter를 사용한 API 호출
* @return API 응답
*/
private List<PortCallsDto> callApiWithBatch(String startDate, String stopDate) {
String url = getApiPath() + "?startDate=" + startDate + "&stopDate=" + stopDate;
log.info("[{}] API 호출: {}", getReaderName(), url);
return webClient.get()
.uri(url)
.retrieve()
.bodyToFlux(PortCallsDto.class)
.collectList()
.block();
}
@Override
protected void afterFetch(List<PortCallsDto> data) {
if (data == null) {
int totalBatches = (int) Math.ceil((double) allData.size() / batchSize);
log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches);
}
}
}

파일 보기

@ -1,9 +1,10 @@
package com.snp.batch.jobs.shipMovement.batch.reader;
package com.snp.batch.jobs.shipMovementPortCalls.batch.reader;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.snp.batch.common.batch.reader.BaseApiReader;
import com.snp.batch.jobs.shipMovement.batch.dto.PortCallsDto;
import com.snp.batch.jobs.shipMovement.batch.dto.ShipMovementApiResponse;
import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.dto.AnchorageCallsDto;
import com.snp.batch.jobs.shipMovementPortCalls.batch.dto.PortCallsDto;
import com.snp.batch.jobs.shipMovementPortCalls.batch.dto.ShipMovementApiResponse;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.beans.factory.annotation.Value;
@ -34,7 +35,7 @@ import java.util.*;
*/
@Slf4j
@StepScope
public class ShipMovementReader extends BaseApiReader<PortCallsDto> {
public class PortCallsReader extends BaseApiReader<PortCallsDto> {
private final JdbcTemplate jdbcTemplate;
private final ObjectMapper objectMapper;
@ -46,16 +47,16 @@ public class ShipMovementReader extends BaseApiReader<PortCallsDto> {
private int currentBatchIndex = 0;
private final int batchSize = 10;
@Value("#{jobParameters['startDate']}")
private String startDate;
// private String startDate = "2024-01-01";
// @Value("#{jobParameters['startDate']}")
// private String startDate;
private String startDate = "2025-01-01";
@Value("#{jobParameters['stopDate']}")
private String stopDate;
// private String stopDate = "2024-12-31";
// @Value("#{jobParameters['stopDate']}")
// private String stopDate;
private String stopDate = "2025-12-31";
public void setStartDate(String startDate) {this.startDate = startDate;}
public void setStopDate(String stopDate){this.stopDate=stopDate;}
public ShipMovementReader(WebClient webClient, JdbcTemplate jdbcTemplate, ObjectMapper objectMapper) {
public PortCallsReader(WebClient webClient, JdbcTemplate jdbcTemplate, ObjectMapper objectMapper) {
super(webClient);
this.jdbcTemplate = jdbcTemplate;
this.objectMapper = objectMapper;
@ -76,7 +77,7 @@ public class ShipMovementReader extends BaseApiReader<PortCallsDto> {
@Override
protected String getApiPath() {
return "/Movements";
return "/Movements/PortCalls";
}
@Override
@ -88,9 +89,6 @@ public class ShipMovementReader extends BaseApiReader<PortCallsDto> {
"SELECT imo_number FROM ship_data ORDER BY id";
// "SELECT imo_number FROM snp_data.ship_data where imo_number > (select max(imo) from snp_data.t_ship_stpov_info) ORDER BY imo_number";
private static final String FETCH_ALL_HASHES_QUERY =
"SELECT imo_number, ship_detail_hash FROM ship_detail_hash_json ORDER BY imo_number";
/**
* 최초 1회만 실행: ship_data 테이블에서 IMO 번호 전체 조회
*/
@ -144,15 +142,16 @@ public class ShipMovementReader extends BaseApiReader<PortCallsDto> {
String imoParam = String.join(",", currentBatch);
// API 호출
ShipMovementApiResponse response = callApiWithBatch(imoParam);
// ShipMovementApiResponse response = callApiWithBatch(imoParam);
List<PortCallsDto> response= callApiWithBatch(imoParam);
// 다음 배치로 인덱스 이동
currentBatchIndex = endIndex;
// 응답 처리
if (response != null && response.getPortCallList() != null) {
List<PortCallsDto> portCalls = response.getPortCallList();
if (response != null) {
List<PortCallsDto> portCalls = response;
log.info("[{}] 배치 {}/{} 완료: {} 건 조회",
getReaderName(), currentBatchNumber, totalBatches, portCalls.size());
@ -194,7 +193,7 @@ public class ShipMovementReader extends BaseApiReader<PortCallsDto> {
* @param lrno 쉼표로 연결된 IMO 번호 (: "1000019,1000021,...")
* @return API 응답
*/
private ShipMovementApiResponse callApiWithBatch(String lrno) {
private List<PortCallsDto> callApiWithBatch(String lrno) {
String url = getApiPath() + "?startDate=" + startDate +"&stopDate="+stopDate+"&lrno=" + lrno;
log.debug("[{}] API 호출: {}", getReaderName(), url);
@ -202,7 +201,8 @@ public class ShipMovementReader extends BaseApiReader<PortCallsDto> {
return webClient.get()
.uri(url)
.retrieve()
.bodyToMono(ShipMovementApiResponse.class)
.bodyToFlux(PortCallsDto.class)
.collectList()
.block();
}

파일 보기

@ -0,0 +1,16 @@
package com.snp.batch.jobs.shipMovementPortCalls.batch.repository;
import com.snp.batch.jobs.shipMovementPortCalls.batch.entity.PortCallsEntity;
import java.util.List;
/**
* 선박 상세 정보 Repository 인터페이스
*/
public interface PortCallsRepository {
void saveAll(List<PortCallsEntity> entities);
boolean existsByPortCallId(Integer portCallId);
}

파일 보기

@ -1,9 +1,9 @@
package com.snp.batch.jobs.shipMovement.batch.repository;
package com.snp.batch.jobs.shipMovementPortCalls.batch.repository;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.snp.batch.common.batch.repository.BaseJdbcRepository;
import com.snp.batch.jobs.shipMovement.batch.entity.ShipMovementEntity;
import com.snp.batch.jobs.shipMovementPortCalls.batch.entity.PortCallsEntity;
import lombok.extern.slf4j.Slf4j;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapper;
@ -21,16 +21,17 @@ import java.util.List;
*/
@Slf4j
@Repository("ShipMovementRepository")
public class ShipMovementRepositoryImpl extends BaseJdbcRepository<ShipMovementEntity, String>
implements ShipMovementRepository {
public class PortCallsRepositoryImpl extends BaseJdbcRepository<PortCallsEntity, String>
implements PortCallsRepository {
public ShipMovementRepositoryImpl(JdbcTemplate jdbcTemplate) {
public PortCallsRepositoryImpl(JdbcTemplate jdbcTemplate) {
super(jdbcTemplate);
}
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
@Override
protected String getTableName() {
return "snp_data.t_ship_stpov_info";
// return "snp_data.t_ship_stpov_info";
return "new_snp.t_ship_stpov_info";
}
@Override
@ -39,14 +40,16 @@ public class ShipMovementRepositoryImpl extends BaseJdbcRepository<ShipMovementE
}
@Override
protected String extractId(ShipMovementEntity entity) {
protected String extractId(PortCallsEntity entity) {
return entity.getImolRorIHSNumber();
}
@Override
public String getInsertSql() {
// return """
// INSERT INTO snp_data.t_ship_stpov_info(
return """
INSERT INTO snp_data.t_ship_stpov_info(
INSERT INTO new_snp.t_ship_stpov_info(
imo,
mvmn_type,
mvmn_dt,
@ -127,7 +130,7 @@ public class ShipMovementRepositoryImpl extends BaseJdbcRepository<ShipMovementE
}
@Override
protected void setInsertParameters(PreparedStatement ps, ShipMovementEntity e) throws Exception {
protected void setInsertParameters(PreparedStatement ps, PortCallsEntity e) throws Exception {
int i = 1;
ps.setString(i++, e.getImolRorIHSNumber()); // imo
ps.setString(i++, e.getMovementType()); // mvmn_type
@ -171,17 +174,17 @@ public class ShipMovementRepositoryImpl extends BaseJdbcRepository<ShipMovementE
}
@Override
protected void setUpdateParameters(PreparedStatement ps, ShipMovementEntity entity) throws Exception {
protected void setUpdateParameters(PreparedStatement ps, PortCallsEntity entity) throws Exception {
}
@Override
protected RowMapper<ShipMovementEntity> getRowMapper() {
protected RowMapper<PortCallsEntity> getRowMapper() {
return new ShipMovementRowMapper();
}
@Override
public void saveAll(List<ShipMovementEntity> entities) {
public void saveAll(List<PortCallsEntity> entities) {
if (entities == null || entities.isEmpty()) return;
log.info("ShipMovement 저장 시작 = {}건", entities.size());
@ -205,10 +208,10 @@ public class ShipMovementRepositoryImpl extends BaseJdbcRepository<ShipMovementE
/**
* ShipDetailEntity RowMapper
*/
private static class ShipMovementRowMapper implements RowMapper<ShipMovementEntity> {
private static class ShipMovementRowMapper implements RowMapper<PortCallsEntity> {
@Override
public ShipMovementEntity mapRow(ResultSet rs, int rowNum) throws SQLException {
ShipMovementEntity entity = ShipMovementEntity.builder()
public PortCallsEntity mapRow(ResultSet rs, int rowNum) throws SQLException {
PortCallsEntity entity = PortCallsEntity.builder()
.id(rs.getLong("id"))
.imolRorIHSNumber(rs.getString("imolRorIHSNumber"))
.portCallId(rs.getObject("portCallId", Integer.class))

파일 보기

@ -0,0 +1,38 @@
package com.snp.batch.jobs.shipMovementPortCalls.batch.writer;
import com.snp.batch.common.batch.writer.BaseWriter;
import com.snp.batch.jobs.shipMovementPortCalls.batch.entity.PortCallsEntity;
import com.snp.batch.jobs.shipMovementPortCalls.batch.repository.PortCallsRepository;
import com.snp.batch.jobs.shipdetail.batch.repository.ShipDetailRepository;
import com.snp.batch.jobs.shipdetail.batch.repository.ShipHashRepository;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.List;
/**
* 선박 상세 정보 Writer
*/
@Slf4j
@Component
public class PortCallsWriter extends BaseWriter<PortCallsEntity> {
private final PortCallsRepository shipMovementRepository;
public PortCallsWriter(PortCallsRepository shipMovementRepository) {
super("ShipPortCalls");
this.shipMovementRepository = shipMovementRepository;
}
@Override
protected void writeItems(List<PortCallsEntity> items) throws Exception {
if (items.isEmpty()) { return; }
shipMovementRepository.saveAll(items);
log.info("PortCalls 데이터 저장 완료: {} 건", items.size());
}
}

파일 보기

@ -0,0 +1,117 @@
package com.snp.batch.jobs.shipMovementStsOperations.batch.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.jobs.shipMovementDarkActivity.batch.reader.DarkActivityRangeReader;
import com.snp.batch.jobs.shipMovementStsOperations.batch.dto.StsOperationDto;
import com.snp.batch.jobs.shipMovementStsOperations.batch.entity.StsOperationEntity;
import com.snp.batch.jobs.shipMovementStsOperations.batch.processor.StsOperationProcessor;
import com.snp.batch.jobs.shipMovementStsOperations.batch.reader.StsOperationRangeReader;
import com.snp.batch.jobs.shipMovementStsOperations.batch.writer.StsOperationWriter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.web.reactive.function.client.WebClient;
/**
* 선박 상세 정보 Import Job Config
*
* 특징:
* - ship_data 테이블에서 IMO 번호 조회
* - IMO 번호를 100개씩 배치로 분할
* - Maritime API GetShipsByIHSLRorIMONumbers 호출
* TODO : GetShipsByIHSLRorIMONumbersAll 호출로 변경
* - 선박 상세 정보를 ship_detail 테이블에 저장 (UPSERT)
*
* 데이터 흐름:
* StsOperationReader (ship_data Maritime API)
* (StsOperationDto)
* StsOperationProcessor
* (StsOperationEntity)
* StsOperationWriter
* (t_stsoperation 테이블)
*/
@Slf4j
@Configuration
public class StsOperationRangeJobConfig extends BaseJobConfig<StsOperationDto, StsOperationEntity> {
private final StsOperationProcessor stsOperationProcessor;
private final StsOperationWriter stsOperationWriter;
private final StsOperationRangeReader stsOperationRangeReader;
private final JdbcTemplate jdbcTemplate;
private final WebClient maritimeApiWebClient;
public StsOperationRangeJobConfig(
JobRepository jobRepository,
PlatformTransactionManager transactionManager,
StsOperationProcessor stsOperationProcessor,
StsOperationWriter stsOperationWriter, StsOperationRangeReader stsOperationRangeReader, JdbcTemplate jdbcTemplate,
@Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient) { // ObjectMapper 주입 추가
super(jobRepository, transactionManager);
this.stsOperationProcessor = stsOperationProcessor;
this.stsOperationWriter = stsOperationWriter;
this.stsOperationRangeReader = stsOperationRangeReader;
this.jdbcTemplate = jdbcTemplate;
this.maritimeApiWebClient = maritimeApiWebClient;
}
@Override
protected String getJobName() {
return "STSOperationRangeImportJob";
}
@Override
protected String getStepName() {
return "STSOperationRangeImportStep";
}
@Override
protected ItemReader<StsOperationDto> createReader() { // 타입 변경
// Reader 생성자 수정: ObjectMapper를 전달합니다.
return stsOperationRangeReader;
}
@Bean
@StepScope
public StsOperationRangeReader stsOperationRangeReader(
@Value("#{jobParameters['startDate']}") String startDate,
@Value("#{jobParameters['stopDate']}") String stopDate
) {
// jobParameters 없으면 null 넘어오고 Reader에서 default 처리
return new StsOperationRangeReader(maritimeApiWebClient, startDate, stopDate);
}
@Override
protected ItemProcessor<StsOperationDto, StsOperationEntity> createProcessor() {
return stsOperationProcessor;
}
@Override
protected ItemWriter<StsOperationEntity> createWriter() { // 타입 변경
return stsOperationWriter;
}
@Override
protected int getChunkSize() {
return 5000; // API에서 100개씩 가져오므로 chunk도 100으로 설정
}
@Bean(name = "STSOperationRangeImportJob")
public Job STSOperationRangeImportJob() {
return job();
}
@Bean(name = "STSOperationRangeImportStep")
public Step STSOperationRangeImportStep() {
return step();
}
}

파일 보기

@ -0,0 +1,164 @@
package com.snp.batch.jobs.shipMovementStsOperations.batch.reader;
import com.snp.batch.common.batch.reader.BaseApiReader;
import com.snp.batch.jobs.shipMovementStsOperations.batch.dto.StsOperationDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.web.reactive.function.client.WebClient;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.List;
/**
* 선박 상세 정보 Reader (v2.0 - Chunk 기반)
*
* 기능:
* 1. ship_data 테이블에서 IMO 번호 전체 조회 (최초 1회)
* 2. IMO 번호를 100개씩 분할하여 배치 단위로 처리
* 3. fetchNextBatch() 호출 시마다 100개씩 API 호출
* 4. Spring Batch가 100건씩 Process Write 수행
*
* Chunk 처리 흐름:
* - beforeFetch() IMO 전체 조회 (1회)
* - fetchNextBatch() 100개 IMO로 API 호출 (1,718회)
* - read() 1건씩 반환 (100번)
* - Processor/Writer 100건 처리
* - 반복... (1,718번의 Chunk)
*
* 기존 방식과의 차이:
* - 기존: 17만건 전체 메모리 로드 Process Write
* - 신규: 100건씩 로드 Process Write (Chunk 1,718회)
*/
@Slf4j
@StepScope
public class StsOperationRangeReader extends BaseApiReader<StsOperationDto> {
private List<StsOperationDto> allData;
private int currentBatchIndex = 0;
private final int batchSize = 5000;
private String startDate;
private String stopDate;
public StsOperationRangeReader(WebClient webClient,
@Value("#{jobParameters['startDate']}") String startDate,
@Value("#{jobParameters['stopDate']}") String stopDate) {
super(webClient);
// 날짜가 없으면 전날 하루 기준
if (startDate == null || startDate.isBlank() || stopDate == null || stopDate.isBlank()) {
LocalDate yesterday = LocalDate.now().minusDays(1);
this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z";
this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z";
} else {
this.startDate = startDate;
this.stopDate = stopDate;
}
enableChunkMode(); // Chunk 모드 활성화
}
@Override
protected String getReaderName() {
return "StsOperationReader";
}
@Override
protected void resetCustomState() {
this.currentBatchIndex = 0;
this.allData = null;
}
@Override
protected String getApiPath() {
return "/Movements/StsOperations";
}
@Override
protected String getApiBaseUrl() {
return "https://webservices.maritime.spglobal.com";
}
/**
* 최초 1회만 실행: ship_data 테이블에서 IMO 번호 전체 조회
*/
@Override
protected void beforeFetch() {
log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate);
}
/**
* Chunk 기반 핵심 메서드: 다음 100개 배치를 조회하여 반환
*
* Spring Batch가 100건씩 read() 호출 완료 메서드 재호출
*
* @return 다음 배치 100건 ( 이상 없으면 null)
*/
@Override
protected List<StsOperationDto> fetchNextBatch() throws Exception {
// 모든 배치 처리 완료 확인
if (allData == null ) {
log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate);
allData = callApiWithBatch(startDate, stopDate);
if (allData == null || allData.isEmpty()) {
log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName());
return null;
}
log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize);
}
// 2) 이미 끝까지 읽었으면 종료
if (currentBatchIndex >= allData.size()) {
log.info("[{}] 모든 배치 처리 완료", getReaderName());
return null;
}
// 3) 이번 배치의 end 계산
int endIndex = Math.min(currentBatchIndex + batchSize, allData.size());
// 현재 배치의 IMO 번호 추출 (100개)
List<StsOperationDto> batch = allData.subList(currentBatchIndex, endIndex);
int currentBatchNumber = (currentBatchIndex / batchSize) + 1;
int totalBatches = (int) Math.ceil((double) allData.size() / batchSize);
log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), currentBatchNumber, totalBatches, batch.size());
currentBatchIndex = endIndex;
updateApiCallStats(totalBatches, currentBatchNumber);
return batch;
}
/**
* Query Parameter를 사용한 API 호출
*
* @param startDate,stopDate
* @return API 응답
*/
private List<StsOperationDto> callApiWithBatch(String startDate, String stopDate) {
String url = getApiPath() + "?startDate=" + startDate + "&stopDate=" + stopDate;
log.debug("[{}] API 호출: {}", getReaderName(), url);
return webClient.get()
.uri(url)
.retrieve()
.bodyToFlux(StsOperationDto.class)
.collectList()
.block();
}
@Override
protected void afterFetch(List<StsOperationDto> data) {
if (data == null) {
int totalBatches = (int) Math.ceil((double) allData.size() / batchSize);
log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches);
/*log.info("[{}] 총 {} 개의 IMO 번호에 대한 API 호출 종료",
getReaderName(), allImoNumbers.size());*/
}
}
}

파일 보기

@ -30,7 +30,8 @@ public class StsOperationRepositoryImpl extends BaseJdbcRepository<StsOperationE
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
@Override
protected String getTableName() {
return "snp_data.t_stsoperation";
// return "snp_data.t_stsoperation";
return "new_snp.t_stsoperation";
}
@Override
@ -45,8 +46,10 @@ public class StsOperationRepositoryImpl extends BaseJdbcRepository<StsOperationE
@Override
public String getInsertSql() {
// return """
// INSERT INTO snp_data.t_stsoperation(
return """
INSERT INTO snp_data.t_stsoperation(
INSERT INTO new_snp.t_stsoperation(
imo,
mvmn_type,
mvmn_dt,
@ -67,7 +70,7 @@ public class StsOperationRepositoryImpl extends BaseJdbcRepository<StsOperationE
evt_start_dt,
lcinfo
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT (imo, mvmn_type, mvmn_dt)
ON CONFLICT (imo, mvmn_type, mvmn_dt, fclty_id)
DO UPDATE SET
mvmn_type = EXCLUDED.mvmn_type,
mvmn_dt = EXCLUDED.mvmn_dt,

파일 보기

@ -0,0 +1,117 @@
package com.snp.batch.jobs.shipMovementTerminalCalls.batch.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.jobs.shipMovementDarkActivity.batch.reader.DarkActivityRangeReader;
import com.snp.batch.jobs.shipMovementTerminalCalls.batch.dto.TerminalCallsDto;
import com.snp.batch.jobs.shipMovementTerminalCalls.batch.entity.TerminalCallsEntity;
import com.snp.batch.jobs.shipMovementTerminalCalls.batch.processor.TerminalCallsProcessor;
import com.snp.batch.jobs.shipMovementTerminalCalls.batch.reader.TerminalCallsRangeReader;
import com.snp.batch.jobs.shipMovementTerminalCalls.batch.reader.TerminalCallsReader;
import com.snp.batch.jobs.shipMovementTerminalCalls.batch.writer.TerminalCallsWriter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.web.reactive.function.client.WebClient;
/**
* 선박 상세 정보 Import Job Config
*
* 특징:
* - ship_data 테이블에서 IMO 번호 조회
* - IMO 번호를 100개씩 배치로 분할
* - Maritime API GetShipsByIHSLRorIMONumbers 호출
* TODO : GetShipsByIHSLRorIMONumbersAll 호출로 변경
* - 선박 상세 정보를 ship_detail 테이블에 저장 (UPSERT)
*
* 데이터 흐름:
* TerminalCallsReader (ship_data Maritime API)
* (TerminalCallsDto)
* TerminalCallsProcessor
* (TerminalCallsEntity)
* TerminalCallsWriter
* (t_terminalcall 테이블)
*/
@Slf4j
@Configuration
public class TerminalCallsRangeJobConfig extends BaseJobConfig<TerminalCallsDto, TerminalCallsEntity> {
private final TerminalCallsProcessor terminalCallsProcessor;
private final TerminalCallsWriter terminalCallsWriter;
private final TerminalCallsRangeReader terminalCallsRangeReader;
private final JdbcTemplate jdbcTemplate;
private final WebClient maritimeApiWebClient;
public TerminalCallsRangeJobConfig(
JobRepository jobRepository,
PlatformTransactionManager transactionManager,
TerminalCallsProcessor terminalCallsProcessor,
TerminalCallsWriter terminalCallsWriter, TerminalCallsRangeReader terminalCallsRangeReader, JdbcTemplate jdbcTemplate,
@Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient) { // ObjectMapper 주입 추가
super(jobRepository, transactionManager);
this.terminalCallsProcessor = terminalCallsProcessor;
this.terminalCallsWriter = terminalCallsWriter;
this.terminalCallsRangeReader = terminalCallsRangeReader;
this.jdbcTemplate = jdbcTemplate;
this.maritimeApiWebClient = maritimeApiWebClient;
}
@Override
protected String getJobName() {
return "TerminalCallsRangeImportJob";
}
@Override
protected String getStepName() {
return "TerminalCallsRangeImportStep";
}
@Override
protected ItemReader<TerminalCallsDto> createReader() { // 타입 변경
return terminalCallsRangeReader;
}
@Bean
@StepScope
public TerminalCallsRangeReader terminalCallsRangeReader(
@Value("#{jobParameters['startDate']}") String startDate,
@Value("#{jobParameters['stopDate']}") String stopDate
) {
// jobParameters 없으면 null 넘어오고 Reader에서 default 처리
return new TerminalCallsRangeReader(maritimeApiWebClient, startDate, stopDate);
}
@Override
protected ItemProcessor<TerminalCallsDto, TerminalCallsEntity> createProcessor() {
return terminalCallsProcessor;
}
@Override
protected ItemWriter<TerminalCallsEntity> createWriter() { // 타입 변경
return terminalCallsWriter;
}
@Override
protected int getChunkSize() {
return 1000; // API에서 100개씩 가져오므로 chunk도 1000으로 설정
}
@Bean(name = "TerminalCallsRangeImportJob")
public Job terminalCallsRangeImportJob() {
return job();
}
@Bean(name = "TerminalCallsRangeImportStep")
public Step terminalCallsRangeImportStep() {
return step();
}
}

파일 보기

@ -0,0 +1,162 @@
package com.snp.batch.jobs.shipMovementTerminalCalls.batch.reader;
import com.snp.batch.common.batch.reader.BaseApiReader;
import com.snp.batch.jobs.shipMovementDarkActivity.batch.dto.DarkActivityDto;
import com.snp.batch.jobs.shipMovementTerminalCalls.batch.dto.TerminalCallsDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.web.reactive.function.client.WebClient;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
* 선박 상세 정보 Reader (v2.0 - Chunk 기반)
* <p>
* 기능:
* 1. ship_data 테이블에서 IMO 번호 전체 조회 (최초 1회)
* 2. IMO 번호를 100개씩 분할하여 배치 단위로 처리
* 3. fetchNextBatch() 호출 시마다 100개씩 API 호출
* 4. Spring Batch가 100건씩 Process Write 수행
* <p>
* Chunk 처리 흐름:
* - beforeFetch() IMO 전체 조회 (1회)
* - fetchNextBatch() 100개 IMO로 API 호출 (1,718회)
* - read() 1건씩 반환 (100번)
* - Processor/Writer 100건 처리
* - 반복... (1,718번의 Chunk)
* <p>
* 기존 방식과의 차이:
* - 기존: 17만건 전체 메모리 로드 Process Write
* - 신규: 100건씩 로드 Process Write (Chunk 1,718회)
*/
@Slf4j
@StepScope
public class TerminalCallsRangeReader extends BaseApiReader<TerminalCallsDto> {
private List<TerminalCallsDto> allData;
private int currentBatchIndex = 0;
private final int batchSize = 1000;
private String startDate;
private String stopDate;
public TerminalCallsRangeReader(WebClient webClient,
@Value("#{jobParameters['startDate']}") String startDate,
@Value("#{jobParameters['stopDate']}") String stopDate) {
super(webClient);
// 날짜가 없으면 전날 하루 기준
if (startDate == null || startDate.isBlank() || stopDate == null || stopDate.isBlank()) {
LocalDate yesterday = LocalDate.now().minusDays(1);
this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z";
this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z";
} else {
this.startDate = startDate;
this.stopDate = stopDate;
}
enableChunkMode(); // Chunk 모드 활성화
}
@Override
protected String getReaderName() {
return "TerminalCalls";
}
@Override
protected void resetCustomState() {
this.currentBatchIndex = 0;
this.allData = null;
}
@Override
protected String getApiPath() {
return "/Movements/TerminalCalls";
}
@Override
protected String getApiBaseUrl() {
return "https://webservices.maritime.spglobal.com";
}
@Override
protected void beforeFetch() {
log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate);
}
/**
* Chunk 기반 핵심 메서드: 다음 100개 배치를 조회하여 반환
* <p>
* Spring Batch가 100건씩 read() 호출 완료 메서드 재호출
*
* @return 다음 배치 100건 ( 이상 없으면 null)
*/
@Override
protected List<TerminalCallsDto> fetchNextBatch() throws Exception {
// 모든 배치 처리 완료 확인
if (allData == null ) {
log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate);
allData = callApiWithBatch(startDate, stopDate);
if (allData == null || allData.isEmpty()) {
log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName());
return null;
}
log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize);
}
// 2) 이미 끝까지 읽었으면 종료
if (currentBatchIndex >= allData.size()) {
log.info("[{}] 모든 배치 처리 완료", getReaderName());
return null;
}
// 3) 이번 배치의 end 계산
int endIndex = Math.min(currentBatchIndex + batchSize, allData.size());
// 현재 배치의 IMO 번호 추출 (100개)
List<TerminalCallsDto> batch = allData.subList(currentBatchIndex, endIndex);
int currentBatchNumber = (currentBatchIndex / batchSize) + 1;
int totalBatches = (int) Math.ceil((double) allData.size() / batchSize);
log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), currentBatchNumber, totalBatches, batch.size());
currentBatchIndex = endIndex;
updateApiCallStats(totalBatches, currentBatchNumber);
return batch;
}
/**
* Query Parameter를 사용한 API 호출
* @param startDate, stopDate
* @return API 응답
*/
private List<TerminalCallsDto> callApiWithBatch(String startDate, String stopDate) {
String url = getApiPath() + "?startDate=" + startDate + "&stopDate=" + stopDate;
log.debug("[{}] API 호출: {}", getReaderName(), url);
return webClient.get()
.uri(url)
.retrieve()
.bodyToFlux(TerminalCallsDto.class)
.collectList()
.block();
}
@Override
protected void afterFetch(List<TerminalCallsDto> data) {
if (data == null) {
int totalBatches = (int) Math.ceil((double) allData.size() / batchSize);
log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches);
}
}
}

파일 보기

@ -30,7 +30,8 @@ public class TerminalCallsRepositoryImpl extends BaseJdbcRepository<TerminalCall
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
@Override
protected String getTableName() {
return "snp_data.t_terminalcall";
// return "snp_data.t_terminalcall";
return "new_snp.t_terminalcall";
}
@Override
@ -45,8 +46,10 @@ public class TerminalCallsRepositoryImpl extends BaseJdbcRepository<TerminalCall
@Override
public String getInsertSql() {
// return """
// INSERT INTO snp_data.t_terminalcall(
return """
INSERT INTO snp_data.t_terminalcall(
INSERT INTO new_snp.t_terminalcall(
imo,
mvmn_type,
mvmn_dt,

파일 보기

@ -0,0 +1,115 @@
package com.snp.batch.jobs.shipMovementTransits.batch.config;
import com.snp.batch.common.batch.config.BaseJobConfig;
import com.snp.batch.jobs.shipMovementDarkActivity.batch.reader.DarkActivityRangeReader;
import com.snp.batch.jobs.shipMovementTransits.batch.dto.TransitsDto;
import com.snp.batch.jobs.shipMovementTransits.batch.entity.TransitsEntity;
import com.snp.batch.jobs.shipMovementTransits.batch.processor.TransitsProcessor;
import com.snp.batch.jobs.shipMovementTransits.batch.reader.TransitsRangeReader;
import com.snp.batch.jobs.shipMovementTransits.batch.reader.TransitsReader;
import com.snp.batch.jobs.shipMovementTransits.batch.writer.TransitsWriter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.web.reactive.function.client.WebClient;
/**
* 선박 상세 정보 Import Job Config
*
* 특징:
* - ship_data 테이블에서 IMO 번호 조회
* - IMO 번호를 100개씩 배치로 분할
* - Maritime API GetShipsByIHSLRorIMONumbers 호출
* TODO : GetShipsByIHSLRorIMONumbersAll 호출로 변경
* - 선박 상세 정보를 ship_detail 테이블에 저장 (UPSERT)
*
* 데이터 흐름:
* TransitsReader (ship_data Maritime API)
* (TransitsDto)
* TransitsProcessor
* (TransitsEntity)
* TransitsWriter
* (t_transit 테이블)
*/
@Slf4j
@Configuration
public class TransitsRangeJobConfig extends BaseJobConfig<TransitsDto, TransitsEntity> {
private final TransitsProcessor transitsProcessor;
private final TransitsWriter transitsWriter;
private final TransitsRangeReader transitsRangeReader;
private final WebClient maritimeApiWebClient;
public TransitsRangeJobConfig(
JobRepository jobRepository,
PlatformTransactionManager transactionManager,
TransitsProcessor TransitsProcessor,
TransitsWriter transitsWriter, TransitsRangeReader transitsRangeReader,
@Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient) { // ObjectMapper 주입 추가
super(jobRepository, transactionManager);
this.transitsProcessor = TransitsProcessor;
this.transitsWriter = transitsWriter;
this.transitsRangeReader = transitsRangeReader;
this.maritimeApiWebClient = maritimeApiWebClient;
}
@Override
protected String getJobName() {
return "TransitsRangeImportJob";
}
@Override
protected String getStepName() {
return "TransitsRangeImportStep";
}
@Override
protected ItemReader<TransitsDto> createReader() { // 타입 변경
return transitsRangeReader;
}
@Bean
@StepScope
public TransitsRangeReader transitsRangeReader(
@Value("#{jobParameters['startDate']}") String startDate,
@Value("#{jobParameters['stopDate']}") String stopDate
) {
// jobParameters 없으면 null 넘어오고 Reader에서 default 처리
return new TransitsRangeReader(maritimeApiWebClient, startDate, stopDate);
}
@Override
protected ItemProcessor<TransitsDto, TransitsEntity> createProcessor() {
return transitsProcessor;
}
@Override
protected ItemWriter<TransitsEntity> createWriter() { // 타입 변경
return transitsWriter;
}
@Override
protected int getChunkSize() {
return 1000; // API에서 100개씩 가져오므로 chunk도 100으로 설정
}
@Bean(name = "TransitsRangeImportJob")
public Job transitsRangeImportJob() {
return job();
}
@Bean(name = "TransitsRangeImportStep")
public Step transitsRangeImportStep() {
return step();
}
}

파일 보기

@ -0,0 +1,159 @@
package com.snp.batch.jobs.shipMovementTransits.batch.reader;
import com.snp.batch.common.batch.reader.BaseApiReader;
import com.snp.batch.jobs.shipMovementTransits.batch.dto.TransitsDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.web.reactive.function.client.WebClient;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.List;
/**
* 선박 상세 정보 Reader (v2.0 - Chunk 기반)
*
* 기능:
* 1. ship_data 테이블에서 IMO 번호 전체 조회 (최초 1회)
* 2. IMO 번호를 100개씩 분할하여 배치 단위로 처리
* 3. fetchNextBatch() 호출 시마다 100개씩 API 호출
* 4. Spring Batch가 100건씩 Process Write 수행
*
* Chunk 처리 흐름:
* - beforeFetch() IMO 전체 조회 (1회)
* - fetchNextBatch() 100개 IMO로 API 호출 (1,718회)
* - read() 1건씩 반환 (100번)
* - Processor/Writer 100건 처리
* - 반복... (1,718번의 Chunk)
*
* 기존 방식과의 차이:
* - 기존: 17만건 전체 메모리 로드 Process Write
* - 신규: 100건씩 로드 Process Write (Chunk 1,718회)
*/
@Slf4j
@StepScope
public class TransitsRangeReader extends BaseApiReader<TransitsDto> {
private List<TransitsDto> allData;
private int currentBatchIndex = 0;
private final int batchSize = 1000;
private String startDate;
private String stopDate;
public TransitsRangeReader(WebClient webClient,
@Value("#{jobParameters['startDate']}") String startDate,
@Value("#{jobParameters['stopDate']}") String stopDate) {
super(webClient);
// 날짜가 없으면 전날 하루 기준
if (startDate == null || startDate.isBlank() || stopDate == null || stopDate.isBlank()) {
LocalDate yesterday = LocalDate.now().minusDays(1);
this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z";
this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z";
} else {
this.startDate = startDate;
this.stopDate = stopDate;
}
enableChunkMode(); // Chunk 모드 활성화
}
@Override
protected String getReaderName() {
return "Transits";
}
@Override
protected void resetCustomState() {
this.currentBatchIndex = 0;
this.allData = null;
}
@Override
protected String getApiPath() {
return "/Movements/Transits";
}
@Override
protected String getApiBaseUrl() {
return "https://webservices.maritime.spglobal.com";
}
@Override
protected void beforeFetch() {
log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate);
}
/**
* Chunk 기반 핵심 메서드: 다음 100개 배치를 조회하여 반환
*
* Spring Batch가 100건씩 read() 호출 완료 메서드 재호출
*
* @return 다음 배치 100건 ( 이상 없으면 null)
*/
@Override
protected List<TransitsDto> fetchNextBatch() throws Exception {
// 모든 배치 처리 완료 확인
if (allData == null ) {
log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate);
allData = callApiWithBatch(startDate, stopDate);
if (allData == null || allData.isEmpty()) {
log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName());
return null;
}
log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize);
}
// 2) 이미 끝까지 읽었으면 종료
if (currentBatchIndex >= allData.size()) {
log.info("[{}] 모든 배치 처리 완료", getReaderName());
return null;
}
// 3) 이번 배치의 end 계산
int endIndex = Math.min(currentBatchIndex + batchSize, allData.size());
// 현재 배치의 IMO 번호 추출 (100개)
List<TransitsDto> batch = allData.subList(currentBatchIndex, endIndex);
int currentBatchNumber = (currentBatchIndex / batchSize) + 1;
int totalBatches = (int) Math.ceil((double) allData.size() / batchSize);
log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), currentBatchNumber, totalBatches, batch.size());
currentBatchIndex = endIndex;
updateApiCallStats(totalBatches, currentBatchNumber);
return batch;
}
/**
* Query Parameter를 사용한 API 호출
* @param startDate,stopDate
* @return API 응답
*/
private List<TransitsDto> callApiWithBatch(String startDate, String stopDate) {
String url = getApiPath() + "?startDate=" + startDate +"&stopDate="+stopDate;
// +"&lrno=" + lrno;
log.debug("[{}] API 호출: {}", getReaderName(), url);
return webClient.get()
.uri(url)
.retrieve()
.bodyToFlux(TransitsDto.class)
.collectList()
.block();
}
@Override
protected void afterFetch(List<TransitsDto> data) {
if (data == null) {
int totalBatches = (int) Math.ceil((double) allData.size() / batchSize);
log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches);
}
}
}

파일 보기

@ -18,16 +18,17 @@ import java.util.List;
*/
@Slf4j
@Repository("TransitsRepository")
public class TransitlsRepositoryImpl extends BaseJdbcRepository<TransitsEntity, String>
public class TransitsRepositoryImpl extends BaseJdbcRepository<TransitsEntity, String>
implements TransitsRepository {
public TransitlsRepositoryImpl(JdbcTemplate jdbcTemplate) {
public TransitsRepositoryImpl(JdbcTemplate jdbcTemplate) {
super(jdbcTemplate);
}
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
@Override
protected String getTableName() {
return "snp_data.t_transit";
// return "snp_data.t_transit";
return "new_snp.t_transit";
}
@Override
@ -42,8 +43,10 @@ public class TransitlsRepositoryImpl extends BaseJdbcRepository<TransitsEntity,
@Override
public String getInsertSql() {
// return """
// INSERT INTO snp_data.t_transit(
return """
INSERT INTO snp_data.t_transit(
INSERT INTO new_snp.t_transit(
imo,
mvmn_type,
mvmn_dt,

파일 보기

@ -55,7 +55,7 @@ spring:
# Server Configuration
server:
port: 8041
port: 8081
servlet:
context-path: /snp-api