diff --git a/src/main/java/com/snp/batch/common/batch/repository/BaseJdbcRepository.java b/src/main/java/com/snp/batch/common/batch/repository/BaseJdbcRepository.java index d0c82e3..3bf752a 100644 --- a/src/main/java/com/snp/batch/common/batch/repository/BaseJdbcRepository.java +++ b/src/main/java/com/snp/batch/common/batch/repository/BaseJdbcRepository.java @@ -1,6 +1,5 @@ package com.snp.batch.common.batch.repository; -import com.snp.batch.jobs.shipMovement.batch.entity.ShipMovementEntity; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.jdbc.core.JdbcTemplate; diff --git a/src/main/java/com/snp/batch/global/config/SwaggerConfig.java b/src/main/java/com/snp/batch/global/config/SwaggerConfig.java index 8d4e911..f03338d 100644 --- a/src/main/java/com/snp/batch/global/config/SwaggerConfig.java +++ b/src/main/java/com/snp/batch/global/config/SwaggerConfig.java @@ -88,4 +88,4 @@ public class SwaggerConfig { .name("Apache 2.0") .url("https://www.apache.org/licenses/LICENSE-2.0")); } -} +} \ No newline at end of file diff --git a/src/main/java/com/snp/batch/jobs/event/batch/config/EventImportJobConfig.java b/src/main/java/com/snp/batch/jobs/event/batch/config/EventImportJobConfig.java index 40c38be..9da156e 100644 --- a/src/main/java/com/snp/batch/jobs/event/batch/config/EventImportJobConfig.java +++ b/src/main/java/com/snp/batch/jobs/event/batch/config/EventImportJobConfig.java @@ -1,8 +1,8 @@ package com.snp.batch.jobs.event.batch.config; import com.snp.batch.common.batch.config.BaseJobConfig; -import com.snp.batch.jobs.event.batch.dto.EventDto; -import com.snp.batch.jobs.event.batch.entity.EventEntity; +import com.snp.batch.jobs.event.batch.dto.EventDetailDto; +import com.snp.batch.jobs.event.batch.entity.EventDetailEntity; import com.snp.batch.jobs.event.batch.processor.EventDataProcessor; import com.snp.batch.jobs.event.batch.reader.EventDataReader; import com.snp.batch.jobs.event.batch.writer.EventDataWriter; @@ -23,7 +23,7 @@ import org.springframework.web.reactive.function.client.WebClient; @Slf4j @Configuration -public class EventImportJobConfig extends BaseJobConfig { +public class EventImportJobConfig extends BaseJobConfig { private final JdbcTemplate jdbcTemplate; private final WebClient maritimeApiWebClient; @@ -34,7 +34,7 @@ public class EventImportJobConfig extends BaseJobConfig { @Override protected int getChunkSize() { - return 5000; // API에서 5000개씩 가져오므로 chunk도 5000으로 설정 + return 10; // API에서 5000개씩 가져오므로 chunk도 5000으로 설정 } public EventImportJobConfig( JobRepository jobRepository, @@ -63,17 +63,17 @@ public class EventImportJobConfig extends BaseJobConfig { } @Override - protected ItemReader createReader() { + protected ItemReader createReader() { return new EventDataReader(maritimeApiWebClient, jdbcTemplate, batchDateService); } @Override - protected ItemProcessor createProcessor() { + protected ItemProcessor createProcessor() { return eventDataProcessor; } @Override - protected ItemWriter createWriter() { return eventDataWriter; } + protected ItemWriter createWriter() { return eventDataWriter; } @Bean(name = "eventImportJob") public Job eventImportJob() { diff --git a/src/main/java/com/snp/batch/jobs/event/batch/dto/CargoDto.java b/src/main/java/com/snp/batch/jobs/event/batch/dto/CargoDto.java new file mode 100644 index 0000000..8a91382 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/event/batch/dto/CargoDto.java @@ -0,0 +1,50 @@ +package com.snp.batch.jobs.event.batch.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; +import com.snp.batch.jobs.event.batch.entity.CargoEntity; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class CargoDto { + @JsonProperty("EventID") + private Integer eventID; + @JsonProperty("Sequence") + private String sequence; + @JsonProperty("IHSLRorIMOShipNo") + private String ihslrOrImoShipNo; + @JsonProperty("Type") + private String type; + @JsonProperty("Quantity") + private Integer quantity; + @JsonProperty("UnitShort") + private String unitShort; + @JsonProperty("Unit") + private String unit; + @JsonProperty("Text") + private String text; + @JsonProperty("CargoDamage") + private String cargoDamage; + @JsonProperty("Dangerous") + private String dangerous; + + public CargoEntity toEntity() { + return CargoEntity.builder() + .eventID(this.eventID) + .sequence(this.sequence) + .ihslrOrImoShipNo(this.ihslrOrImoShipNo) + .type(this.type) + .unit(this.unit) + .quantity(this.quantity) + .unitShort(this.unitShort) + .text(this.text) + .cargoDamage(this.cargoDamage) + .dangerous(this.dangerous) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/event/batch/dto/EventDetailDto.java b/src/main/java/com/snp/batch/jobs/event/batch/dto/EventDetailDto.java new file mode 100644 index 0000000..38a2fd6 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/event/batch/dto/EventDetailDto.java @@ -0,0 +1,109 @@ +package com.snp.batch.jobs.event.batch.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.util.List; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class EventDetailDto { + @JsonProperty("IncidentID") + private Integer incidentID; + @JsonProperty("EventID") + private Long eventID; + @JsonProperty("EventTypeID") + private Integer eventTypeID; + @JsonProperty("EventType") + private String eventType; + @JsonProperty("Significance") + private String significance; + @JsonProperty("Headline") + private String headline; + @JsonProperty("IHSLRorIMOShipNo") + private String ihslrOrImoShipNo; + @JsonProperty("VesselName") + private String vesselName; + @JsonProperty("VesselType") + private String vesselType; + @JsonProperty("VesselTypeDecode") + private String vesselTypeDecode; + @JsonProperty("VesselFlag") + private String vesselFlagCode; + @JsonProperty("Flag") + private String vesselFlagDecode; + @JsonProperty("CargoLoadingStatusCode") + private String cargoLoadingStatusCode; + @JsonProperty("VesselDWT") + private Integer vesselDWT; + @JsonProperty("VesselGT") + private Integer vesselGT; + @JsonProperty("LDTAtTime") + private Integer ldtAtTime; + @JsonProperty("DateOfBuild") + private Integer dateOfBuild; + @JsonProperty("RegisteredOwnerCodeAtTime") + private String registeredOwnerCodeAtTime; + @JsonProperty("RegisteredOwnerAtTime") + private String registeredOwnerAtTime; + @JsonProperty("RegisteredOwnerCoDAtTime") + private String registeredOwnerCountryCodeAtTime; + @JsonProperty("RegisteredOwnerCountryAtTime") + private String registeredOwnerCountryAtTime; + @JsonProperty("Weather") + private String weather; + @JsonProperty("EventTypeDetail") + private String eventTypeDetail; + @JsonProperty("EventTypeDetailID") + private Integer eventTypeDetailID; + @JsonProperty("CasualtyAction") + private String casualtyAction; + @JsonProperty("LocationName") + private String locationName; + @JsonProperty("TownName") + private String townName; + @JsonProperty("MarsdenGridReference") + private Integer marsdenGridReference; + @JsonProperty("EnvironmentLocation") + private String environmentLocation; + @JsonProperty("CasualtyZone") + private String casualtyZone; + @JsonProperty("CasualtyZoneCode") + private String casualtyZoneCode; + @JsonProperty("CountryCode") + private String countryCode; + @JsonProperty("AttemptedBoarding") + private String attemptedBoarding; + @JsonProperty("Description") + private String description; + @JsonProperty("Pollutant") + private String pollutant; + @JsonProperty("PollutantUnit") + private String pollutantUnit; + @JsonProperty("PollutantQuantity") + private Double pollutantQuantity; + @JsonProperty("PublishedDate") + private String publishedDate; + @JsonProperty("Component2") + private String component2; + @JsonProperty("FiredUpon") + private String firedUpon; + private String eventStartDate; + private String eventEndDate; + + @JsonProperty("Cargoes") + private List cargoes; + + @JsonProperty("HumanCasualties") + private List humanCasualties; + + @JsonProperty("Relationships") + private List relationships; +} diff --git a/src/main/java/com/snp/batch/jobs/event/batch/dto/EventDetailResponse.java b/src/main/java/com/snp/batch/jobs/event/batch/dto/EventDetailResponse.java new file mode 100644 index 0000000..fe67261 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/event/batch/dto/EventDetailResponse.java @@ -0,0 +1,18 @@ +package com.snp.batch.jobs.event.batch.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.util.List; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class EventDetailResponse { + @JsonProperty("MaritimeEvent") + private EventDetailDto eventDetailDto; +} diff --git a/src/main/java/com/snp/batch/jobs/event/batch/dto/EventPeriod.java b/src/main/java/com/snp/batch/jobs/event/batch/dto/EventPeriod.java new file mode 100644 index 0000000..05e4703 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/event/batch/dto/EventPeriod.java @@ -0,0 +1,12 @@ +package com.snp.batch.jobs.event.batch.dto; + +import lombok.AllArgsConstructor; +import lombok.Data; + +import java.time.LocalDateTime; + +@Data +@AllArgsConstructor +public class EventPeriod { + private String eventStartDate; + private String eventEndDate;} diff --git a/src/main/java/com/snp/batch/jobs/event/batch/dto/HumanCasualtyDto.java b/src/main/java/com/snp/batch/jobs/event/batch/dto/HumanCasualtyDto.java new file mode 100644 index 0000000..09ed21c --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/event/batch/dto/HumanCasualtyDto.java @@ -0,0 +1,35 @@ +package com.snp.batch.jobs.event.batch.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; +import com.snp.batch.jobs.event.batch.entity.HumanCasualtyEntity; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class HumanCasualtyDto { + @JsonProperty("EventID") + private Integer eventID; + @JsonProperty("Scope") + private String scope; + @JsonProperty("Type") + private String type; + @JsonProperty("Qualifier") + private String qualifier; + @JsonProperty("Count") + private Integer count; + + public HumanCasualtyEntity toEntity() { + return HumanCasualtyEntity.builder() + .eventID(this.eventID) + .scope(this.scope) + .type(this.type) + .qualifier(this.qualifier) + .count(this.count) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/event/batch/dto/RelationshipDto.java b/src/main/java/com/snp/batch/jobs/event/batch/dto/RelationshipDto.java new file mode 100644 index 0000000..1d1beae --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/event/batch/dto/RelationshipDto.java @@ -0,0 +1,41 @@ +package com.snp.batch.jobs.event.batch.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; +import com.snp.batch.jobs.event.batch.entity.RelationshipEntity; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class RelationshipDto { + @JsonProperty("IncidentID") + private String incidentID; + @JsonProperty("EventID") + private Integer eventID; + @JsonProperty("RelationshipType") + private String relationshipType; + @JsonProperty("RelationshipTypeCode") + private String relationshipTypeCode; + @JsonProperty("EventID2") + private Integer eventID2; + @JsonProperty("EventType") + private String eventType; + @JsonProperty("EventTypeCode") + private String eventTypeCode; + + public RelationshipEntity toEntity() { + return RelationshipEntity.builder() + .incidentID(this.incidentID) + .eventID(this.eventID) + .relationshipType(this.relationshipType) + .relationshipTypeCode(this.relationshipTypeCode) + .eventID2(this.eventID2) + .eventType(this.eventType) + .eventTypeCode(this.eventTypeCode) + .build(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/event/batch/entity/CargoEntity.java b/src/main/java/com/snp/batch/jobs/event/batch/entity/CargoEntity.java new file mode 100644 index 0000000..8e2e95d --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/event/batch/entity/CargoEntity.java @@ -0,0 +1,26 @@ +package com.snp.batch.jobs.event.batch.entity; + +import com.snp.batch.common.batch.entity.BaseEntity; +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.NoArgsConstructor; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@NoArgsConstructor +@AllArgsConstructor +@EqualsAndHashCode(callSuper = true) +public class CargoEntity extends BaseEntity { + private Integer eventID; + private String sequence; + private String ihslrOrImoShipNo; + private String type; + private Integer quantity; + private String unitShort; + private String unit; + private String text; + private String cargoDamage; + private String dangerous; +} diff --git a/src/main/java/com/snp/batch/jobs/event/batch/entity/EventDetailEntity.java b/src/main/java/com/snp/batch/jobs/event/batch/entity/EventDetailEntity.java new file mode 100644 index 0000000..58b4bda --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/event/batch/entity/EventDetailEntity.java @@ -0,0 +1,67 @@ +package com.snp.batch.jobs.event.batch.entity; + +import com.fasterxml.jackson.datatype.jsr310.deser.LocalDateDeserializer; +import com.snp.batch.common.batch.entity.BaseEntity; +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.NoArgsConstructor; +import lombok.experimental.SuperBuilder; + +import java.time.LocalDateTime; +import java.util.List; + +@Data +@SuperBuilder +@NoArgsConstructor +@AllArgsConstructor +@EqualsAndHashCode(callSuper = true) +public class EventDetailEntity extends BaseEntity { + private Integer incidentID; + private Long eventID; + private Integer eventTypeID; + private String eventType; + private String significance; + private String headline; + private String ihslrOrImoShipNo; + private String vesselName; + private String vesselType; + private String vesselTypeDecode; + private String vesselFlagCode; + private String vesselFlagDecode; + private String cargoLoadingStatusCode; + private Integer vesselDWT; + private Integer vesselGT; + private Integer ldtAtTime; + private Integer dateOfBuild; + private String registeredOwnerCodeAtTime; + private String registeredOwnerAtTime; + private String registeredOwnerCountryCodeAtTime; + private String registeredOwnerCountryAtTime; + private String weather; + private String eventTypeDetail; + private Integer eventTypeDetailID; + private String casualtyAction; + private String locationName; + private String townName; + private Integer marsdenGridReference; + private String environmentLocation; + private String casualtyZone; + private String casualtyZoneCode; + private String countryCode; + private String attemptedBoarding; + private String description; + private String pollutant; + private String pollutantUnit; + private Double pollutantQuantity; + private String publishedDate; + private String component2; + private String firedUpon; + + private String eventStartDate; + private String eventEndDate; + + private List cargoes; + private List humanCasualties; + private List relationships; +} diff --git a/src/main/java/com/snp/batch/jobs/event/batch/entity/EventEntity.java b/src/main/java/com/snp/batch/jobs/event/batch/entity/EventEntity.java index 19352c4..6a55161 100644 --- a/src/main/java/com/snp/batch/jobs/event/batch/entity/EventEntity.java +++ b/src/main/java/com/snp/batch/jobs/event/batch/entity/EventEntity.java @@ -1,7 +1,6 @@ package com.snp.batch.jobs.event.batch.entity; import com.snp.batch.common.batch.entity.BaseEntity; -import jakarta.persistence.Embedded; import lombok.AllArgsConstructor; import lombok.Data; import lombok.EqualsAndHashCode; diff --git a/src/main/java/com/snp/batch/jobs/event/batch/entity/HumanCasualtyEntity.java b/src/main/java/com/snp/batch/jobs/event/batch/entity/HumanCasualtyEntity.java new file mode 100644 index 0000000..d52bd9e --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/event/batch/entity/HumanCasualtyEntity.java @@ -0,0 +1,21 @@ +package com.snp.batch.jobs.event.batch.entity; + +import com.snp.batch.common.batch.entity.BaseEntity; +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.NoArgsConstructor; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@NoArgsConstructor +@AllArgsConstructor +@EqualsAndHashCode(callSuper = true) +public class HumanCasualtyEntity extends BaseEntity { + private Integer eventID; + private String scope; + private String type; + private String qualifier; + private Integer count; +} diff --git a/src/main/java/com/snp/batch/jobs/event/batch/entity/RelationshipEntity.java b/src/main/java/com/snp/batch/jobs/event/batch/entity/RelationshipEntity.java new file mode 100644 index 0000000..e3c5ea7 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/event/batch/entity/RelationshipEntity.java @@ -0,0 +1,23 @@ +package com.snp.batch.jobs.event.batch.entity; + +import com.snp.batch.common.batch.entity.BaseEntity; +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.NoArgsConstructor; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@NoArgsConstructor +@AllArgsConstructor +@EqualsAndHashCode(callSuper = true) +public class RelationshipEntity extends BaseEntity { + private String incidentID; + private Integer eventID; + private String relationshipType; + private String relationshipTypeCode; + private Integer eventID2; + private String eventType; + private String eventTypeCode; +} diff --git a/src/main/java/com/snp/batch/jobs/event/batch/processor/EventDataProcessor.java b/src/main/java/com/snp/batch/jobs/event/batch/processor/EventDataProcessor.java index 329ab54..74dd115 100644 --- a/src/main/java/com/snp/batch/jobs/event/batch/processor/EventDataProcessor.java +++ b/src/main/java/com/snp/batch/jobs/event/batch/processor/EventDataProcessor.java @@ -1,34 +1,75 @@ package com.snp.batch.jobs.event.batch.processor; import com.snp.batch.common.batch.processor.BaseProcessor; -import com.snp.batch.jobs.event.batch.dto.EventDto; -import com.snp.batch.jobs.event.batch.entity.EventEntity; +import com.snp.batch.jobs.event.batch.dto.CargoDto; +import com.snp.batch.jobs.event.batch.dto.EventDetailDto; +import com.snp.batch.jobs.event.batch.dto.HumanCasualtyDto; +import com.snp.batch.jobs.event.batch.dto.RelationshipDto; +import com.snp.batch.jobs.event.batch.entity.EventDetailEntity; import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Component; +import java.util.stream.Collectors; + @Slf4j @Component -public class EventDataProcessor extends BaseProcessor { +public class EventDataProcessor extends BaseProcessor { @Override - protected EventEntity processItem(EventDto dto) throws Exception { - log.debug("Event 데이터 처리 시작: Event ID = {}", dto.getEventId()); + protected EventDetailEntity processItem(EventDetailDto dto) throws Exception { + log.debug("Event 데이터 처리 시작: Event ID = {}", dto.getEventID()); - EventEntity entity = EventEntity.builder() - .incidentId(dto.getIncidentId()) - .eventId(dto.getEventId()) - .startDate(dto.getStartDate()) + EventDetailEntity entity = EventDetailEntity.builder() + .eventID(dto.getEventID()) + .incidentID(dto.getIncidentID()) + .eventTypeID(dto.getEventTypeID()) .eventType(dto.getEventType()) .significance(dto.getSignificance()) .headline(dto.getHeadline()) - .endDate(dto.getEndDate()) - .ihslRorImoShipNo(dto.getIhslRorImoShipNo()) + .ihslrOrImoShipNo(dto.getIhslrOrImoShipNo()) .vesselName(dto.getVesselName()) .vesselType(dto.getVesselType()) + .vesselTypeDecode(dto.getVesselTypeDecode()) + .vesselFlagCode(dto.getVesselFlagCode()) + .vesselFlagDecode(dto.getVesselFlagDecode()) + .cargoLoadingStatusCode(dto.getCargoLoadingStatusCode()) + .vesselDWT(dto.getVesselDWT()) + .vesselGT(dto.getVesselGT()) + .ldtAtTime(dto.getLdtAtTime()) + .dateOfBuild(dto.getDateOfBuild()) + .registeredOwnerCodeAtTime(dto.getRegisteredOwnerCodeAtTime()) + .registeredOwnerAtTime(dto.getRegisteredOwnerAtTime()) + .registeredOwnerCountryCodeAtTime(dto.getRegisteredOwnerCountryCodeAtTime()) + .registeredOwnerCountryAtTime(dto.getRegisteredOwnerCountryAtTime()) + .weather(dto.getWeather()) + .eventTypeDetail(dto.getEventTypeDetail()) + .eventTypeDetailID(dto.getEventTypeDetailID()) + .casualtyAction(dto.getCasualtyAction()) .locationName(dto.getLocationName()) + .townName(dto.getTownName()) + .marsdenGridReference(dto.getMarsdenGridReference()) + .environmentLocation(dto.getEnvironmentLocation()) + .casualtyZone(dto.getCasualtyZone()) + .casualtyZoneCode(dto.getCasualtyZoneCode()) + .countryCode(dto.getCountryCode()) + .attemptedBoarding(dto.getAttemptedBoarding()) + .description(dto.getDescription()) + .pollutant(dto.getPollutant()) + .pollutantUnit(dto.getPollutantUnit()) + .pollutantQuantity(dto.getPollutantQuantity()) .publishedDate(dto.getPublishedDate()) + .component2(dto.getComponent2()) + .firedUpon(dto.getFiredUpon()) + .eventStartDate(dto.getEventStartDate()) + .eventEndDate(dto.getEventEndDate()) + .cargoes(dto.getCargoes() != null ? + dto.getCargoes().stream().map(CargoDto::toEntity).collect(Collectors.toList()) : null) + .humanCasualties(dto.getHumanCasualties() != null ? + dto.getHumanCasualties().stream().map(HumanCasualtyDto::toEntity).collect(Collectors.toList()) : null) + .relationships(dto.getRelationships() != null ? + dto.getRelationships().stream().map(RelationshipDto::toEntity).collect(Collectors.toList()) : null) .build(); - log.debug("Event 데이터 처리 완료: Event ID = {}", dto.getEventId()); + log.debug("Event 데이터 처리 완료: Event ID = {}", dto.getEventID()); return entity; } diff --git a/src/main/java/com/snp/batch/jobs/event/batch/reader/EventDataReader.java b/src/main/java/com/snp/batch/jobs/event/batch/reader/EventDataReader.java index e50ef36..aae9195 100644 --- a/src/main/java/com/snp/batch/jobs/event/batch/reader/EventDataReader.java +++ b/src/main/java/com/snp/batch/jobs/event/batch/reader/EventDataReader.java @@ -1,20 +1,24 @@ package com.snp.batch.jobs.event.batch.reader; import com.snp.batch.common.batch.reader.BaseApiReader; -import com.snp.batch.jobs.event.batch.dto.EventDto; -import com.snp.batch.jobs.event.batch.dto.EventResponse; +import com.snp.batch.jobs.event.batch.dto.*; +import com.snp.batch.jobs.event.batch.dto.EventDetailDto; +import com.snp.batch.jobs.event.batch.entity.EventDetailEntity; +import com.snp.batch.jobs.shipdetail.batch.dto.ShipDetailComparisonData; import com.snp.batch.service.BatchDateService; import lombok.extern.slf4j.Slf4j; -import org.springframework.core.ParameterizedTypeReference; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.web.reactive.function.client.WebClient; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.util.*; +import java.util.stream.Collectors; @Slf4j -public class EventDataReader extends BaseApiReader { +public class EventDataReader extends BaseApiReader { + + private Map eventPeriodMap; private final JdbcTemplate jdbcTemplate; private final BatchDateService batchDateService; // ✨ BatchDateService 필드 추가 @@ -22,6 +26,7 @@ public class EventDataReader extends BaseApiReader { super(webClient); this.jdbcTemplate = jdbcTemplate; this.batchDateService = batchDateService; + enableChunkMode(); // ✨ Chunk 모드 활성화 } @Override @@ -33,27 +38,163 @@ public class EventDataReader extends BaseApiReader { protected String getApiPath() { return "/MaritimeWCF/MaritimeAndTradeEventsService.svc/RESTFul/GetEventListByEventChangeDateRange"; } - protected String getApiKey() {return "EVENT_IMPORT_JOB";} + + protected String getEventDetailApiPath() { + return "/MaritimeWCF/MaritimeAndTradeEventsService.svc/RESTFul/GetEventDataByEventID"; + } + + protected String getApiKey() { + return "EVENT_IMPORT_JOB"; + } + + // 배치 처리 상태 + private List eventIds; + // DB 해시값을 저장할 맵 + private int currentBatchIndex = 0; + private final int batchSize = 1; @Override - protected List fetchDataFromApi() { - try { - log.info("Event API 호출 시작"); - EventResponse response = callEventApiWithBatch(); + protected void resetCustomState() { + this.currentBatchIndex = 0; + this.eventIds = null; + this.eventPeriodMap = new HashMap<>(); + } + + @Override + protected void beforeFetch() { + // 1. 기간내 기록된 Event List 조회 (API 요청) + log.info("Event API 호출"); + EventResponse response = callEventApiWithBatch(); + // 2-1. Event List 에서 EventID List 추출 + // TODO: 2-2. Event List 에서 Map> 추출 + eventIds = extractEventIdList(response); + log.info("EvnetId List 추출 완료 : {} 개", eventIds.size()); + + eventPeriodMap = response.getMaritimeEvents().stream() + .filter(e -> e.getEventId() != null) + .collect(Collectors.toMap( + EventDto::getEventId, + e -> new EventPeriod( + e.getStartDate(), + e.getEndDate() + ) + )); + + updateApiCallStats(eventIds.size(), 0); + } + + @Override + protected List fetchNextBatch() throws Exception { + // 3. EventID List 로 Event Detail 조회 (API요청) : 청크단위 실행 + // 모든 배치 처리 완료 확인 + if (eventIds == null || currentBatchIndex >= eventIds.size()) { + return null; // Job 종료 + } + + // 현재 배치의 시작/끝 인덱스 계산 + int startIndex = currentBatchIndex; + int endIndex = Math.min(currentBatchIndex + batchSize, eventIds.size()); + + // 현재 배치의 IMO 번호 추출 (100개) + List currentBatch = eventIds.subList(startIndex, endIndex); + + int currentBatchNumber = (currentBatchIndex / batchSize) + 1; + int totalBatches = (int) Math.ceil((double) eventIds.size() / batchSize); + + log.info("[{}] 배치 {}/{} 처리 중 (Event ID : {} 개)...", + getReaderName(), currentBatchNumber, totalBatches, currentBatch.size()); + + try { + // API 호출 + EventDetailResponse response = callEventDetailApiWithBatch(currentBatch.get(0)); + // 다음 배치로 인덱스 이동 + currentBatchIndex = endIndex; + + List eventDetailList = new ArrayList<>(); + + // 응답 처리 + if (response != null && response.getEventDetailDto() != null) { + + // TODO: getEventDetailDto에 Map> 데이터 세팅 + EventDetailDto detailDto = response.getEventDetailDto(); + Long eventId = detailDto.getEventID(); + EventPeriod period = eventPeriodMap.get(eventId); + + if (period != null) { + detailDto.setEventStartDate(period.getEventStartDate()); + detailDto.setEventEndDate(period.getEventEndDate()); + } + + eventDetailList.add(response.getEventDetailDto()); + log.info("[{}] 배치 {}/{} 완료: {} 건 조회", + getReaderName(), currentBatchNumber, totalBatches, eventDetailList.size()); + + // API 호출 통계 업데이트 + updateApiCallStats(totalBatches, currentBatchNumber); + + // API 과부하 방지 (다음 배치 전 0.5초 대기) + if (currentBatchIndex < eventIds.size()) { + Thread.sleep(500); + } + + return eventDetailList; - if (response != null && response.getMaritimeEvents() != null) { - log.info("API 응답 성공: 총 {} 개의 Event 데이터 수신", response.getEventCount()); - return response.getMaritimeEvents(); } else { - log.warn("API 응답이 null이거나 Event 데이터가 없습니다"); - return new ArrayList<>(); + log.warn("[{}] 배치 {}/{} 응답 없음", + getReaderName(), currentBatchNumber, totalBatches); + + // API 호출 통계 업데이트 (실패도 카운트) + updateApiCallStats(totalBatches, currentBatchNumber); + + return Collections.emptyList(); } } catch (Exception e) { - log.error("Event API 호출 실패", e); - log.error("에러 메시지: {}", e.getMessage()); - return new ArrayList<>(); + log.error("[{}] 배치 {}/{} 처리 중 오류: {}", + getReaderName(), currentBatchNumber, totalBatches, e.getMessage(), e); + + // 오류 발생 시에도 다음 배치로 이동 (부분 실패 허용) + currentBatchIndex = endIndex; + + // 빈 리스트 반환 (Job 계속 진행) + return Collections.emptyList(); } + + + } + + @Override + protected void afterFetch(List data) { + int totalBatches = (int) Math.ceil((double) eventIds.size() / batchSize); + try { + if (data == null) { + // 3. ✨ 배치 성공 시 상태 업데이트 (트랜잭션 커밋 직전에 실행) + LocalDate successDate = LocalDate.now(); // 현재 배치 실행 시점의 날짜 (Reader의 toDay와 동일한 값) + batchDateService.updateLastSuccessDate(getApiKey(), successDate); + log.info("batch_last_execution update 완료 : {}", getApiKey()); + + log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches); + log.info("[{}] 총 {} 개의 Event ID에 대한 API 호출 종료", + getReaderName(), eventIds.size()); + } + } catch (Exception e) { + log.info("[{}] 전체 {} 개 배치 처리 실패", getReaderName(), totalBatches); + log.info("[{}] 총 {} 개의 Event ID에 대한 API 호출 종료", + getReaderName(), eventIds.size()); + } + } + + private List extractEventIdList(EventResponse response) { + if (response.getMaritimeEvents() == null) { + return Collections.emptyList(); + } + return response.getMaritimeEvents().stream() + // ShipDto 객체에서 imoNumber 필드 (String 타입)를 추출 + .map(EventDto::getEventId) + // IMO 번호가 null이 아닌 경우만 필터링 (선택 사항이지만 안전성을 위해) + .filter(eventId -> eventId != null) + // 추출된 String imoNumber들을 List으로 수집 + .collect(Collectors.toList()); } private EventResponse callEventApiWithBatch() { @@ -77,4 +218,25 @@ public class EventDataReader extends BaseApiReader { .block(); } + private EventDetailResponse callEventDetailApiWithBatch(Long eventId) { + String url = getEventDetailApiPath(); + log.info("[{}] API 호출: {}", getReaderName(), url); + + return webClient.get() + .uri(url, uriBuilder -> uriBuilder + // 맵에서 파라미터 값을 동적으로 가져와 세팅 + .queryParam("eventID", eventId) + .build()) + .retrieve() + .bodyToMono(EventDetailResponse.class) + .block(); + } + + private LocalDateTime parseToLocalDate(String value) { + if (value == null || value.isBlank()) { + return null; + } + return LocalDateTime.parse(value); + } + } diff --git a/src/main/java/com/snp/batch/jobs/event/batch/repository/EventRepository.java b/src/main/java/com/snp/batch/jobs/event/batch/repository/EventRepository.java index 2448130..da65ba6 100644 --- a/src/main/java/com/snp/batch/jobs/event/batch/repository/EventRepository.java +++ b/src/main/java/com/snp/batch/jobs/event/batch/repository/EventRepository.java @@ -1,9 +1,15 @@ package com.snp.batch.jobs.event.batch.repository; -import com.snp.batch.jobs.event.batch.entity.EventEntity; +import com.snp.batch.jobs.event.batch.entity.CargoEntity; +import com.snp.batch.jobs.event.batch.entity.EventDetailEntity; +import com.snp.batch.jobs.event.batch.entity.HumanCasualtyEntity; +import com.snp.batch.jobs.event.batch.entity.RelationshipEntity; import java.util.List; public interface EventRepository { - void saveEventAll(List items); + void saveEventAll(List items); + void saveCargoAll(List items); + void saveHumanCasualtyAll(List items); + void saveRelationshipAll(List items); } diff --git a/src/main/java/com/snp/batch/jobs/event/batch/repository/EventRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/event/batch/repository/EventRepositoryImpl.java index a45c451..5cbe1e0 100644 --- a/src/main/java/com/snp/batch/jobs/event/batch/repository/EventRepositoryImpl.java +++ b/src/main/java/com/snp/batch/jobs/event/batch/repository/EventRepositoryImpl.java @@ -1,7 +1,12 @@ package com.snp.batch.jobs.event.batch.repository; import com.snp.batch.common.batch.repository.BaseJdbcRepository; -import com.snp.batch.jobs.event.batch.entity.EventEntity; +import com.snp.batch.jobs.event.batch.entity.CargoEntity; +import com.snp.batch.jobs.event.batch.entity.EventDetailEntity; +import com.snp.batch.jobs.event.batch.entity.HumanCasualtyEntity; +import com.snp.batch.jobs.event.batch.entity.RelationshipEntity; +import com.snp.batch.jobs.shipdetail.batch.entity.GroupBeneficialOwnerHistoryEntity; +import com.snp.batch.jobs.shipdetail.batch.repository.ShipDetailSql; import lombok.extern.slf4j.Slf4j; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.RowMapper; @@ -13,7 +18,7 @@ import java.util.List; @Slf4j @Repository("EventRepository") -public class EventRepositoryImpl extends BaseJdbcRepository implements EventRepository { +public class EventRepositoryImpl extends BaseJdbcRepository implements EventRepository { public EventRepositoryImpl(JdbcTemplate jdbcTemplate) { super(jdbcTemplate); @@ -25,12 +30,12 @@ public class EventRepositoryImpl extends BaseJdbcRepository i } @Override - protected RowMapper getRowMapper() { + protected RowMapper getRowMapper() { return null; } @Override - protected Long extractId(EventEntity entity) { + protected Long extractId(EventDetailEntity entity) { return null; } @@ -42,7 +47,7 @@ public class EventRepositoryImpl extends BaseJdbcRepository i @Override protected String getUpdateSql() { return """ - INSERT INTO snp_data.event ( + INSERT INTO snp_data.event_detail ( Event_ID, Incident_ID, IHSLRorIMOShipNo, Vessel_Name, Vessel_Type, Event_Type, Significance, Headline, Location_Name, Published_Date, Event_Start_Date, Event_End_Date, batch_flag @@ -69,48 +74,170 @@ public class EventRepositoryImpl extends BaseJdbcRepository i } @Override - protected void setInsertParameters(PreparedStatement ps, EventEntity entity) throws Exception { + protected void setInsertParameters(PreparedStatement ps, EventDetailEntity entity) throws Exception { } - @Override - protected void setUpdateParameters(PreparedStatement ps, EventEntity entity) throws Exception { - int idx = 1; - ps.setLong(idx++, entity.getEventId()); - ps.setLong(idx++, entity.getIncidentId()); - ps.setString(idx++, entity.getIhslRorImoShipNo()); - ps.setString(idx++, entity.getVesselName()); - ps.setString(idx++, entity.getVesselType()); - ps.setString(idx++, entity.getEventType()); - ps.setString(idx++, entity.getSignificance()); - ps.setString(idx++, entity.getHeadline()); - ps.setString(idx++, entity.getLocationName()); - ps.setString(idx++, entity.getPublishedDate()); - ps.setString(idx++, entity.getStartDate()); - ps.setString(idx++, entity.getEndDate()); - } - @Override protected String getEntityName() { - return "EventEntity"; + return "EventDetailEntity"; } @Override - public void saveEventAll(List items) { - if (items == null || items.isEmpty()) { - return; - } - jdbcTemplate.batchUpdate(getUpdateSql(), items, items.size(), + public void saveEventAll(List items) { + String entityName = "EventDetailEntity"; + String sql = EventSql.getEventDetailUpdateSql(); + + jdbcTemplate.batchUpdate(sql, items, items.size(), (ps, entity) -> { try { - setUpdateParameters(ps, entity); + setUpdateParameters(ps, (EventDetailEntity) entity); } catch (Exception e) { log.error("배치 수정 파라미터 설정 실패", e); throw new RuntimeException(e); } }); - log.info("{} 전체 저장 완료: 수정={} 건", getEntityName(), items.size()); + log.info("{} 배치 삽입 완료: {} 건", entityName, items.size()); + } + + @Override + public void saveCargoAll(List items) { + String entityName = "CargoEntity"; + String sql = EventSql.getEventCargoSql(); + + jdbcTemplate.batchUpdate(sql, items, items.size(), + (ps, entity) -> { + try { + setCargoInsertParameters(ps, (CargoEntity) entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패 - " + entityName, e); + throw new RuntimeException(e); + } + }); + + log.info("{} 배치 삽입 완료: {} 건", entityName, items.size()); + } + + @Override + public void saveHumanCasualtyAll(List items) { + String entityName = "HumanCasualtyEntity"; + String sql = EventSql.getEventHumanCasualtySql(); + + jdbcTemplate.batchUpdate(sql, items, items.size(), + (ps, entity) -> { + try { + setHumanCasualtyInsertParameters(ps, (HumanCasualtyEntity) entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패 - " + entityName, e); + throw new RuntimeException(e); + } + }); + + log.info("{} 배치 삽입 완료: {} 건", entityName, items.size()); + } + + @Override + public void saveRelationshipAll(List items) { + String entityName = "RelationshipEntity"; + String sql = EventSql.getEventRelationshipSql(); + + jdbcTemplate.batchUpdate(sql, items, items.size(), + (ps, entity) -> { + try { + setRelationshipInsertParameters(ps, (RelationshipEntity) entity); + } catch (Exception e) { + log.error("배치 삽입 파라미터 설정 실패 - " + entityName, e); + throw new RuntimeException(e); + } + }); + + log.info("{} 배치 삽입 완료: {} 건", entityName, items.size()); + } + + @Override + protected void setUpdateParameters(PreparedStatement ps, EventDetailEntity entity) throws Exception { + int idx = 1; + ps.setObject(idx++, entity.getEventID()); // event_id + ps.setObject(idx++, entity.getIncidentID()); // incident_id (누락됨) + ps.setObject(idx++, entity.getIhslrOrImoShipNo()); // ihslrorimoshipno (누락됨) + ps.setObject(idx++, entity.getPublishedDate()); // published_date (누락됨) + ps.setObject(idx++, entity.getEventStartDate()); // event_start_date + ps.setObject(idx++, entity.getEventEndDate()); // event_end_date + ps.setString(idx++, entity.getAttemptedBoarding()); // attempted_boarding + ps.setString(idx++, entity.getCargoLoadingStatusCode());// cargo_loading_status_code + ps.setString(idx++, entity.getCasualtyAction()); // casualty_action + ps.setString(idx++, entity.getCasualtyZone()); // casualty_zone + + // 11~20 + ps.setString(idx++, entity.getCasualtyZoneCode()); // casualty_zone_code + ps.setString(idx++, entity.getComponent2()); // component2 + ps.setString(idx++, entity.getCountryCode()); // country_code + ps.setObject(idx++, entity.getDateOfBuild()); // date_of_build (Integer) + ps.setString(idx++, entity.getDescription()); // description + ps.setString(idx++, entity.getEnvironmentLocation()); // environment_location + ps.setString(idx++, entity.getLocationName()); // location_name (누락됨) + ps.setObject(idx++, entity.getMarsdenGridReference()); // marsden_grid_reference (Integer) + ps.setString(idx++, entity.getTownName()); // town_name + ps.setString(idx++, entity.getEventType()); // event_type (누락됨) + + // 21~30 + ps.setString(idx++, entity.getEventTypeDetail()); // event_type_detail + ps.setObject(idx++, entity.getEventTypeDetailID()); // event_type_detail_id (Integer) + ps.setObject(idx++, entity.getEventTypeID()); // event_type_id (Integer) + ps.setString(idx++, entity.getFiredUpon()); // fired_upon + ps.setString(idx++, entity.getHeadline()); // headline (누락됨) + ps.setObject(idx++, entity.getLdtAtTime()); // ldt_at_time (Integer) + ps.setString(idx++, entity.getSignificance()); // significance (누락됨) + ps.setString(idx++, entity.getWeather()); // weather + ps.setString(idx++, entity.getPollutant()); // pollutant + ps.setObject(idx++, entity.getPollutantQuantity()); // pollutant_quantity (Double) + + // 31~42 + ps.setString(idx++, entity.getPollutantUnit()); // pollutant_unit + ps.setString(idx++, entity.getRegisteredOwnerCodeAtTime()); // registered_owner_code_at_time + ps.setString(idx++, entity.getRegisteredOwnerAtTime()); // registered_owner_at_time + ps.setString(idx++, entity.getRegisteredOwnerCountryCodeAtTime()); // registered_owner_country_code_at_time + ps.setString(idx++, entity.getRegisteredOwnerCountryAtTime()); // registered_owner_country_at_time + ps.setObject(idx++, entity.getVesselDWT()); // vessel_dwt (Integer) + ps.setString(idx++, entity.getVesselFlagCode()); // vessel_flag_code + ps.setString(idx++, entity.getVesselFlagDecode()); // vessel_flag_decode (누락됨) + ps.setObject(idx++, entity.getVesselGT()); // vessel_gt (Integer) + ps.setString(idx++, entity.getVesselName()); // vessel_name (누락됨) + ps.setString(idx++, entity.getVesselType()); // vessel_type (누락됨) + ps.setString(idx++, entity.getVesselTypeDecode()); // vessel_type_decode + } + private void setCargoInsertParameters(PreparedStatement ps, CargoEntity entity)throws Exception{ + int idx = 1; + // INSERT 필드 + ps.setObject(idx++, entity.getEventID()); + ps.setString(idx++, entity.getSequence()); + ps.setString(idx++, entity.getIhslrOrImoShipNo()); + ps.setString(idx++, entity.getType()); + ps.setObject(idx++, entity.getQuantity()); // quantity 필드 (Entity에 없을 경우 null 처리) + ps.setString(idx++, entity.getUnitShort()); // unit_short 필드 + ps.setString(idx++, entity.getUnit()); + ps.setString(idx++, entity.getCargoDamage()); + ps.setString(idx++, entity.getDangerous()); + ps.setString(idx++, entity.getText()); + } + private void setHumanCasualtyInsertParameters(PreparedStatement ps, HumanCasualtyEntity entity)throws Exception{ + int idx = 1; + ps.setObject(idx++, entity.getEventID()); + ps.setString(idx++, entity.getScope()); + ps.setString(idx++, entity.getType()); + ps.setString(idx++, entity.getQualifier()); + ps.setObject(idx++, entity.getCount()); + } + private void setRelationshipInsertParameters(PreparedStatement ps, RelationshipEntity entity)throws Exception{ + int idx = 1; + ps.setString(idx++, entity.getIncidentID()); + ps.setObject(idx++, entity.getEventID()); + ps.setString(idx++, entity.getRelationshipType()); + ps.setString(idx++, entity.getRelationshipTypeCode()); + ps.setObject(idx++, entity.getEventID2()); + ps.setString(idx++, entity.getEventType()); + ps.setString(idx++, entity.getEventTypeCode()); } private static void setStringOrNull(PreparedStatement ps, int index, String value) throws Exception { diff --git a/src/main/java/com/snp/batch/jobs/event/batch/repository/EventSql.java b/src/main/java/com/snp/batch/jobs/event/batch/repository/EventSql.java new file mode 100644 index 0000000..321dc9d --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/event/batch/repository/EventSql.java @@ -0,0 +1,126 @@ +package com.snp.batch.jobs.event.batch.repository; + +public class EventSql { + public static String getEventDetailUpdateSql(){ + return """ + INSERT INTO snp_data.event_detail ( + event_id, incident_id, ihslrorimoshipno, published_date, event_start_date, event_end_date, + attempted_boarding, cargo_loading_status_code, casualty_action, + casualty_zone, casualty_zone_code, component2, country_code, + date_of_build, description, environment_location, location_name, + marsden_grid_reference, town_name, event_type, event_type_detail, + event_type_detail_id, event_type_id, fired_upon, headline, + ldt_at_time, significance, weather, pollutant, pollutant_quantity, + pollutant_unit, registered_owner_code_at_time, registered_owner_at_time, + registered_owner_country_code_at_time, registered_owner_country_at_time, + vessel_dwt, vessel_flag_code, vessel_flag_decode, vessel_gt, + vessel_name, vessel_type, vessel_type_decode + ) + VALUES ( + ?, ?, ?, ?::timestamptz,?::timestamptz,?::timestamptz, ?, ?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, ?, ?, ?, ?, ? + ) + ON CONFLICT (event_id) + DO UPDATE SET + incident_id = EXCLUDED.incident_id, + ihslrorimoshipno = EXCLUDED.ihslrorimoshipno, + published_date = EXCLUDED.published_date, + event_start_date = EXCLUDED.event_start_date, + event_end_date = EXCLUDED.event_end_date, + attempted_boarding = EXCLUDED.attempted_boarding, + cargo_loading_status_code = EXCLUDED.cargo_loading_status_code, + casualty_action = EXCLUDED.casualty_action, + casualty_zone = EXCLUDED.casualty_zone, + casualty_zone_code = EXCLUDED.casualty_zone_code, + component2 = EXCLUDED.component2, + country_code = EXCLUDED.country_code, + date_of_build = EXCLUDED.date_of_build, + description = EXCLUDED.description, + environment_location = EXCLUDED.environment_location, + location_name = EXCLUDED.location_name, + marsden_grid_reference = EXCLUDED.marsden_grid_reference, + town_name = EXCLUDED.town_name, + event_type = EXCLUDED.event_type, + event_type_detail = EXCLUDED.event_type_detail, + event_type_detail_id = EXCLUDED.event_type_detail_id, + event_type_id = EXCLUDED.event_type_id, + fired_upon = EXCLUDED.fired_upon, + headline = EXCLUDED.headline, + ldt_at_time = EXCLUDED.ldt_at_time, + significance = EXCLUDED.significance, + weather = EXCLUDED.weather, + pollutant = EXCLUDED.pollutant, + pollutant_quantity = EXCLUDED.pollutant_quantity, + pollutant_unit = EXCLUDED.pollutant_unit, + registered_owner_code_at_time = EXCLUDED.registered_owner_code_at_time, + registered_owner_at_time = EXCLUDED.registered_owner_at_time, + registered_owner_country_code_at_time = EXCLUDED.registered_owner_country_code_at_time, + registered_owner_country_at_time = EXCLUDED.registered_owner_country_at_time, + vessel_dwt = EXCLUDED.vessel_dwt, + vessel_flag_code = EXCLUDED.vessel_flag_code, + vessel_flag_decode = EXCLUDED.vessel_flag_decode, + vessel_gt = EXCLUDED.vessel_gt, + vessel_name = EXCLUDED.vessel_name, + vessel_type = EXCLUDED.vessel_type, + vessel_type_decode = EXCLUDED.vessel_type_decode, + batch_flag = 'N'; + """; + } + + public static String getEventCargoSql(){ + return """ + INSERT INTO snp_data.event_cargo ( + event_id, "sequence", ihslrorimoshipno, "type", quantity, + unit_short, unit, cargo_damage, dangerous, "text" + ) + VALUES ( + ?, ?, ?, ?, ?, + ?, ?, ?, ?, ? + ) + ON CONFLICT (event_id, ihslrorimoshipno, "type", "sequence") + DO UPDATE SET + quantity = EXCLUDED.quantity, + unit_short = EXCLUDED.unit_short, + unit = EXCLUDED.unit, + cargo_damage = EXCLUDED.cargo_damage, + dangerous = EXCLUDED.dangerous, + "text" = EXCLUDED."text", + batch_flag = 'N'; + """; + } + + public static String getEventRelationshipSql(){ + return """ + INSERT INTO snp_data.event_relationship ( + incident_id, event_id, relationship_type, relationship_type_code, + event_id_2, event_type, event_type_code + ) + VALUES ( + ?, ?, ?, ?, + ?, ?, ? + ) + ON CONFLICT (incident_id, event_id, event_id_2, event_type_code, relationship_type_code) + DO UPDATE SET + relationship_type = EXCLUDED.relationship_type, + event_type = EXCLUDED.event_type, + batch_flag = 'N'; + """; + } + + public static String getEventHumanCasualtySql(){ + return """ + INSERT INTO snp_data.event_humancasualty ( + event_id, "scope", "type", qualifier, "count" + ) + VALUES ( + ?, ?, ?, ?, ? + ) + ON CONFLICT (event_id, "scope", "type", qualifier) + DO UPDATE SET + "count" = EXCLUDED."count", + batch_flag = 'N'; + """; + } +} diff --git a/src/main/java/com/snp/batch/jobs/event/batch/writer/EventDataWriter.java b/src/main/java/com/snp/batch/jobs/event/batch/writer/EventDataWriter.java index 9a61e6a..936ce48 100644 --- a/src/main/java/com/snp/batch/jobs/event/batch/writer/EventDataWriter.java +++ b/src/main/java/com/snp/batch/jobs/event/batch/writer/EventDataWriter.java @@ -1,35 +1,49 @@ package com.snp.batch.jobs.event.batch.writer; import com.snp.batch.common.batch.writer.BaseWriter; -import com.snp.batch.jobs.event.batch.entity.EventEntity; +import com.snp.batch.jobs.event.batch.entity.EventDetailEntity; import com.snp.batch.jobs.event.batch.repository.EventRepository; -import com.snp.batch.service.BatchDateService; import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Component; +import org.springframework.util.CollectionUtils; -import java.time.LocalDate; import java.util.List; @Slf4j @Component -public class EventDataWriter extends BaseWriter { +public class EventDataWriter extends BaseWriter { private final EventRepository eventRepository; - private final BatchDateService batchDateService; // ✨ BatchDateService 필드 추가 protected String getApiKey() {return "EVENT_IMPORT_JOB";} - public EventDataWriter(EventRepository eventRepository, BatchDateService batchDateService) { + public EventDataWriter(EventRepository eventRepository) { super("EventRepository"); this.eventRepository = eventRepository; - this.batchDateService = batchDateService; } @Override - protected void writeItems(List items) throws Exception { - eventRepository.saveEventAll(items); - log.info("Event 저장 완료: 수정={} 건", items.size()); + protected void writeItems(List items) throws Exception { - // ✨ 배치 성공 시 상태 업데이트 (트랜잭션 커밋 직전에 실행) - LocalDate successDate = LocalDate.now(); - batchDateService.updateLastSuccessDate(getApiKey(), successDate); - log.info("batch_last_execution update 완료 : {}", getApiKey()); + if (CollectionUtils.isEmpty(items)) { + return; + } + + // 1. EventDetail 메인 데이터 저장 + eventRepository.saveEventAll(items); + + for (EventDetailEntity event : items) { + // 2. CargoEntityList Save + if (!CollectionUtils.isEmpty(event.getCargoes())) { + eventRepository.saveCargoAll(event.getCargoes()); + } + // 3. HumanCasualtyEntityList Save + if (!CollectionUtils.isEmpty(event.getHumanCasualties())) { + eventRepository.saveHumanCasualtyAll(event.getHumanCasualties()); + } + // 4. RelationshipEntityList Save + if (!CollectionUtils.isEmpty(event.getRelationships())) { + eventRepository.saveRelationshipAll(event.getRelationships()); + } + } + + log.info("Batch Write 완료: {} 건의 Event 처리됨", items.size()); } } diff --git a/src/main/java/com/snp/batch/jobs/pscInspection/batch/config/PscInspectionJobConfig.java b/src/main/java/com/snp/batch/jobs/pscInspection/batch/config/PscInspectionJobConfig.java index 3cf87f0..b753867 100644 --- a/src/main/java/com/snp/batch/jobs/pscInspection/batch/config/PscInspectionJobConfig.java +++ b/src/main/java/com/snp/batch/jobs/pscInspection/batch/config/PscInspectionJobConfig.java @@ -80,8 +80,8 @@ public class PscInspectionJobConfig extends BaseJobConfig { - //private final JdbcTemplate jdbcTemplate; - - private final String fromDate; - private final String toDate; -// private List allImoNumbers; + private final String startDate; + private final String stopDate; private List allData; private int currentBatchIndex = 0; - private final int batchSize = 10; + private final int batchSize = 1000; public PscApiReader(@Qualifier("maritimeApiWebClient") WebClient webClient, - @Value("#{jobParameters['fromDate']}") String fromDate, - @Value("#{jobParameters['toDate']}") String toDate) { + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate) { super(webClient); - //this.jdbcTemplate = jdbcTemplate; - this.fromDate = fromDate; - this.toDate = toDate; + + // 날짜가 없으면 전날 하루 기준 + if (startDate == null || startDate.isBlank() || + stopDate == null || stopDate.isBlank()) { + + LocalDate yesterday = LocalDate.now().minusDays(1); + this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + } else { + this.startDate = startDate; + this.stopDate = stopDate; + } + enableChunkMode(); } @@ -45,7 +55,7 @@ public class PscApiReader extends BaseApiReader { @Override protected void resetCustomState() { this.currentBatchIndex = 0; -// this.allImoNumbers = null; + this.allData = null; } @Override @@ -53,37 +63,18 @@ public class PscApiReader extends BaseApiReader { return "/MaritimeWCF/PSCService.svc/RESTFul/GetPSCDataByLastUpdateDateRange"; } - private static final String GET_ALL_IMO_QUERY = - "SELECT imo_number FROM ship_data ORDER BY id"; -// "SELECT imo_number FROM snp_data.ship_data where imo_number > (select max(imo) from snp_data.t_berthcalls) ORDER BY imo_number"; - @Override protected void beforeFetch() { - // 전처리 과정 - // Step 1. IMO 전체 번호 조회 - /*log.info("[{}] ship_data 테이블에서 IMO 번호 조회 시작...", getReaderName()); - - allImoNumbers = jdbcTemplate.queryForList(GET_ALL_IMO_QUERY, String.class); - int totalBatches = (int) Math.ceil((double) allImoNumbers.size() / batchSize); - - log.info("[{}] 총 {} 개의 IMO 번호 조회 완료", getReaderName(), allImoNumbers.size()); - log.info("[{}] {}개씩 배치로 분할하여 API 호출 예정", getReaderName(), batchSize); - log.info("[{}] 예상 배치 수: {} 개", getReaderName(), totalBatches); - - // API 통계 초기화 - updateApiCallStats(totalBatches, 0);*/ - log.info("[PSC] 요청 날짜 범위: {} → {}", fromDate, toDate); + log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate); } - @Override protected List fetchNextBatch() { - // 1) 처음 호출이면 API 한 번 호출해서 전체 데이터를 가져온다 if (allData == null) { - log.info("[PSC] 최초 API 조회 실행: {} ~ {}", fromDate, toDate); - allData = callApiWithBatch(fromDate, toDate); + log.info("[PSC] 최초 API 조회 실행: {} ~ {}", startDate, stopDate); + allData = callApiWithBatch(startDate, stopDate); if (allData == null || allData.isEmpty()) { log.warn("[PSC] 조회된 데이터 없음 → 종료"); @@ -116,20 +107,19 @@ public class PscApiReader extends BaseApiReader { return batch; } - // private List callApiWithBatch(String lrno) { - private List callApiWithBatch(String from, String to) { + private List callApiWithBatch(String startDate, String stopDate) { - String[] f = from.split("-"); - String[] t = to.split("-"); + LocalDateTime fromDay = parseToDateTime(startDate, true); + LocalDateTime toDay = parseToDateTime(stopDate, false); String url = getApiPath() + "?shipsCategory=0" - + "&fromYear=" + f[0] - + "&fromMonth=" + f[1] - + "&fromDay=" + f[2] - + "&toYear=" + t[0] - + "&toMonth=" + t[1] - + "&toDay=" + t[2]; + + "&fromYear=" + fromDay.getYear() + + "&fromMonth=" + fromDay.getMonthValue() + + "&fromDay=" + fromDay.getDayOfMonth() + + "&toYear=" + toDay.getYear() + + "&toMonth=" + toDay.getMonthValue() + + "&toDay=" + toDay.getDayOfMonth(); log.info("[PSC] API 호출 URL = {}", url); @@ -170,4 +160,18 @@ public class PscApiReader extends BaseApiReader { getReaderName(), allData.size()); } } + + private LocalDateTime parseToDateTime(String value, boolean isStart) { + + // yyyy-MM-dd 인 경우 + if (value.length() == 10) { + LocalDate date = LocalDate.parse(value); + return isStart + ? date.atStartOfDay() + : date.plusDays(1).atStartOfDay(); + } + + // yyyy-MM-ddTHH:mm:ssZ 인 경우 + return OffsetDateTime.parse(value).toLocalDateTime(); + } } diff --git a/src/main/java/com/snp/batch/jobs/risk/batch/config/RiskImportRangeJobConfig.java b/src/main/java/com/snp/batch/jobs/risk/batch/config/RiskImportRangeJobConfig.java new file mode 100644 index 0000000..1ff38d5 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/risk/batch/config/RiskImportRangeJobConfig.java @@ -0,0 +1,94 @@ +package com.snp.batch.jobs.risk.batch.config; + +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.jobs.risk.batch.dto.RiskDto; +import com.snp.batch.jobs.risk.batch.entity.RiskEntity; +import com.snp.batch.jobs.risk.batch.processor.RiskDataProcessor; +import com.snp.batch.jobs.risk.batch.reader.RiskDataRangeReader; +import com.snp.batch.jobs.risk.batch.reader.RiskDataReader; +import com.snp.batch.jobs.risk.batch.writer.RiskDataWriter; +import com.snp.batch.jobs.sanction.batch.reader.ComplianceDataRangeReader; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.web.reactive.function.client.WebClient; + +@Slf4j +@Configuration +public class RiskImportRangeJobConfig extends BaseJobConfig { + private final WebClient maritimeServiceApiWebClient; + private final RiskDataProcessor riskDataProcessor; + private final RiskDataWriter riskDataWriter; + private final RiskDataRangeReader riskDataRangeReader; + + @Override + protected int getChunkSize() { + return 5000; // API에서 5000개씩 가져오므로 chunk도 5000으로 설정 + } + public RiskImportRangeJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + RiskDataProcessor riskDataProcessor, + RiskDataWriter riskDataWriter, + JdbcTemplate jdbcTemplate, + @Qualifier("maritimeServiceApiWebClient")WebClient maritimeServiceApiWebClient, RiskDataRangeReader riskDataRangeReader) { + super(jobRepository, transactionManager); + this.maritimeServiceApiWebClient = maritimeServiceApiWebClient; + this.riskDataProcessor = riskDataProcessor; + this.riskDataWriter = riskDataWriter; + this.riskDataRangeReader = riskDataRangeReader; + } + + @Override + protected String getJobName() { + return "RiskRangeImportJob"; + } + + @Override + protected String getStepName() { + return "RiskRangeImportStep"; + } + + @Override + protected ItemReader createReader() { + return riskDataRangeReader; + } + @Bean + @StepScope + public RiskDataRangeReader riskDataRangeReader( + @Value("#{jobParameters['fromDate']}") String startDate, + @Value("#{jobParameters['toDate']}") String stopDate + ) { + return new RiskDataRangeReader(maritimeServiceApiWebClient, startDate, stopDate); + } + + @Override + protected ItemProcessor createProcessor() { + return riskDataProcessor; + } + + @Override + protected ItemWriter createWriter() { return riskDataWriter; } + + @Bean(name = "RiskRangeImportJob") + public Job riskRangeImportJob() { + return job(); + } + + @Bean(name = "RiskRangeImportStep") + public Step riskRangeImportStep() { + return step(); + } + +} diff --git a/src/main/java/com/snp/batch/jobs/risk/batch/reader/RiskDataRangeReader.java b/src/main/java/com/snp/batch/jobs/risk/batch/reader/RiskDataRangeReader.java new file mode 100644 index 0000000..a29dd09 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/risk/batch/reader/RiskDataRangeReader.java @@ -0,0 +1,120 @@ +package com.snp.batch.jobs.risk.batch.reader; + +import com.snp.batch.common.batch.reader.BaseApiReader; +import com.snp.batch.jobs.risk.batch.dto.RiskDto; +import com.snp.batch.jobs.sanction.batch.dto.ComplianceDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.core.ParameterizedTypeReference; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.web.reactive.function.client.WebClient; + +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.util.Collections; +import java.util.List; + +@Slf4j +public class RiskDataRangeReader extends BaseApiReader { + + //TODO : + // 1. Core20 IMO_NUMBER 전체 조회 + // 2. IMO번호에 대한 마지막 AIS 신호 요청 (1회 최대 5000개 : Chunk 단위로 반복) + // 3. Response Data -> Core20에 업데이트 (Chunk 단위로 반복) + + private List allData; + private int currentBatchIndex = 0; + private final int batchSize = 100; + private String fromDate; + private String toDate; + public RiskDataRangeReader(WebClient webClient, + @Value("#{jobParameters['fromDate']}") String fromDate, + @Value("#{jobParameters['toDate']}") String toDate) { + super(webClient); + + // 날짜가 없으면 전날 하루 기준 + if (fromDate == null || fromDate.isBlank() || + toDate == null || toDate.isBlank()) { + + LocalDate yesterday = LocalDate.now().minusDays(1); + this.fromDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + this.toDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + } else { + this.fromDate = fromDate; + this.toDate = toDate; + } + + enableChunkMode(); + } + + @Override + protected String getReaderName() { + return "riskDataRangeReader"; + } + + @Override + protected void resetCustomState() { + this.currentBatchIndex = 0; + this.allData = null; + } + + @Override + protected String getApiPath() { + return "/RiskAndCompliance/UpdatedRiskList"; + } + + @Override + protected void beforeFetch(){ + log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), fromDate, toDate); + } + + @Override + protected List fetchNextBatch() throws Exception { + // 모든 배치 처리 완료 확인 + if (allData == null) { + log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), fromDate, toDate); + allData = callApiWithBatch(fromDate, toDate); + + if (allData == null || allData.isEmpty()) { + log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName()); + return null; + } + + log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize); + } + + // 2) 이미 끝까지 읽었으면 종료 + if (currentBatchIndex >= allData.size()) { + log.info("[{}] 모든 배치 처리 완료", getReaderName()); + return null; + } + + // 3) 이번 배치의 end 계산 + int end = Math.min(currentBatchIndex + batchSize, allData.size()); + + // 4) 현재 batch 리스트 잘라서 반환 + List batch = allData.subList(currentBatchIndex, end); + + int batchNum = (currentBatchIndex / batchSize) + 1; + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + + log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), batchNum, totalBatches, batch.size()); + + // 다음 batch 인덱스 이동 + currentBatchIndex = end; + updateApiCallStats(totalBatches, batchNum); + + return batch; + } + + private List callApiWithBatch(String fromDate, String stopDate) { + String url = getApiPath() + "?fromDate=" + fromDate +"&stopDate=" + stopDate; + log.debug("[{}] API 호출: {}", getReaderName(), url); + return webClient.get() + .uri(url) + .retrieve() + .bodyToMono(new ParameterizedTypeReference>() {}) + .block(); + } + +} diff --git a/src/main/java/com/snp/batch/jobs/risk/batch/repository/RiskRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/risk/batch/repository/RiskRepositoryImpl.java index a5c8695..a6a07ff 100644 --- a/src/main/java/com/snp/batch/jobs/risk/batch/repository/RiskRepositoryImpl.java +++ b/src/main/java/com/snp/batch/jobs/risk/batch/repository/RiskRepositoryImpl.java @@ -41,7 +41,7 @@ public class RiskRepositoryImpl extends BaseJdbcRepository imp @Override protected String getUpdateSql() { return """ - INSERT INTO snp_data.risk ( + INSERT INTO new_snp.risk ( lrno, lastupdated, riskdatamaintained, dayssincelastseenonais, dayssincelastseenonaisnarrative, daysunderais, daysunderaisnarrative, imocorrectonais, imocorrectonaisnarrative, sailingundername, sailingundernamenarrative, anomalousmessagesfrommmsi, anomalousmessagesfrommmsinarrative, diff --git a/src/main/java/com/snp/batch/jobs/sanction/batch/config/SanctionUpdateRangeJobConfig.java b/src/main/java/com/snp/batch/jobs/sanction/batch/config/SanctionUpdateRangeJobConfig.java new file mode 100644 index 0000000..4da073f --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/sanction/batch/config/SanctionUpdateRangeJobConfig.java @@ -0,0 +1,98 @@ +package com.snp.batch.jobs.sanction.batch.config; + +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.jobs.sanction.batch.dto.ComplianceDto; +import com.snp.batch.jobs.sanction.batch.entity.ComplianceEntity; +import com.snp.batch.jobs.sanction.batch.processor.ComplianceDataProcessor; +import com.snp.batch.jobs.sanction.batch.reader.ComplianceDataRangeReader; +import com.snp.batch.jobs.sanction.batch.reader.ComplianceDataReader; +import com.snp.batch.jobs.sanction.batch.writer.ComplianceDataWriter; +import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.reader.AnchorageCallsRangeReader; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.web.reactive.function.client.WebClient; + +@Slf4j +@Configuration +public class SanctionUpdateRangeJobConfig extends BaseJobConfig { + private final JdbcTemplate jdbcTemplate; + private final WebClient maritimeServiceApiWebClient; + private final ComplianceDataProcessor complianceDataProcessor; + private final ComplianceDataWriter complianceDataWriter; + private final ComplianceDataRangeReader complianceDataRangeReader; + + @Override + protected int getChunkSize() { + return 5000; // API에서 5000개씩 가져오므로 chunk도 5000으로 설정 + } + public SanctionUpdateRangeJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + ComplianceDataProcessor complianceDataProcessor, + ComplianceDataWriter complianceDataWriter, + JdbcTemplate jdbcTemplate, + @Qualifier("maritimeServiceApiWebClient")WebClient maritimeServiceApiWebClient, ComplianceDataRangeReader complianceDataRangeReader) { + super(jobRepository, transactionManager); + this.jdbcTemplate = jdbcTemplate; + this.maritimeServiceApiWebClient = maritimeServiceApiWebClient; + this.complianceDataProcessor = complianceDataProcessor; + this.complianceDataWriter = complianceDataWriter; + this.complianceDataRangeReader = complianceDataRangeReader; + } + + @Override + protected String getJobName() { + return "SanctionRangeUpdateJob"; + } + + @Override + protected String getStepName() { + return "SanctionRangeUpdateStep"; + } + + @Override + protected ItemReader createReader() { + return complianceDataRangeReader; + } + + @Bean + @StepScope + public ComplianceDataRangeReader complianceDataRangeReader( + @Value("#{jobParameters['fromDate']}") String startDate, + @Value("#{jobParameters['toDate']}") String stopDate + ) { + return new ComplianceDataRangeReader(maritimeServiceApiWebClient, startDate, stopDate); + } + @Override + protected ItemProcessor createProcessor() { + return complianceDataProcessor; + } + + @Override + protected ItemWriter createWriter() { + return complianceDataWriter; + } + + @Bean(name = "SanctionRangeUpdateJob") + public Job sanctionRangeUpdateJob() { + return job(); + } + + @Bean(name = "SanctionRangeUpdateStep") + public Step sanctionRangeUpdateStep() { + return step(); + } + +} diff --git a/src/main/java/com/snp/batch/jobs/sanction/batch/reader/ComplianceDataRangeReader.java b/src/main/java/com/snp/batch/jobs/sanction/batch/reader/ComplianceDataRangeReader.java new file mode 100644 index 0000000..60f3677 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/sanction/batch/reader/ComplianceDataRangeReader.java @@ -0,0 +1,128 @@ +package com.snp.batch.jobs.sanction.batch.reader; + +import com.snp.batch.common.batch.reader.BaseApiReader; +import com.snp.batch.jobs.sanction.batch.dto.ComplianceDto; +import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.dto.AnchorageCallsDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.core.ParameterizedTypeReference; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.web.reactive.function.client.WebClient; + +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.util.Collections; +import java.util.List; + +@Slf4j +public class ComplianceDataRangeReader extends BaseApiReader { + + //TODO : + // 1. Core20 IMO_NUMBER 전체 조회 + // 2. IMO번호에 대한 마지막 AIS 신호 요청 (1회 최대 5000개 : Chunk 단위로 반복) + // 3. Response Data -> Core20에 업데이트 (Chunk 단위로 반복) + + //private final JdbcTemplate jdbcTemplate; + + private List allData; + private int currentBatchIndex = 0; + private final int batchSize = 100; + private String fromDate; + private String toDate; + public ComplianceDataRangeReader(WebClient webClient, + @Value("#{jobParameters['fromDate']}") String fromDate, + @Value("#{jobParameters['toDate']}") String toDate) { + super(webClient); + + // 날짜가 없으면 전날 하루 기준 + if (fromDate == null || fromDate.isBlank() || + toDate == null || toDate.isBlank()) { + + LocalDate yesterday = LocalDate.now().minusDays(1); + this.fromDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + this.toDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + } else { + this.fromDate = fromDate; + this.toDate = toDate; + } + + enableChunkMode(); + } + + @Override + protected String getReaderName() { + return "ComplianceDataReader"; + } + + @Override + protected void resetCustomState() { + this.currentBatchIndex = 0; + this.allData = null; + } + + @Override + protected String getApiPath() { + return "/RiskAndCompliance/UpdatedComplianceList"; + } + + private String getTargetTable(){ + return "snp_data.core20"; + } + private String GET_CORE_IMO_LIST = +// "SELECT ihslrorimoshipno FROM " + getTargetTable() + " ORDER BY ihslrorimoshipno"; + "select imo_number as ihslrorimoshipno from snp_data.ship_data order by imo_number"; + @Override + protected void beforeFetch(){ + log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), fromDate, toDate); + } + + @Override + protected List fetchNextBatch() throws Exception { + // 모든 배치 처리 완료 확인 + if (allData == null) { + log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), fromDate, toDate); + allData = callApiWithBatch(fromDate, toDate); + + if (allData == null || allData.isEmpty()) { + log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName()); + return null; + } + + log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize); + } + + // 2) 이미 끝까지 읽었으면 종료 + if (currentBatchIndex >= allData.size()) { + log.info("[{}] 모든 배치 처리 완료", getReaderName()); + return null; + } + + // 3) 이번 배치의 end 계산 + int end = Math.min(currentBatchIndex + batchSize, allData.size()); + + // 4) 현재 batch 리스트 잘라서 반환 + List batch = allData.subList(currentBatchIndex, end); + + int batchNum = (currentBatchIndex / batchSize) + 1; + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + + log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), batchNum, totalBatches, batch.size()); + + // 다음 batch 인덱스 이동 + currentBatchIndex = end; + updateApiCallStats(totalBatches, batchNum); + + return batch; + } + + private List callApiWithBatch(String fromDate, String stopDate) { + String url = getApiPath() + "?fromDate=" + fromDate +"&stopDate=" + stopDate; + log.debug("[{}] API 호출: {}", getReaderName(), url); + return webClient.get() + .uri(url) + .retrieve() + .bodyToMono(new ParameterizedTypeReference>() {}) + .block(); + } + +} diff --git a/src/main/java/com/snp/batch/jobs/sanction/batch/repository/ComplianceRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/sanction/batch/repository/ComplianceRepositoryImpl.java index db90923..e4ace50 100644 --- a/src/main/java/com/snp/batch/jobs/sanction/batch/repository/ComplianceRepositoryImpl.java +++ b/src/main/java/com/snp/batch/jobs/sanction/batch/repository/ComplianceRepositoryImpl.java @@ -42,7 +42,7 @@ public class ComplianceRepositoryImpl extends BaseJdbcRepository entities); - - boolean existsByPortCallId(Integer portCallId); -} diff --git a/src/main/java/com/snp/batch/jobs/shipMovement/batch/writer/ShipMovementWriter.java b/src/main/java/com/snp/batch/jobs/shipMovement/batch/writer/ShipMovementWriter.java deleted file mode 100644 index 4368940..0000000 --- a/src/main/java/com/snp/batch/jobs/shipMovement/batch/writer/ShipMovementWriter.java +++ /dev/null @@ -1,40 +0,0 @@ -package com.snp.batch.jobs.shipMovement.batch.writer; - -import com.snp.batch.common.batch.writer.BaseWriter; -import com.snp.batch.jobs.shipMovement.batch.entity.ShipMovementEntity; -import com.snp.batch.jobs.shipMovement.batch.repository.ShipMovementRepository; -import com.snp.batch.jobs.shipdetail.batch.repository.ShipDetailRepository; -import com.snp.batch.jobs.shipdetail.batch.repository.ShipHashRepository; -import lombok.extern.slf4j.Slf4j; -import org.springframework.stereotype.Component; - -import java.util.List; - -/** - * 선박 상세 정보 Writer - */ -@Slf4j -@Component -public class ShipMovementWriter extends BaseWriter { - - private final ShipMovementRepository shipMovementRepository; - - - public ShipMovementWriter(ShipDetailRepository shipDetailRepository, ShipHashRepository shipHashRepository, ShipMovementRepository shipMovementRepositoryy) { - super("ShipMovement"); - this.shipMovementRepository = shipMovementRepositoryy; - } - - @Override - protected void writeItems(List items) throws Exception { - - if (items.isEmpty()) { return; } - - log.info("선박 상세 정보 데이터 저장: {} 건", items.size()); - - shipMovementRepository.saveAll(items); - log.info("선박 상세 정보 및 해시 데이터 저장 완료: {} 건", items.size()); - - } - -} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/config/AnchorageCallsRangeJobConfig.java b/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/config/AnchorageCallsRangeJobConfig.java new file mode 100644 index 0000000..764e26b --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/config/AnchorageCallsRangeJobConfig.java @@ -0,0 +1,114 @@ +package com.snp.batch.jobs.shipMovementAnchorageCalls.batch.config; + +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.dto.AnchorageCallsDto; +import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.entity.AnchorageCallsEntity; +import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.processor.AnchorageCallsProcessor; +import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.reader.AnchorageCallsRangeReader; +import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.writer.AnchorageCallsWriter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.web.reactive.function.client.WebClient; + +/** + * 선박 상세 정보 Import Job Config + * + * 특징: + * - ship_data 테이블에서 IMO 번호 조회 + * - IMO 번호를 100개씩 배치로 분할 + * - Maritime API GetShipsByIHSLRorIMONumbers 호출 + * TODO : GetShipsByIHSLRorIMONumbersAll 호출로 변경 + * - 선박 상세 정보를 ship_detail 테이블에 저장 (UPSERT) + * + * 데이터 흐름: + * AnchorageCallsReader (ship_data → Maritime API) + * ↓ (AnchorageCallsDto) + * AnchorageCallsProcessor + * ↓ (AnchorageCallsEntity) + * AnchorageCallsWriter + * ↓ (t_anchoragecall 테이블) + */ + +@Slf4j +@Configuration +public class AnchorageCallsRangeJobConfig extends BaseJobConfig { + + private final AnchorageCallsProcessor anchorageCallsProcessor; + private final AnchorageCallsWriter anchorageCallsWriter; + private final AnchorageCallsRangeReader anchorageCallsRangeReader; + + public AnchorageCallsRangeJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + AnchorageCallsProcessor anchorageCallsProcessor, + AnchorageCallsWriter anchorageCallsWriter, + AnchorageCallsRangeReader anchorageCallsRangeReader + ) { // ObjectMapper 주입 추가 + super(jobRepository, transactionManager); + this.anchorageCallsProcessor = anchorageCallsProcessor; + this.anchorageCallsWriter = anchorageCallsWriter; + this.anchorageCallsRangeReader = anchorageCallsRangeReader; + } + + @Override + protected String getJobName() { + return "AnchorageCallsRangeImportJob"; + } + + @Override + protected String getStepName() { + return "AnchorageCallsRangeImportStep"; + } + + @Override + protected ItemReader createReader() { // 타입 변경 + return anchorageCallsRangeReader; + } + + @Bean + @StepScope + public AnchorageCallsRangeReader anchorageCallsReader( + @Qualifier("maritimeServiceApiWebClient") WebClient webClient, + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate + ) { + return new AnchorageCallsRangeReader(webClient, startDate, stopDate); + } + + @Override + protected ItemProcessor createProcessor() { + return anchorageCallsProcessor; + } + + @Override + protected ItemWriter createWriter() { // 타입 변경 + return anchorageCallsWriter; + } + + @Override + protected int getChunkSize() { + return 5000; // API에서 100개씩 가져오므로 chunk도 100으로 설정 + } + + @Bean(name = "AnchorageCallsRangeImportJob") + public Job anchorageCallsRangeImportJob() { + return job(); + } + + @Bean(name = "AnchorageCallsRangeImportStep") + public Step anchorageCallsRangeImportStep() { + return step(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/reader/AnchorageCallsRangeReader.java b/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/reader/AnchorageCallsRangeReader.java new file mode 100644 index 0000000..66f9021 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/reader/AnchorageCallsRangeReader.java @@ -0,0 +1,153 @@ +package com.snp.batch.jobs.shipMovementAnchorageCalls.batch.reader; + +import com.snp.batch.common.batch.reader.BaseApiReader; +import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.dto.AnchorageCallsDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.web.reactive.function.client.WebClient; + +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.util.List; + +/** + * 선박 상세 정보 Reader (v2.0 - Chunk 기반) + * + * 기능: + * 1. ship_data 테이블에서 IMO 번호 전체 조회 (최초 1회) + * 2. IMO 번호를 100개씩 분할하여 배치 단위로 처리 + * 3. fetchNextBatch() 호출 시마다 100개씩 API 호출 + * 4. Spring Batch가 100건씩 Process → Write 수행 + * + * Chunk 처리 흐름: + * - beforeFetch() → IMO 전체 조회 (1회) + * - fetchNextBatch() → 100개 IMO로 API 호출 (1,718회) + * - read() → 1건씩 반환 (100번) + * - Processor/Writer → 100건 처리 + * - 반복... (1,718번의 Chunk) + * + * 기존 방식과의 차이: + * - 기존: 17만건 전체 메모리 로드 → Process → Write + * - 신규: 100건씩 로드 → Process → Write (Chunk 1,718회) + */ +@Slf4j +@StepScope +public class AnchorageCallsRangeReader extends BaseApiReader { + + private List allData; + private int currentBatchIndex = 0; + private final int batchSize = 5000; + private String startDate; + private String stopDate; + + public AnchorageCallsRangeReader(WebClient webClient, + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate) { + super(webClient); + + // 날짜가 없으면 전날 하루 기준 + if (startDate == null || startDate.isBlank() || + stopDate == null || stopDate.isBlank()) { + + LocalDate yesterday = LocalDate.now().minusDays(1); + this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + } else { + this.startDate = startDate; + this.stopDate = stopDate; + } + + enableChunkMode(); + } + + @Override + protected String getReaderName() { + return "AnchorageCallsReader"; + } + + @Override + protected void resetCustomState() { + this.currentBatchIndex = 0; + this.allData = null; + } + + @Override + protected String getApiPath() { + return "/Movements/AnchorageCalls"; + } + + @Override + protected String getApiBaseUrl() { + return "https://webservices.maritime.spglobal.com"; + } + + @Override + protected void beforeFetch() { + log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate); + } + + @Override + protected List fetchNextBatch() throws Exception { + // 1) 처음 호출이면 API 한 번 호출해서 전체 데이터를 가져온다 + if (allData == null) { + log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate); + allData = callApiWithBatch(startDate, stopDate); + + if (allData == null || allData.isEmpty()) { + log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName()); + return null; + } + + log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize); + } + + // 2) 이미 끝까지 읽었으면 종료 + if (currentBatchIndex >= allData.size()) { + log.info("[{}] 모든 배치 처리 완료", getReaderName()); + return null; + } + + // 3) 이번 배치의 end 계산 + int end = Math.min(currentBatchIndex + batchSize, allData.size()); + + // 4) 현재 batch 리스트 잘라서 반환 + List batch = allData.subList(currentBatchIndex, end); + + int batchNum = (currentBatchIndex / batchSize) + 1; + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + + log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), batchNum, totalBatches, batch.size()); + + // 다음 batch 인덱스 이동 + currentBatchIndex = end; + updateApiCallStats(totalBatches, batchNum); + + return batch; + } + + /** + * Query Parameter를 사용한 API 호출 + * @return API 응답 + */ + private List callApiWithBatch(String startDate, String stopDate) { + String url = getApiPath() + "?startDate=" + startDate +"&stopDate=" + stopDate; + log.info("[{}] API 호출: {}", getReaderName(), url); + + return webClient.get() + .uri(url) + .retrieve() + .bodyToFlux(AnchorageCallsDto.class) + .collectList() + .block(); + } + + @Override + protected void afterFetch(List data) { + if (data == null) { + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches); + } + } + +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/repository/AnchorageCallsRepository.java b/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/repository/AnchorageCallsRepository.java index 5bcfa85..991f551 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/repository/AnchorageCallsRepository.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/repository/AnchorageCallsRepository.java @@ -1,6 +1,5 @@ package com.snp.batch.jobs.shipMovementAnchorageCalls.batch.repository; -import com.snp.batch.jobs.shipMovement.batch.entity.ShipMovementEntity; import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.entity.AnchorageCallsEntity; import java.util.List; diff --git a/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/repository/AnchorageCallsRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/repository/AnchorageCallsRepositoryImpl.java index 0a590a9..0b8e741 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/repository/AnchorageCallsRepositoryImpl.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementAnchorageCalls/batch/repository/AnchorageCallsRepositoryImpl.java @@ -3,8 +3,6 @@ package com.snp.batch.jobs.shipMovementAnchorageCalls.batch.repository; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.snp.batch.common.batch.repository.BaseJdbcRepository; -import com.snp.batch.jobs.shipMovement.batch.entity.ShipMovementEntity; -import com.snp.batch.jobs.shipMovement.batch.repository.ShipMovementRepository; import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.entity.AnchorageCallsEntity; import lombok.extern.slf4j.Slf4j; import org.springframework.jdbc.core.JdbcTemplate; @@ -32,7 +30,8 @@ public class AnchorageCallsRepositoryImpl extends BaseJdbcRepository { + + private final BerthCallsProcessor berthCallsProcessor; + private final BerthCallsWriter berthCallsWriter; + private final BerthCallsRangeReader berthCallsRangeReader; + private final JdbcTemplate jdbcTemplate; + private final WebClient maritimeApiWebClient; + private final ObjectMapper objectMapper; // ObjectMapper 주입 추가 + + public BerthCallsRangJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + BerthCallsProcessor berthCallsProcessor, + BerthCallsWriter berthCallsWriter, BerthCallsRangeReader berthCallsRangeReader, JdbcTemplate jdbcTemplate, + @Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient, + ObjectMapper objectMapper) { // ObjectMapper 주입 추가 + super(jobRepository, transactionManager); + this.berthCallsProcessor = berthCallsProcessor; + this.berthCallsWriter = berthCallsWriter; + this.berthCallsRangeReader = berthCallsRangeReader; + this.jdbcTemplate = jdbcTemplate; + this.maritimeApiWebClient = maritimeApiWebClient; + this.objectMapper = objectMapper; // ObjectMapper 초기화 + } + + @Override + protected String getJobName() { + return "BerthCallsRangeImportJob"; + } + + @Override + protected String getStepName() { + return "BerthCallsRangeImportStep"; + } + + @Override + protected ItemReader createReader() { // 타입 변경 + return berthCallsRangeReader; + } + @Bean + @StepScope + public BerthCallsRangeReader berthCallsRangeReader( + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate + ) { + return new BerthCallsRangeReader(maritimeApiWebClient, startDate, stopDate); + } + @Override + protected ItemProcessor createProcessor() { + return berthCallsProcessor; + } + + @Override + protected ItemWriter createWriter() { + return berthCallsWriter; + } + + @Override + protected int getChunkSize() { + return 5000; // API에서 5000개씩 가져오므로 chunk도 5000으로 설정 + } + + @Bean(name = "BerthCallsRangeImportJob") + public Job berthCallsRangeImportJob() { + return job(); + } + + @Bean(name = "BerthCallsRangeImportStep") + public Step berthCallsRangeImportStep() { + return step(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementBerthCalls/batch/reader/BerthCallsRangeReader.java b/src/main/java/com/snp/batch/jobs/shipMovementBerthCalls/batch/reader/BerthCallsRangeReader.java new file mode 100644 index 0000000..5ebfdf2 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementBerthCalls/batch/reader/BerthCallsRangeReader.java @@ -0,0 +1,154 @@ +package com.snp.batch.jobs.shipMovementBerthCalls.batch.reader; + +import com.snp.batch.common.batch.reader.BaseApiReader; +import com.snp.batch.jobs.shipMovementBerthCalls.batch.dto.BerthCallsDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.web.reactive.function.client.WebClient; + +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.util.List; + +/** + * 선박 상세 정보 Reader (v2.0 - Chunk 기반) + * + * 기능: + * 1. ship_data 테이블에서 IMO 번호 전체 조회 (최초 1회) + * 2. IMO 번호를 100개씩 분할하여 배치 단위로 처리 + * 3. fetchNextBatch() 호출 시마다 100개씩 API 호출 + * 4. Spring Batch가 100건씩 Process → Write 수행 + * + * Chunk 처리 흐름: + * - beforeFetch() → IMO 전체 조회 (1회) + * - fetchNextBatch() → 100개 IMO로 API 호출 (1,718회) + * - read() → 1건씩 반환 (100번) + * - Processor/Writer → 100건 처리 + * - 반복... (1,718번의 Chunk) + * + * 기존 방식과의 차이: + * - 기존: 17만건 전체 메모리 로드 → Process → Write + * - 신규: 100건씩 로드 → Process → Write (Chunk 1,718회) + */ +@Slf4j +@StepScope +public class BerthCallsRangeReader extends BaseApiReader { + + private List allData; + private int currentBatchIndex = 0; + private final int batchSize = 5000; + private String startDate; + private String stopDate; + + public BerthCallsRangeReader(WebClient webClient, + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate) { + + super(webClient); + + // 날짜가 없으면 전날 하루 기준 + if (startDate == null || startDate.isBlank() || stopDate == null || stopDate.isBlank()) { + LocalDate yesterday = LocalDate.now().minusDays(1); + this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + } else { + this.startDate = startDate; + this.stopDate = stopDate; + } + + enableChunkMode(); + } + + @Override + protected String getReaderName() { + return "BerthCallsRangeReader"; + } + + @Override + protected void resetCustomState() { + this.currentBatchIndex = 0; + this.allData = null; + } + + @Override + protected String getApiPath() { + return "/Movements/BerthCalls"; + } + + @Override + protected String getApiBaseUrl() { + return "https://webservices.maritime.spglobal.com"; + } + + @Override + protected void beforeFetch() { + log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate); + } + + @Override + protected List fetchNextBatch() throws Exception { + // 1) 처음 호출이면 API 한 번 호출해서 전체 데이터를 가져온다 + if (allData == null) { + log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate); + allData = callApiWithBatch(startDate, stopDate); + + if (allData == null || allData.isEmpty()) { + log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName()); + return null; + } + + log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize); + } + + // 2) 이미 끝까지 읽었으면 종료 + if (currentBatchIndex >= allData.size()) { + log.info("[{}] 모든 배치 처리 완료", getReaderName()); + return null; + } + + // 3) 이번 배치의 end 계산 + int end = Math.min(currentBatchIndex + batchSize, allData.size()); + + // 4) 현재 batch 리스트 잘라서 반환 + List batch = allData.subList(currentBatchIndex, end); + + int currentBatchNumber = (currentBatchIndex / batchSize) + 1; + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + + log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), currentBatchNumber, totalBatches, batch.size()); + + // 다음 batch 인덱스 이동 + currentBatchIndex = end; + updateApiCallStats(totalBatches, currentBatchNumber); + return batch; + + } + + /** + * Query Parameter를 사용한 API 호출 + * @return API 응답 + */ + private List callApiWithBatch(String startDate, String stopDate) { + String url = getApiPath() + "?startDate=" + startDate +"&stopDate=" + stopDate; +// "&lrno=" + lrno; + + log.debug("[{}] API 호출: {}", getReaderName(), url); + + return webClient.get() + .uri(url) + .retrieve() + .bodyToFlux(BerthCallsDto.class) + .collectList() + .block(); + } + + @Override + protected void afterFetch(List data) { + if (data == null) { + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches); + } + } + +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementBerthCalls/batch/repository/BerthCallsRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/shipMovementBerthCalls/batch/repository/BerthCallsRepositoryImpl.java index db5d696..6cf0f61 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovementBerthCalls/batch/repository/BerthCallsRepositoryImpl.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementBerthCalls/batch/repository/BerthCallsRepositoryImpl.java @@ -32,7 +32,8 @@ public class BerthCallsRepositoryImpl extends BaseJdbcRepository { + + private final CurrentlyAtProcessor currentlyAtProcessor; + private final CurrentlyAtWriter currentlyAtWriter; + private final CurrentlyAtRangeReader currentlyAtRangeReader; + private final JdbcTemplate jdbcTemplate; + private final WebClient maritimeApiWebClient; + + public CurrentlyAtRangeJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + CurrentlyAtProcessor currentlyAtProcessor, + CurrentlyAtWriter currentlyAtWriter, CurrentlyAtRangeReader currentlyAtRangeReader, JdbcTemplate jdbcTemplate, + @Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient) { // ObjectMapper 주입 추가 + super(jobRepository, transactionManager); + this.currentlyAtProcessor = currentlyAtProcessor; + this.currentlyAtWriter = currentlyAtWriter; + this.currentlyAtRangeReader = currentlyAtRangeReader; + this.jdbcTemplate = jdbcTemplate; + this.maritimeApiWebClient = maritimeApiWebClient; + } + + @Override + protected String getJobName() { + return "CurrentlyAtRangeImportJob"; + } + + @Override + protected String getStepName() { + return "currentlyAtRangeImportStep"; + } + + @Override + protected ItemReader createReader() { // 타입 변경 + return currentlyAtRangeReader; + } + @Bean + @StepScope + public CurrentlyAtRangeReader currentlyAtReader( + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate + ) { + // jobParameters 없으면 null 넘어오고 Reader에서 default 처리 + return new CurrentlyAtRangeReader(maritimeApiWebClient, startDate, stopDate); + } + @Override + protected ItemProcessor createProcessor() { + return currentlyAtProcessor; + } + + @Override + protected ItemWriter createWriter() { // 타입 변경 + return currentlyAtWriter; + } + + @Override + protected int getChunkSize() { + return 5000; // API에서 100개씩 가져오므로 chunk도 100으로 설정 + } + + @Bean(name = "CurrentlyAtRangeImportJob") + public Job currentlyAtRangeImportJob() { + return job(); + } + + @Bean(name = "CurrentlyAtRangeImportStep") + public Step currentlyAtRangeImportStep() { + return step(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementCurrentlyAt/batch/dto/CurrentlyAtDto.java b/src/main/java/com/snp/batch/jobs/shipMovementCurrentlyAt/batch/dto/CurrentlyAtDto.java index f20bad1..121cdca 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovementCurrentlyAt/batch/dto/CurrentlyAtDto.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementCurrentlyAt/batch/dto/CurrentlyAtDto.java @@ -1,6 +1,6 @@ package com.snp.batch.jobs.shipMovementCurrentlyAt.batch.dto; -import com.snp.batch.jobs.shipMovement.batch.dto.PortCallsPositionDto; +import com.snp.batch.jobs.shipMovementPortCalls.batch.dto.PortCallsPositionDto; import lombok.Data; @Data diff --git a/src/main/java/com/snp/batch/jobs/shipMovementCurrentlyAt/batch/reader/CurrentlyAtRangeReader.java b/src/main/java/com/snp/batch/jobs/shipMovementCurrentlyAt/batch/reader/CurrentlyAtRangeReader.java new file mode 100644 index 0000000..1d7120e --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementCurrentlyAt/batch/reader/CurrentlyAtRangeReader.java @@ -0,0 +1,154 @@ +package com.snp.batch.jobs.shipMovementCurrentlyAt.batch.reader; + +import com.snp.batch.common.batch.reader.BaseApiReader; +import com.snp.batch.jobs.shipMovementCurrentlyAt.batch.dto.CurrentlyAtDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.web.reactive.function.client.WebClient; + +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.util.List; + +/** + * 선박 상세 정보 Reader (v2.0 - Chunk 기반) + *

+ * 기능: + * 1. ship_data 테이블에서 IMO 번호 전체 조회 (최초 1회) + * 2. IMO 번호를 100개씩 분할하여 배치 단위로 처리 + * 3. fetchNextBatch() 호출 시마다 100개씩 API 호출 + * 4. Spring Batch가 100건씩 Process → Write 수행 + *

+ * Chunk 처리 흐름: + * - beforeFetch() → IMO 전체 조회 (1회) + * - fetchNextBatch() → 100개 IMO로 API 호출 (1,718회) + * - read() → 1건씩 반환 (100번) + * - Processor/Writer → 100건 처리 + * - 반복... (1,718번의 Chunk) + *

+ * 기존 방식과의 차이: + * - 기존: 17만건 전체 메모리 로드 → Process → Write + * - 신규: 100건씩 로드 → Process → Write (Chunk 1,718회) + */ +@Slf4j +@StepScope +public class CurrentlyAtRangeReader extends BaseApiReader { + + private List allData; + private int currentBatchIndex = 0; + private final int batchSize = 5000; + private String startDate; + private String stopDate; + + public CurrentlyAtRangeReader(WebClient webClient, + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate) { + + super(webClient); + + // 날짜가 없으면 전날 하루 기준 + if (startDate == null || startDate.isBlank() || stopDate == null || stopDate.isBlank()) { + LocalDate yesterday = LocalDate.now().minusDays(1); + this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + } else { + this.startDate = startDate; + this.stopDate = stopDate; + } + + enableChunkMode(); // ✨ Chunk 모드 활성화 + } + + @Override + protected String getReaderName() { + return "CurrentlyAtReader"; + } + + @Override + protected void resetCustomState() { + this.currentBatchIndex = 0; + this.allData = null; + } + + @Override + protected String getApiPath() { + return "/Movements/CurrentlyAt"; + } + + @Override + protected String getApiBaseUrl() { + return "https://webservices.maritime.spglobal.com"; + } + + @Override + protected void beforeFetch() { + // 전처리 과정 + log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate); + } + + @Override + protected List fetchNextBatch() throws Exception { + + // 모든 배치 처리 완료 확인 + if (allData == null ) { + log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate); + allData = callApiWithBatch(startDate, stopDate); + + if (allData == null || allData.isEmpty()) { + log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName()); + return null; + } + + log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize); + } + + // 2) 이미 끝까지 읽었으면 종료 + if (currentBatchIndex >= allData.size()) { + log.info("[{}] 모든 배치 처리 완료", getReaderName()); + return null; + } + + // 3) 이번 배치의 end 계산 + int endIndex = Math.min(currentBatchIndex + batchSize, allData.size()); + + // 현재 배치의 IMO 번호 추출 (100개) + List batch = allData.subList(currentBatchIndex, endIndex); + + int currentBatchNumber = (currentBatchIndex / batchSize) + 1; + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + + log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), currentBatchNumber, totalBatches, batch.size()); + + currentBatchIndex = endIndex; + updateApiCallStats(totalBatches, currentBatchNumber); + return batch; + + } + + /** + * Query Parameter를 사용한 API 호출 + * @return API 응답 + */ + private List callApiWithBatch(String startDate, String stopDate) { + String url = getApiPath() + "?dateCreatedUpdatedStart=" + startDate +"&dateCreatedUpdatedStop="+stopDate; + + log.debug("[{}] API 호출: {}", getReaderName(), url); + + return webClient.get() + .uri(url) + .retrieve() + .bodyToFlux(CurrentlyAtDto.class) + .collectList() + .block(); + } + + @Override + protected void afterFetch(List data) { + if (data == null) { + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches); + } + } + +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementCurrentlyAt/batch/repository/CurrentlyAtRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/shipMovementCurrentlyAt/batch/repository/CurrentlyAtRepositoryImpl.java index 9cb26eb..358731d 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovementCurrentlyAt/batch/repository/CurrentlyAtRepositoryImpl.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementCurrentlyAt/batch/repository/CurrentlyAtRepositoryImpl.java @@ -27,7 +27,8 @@ public class CurrentlyAtRepositoryImpl extends BaseJdbcRepository { - @Override - public ShipMovementEntity mapRow(ResultSet rs, int rowNum) throws SQLException { - ShipMovementEntity entity = ShipMovementEntity.builder() - .id(rs.getLong("id")) - .imolRorIHSNumber(rs.getString("imolRorIHSNumber")) - .portCallId(rs.getObject("portCallId", Integer.class)) - .facilityId(rs.getObject("facilityId", Integer.class)) - .facilityName(rs.getString("facilityName")) - .facilityType(rs.getString("facilityType")) - .subFacilityId(rs.getObject("subFacilityId", Integer.class)) - .subFacilityName(rs.getString("subFacilityName")) - .subFacilityType(rs.getString("subFacilityType")) - .parentFacilityId(rs.getObject("parentFacilityId", Integer.class)) - .parentFacilityName(rs.getString("parentFacilityName")) - .parentFacilityType(rs.getString("parentFacilityType")) - .countryCode(rs.getString("countryCode")) - .countryName(rs.getString("countryName")) - .draught(rs.getObject("draught", Double.class)) - .latitude(rs.getObject("latitude", Double.class)) - .longitude(rs.getObject("longitude", Double.class)) - .destination(rs.getString("destination")) - .iso2(rs.getString("iso2")) - .position(parseJson(rs.getString("position"))) - .schemaType(rs.getString("schemaType")) - .build(); - - Timestamp movementDate = rs.getTimestamp("movementDate"); - if (movementDate != null) { - entity.setMovementDate(movementDate.toLocalDateTime()); - } - - return entity; - } - - private JsonNode parseJson(String json) { - try { - if (json == null) return null; - return new ObjectMapper().readTree(json); - } catch (Exception e) { - throw new RuntimeException("JSON 파싱 오류: " + json); - } - } - }*/ } diff --git a/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/config/DarkActivityRangeJobConfig.java b/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/config/DarkActivityRangeJobConfig.java new file mode 100644 index 0000000..1a7d521 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/config/DarkActivityRangeJobConfig.java @@ -0,0 +1,119 @@ +package com.snp.batch.jobs.shipMovementDarkActivity.batch.config; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.jobs.shipMovementDarkActivity.batch.dto.DarkActivityDto; +import com.snp.batch.jobs.shipMovementDarkActivity.batch.entity.DarkActivityEntity; +import com.snp.batch.jobs.shipMovementDarkActivity.batch.processor.DarkActivityProcessor; +import com.snp.batch.jobs.shipMovementDarkActivity.batch.reader.DarkActivityRangeReader; +import com.snp.batch.jobs.shipMovementDarkActivity.batch.writer.DarkActivityWriter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.web.reactive.function.client.WebClient; + +/** + * 선박 상세 정보 Import Job Config + * + * 특징: + * - ship_data 테이블에서 IMO 번호 조회 + * - IMO 번호를 100개씩 배치로 분할 + * - Maritime API GetShipsByIHSLRorIMONumbers 호출 + * TODO : GetShipsByIHSLRorIMONumbersAll 호출로 변경 + * - 선박 상세 정보를 ship_detail 테이블에 저장 (UPSERT) + * + * 데이터 흐름: + * DarkActivityReader (ship_data → Maritime API) + * ↓ (DarkActivityDto) + * DarkActivityProcessor + * ↓ (DarkActivityEntity) + * DarkActivityWriter + * ↓ (t_darkactivity 테이블) + */ + +@Slf4j +@Configuration +public class DarkActivityRangeJobConfig extends BaseJobConfig { + + private final DarkActivityProcessor darkActivityProcessor; + private final DarkActivityWriter darkActivityWriter; + private final DarkActivityRangeReader darkActivityRangeReader; + private final JdbcTemplate jdbcTemplate; + private final WebClient maritimeApiWebClient; + + public DarkActivityRangeJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + DarkActivityProcessor darkActivityProcessor, + DarkActivityWriter darkActivityWriter, JdbcTemplate jdbcTemplate, + @Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient, + ObjectMapper objectMapper, DarkActivityRangeReader darkActivityRangeReader) { // ObjectMapper 주입 추가 + super(jobRepository, transactionManager); + this.darkActivityProcessor = darkActivityProcessor; + this.darkActivityWriter = darkActivityWriter; + this.jdbcTemplate = jdbcTemplate; + this.maritimeApiWebClient = maritimeApiWebClient; + this.darkActivityRangeReader = darkActivityRangeReader; + } + + @Override + protected String getJobName() { + return "DarkActivityRangeImportJob"; + } + + @Override + protected String getStepName() { + return "DarkActivityRangeImportStep"; + } + + @Override + protected ItemReader createReader() { // 타입 변경 + // Reader 생성자 수정: ObjectMapper를 전달합니다. + return darkActivityRangeReader; + } + @Bean + @StepScope + public DarkActivityRangeReader darkActivityReader( + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate + ) { + // jobParameters 없으면 null 넘어오고 Reader에서 default 처리 + return new DarkActivityRangeReader(maritimeApiWebClient, startDate, stopDate); + } + + @Override + protected ItemProcessor createProcessor() { + return darkActivityProcessor; + } + + @Override + protected ItemWriter createWriter() { // 타입 변경 + return darkActivityWriter; + } + + @Override + protected int getChunkSize() { + return 5000; // API에서 100개씩 가져오므로 chunk도 100으로 설정 + } + + @Bean(name = "DarkActivityRangeImportJob") + public Job darkActivityRangeImportJob() { + return job(); + } + + @Bean(name = "DarkActivityRangeImportStep") + public Step darkActivityRangeImportStep() { + return step(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/dto/DarkActivityDto.java b/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/dto/DarkActivityDto.java index 9cb7b81..2c05582 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/dto/DarkActivityDto.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/dto/DarkActivityDto.java @@ -24,7 +24,7 @@ public class DarkActivityDto { private Double latitude; private Double longitude; - private AnchorageCallsPositionDto position; + private DarkActivityPositionDto position; private String eventStartDate; } diff --git a/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/dto/DarkActivityPositionDto.java b/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/dto/DarkActivityPositionDto.java new file mode 100644 index 0000000..d67d7db --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/dto/DarkActivityPositionDto.java @@ -0,0 +1,17 @@ +package com.snp.batch.jobs.shipMovementDarkActivity.batch.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; +import lombok.Data; + +@Data +public class DarkActivityPositionDto { + private boolean isNull; + private int stSrid; + private double lat; + @JsonProperty("long") + private double lon; + private double z; + private double m; + private boolean hasZ; + private boolean hasM; +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/reader/DarkActivityRangeReader.java b/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/reader/DarkActivityRangeReader.java new file mode 100644 index 0000000..2c72717 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/reader/DarkActivityRangeReader.java @@ -0,0 +1,182 @@ +package com.snp.batch.jobs.shipMovementDarkActivity.batch.reader; + +import com.snp.batch.common.batch.reader.BaseApiReader; +import com.snp.batch.jobs.shipMovementDarkActivity.batch.dto.DarkActivityDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.web.reactive.function.client.WebClient; + +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.util.List; + +/** + * 선박 상세 정보 Reader (v2.0 - Chunk 기반) + * + * 기능: + * 1. ship_data 테이블에서 IMO 번호 전체 조회 (최초 1회) + * 2. IMO 번호를 100개씩 분할하여 배치 단위로 처리 + * 3. fetchNextBatch() 호출 시마다 100개씩 API 호출 + * 4. Spring Batch가 100건씩 Process → Write 수행 + * + * Chunk 처리 흐름: + * - beforeFetch() → IMO 전체 조회 (1회) + * - fetchNextBatch() → 100개 IMO로 API 호출 (1,718회) + * - read() → 1건씩 반환 (100번) + * - Processor/Writer → 100건 처리 + * - 반복... (1,718번의 Chunk) + * + * 기존 방식과의 차이: + * - 기존: 17만건 전체 메모리 로드 → Process → Write + * - 신규: 100건씩 로드 → Process → Write (Chunk 1,718회) + */ +@Slf4j +@StepScope +public class DarkActivityRangeReader extends BaseApiReader { + + + private List allData; + // DB 해시값을 저장할 맵 + private int currentBatchIndex = 0; + private final int batchSize = 5000; + + // @Value("#{jobParameters['startDate']}") + private String startDate; +// private String startDate = "2025-01-01"; + + // @Value("#{jobParameters['stopDate']}") + private String stopDate; +// private String stopDate = "2025-12-31"; + + /*public DarkActivityRangeReader(WebClient webClient) { + super(webClient); + enableChunkMode(); // ✨ Chunk 모드 활성화 + }*/ + public DarkActivityRangeReader(WebClient webClient, + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate) { + super(webClient); + // 날짜가 없으면 전날 하루 기준 + if (startDate == null || startDate.isBlank() || stopDate == null || stopDate.isBlank()) { + LocalDate yesterday = LocalDate.now().minusDays(1); + this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + } else { + this.startDate = startDate; + this.stopDate = stopDate; + } + + enableChunkMode(); // ✨ Chunk 모드 활성화 + } + + @Override + protected String getReaderName() { + return "DarkActivityReader"; + } + + @Override + protected void resetCustomState() { + this.currentBatchIndex = 0; + this.allData = null; + } + + @Override + protected String getApiPath() { + return "/Movements/DarkActivity"; + } + + @Override + protected String getApiBaseUrl() { + return "https://webservices.maritime.spglobal.com"; + } + + private static final String GET_ALL_IMO_QUERY = + "SELECT imo_number FROM ship_data ORDER BY id"; +// "SELECT imo_number FROM snp_data.ship_data where imo_number > (select max(imo) from snp_data.t_darkactivity) ORDER BY imo_number"; + + /** + * 최초 1회만 실행: ship_data 테이블에서 IMO 번호 전체 조회 + */ + @Override + protected void beforeFetch() { + log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate); + } + + /** + * ✨ Chunk 기반 핵심 메서드: 다음 100개 배치를 조회하여 반환 + * + * Spring Batch가 100건씩 read() 호출 완료 후 이 메서드 재호출 + * + * @return 다음 배치 100건 (더 이상 없으면 null) + */ + @Override + protected List fetchNextBatch() throws Exception { + + // 모든 배치 처리 완료 확인 + if (allData == null ) { + log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate); + allData = callApiWithBatch(startDate, stopDate); + + if (allData == null || allData.isEmpty()) { + log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName()); + return null; + } + + log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize); + } + + // 2) 이미 끝까지 읽었으면 종료 + if (currentBatchIndex >= allData.size()) { + log.info("[{}] 모든 배치 처리 완료", getReaderName()); + return null; + } + + // 3) 이번 배치의 end 계산 + int endIndex = Math.min(currentBatchIndex + batchSize, allData.size()); + + // 현재 배치의 IMO 번호 추출 (100개) + List batch = allData.subList(currentBatchIndex, endIndex); + + int currentBatchNumber = (currentBatchIndex / batchSize) + 1; + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + + log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), currentBatchNumber, totalBatches, batch.size()); + + currentBatchIndex = endIndex; + updateApiCallStats(totalBatches, currentBatchNumber); + return batch; + + } + + /** + * Query Parameter를 사용한 API 호출 + * + * @param startDate,stopDate + * @return API 응답 + */ + private List callApiWithBatch(String startDate, String stopDate){ + String url = getApiPath() + "?startDate=" + startDate +"&stopDate="+stopDate; +// +"&lrno=" + lrno; + + log.debug("[{}] API 호출: {}", getReaderName(), url); + + return webClient.get() + .uri(url) + .retrieve() + .bodyToFlux(DarkActivityDto.class) + .collectList() + .block(); + } + + @Override + protected void afterFetch(List data) { + if (data == null) { + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches); + /* log.info("[{}] 총 {} 개의 IMO 번호에 대한 API 호출 종료", + getReaderName(), allData.size());*/ + } + } + +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/repository/DarkActivityRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/repository/DarkActivityRepositoryImpl.java index 2055651..12ceb9e 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/repository/DarkActivityRepositoryImpl.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementDarkActivity/batch/repository/DarkActivityRepositoryImpl.java @@ -32,7 +32,8 @@ public class DarkActivityRepositoryImpl extends BaseJdbcRepository { + + private final DestinationProcessor DestinationProcessor; + private final DestinationWriter DestinationWriter; + private final DestinationRangeReader destinationRangeReader; + private final WebClient maritimeApiWebClient; + + public DestinationsRangeJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + DestinationProcessor DestinationProcessor, + DestinationWriter DestinationWriter, DestinationRangeReader destinationRangeReader, + @Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient) { // ObjectMapper 주입 추가 + super(jobRepository, transactionManager); + this.DestinationProcessor = DestinationProcessor; + this.DestinationWriter = DestinationWriter; + this.destinationRangeReader = destinationRangeReader; + this.maritimeApiWebClient = maritimeApiWebClient; + } + + @Override + protected String getJobName() { + return "DestinationsRangeImportJob"; + } + + @Override + protected String getStepName() { + return "DestinationsRangeImportStep"; + } + + @Override + protected ItemReader createReader() { // 타입 변경 + return destinationRangeReader; + } + @Bean + @StepScope + public DestinationRangeReader destinationRangeReader( + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate + ) { + // jobParameters 없으면 null 넘어오고 Reader에서 default 처리 + return new DestinationRangeReader(maritimeApiWebClient, startDate, stopDate); + } + @Override + protected ItemProcessor createProcessor() { + return DestinationProcessor; + } + + @Override + protected ItemWriter createWriter() { // 타입 변경 + return DestinationWriter; + } + + @Override + protected int getChunkSize() { + return 1000; // API에서 100개씩 가져오므로 chunk도 100으로 설정 + } + + @Bean(name = "DestinationsRangeImportJob") + public Job destinationsRangeImportJob() { + return job(); + } + + @Bean(name = "DestinationsRangeImportStep") + public Step destinationsRangeImportStep() { + return step(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementDestination/batch/reader/DestinationRangeReader.java b/src/main/java/com/snp/batch/jobs/shipMovementDestination/batch/reader/DestinationRangeReader.java new file mode 100644 index 0000000..7ce34c4 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementDestination/batch/reader/DestinationRangeReader.java @@ -0,0 +1,161 @@ +package com.snp.batch.jobs.shipMovementDestination.batch.reader; + +import com.snp.batch.common.batch.reader.BaseApiReader; +import com.snp.batch.jobs.shipMovementDestination.batch.dto.DestinationDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.web.reactive.function.client.WebClient; + +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.util.List; + +/** + * 선박 상세 정보 Reader (v2.0 - Chunk 기반) + *

+ * 기능: + * 1. ship_data 테이블에서 IMO 번호 전체 조회 (최초 1회) + * 2. IMO 번호를 100개씩 분할하여 배치 단위로 처리 + * 3. fetchNextBatch() 호출 시마다 100개씩 API 호출 + * 4. Spring Batch가 100건씩 Process → Write 수행 + *

+ * Chunk 처리 흐름: + * - beforeFetch() → IMO 전체 조회 (1회) + * - fetchNextBatch() → 100개 IMO로 API 호출 (1,718회) + * - read() → 1건씩 반환 (100번) + * - Processor/Writer → 100건 처리 + * - 반복... (1,718번의 Chunk) + *

+ * 기존 방식과의 차이: + * - 기존: 17만건 전체 메모리 로드 → Process → Write + * - 신규: 100건씩 로드 → Process → Write (Chunk 1,718회) + */ +@Slf4j +@StepScope +public class DestinationRangeReader extends BaseApiReader { + + private List allData; + private int currentBatchIndex = 0; + private final int batchSize = 1000; + private String startDate; + private String stopDate; + + public DestinationRangeReader(WebClient webClient, + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate) { + super(webClient); + // 날짜가 + 한달 기간 도착예정지 정보 update + if (startDate == null || startDate.isBlank() || stopDate == null || stopDate.isBlank()) { + LocalDate today = LocalDate.now(); + this.startDate = today + .atStartOfDay() + .format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + + this.stopDate = today + .plusDays(15) + .atStartOfDay() + .format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + } else { + this.startDate = startDate; + this.stopDate = stopDate; + } + + enableChunkMode(); // ✨ Chunk 모드 활성화 + } + + @Override + protected String getReaderName() { + return "DestinationsRange"; + } + + @Override + protected void resetCustomState() { + this.currentBatchIndex = 0; + this.allData = null; + } + + @Override + protected String getApiPath() { + return "/Movements/Destinations"; + } + + @Override + protected String getApiBaseUrl() { + return "https://webservices.maritime.spglobal.com"; + } + + /** + * 최초 1회만 실행: ship_data 테이블에서 IMO 번호 전체 조회 + */ + @Override + protected void beforeFetch() { + log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate); + } + + @Override + protected List fetchNextBatch() throws Exception { + + // 모든 배치 처리 완료 확인 + // 모든 배치 처리 완료 확인 + if (allData == null) { + log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate); + allData = callApiWithBatch(startDate, stopDate); + + if (allData == null || allData.isEmpty()) { + log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName()); + return null; + } + + log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize); + } + + // 2) 이미 끝까지 읽었으면 종료 + if (currentBatchIndex >= allData.size()) { + log.info("[{}] 모든 배치 처리 완료", getReaderName()); + return null; + } + + // 3) 이번 배치의 end 계산 + int endIndex = Math.min(currentBatchIndex + batchSize, allData.size()); + + // 현재 배치의 IMO 번호 추출 (100개) + List batch = allData.subList(currentBatchIndex, endIndex); + + int currentBatchNumber = (currentBatchIndex / batchSize) + 1; + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + + log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), currentBatchNumber, totalBatches, batch.size()); + + currentBatchIndex = endIndex; + updateApiCallStats(totalBatches, currentBatchNumber); + return batch; + } + + /** + * Query Parameter를 사용한 API 호출 + * @return API 응답 + */ + private List callApiWithBatch(String startDate, String stopDate) { + String url = getApiPath() + "?startDate=" + startDate + "&stopDate=" + stopDate; +// +"&lrno=" + lrno; + + log.debug("[{}] API 호출: {}", getReaderName(), url); + + return webClient.get() + .uri(url) + .retrieve() + .bodyToFlux(DestinationDto.class) + .collectList() + .block(); + } + + @Override + protected void afterFetch(List data) { + if (data == null) { + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches); + } + } + +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementDestination/batch/repository/DestinationRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/shipMovementDestination/batch/repository/DestinationRepositoryImpl.java index bea7875..7147469 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovementDestination/batch/repository/DestinationRepositoryImpl.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementDestination/batch/repository/DestinationRepositoryImpl.java @@ -27,12 +27,13 @@ public class DestinationRepositoryImpl extends BaseJdbcRepository entities) { if (entities == null || entities.isEmpty()) return; - log.info("Destinations 저장 시작 = {}건", entities.size()); batchInsert(entities); diff --git a/src/main/java/com/snp/batch/jobs/shipMovement/batch/config/ShipMovementJobConfig.java b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/config/ShipPortCallsJobConfig.java similarity index 56% rename from src/main/java/com/snp/batch/jobs/shipMovement/batch/config/ShipMovementJobConfig.java rename to src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/config/ShipPortCallsJobConfig.java index c840630..7971e39 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovement/batch/config/ShipMovementJobConfig.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/config/ShipPortCallsJobConfig.java @@ -1,12 +1,12 @@ -package com.snp.batch.jobs.shipMovement.batch.config; +package com.snp.batch.jobs.shipMovementPortCalls.batch.config; import com.fasterxml.jackson.databind.ObjectMapper; import com.snp.batch.common.batch.config.BaseJobConfig; -import com.snp.batch.jobs.shipMovement.batch.dto.PortCallsDto; -import com.snp.batch.jobs.shipMovement.batch.entity.ShipMovementEntity; -import com.snp.batch.jobs.shipMovement.batch.processor.ShipMovementProcessor; -import com.snp.batch.jobs.shipMovement.batch.reader.ShipMovementReader; -import com.snp.batch.jobs.shipMovement.batch.writer.ShipMovementWriter; +import com.snp.batch.jobs.shipMovementPortCalls.batch.dto.PortCallsDto; +import com.snp.batch.jobs.shipMovementPortCalls.batch.entity.PortCallsEntity; +import com.snp.batch.jobs.shipMovementPortCalls.batch.processor.PortCallsProcessor; +import com.snp.batch.jobs.shipMovementPortCalls.batch.reader.PortCallsReader; +import com.snp.batch.jobs.shipMovementPortCalls.batch.writer.PortCallsWriter; import lombok.extern.slf4j.Slf4j; import org.springframework.batch.core.Job; import org.springframework.batch.core.Step; @@ -37,34 +37,34 @@ import java.time.format.DateTimeFormatter; * - 선박 상세 정보를 ship_detail 테이블에 저장 (UPSERT) * * 데이터 흐름: - * ShipMovementReader (ship_data → Maritime API) + * PortCallsReader (ship_data → Maritime API) * ↓ (PortCallDto) - * ShipMovementProcessor - * ↓ (ShipMovementEntity) + * PortCallsProcessor + * ↓ (PortCallsEntity) * ShipDetailDataWriter * ↓ (ship_movement 테이블) */ @Slf4j @Configuration -public class ShipMovementJobConfig extends BaseJobConfig { +public class ShipPortCallsJobConfig extends BaseJobConfig { - private final ShipMovementProcessor shipMovementProcessor; - private final ShipMovementWriter shipMovementWriter; + private final PortCallsProcessor portCallsProcessor; + private final PortCallsWriter portCallsWriter; private final JdbcTemplate jdbcTemplate; private final WebClient maritimeApiWebClient; private final ObjectMapper objectMapper; // ObjectMapper 주입 추가 - public ShipMovementJobConfig( + public ShipPortCallsJobConfig( JobRepository jobRepository, PlatformTransactionManager transactionManager, - ShipMovementProcessor shipMovementProcessor, - ShipMovementWriter shipMovementWriter, JdbcTemplate jdbcTemplate, + PortCallsProcessor portCallsProcessor, + PortCallsWriter portCallsWriter, JdbcTemplate jdbcTemplate, @Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient, ObjectMapper objectMapper) { // ObjectMapper 주입 추가 super(jobRepository, transactionManager); - this.shipMovementProcessor = shipMovementProcessor; - this.shipMovementWriter = shipMovementWriter; + this.portCallsProcessor = portCallsProcessor; + this.portCallsWriter = portCallsWriter; this.jdbcTemplate = jdbcTemplate; this.maritimeApiWebClient = maritimeApiWebClient; this.objectMapper = objectMapper; // ObjectMapper 초기화 @@ -72,30 +72,28 @@ public class ShipMovementJobConfig extends BaseJobConfig createReader() { // 타입 변경 // Reader 생성자 수정: ObjectMapper를 전달합니다. - return shipMovementReader(null, null); - //return new ShipMovementReader(maritimeApiWebClient, jdbcTemplate, objectMapper); + return portCallsReader( null, null); + //return new PortCallsReader(maritimeApiWebClient, jdbcTemplate, objectMapper); } @Override - protected ItemProcessor createProcessor() { - return shipMovementProcessor; + protected ItemProcessor createProcessor() { + return portCallsProcessor; } @Override - protected ItemWriter createWriter() { // 타입 변경 - return shipMovementWriter; + protected ItemWriter createWriter() { // 타입 변경 + return portCallsWriter; } @Override protected int getChunkSize() { - return 50; // API에서 100개씩 가져오므로 chunk도 100으로 설정 + return 1000; // API에서 5000개씩 가져오므로 chunk도 5000으로 설정 } - @Bean(name = "shipMovementJob") - public Job shipMovementJob() { + @Bean(name = "PortCallsImportJob") + public Job portCallsImportJob() { return job(); } - @Bean(name = "shipMovementStep") - public Step shipMovementStep() { + @Bean(name = "PortCallsImportStep") + public Step portCallsImportStep() { return step(); } } diff --git a/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/config/ShipPortCallsRangeJobConfig.java b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/config/ShipPortCallsRangeJobConfig.java new file mode 100644 index 0000000..702412e --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/config/ShipPortCallsRangeJobConfig.java @@ -0,0 +1,114 @@ +package com.snp.batch.jobs.shipMovementPortCalls.batch.config; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.jobs.shipMovementPortCalls.batch.dto.PortCallsDto; +import com.snp.batch.jobs.shipMovementPortCalls.batch.entity.PortCallsEntity; +import com.snp.batch.jobs.shipMovementPortCalls.batch.processor.PortCallsProcessor; +import com.snp.batch.jobs.shipMovementPortCalls.batch.reader.PortCallsRangeReader; +import com.snp.batch.jobs.shipMovementPortCalls.batch.reader.PortCallsReader; +import com.snp.batch.jobs.shipMovementPortCalls.batch.writer.PortCallsWriter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.web.reactive.function.client.WebClient; + +/** + * 선박 상세 정보 Import Job Config + * + * 특징: + * - ship_data 테이블에서 IMO 번호 조회 + * - IMO 번호를 100개씩 배치로 분할 + * - Maritime API GetShipsByIHSLRorIMONumbers 호출 + * TODO : GetShipsByIHSLRorIMONumbersAll 호출로 변경 + * - 선박 상세 정보를 ship_detail 테이블에 저장 (UPSERT) + * + * 데이터 흐름: + * PortCallsReader (ship_data → Maritime API) + * ↓ (PortCallDto) + * PortCallsProcessor + * ↓ (PortCallsEntity) + * ShipDetailDataWriter + * ↓ (ship_movement 테이블) + */ + +@Slf4j +@Configuration +public class ShipPortCallsRangeJobConfig extends BaseJobConfig { + + private final PortCallsProcessor portCallsProcessor; + private final PortCallsWriter portCallsWriter; + private final PortCallsRangeReader portCallsRangeReader; + public ShipPortCallsRangeJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + PortCallsProcessor portCallsProcessor, + PortCallsWriter portCallsWriter, JdbcTemplate jdbcTemplate, + @Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient, + ObjectMapper objectMapper, PortCallsRangeReader portCallsRangeReader) { // ObjectMapper 주입 추가 + super(jobRepository, transactionManager); + this.portCallsProcessor = portCallsProcessor; + this.portCallsWriter = portCallsWriter; + this.portCallsRangeReader = portCallsRangeReader; + } + + @Override + protected String getJobName() { + return "PortCallsRangeImportJob"; + } + + @Override + protected String getStepName() { + return "PortCallsRangeImportStep"; + } + + @Bean + @StepScope + public PortCallsRangeReader portCallsRangeReader( + @Qualifier("maritimeServiceApiWebClient") WebClient webClient, + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate + ) { + return new PortCallsRangeReader(webClient, startDate, stopDate); + } + @Override + protected ItemReader createReader() { // 타입 변경 + return portCallsRangeReader; + } + + @Override + protected ItemProcessor createProcessor() { + return portCallsProcessor; + } + + @Override + protected ItemWriter createWriter() { // 타입 변경 + return portCallsWriter; + } + + @Override + protected int getChunkSize() { + return 5000; // API에서 5000개 가져오므로 chunk도 5000개씩 설정 + } + + @Bean(name = "PortCallsRangeImportJob") + public Job portCallsRangeImportJob() { + return job(); + } + + @Bean(name = "PortCallsRangeImportStep") + public Step portCallsRangeImportStep() { + return step(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovement/batch/dto/PortCallsDto.java b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/dto/PortCallsDto.java similarity index 92% rename from src/main/java/com/snp/batch/jobs/shipMovement/batch/dto/PortCallsDto.java rename to src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/dto/PortCallsDto.java index c97db50..272626d 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovement/batch/dto/PortCallsDto.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/dto/PortCallsDto.java @@ -1,4 +1,4 @@ -package com.snp.batch.jobs.shipMovement.batch.dto; +package com.snp.batch.jobs.shipMovementPortCalls.batch.dto; import lombok.Data; diff --git a/src/main/java/com/snp/batch/jobs/shipMovement/batch/dto/PortCallsPositionDto.java b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/dto/PortCallsPositionDto.java similarity index 85% rename from src/main/java/com/snp/batch/jobs/shipMovement/batch/dto/PortCallsPositionDto.java rename to src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/dto/PortCallsPositionDto.java index 8906ba0..a960e8c 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovement/batch/dto/PortCallsPositionDto.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/dto/PortCallsPositionDto.java @@ -1,4 +1,4 @@ -package com.snp.batch.jobs.shipMovement.batch.dto; +package com.snp.batch.jobs.shipMovementPortCalls.batch.dto; import com.fasterxml.jackson.annotation.JsonProperty; import lombok.Data; diff --git a/src/main/java/com/snp/batch/jobs/shipMovement/batch/dto/ShipMovementApiResponse.java b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/dto/ShipMovementApiResponse.java similarity index 78% rename from src/main/java/com/snp/batch/jobs/shipMovement/batch/dto/ShipMovementApiResponse.java rename to src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/dto/ShipMovementApiResponse.java index eb8fae8..f32b864 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovement/batch/dto/ShipMovementApiResponse.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/dto/ShipMovementApiResponse.java @@ -1,4 +1,4 @@ -package com.snp.batch.jobs.shipMovement.batch.dto; +package com.snp.batch.jobs.shipMovementPortCalls.batch.dto; import com.fasterxml.jackson.annotation.JsonProperty; import lombok.Data; diff --git a/src/main/java/com/snp/batch/jobs/shipMovement/batch/entity/ShipMovementEntity.java b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/entity/PortCallsEntity.java similarity index 92% rename from src/main/java/com/snp/batch/jobs/shipMovement/batch/entity/ShipMovementEntity.java rename to src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/entity/PortCallsEntity.java index 50bee84..d519d63 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovement/batch/entity/ShipMovementEntity.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/entity/PortCallsEntity.java @@ -1,4 +1,4 @@ -package com.snp.batch.jobs.shipMovement.batch.entity; +package com.snp.batch.jobs.shipMovementPortCalls.batch.entity; import com.fasterxml.jackson.databind.JsonNode; import jakarta.persistence.GeneratedValue; @@ -7,7 +7,6 @@ import jakarta.persistence.Id; import jakarta.persistence.SequenceGenerator; import lombok.AllArgsConstructor; import lombok.Data; -import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; import lombok.experimental.SuperBuilder; @@ -17,7 +16,7 @@ import java.time.LocalDateTime; @SuperBuilder @NoArgsConstructor @AllArgsConstructor -public class ShipMovementEntity { +public class PortCallsEntity { @Id @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "ship_movement_id_seq") @SequenceGenerator(name = "ship_movement_id_seq", sequenceName = "ship_movement_id_seq", allocationSize = 1) diff --git a/src/main/java/com/snp/batch/jobs/shipMovement/batch/processor/ShipMovementProcessor.java b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/processor/PortCallsProcessor.java similarity index 82% rename from src/main/java/com/snp/batch/jobs/shipMovement/batch/processor/ShipMovementProcessor.java rename to src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/processor/PortCallsProcessor.java index 102e404..4df08aa 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovement/batch/processor/ShipMovementProcessor.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/processor/PortCallsProcessor.java @@ -1,10 +1,10 @@ -package com.snp.batch.jobs.shipMovement.batch.processor; +package com.snp.batch.jobs.shipMovementPortCalls.batch.processor; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.snp.batch.common.batch.processor.BaseProcessor; -import com.snp.batch.jobs.shipMovement.batch.dto.PortCallsDto; -import com.snp.batch.jobs.shipMovement.batch.entity.ShipMovementEntity; +import com.snp.batch.jobs.shipMovementPortCalls.batch.dto.PortCallsDto; +import com.snp.batch.jobs.shipMovementPortCalls.batch.entity.PortCallsEntity; import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Component; @@ -22,16 +22,16 @@ import java.time.LocalDateTime; */ @Slf4j @Component -public class ShipMovementProcessor extends BaseProcessor { +public class PortCallsProcessor extends BaseProcessor { private final ObjectMapper objectMapper; - public ShipMovementProcessor(ObjectMapper objectMapper) { + public PortCallsProcessor(ObjectMapper objectMapper) { this.objectMapper = objectMapper; } @Override - protected ShipMovementEntity processItem(PortCallsDto dto) throws Exception { + protected PortCallsEntity processItem(PortCallsDto dto) throws Exception { log.debug("선박 상세 정보 처리 시작: imoNumber={}, facilityName={}", dto.getImolRorIHSNumber(), dto.getFacilityName()); @@ -41,7 +41,7 @@ public class ShipMovementProcessor extends BaseProcessor { + + private List allData; + private int currentBatchIndex = 0; + private final int batchSize = 5000; + private String startDate; + private String stopDate; + public PortCallsRangeReader(WebClient webClient, + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate) { + super(webClient); + + // 날짜가 없으면 전날 하루 기준 + if (startDate == null || startDate.isBlank() || + stopDate == null || stopDate.isBlank()) { + + LocalDate yesterday = LocalDate.now().minusDays(1); + this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + } else { + this.startDate = startDate; + this.stopDate = stopDate; + } + + enableChunkMode(); + } + + @Override + protected String getReaderName() { + return "PortCallsRangeReader"; + } + + @Override + protected void resetCustomState() { + this.currentBatchIndex = 0; + this.allData = null; + } + + @Override + protected String getApiPath() { + return "/Movements/PortCalls"; + } + + @Override + protected String getApiBaseUrl() { + return "https://webservices.maritime.spglobal.com"; + } + + @Override + protected void beforeFetch() { + log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate); + } + + /** + * ✨ Chunk 기반 핵심 메서드: 다음 배치를 조회하여 반환 + * + * Spring Batch가 batchsize만큼 read() 호출 완료 후 이 메서드 재호출 + * + * @return 다음 배치 (더 이상 없으면 null) + */ + @Override + protected List fetchNextBatch() throws Exception { + + // 모든 배치 처리 완료 확인 + if (allData == null) { + log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate); + allData = callApiWithBatch(startDate, stopDate); + + if (allData == null || allData.isEmpty()) { + log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName()); + return null; + } + + log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize); + } + + // 2) 이미 끝까지 읽었으면 종료 + if (currentBatchIndex >= allData.size()) { + log.info("[{}] 모든 배치 처리 완료", getReaderName()); + return null; + } + + // 3) 이번 배치의 end 계산 + int end = Math.min(currentBatchIndex + batchSize, allData.size()); + + // 4) 현재 batch 리스트 잘라서 반환 + List batch = allData.subList(currentBatchIndex, end); + + int batchNum = (currentBatchIndex / batchSize) + 1; + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + + log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), batchNum, totalBatches, batch.size()); + + // 다음 batch 인덱스 이동 + currentBatchIndex = end; + updateApiCallStats(totalBatches, batchNum); + + return batch; + } + + /** + * Query Parameter를 사용한 API 호출 + * @return API 응답 + */ + private List callApiWithBatch(String startDate, String stopDate) { + String url = getApiPath() + "?startDate=" + startDate + "&stopDate=" + stopDate; + log.info("[{}] API 호출: {}", getReaderName(), url); + + return webClient.get() + .uri(url) + .retrieve() + .bodyToFlux(PortCallsDto.class) + .collectList() + .block(); + } + + @Override + protected void afterFetch(List data) { + if (data == null) { + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches); + } + } + +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovement/batch/reader/ShipMovementReader.java b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/reader/PortCallsReader.java similarity index 86% rename from src/main/java/com/snp/batch/jobs/shipMovement/batch/reader/ShipMovementReader.java rename to src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/reader/PortCallsReader.java index 1277732..086a902 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovement/batch/reader/ShipMovementReader.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/reader/PortCallsReader.java @@ -1,9 +1,10 @@ -package com.snp.batch.jobs.shipMovement.batch.reader; +package com.snp.batch.jobs.shipMovementPortCalls.batch.reader; import com.fasterxml.jackson.databind.ObjectMapper; import com.snp.batch.common.batch.reader.BaseApiReader; -import com.snp.batch.jobs.shipMovement.batch.dto.PortCallsDto; -import com.snp.batch.jobs.shipMovement.batch.dto.ShipMovementApiResponse; +import com.snp.batch.jobs.shipMovementAnchorageCalls.batch.dto.AnchorageCallsDto; +import com.snp.batch.jobs.shipMovementPortCalls.batch.dto.PortCallsDto; +import com.snp.batch.jobs.shipMovementPortCalls.batch.dto.ShipMovementApiResponse; import lombok.extern.slf4j.Slf4j; import org.springframework.batch.core.configuration.annotation.StepScope; import org.springframework.beans.factory.annotation.Value; @@ -34,7 +35,7 @@ import java.util.*; */ @Slf4j @StepScope -public class ShipMovementReader extends BaseApiReader { +public class PortCallsReader extends BaseApiReader { private final JdbcTemplate jdbcTemplate; private final ObjectMapper objectMapper; @@ -46,16 +47,16 @@ public class ShipMovementReader extends BaseApiReader { private int currentBatchIndex = 0; private final int batchSize = 10; - @Value("#{jobParameters['startDate']}") - private String startDate; -// private String startDate = "2024-01-01"; + // @Value("#{jobParameters['startDate']}") +// private String startDate; + private String startDate = "2025-01-01"; - @Value("#{jobParameters['stopDate']}") - private String stopDate; - // private String stopDate = "2024-12-31"; + // @Value("#{jobParameters['stopDate']}") +// private String stopDate; + private String stopDate = "2025-12-31"; public void setStartDate(String startDate) {this.startDate = startDate;} public void setStopDate(String stopDate){this.stopDate=stopDate;} - public ShipMovementReader(WebClient webClient, JdbcTemplate jdbcTemplate, ObjectMapper objectMapper) { + public PortCallsReader(WebClient webClient, JdbcTemplate jdbcTemplate, ObjectMapper objectMapper) { super(webClient); this.jdbcTemplate = jdbcTemplate; this.objectMapper = objectMapper; @@ -76,7 +77,7 @@ public class ShipMovementReader extends BaseApiReader { @Override protected String getApiPath() { - return "/Movements"; + return "/Movements/PortCalls"; } @Override @@ -88,9 +89,6 @@ public class ShipMovementReader extends BaseApiReader { "SELECT imo_number FROM ship_data ORDER BY id"; // "SELECT imo_number FROM snp_data.ship_data where imo_number > (select max(imo) from snp_data.t_ship_stpov_info) ORDER BY imo_number"; - private static final String FETCH_ALL_HASHES_QUERY = - "SELECT imo_number, ship_detail_hash FROM ship_detail_hash_json ORDER BY imo_number"; - /** * 최초 1회만 실행: ship_data 테이블에서 IMO 번호 전체 조회 */ @@ -144,15 +142,16 @@ public class ShipMovementReader extends BaseApiReader { String imoParam = String.join(",", currentBatch); // API 호출 - ShipMovementApiResponse response = callApiWithBatch(imoParam); +// ShipMovementApiResponse response = callApiWithBatch(imoParam); + List response= callApiWithBatch(imoParam); // 다음 배치로 인덱스 이동 currentBatchIndex = endIndex; // 응답 처리 - if (response != null && response.getPortCallList() != null) { - List portCalls = response.getPortCallList(); + if (response != null) { + List portCalls = response; log.info("[{}] 배치 {}/{} 완료: {} 건 조회", getReaderName(), currentBatchNumber, totalBatches, portCalls.size()); @@ -194,7 +193,7 @@ public class ShipMovementReader extends BaseApiReader { * @param lrno 쉼표로 연결된 IMO 번호 (예: "1000019,1000021,...") * @return API 응답 */ - private ShipMovementApiResponse callApiWithBatch(String lrno) { + private List callApiWithBatch(String lrno) { String url = getApiPath() + "?startDate=" + startDate +"&stopDate="+stopDate+"&lrno=" + lrno; log.debug("[{}] API 호출: {}", getReaderName(), url); @@ -202,7 +201,8 @@ public class ShipMovementReader extends BaseApiReader { return webClient.get() .uri(url) .retrieve() - .bodyToMono(ShipMovementApiResponse.class) + .bodyToFlux(PortCallsDto.class) + .collectList() .block(); } diff --git a/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/repository/PortCallsRepository.java b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/repository/PortCallsRepository.java new file mode 100644 index 0000000..bb45152 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/repository/PortCallsRepository.java @@ -0,0 +1,16 @@ +package com.snp.batch.jobs.shipMovementPortCalls.batch.repository; + +import com.snp.batch.jobs.shipMovementPortCalls.batch.entity.PortCallsEntity; + +import java.util.List; + +/** + * 선박 상세 정보 Repository 인터페이스 + */ + +public interface PortCallsRepository { + + void saveAll(List entities); + + boolean existsByPortCallId(Integer portCallId); +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovement/batch/repository/ShipMovementRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/repository/PortCallsRepositoryImpl.java similarity index 88% rename from src/main/java/com/snp/batch/jobs/shipMovement/batch/repository/ShipMovementRepositoryImpl.java rename to src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/repository/PortCallsRepositoryImpl.java index 13a3ac0..eb3f6e6 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovement/batch/repository/ShipMovementRepositoryImpl.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/repository/PortCallsRepositoryImpl.java @@ -1,9 +1,9 @@ -package com.snp.batch.jobs.shipMovement.batch.repository; +package com.snp.batch.jobs.shipMovementPortCalls.batch.repository; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.snp.batch.common.batch.repository.BaseJdbcRepository; -import com.snp.batch.jobs.shipMovement.batch.entity.ShipMovementEntity; +import com.snp.batch.jobs.shipMovementPortCalls.batch.entity.PortCallsEntity; import lombok.extern.slf4j.Slf4j; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.RowMapper; @@ -21,16 +21,17 @@ import java.util.List; */ @Slf4j @Repository("ShipMovementRepository") -public class ShipMovementRepositoryImpl extends BaseJdbcRepository - implements ShipMovementRepository { +public class PortCallsRepositoryImpl extends BaseJdbcRepository + implements PortCallsRepository { - public ShipMovementRepositoryImpl(JdbcTemplate jdbcTemplate) { + public PortCallsRepositoryImpl(JdbcTemplate jdbcTemplate) { super(jdbcTemplate); } private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); @Override protected String getTableName() { - return "snp_data.t_ship_stpov_info"; +// return "snp_data.t_ship_stpov_info"; + return "new_snp.t_ship_stpov_info"; } @Override @@ -39,14 +40,16 @@ public class ShipMovementRepositoryImpl extends BaseJdbcRepository getRowMapper() { + protected RowMapper getRowMapper() { return new ShipMovementRowMapper(); } @Override - public void saveAll(List entities) { + public void saveAll(List entities) { if (entities == null || entities.isEmpty()) return; log.info("ShipMovement 저장 시작 = {}건", entities.size()); @@ -205,10 +208,10 @@ public class ShipMovementRepositoryImpl extends BaseJdbcRepository { + private static class ShipMovementRowMapper implements RowMapper { @Override - public ShipMovementEntity mapRow(ResultSet rs, int rowNum) throws SQLException { - ShipMovementEntity entity = ShipMovementEntity.builder() + public PortCallsEntity mapRow(ResultSet rs, int rowNum) throws SQLException { + PortCallsEntity entity = PortCallsEntity.builder() .id(rs.getLong("id")) .imolRorIHSNumber(rs.getString("imolRorIHSNumber")) .portCallId(rs.getObject("portCallId", Integer.class)) diff --git a/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/writer/PortCallsWriter.java b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/writer/PortCallsWriter.java new file mode 100644 index 0000000..20fe890 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementPortCalls/batch/writer/PortCallsWriter.java @@ -0,0 +1,38 @@ +package com.snp.batch.jobs.shipMovementPortCalls.batch.writer; + +import com.snp.batch.common.batch.writer.BaseWriter; +import com.snp.batch.jobs.shipMovementPortCalls.batch.entity.PortCallsEntity; +import com.snp.batch.jobs.shipMovementPortCalls.batch.repository.PortCallsRepository; +import com.snp.batch.jobs.shipdetail.batch.repository.ShipDetailRepository; +import com.snp.batch.jobs.shipdetail.batch.repository.ShipHashRepository; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Component; + +import java.util.List; + +/** + * 선박 상세 정보 Writer + */ +@Slf4j +@Component +public class PortCallsWriter extends BaseWriter { + + private final PortCallsRepository shipMovementRepository; + + + public PortCallsWriter(PortCallsRepository shipMovementRepository) { + super("ShipPortCalls"); + this.shipMovementRepository = shipMovementRepository; + } + + @Override + protected void writeItems(List items) throws Exception { + + if (items.isEmpty()) { return; } + + shipMovementRepository.saveAll(items); + log.info("PortCalls 데이터 저장 완료: {} 건", items.size()); + + } + +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementStsOperations/batch/config/StsOperationRangeJobConfig.java b/src/main/java/com/snp/batch/jobs/shipMovementStsOperations/batch/config/StsOperationRangeJobConfig.java new file mode 100644 index 0000000..d348e3b --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementStsOperations/batch/config/StsOperationRangeJobConfig.java @@ -0,0 +1,117 @@ +package com.snp.batch.jobs.shipMovementStsOperations.batch.config; + +import com.snp.batch.common.batch.config.BaseJobConfig; +import com.snp.batch.jobs.shipMovementDarkActivity.batch.reader.DarkActivityRangeReader; +import com.snp.batch.jobs.shipMovementStsOperations.batch.dto.StsOperationDto; +import com.snp.batch.jobs.shipMovementStsOperations.batch.entity.StsOperationEntity; +import com.snp.batch.jobs.shipMovementStsOperations.batch.processor.StsOperationProcessor; +import com.snp.batch.jobs.shipMovementStsOperations.batch.reader.StsOperationRangeReader; +import com.snp.batch.jobs.shipMovementStsOperations.batch.writer.StsOperationWriter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.web.reactive.function.client.WebClient; + +/** + * 선박 상세 정보 Import Job Config + * + * 특징: + * - ship_data 테이블에서 IMO 번호 조회 + * - IMO 번호를 100개씩 배치로 분할 + * - Maritime API GetShipsByIHSLRorIMONumbers 호출 + * TODO : GetShipsByIHSLRorIMONumbersAll 호출로 변경 + * - 선박 상세 정보를 ship_detail 테이블에 저장 (UPSERT) + * + * 데이터 흐름: + * StsOperationReader (ship_data → Maritime API) + * ↓ (StsOperationDto) + * StsOperationProcessor + * ↓ (StsOperationEntity) + * StsOperationWriter + * ↓ (t_stsoperation 테이블) + */ + +@Slf4j +@Configuration +public class StsOperationRangeJobConfig extends BaseJobConfig { + + private final StsOperationProcessor stsOperationProcessor; + private final StsOperationWriter stsOperationWriter; + private final StsOperationRangeReader stsOperationRangeReader; + private final JdbcTemplate jdbcTemplate; + private final WebClient maritimeApiWebClient; + + public StsOperationRangeJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + StsOperationProcessor stsOperationProcessor, + StsOperationWriter stsOperationWriter, StsOperationRangeReader stsOperationRangeReader, JdbcTemplate jdbcTemplate, + @Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient) { // ObjectMapper 주입 추가 + super(jobRepository, transactionManager); + this.stsOperationProcessor = stsOperationProcessor; + this.stsOperationWriter = stsOperationWriter; + this.stsOperationRangeReader = stsOperationRangeReader; + this.jdbcTemplate = jdbcTemplate; + this.maritimeApiWebClient = maritimeApiWebClient; + } + + @Override + protected String getJobName() { + return "STSOperationRangeImportJob"; + } + + @Override + protected String getStepName() { + return "STSOperationRangeImportStep"; + } + + @Override + protected ItemReader createReader() { // 타입 변경 + // Reader 생성자 수정: ObjectMapper를 전달합니다. + return stsOperationRangeReader; + } + @Bean + @StepScope + public StsOperationRangeReader stsOperationRangeReader( + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate + ) { + // jobParameters 없으면 null 넘어오고 Reader에서 default 처리 + return new StsOperationRangeReader(maritimeApiWebClient, startDate, stopDate); + } + @Override + protected ItemProcessor createProcessor() { + return stsOperationProcessor; + } + + @Override + protected ItemWriter createWriter() { // 타입 변경 + return stsOperationWriter; + } + + @Override + protected int getChunkSize() { + return 5000; // API에서 100개씩 가져오므로 chunk도 100으로 설정 + } + + @Bean(name = "STSOperationRangeImportJob") + public Job STSOperationRangeImportJob() { + return job(); + } + + @Bean(name = "STSOperationRangeImportStep") + public Step STSOperationRangeImportStep() { + return step(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementStsOperations/batch/reader/StsOperationRangeReader.java b/src/main/java/com/snp/batch/jobs/shipMovementStsOperations/batch/reader/StsOperationRangeReader.java new file mode 100644 index 0000000..0b2c56e --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementStsOperations/batch/reader/StsOperationRangeReader.java @@ -0,0 +1,164 @@ +package com.snp.batch.jobs.shipMovementStsOperations.batch.reader; + +import com.snp.batch.common.batch.reader.BaseApiReader; +import com.snp.batch.jobs.shipMovementStsOperations.batch.dto.StsOperationDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.web.reactive.function.client.WebClient; + +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.util.List; + +/** + * 선박 상세 정보 Reader (v2.0 - Chunk 기반) + * + * 기능: + * 1. ship_data 테이블에서 IMO 번호 전체 조회 (최초 1회) + * 2. IMO 번호를 100개씩 분할하여 배치 단위로 처리 + * 3. fetchNextBatch() 호출 시마다 100개씩 API 호출 + * 4. Spring Batch가 100건씩 Process → Write 수행 + * + * Chunk 처리 흐름: + * - beforeFetch() → IMO 전체 조회 (1회) + * - fetchNextBatch() → 100개 IMO로 API 호출 (1,718회) + * - read() → 1건씩 반환 (100번) + * - Processor/Writer → 100건 처리 + * - 반복... (1,718번의 Chunk) + * + * 기존 방식과의 차이: + * - 기존: 17만건 전체 메모리 로드 → Process → Write + * - 신규: 100건씩 로드 → Process → Write (Chunk 1,718회) + */ +@Slf4j +@StepScope +public class StsOperationRangeReader extends BaseApiReader { + + private List allData; + private int currentBatchIndex = 0; + private final int batchSize = 5000; + private String startDate; + private String stopDate; + + public StsOperationRangeReader(WebClient webClient, + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate) { + super(webClient); + // 날짜가 없으면 전날 하루 기준 + if (startDate == null || startDate.isBlank() || stopDate == null || stopDate.isBlank()) { + LocalDate yesterday = LocalDate.now().minusDays(1); + this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + } else { + this.startDate = startDate; + this.stopDate = stopDate; + } + + enableChunkMode(); // ✨ Chunk 모드 활성화 + } + + @Override + protected String getReaderName() { + return "StsOperationReader"; + } + + @Override + protected void resetCustomState() { + this.currentBatchIndex = 0; + this.allData = null; + } + + @Override + protected String getApiPath() { + return "/Movements/StsOperations"; + } + + @Override + protected String getApiBaseUrl() { + return "https://webservices.maritime.spglobal.com"; + } + + /** + * 최초 1회만 실행: ship_data 테이블에서 IMO 번호 전체 조회 + */ + @Override + protected void beforeFetch() { + log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate); + } + + /** + * ✨ Chunk 기반 핵심 메서드: 다음 100개 배치를 조회하여 반환 + * + * Spring Batch가 100건씩 read() 호출 완료 후 이 메서드 재호출 + * + * @return 다음 배치 100건 (더 이상 없으면 null) + */ + @Override + protected List fetchNextBatch() throws Exception { + + // 모든 배치 처리 완료 확인 + if (allData == null ) { + log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate); + allData = callApiWithBatch(startDate, stopDate); + + if (allData == null || allData.isEmpty()) { + log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName()); + return null; + } + + log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize); + } + + // 2) 이미 끝까지 읽었으면 종료 + if (currentBatchIndex >= allData.size()) { + log.info("[{}] 모든 배치 처리 완료", getReaderName()); + return null; + } + + // 3) 이번 배치의 end 계산 + int endIndex = Math.min(currentBatchIndex + batchSize, allData.size()); + + // 현재 배치의 IMO 번호 추출 (100개) + List batch = allData.subList(currentBatchIndex, endIndex); + + int currentBatchNumber = (currentBatchIndex / batchSize) + 1; + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + + log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), currentBatchNumber, totalBatches, batch.size()); + + currentBatchIndex = endIndex; + updateApiCallStats(totalBatches, currentBatchNumber); + return batch; + } + + /** + * Query Parameter를 사용한 API 호출 + * + * @param startDate,stopDate + * @return API 응답 + */ + private List callApiWithBatch(String startDate, String stopDate) { + String url = getApiPath() + "?startDate=" + startDate + "&stopDate=" + stopDate; + + log.debug("[{}] API 호출: {}", getReaderName(), url); + + return webClient.get() + .uri(url) + .retrieve() + .bodyToFlux(StsOperationDto.class) + .collectList() + .block(); + } + + @Override + protected void afterFetch(List data) { + if (data == null) { + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches); + /*log.info("[{}] 총 {} 개의 IMO 번호에 대한 API 호출 종료", + getReaderName(), allImoNumbers.size());*/ + } + } + +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementStsOperations/batch/repository/StsOperationRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/shipMovementStsOperations/batch/repository/StsOperationRepositoryImpl.java index 4cebb94..8dabe87 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovementStsOperations/batch/repository/StsOperationRepositoryImpl.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementStsOperations/batch/repository/StsOperationRepositoryImpl.java @@ -30,7 +30,8 @@ public class StsOperationRepositoryImpl extends BaseJdbcRepository { + + private final TerminalCallsProcessor terminalCallsProcessor; + private final TerminalCallsWriter terminalCallsWriter; + private final TerminalCallsRangeReader terminalCallsRangeReader; + private final JdbcTemplate jdbcTemplate; + private final WebClient maritimeApiWebClient; + + public TerminalCallsRangeJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + TerminalCallsProcessor terminalCallsProcessor, + TerminalCallsWriter terminalCallsWriter, TerminalCallsRangeReader terminalCallsRangeReader, JdbcTemplate jdbcTemplate, + @Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient) { // ObjectMapper 주입 추가 + super(jobRepository, transactionManager); + this.terminalCallsProcessor = terminalCallsProcessor; + this.terminalCallsWriter = terminalCallsWriter; + this.terminalCallsRangeReader = terminalCallsRangeReader; + this.jdbcTemplate = jdbcTemplate; + this.maritimeApiWebClient = maritimeApiWebClient; + } + + @Override + protected String getJobName() { + return "TerminalCallsRangeImportJob"; + } + + @Override + protected String getStepName() { + return "TerminalCallsRangeImportStep"; + } + + @Override + protected ItemReader createReader() { // 타입 변경 + return terminalCallsRangeReader; + } + @Bean + @StepScope + public TerminalCallsRangeReader terminalCallsRangeReader( + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate + ) { + // jobParameters 없으면 null 넘어오고 Reader에서 default 처리 + return new TerminalCallsRangeReader(maritimeApiWebClient, startDate, stopDate); + } + @Override + protected ItemProcessor createProcessor() { + return terminalCallsProcessor; + } + + @Override + protected ItemWriter createWriter() { // 타입 변경 + return terminalCallsWriter; + } + + @Override + protected int getChunkSize() { + return 1000; // API에서 100개씩 가져오므로 chunk도 1000으로 설정 + } + + @Bean(name = "TerminalCallsRangeImportJob") + public Job terminalCallsRangeImportJob() { + return job(); + } + + @Bean(name = "TerminalCallsRangeImportStep") + public Step terminalCallsRangeImportStep() { + return step(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementTerminalCalls/batch/reader/TerminalCallsRangeReader.java b/src/main/java/com/snp/batch/jobs/shipMovementTerminalCalls/batch/reader/TerminalCallsRangeReader.java new file mode 100644 index 0000000..f5bff28 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementTerminalCalls/batch/reader/TerminalCallsRangeReader.java @@ -0,0 +1,162 @@ +package com.snp.batch.jobs.shipMovementTerminalCalls.batch.reader; + +import com.snp.batch.common.batch.reader.BaseApiReader; +import com.snp.batch.jobs.shipMovementDarkActivity.batch.dto.DarkActivityDto; +import com.snp.batch.jobs.shipMovementTerminalCalls.batch.dto.TerminalCallsDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.web.reactive.function.client.WebClient; + +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +/** + * 선박 상세 정보 Reader (v2.0 - Chunk 기반) + *

+ * 기능: + * 1. ship_data 테이블에서 IMO 번호 전체 조회 (최초 1회) + * 2. IMO 번호를 100개씩 분할하여 배치 단위로 처리 + * 3. fetchNextBatch() 호출 시마다 100개씩 API 호출 + * 4. Spring Batch가 100건씩 Process → Write 수행 + *

+ * Chunk 처리 흐름: + * - beforeFetch() → IMO 전체 조회 (1회) + * - fetchNextBatch() → 100개 IMO로 API 호출 (1,718회) + * - read() → 1건씩 반환 (100번) + * - Processor/Writer → 100건 처리 + * - 반복... (1,718번의 Chunk) + *

+ * 기존 방식과의 차이: + * - 기존: 17만건 전체 메모리 로드 → Process → Write + * - 신규: 100건씩 로드 → Process → Write (Chunk 1,718회) + */ +@Slf4j +@StepScope +public class TerminalCallsRangeReader extends BaseApiReader { + + private List allData; + private int currentBatchIndex = 0; + private final int batchSize = 1000; + private String startDate; + private String stopDate; + + public TerminalCallsRangeReader(WebClient webClient, + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate) { + super(webClient); + // 날짜가 없으면 전날 하루 기준 + if (startDate == null || startDate.isBlank() || stopDate == null || stopDate.isBlank()) { + LocalDate yesterday = LocalDate.now().minusDays(1); + this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + } else { + this.startDate = startDate; + this.stopDate = stopDate; + } + + enableChunkMode(); // ✨ Chunk 모드 활성화 + } + + @Override + protected String getReaderName() { + return "TerminalCalls"; + } + + @Override + protected void resetCustomState() { + this.currentBatchIndex = 0; + this.allData = null; + } + + @Override + protected String getApiPath() { + return "/Movements/TerminalCalls"; + } + + @Override + protected String getApiBaseUrl() { + return "https://webservices.maritime.spglobal.com"; + } + + + @Override + protected void beforeFetch() { + log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate); + } + + /** + * ✨ Chunk 기반 핵심 메서드: 다음 100개 배치를 조회하여 반환 + *

+ * Spring Batch가 100건씩 read() 호출 완료 후 이 메서드 재호출 + * + * @return 다음 배치 100건 (더 이상 없으면 null) + */ + @Override + protected List fetchNextBatch() throws Exception { + + // 모든 배치 처리 완료 확인 + if (allData == null ) { + log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate); + allData = callApiWithBatch(startDate, stopDate); + + if (allData == null || allData.isEmpty()) { + log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName()); + return null; + } + + log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize); + } + + // 2) 이미 끝까지 읽었으면 종료 + if (currentBatchIndex >= allData.size()) { + log.info("[{}] 모든 배치 처리 완료", getReaderName()); + return null; + } + + // 3) 이번 배치의 end 계산 + int endIndex = Math.min(currentBatchIndex + batchSize, allData.size()); + + // 현재 배치의 IMO 번호 추출 (100개) + List batch = allData.subList(currentBatchIndex, endIndex); + + int currentBatchNumber = (currentBatchIndex / batchSize) + 1; + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + + log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), currentBatchNumber, totalBatches, batch.size()); + + currentBatchIndex = endIndex; + updateApiCallStats(totalBatches, currentBatchNumber); + return batch; + } + + /** + * Query Parameter를 사용한 API 호출 + * @param startDate, stopDate + * @return API 응답 + */ + private List callApiWithBatch(String startDate, String stopDate) { + String url = getApiPath() + "?startDate=" + startDate + "&stopDate=" + stopDate; + log.debug("[{}] API 호출: {}", getReaderName(), url); + + return webClient.get() + .uri(url) + .retrieve() + .bodyToFlux(TerminalCallsDto.class) + .collectList() + .block(); + } + + @Override + protected void afterFetch(List data) { + if (data == null) { + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches); + } + } + +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementTerminalCalls/batch/repository/TerminalCallsRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/shipMovementTerminalCalls/batch/repository/TerminalCallsRepositoryImpl.java index 66366e1..2eb31f0 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovementTerminalCalls/batch/repository/TerminalCallsRepositoryImpl.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementTerminalCalls/batch/repository/TerminalCallsRepositoryImpl.java @@ -30,7 +30,8 @@ public class TerminalCallsRepositoryImpl extends BaseJdbcRepository { + + private final TransitsProcessor transitsProcessor; + private final TransitsWriter transitsWriter; + private final TransitsRangeReader transitsRangeReader; + private final WebClient maritimeApiWebClient; + + public TransitsRangeJobConfig( + JobRepository jobRepository, + PlatformTransactionManager transactionManager, + TransitsProcessor TransitsProcessor, + TransitsWriter transitsWriter, TransitsRangeReader transitsRangeReader, + @Qualifier("maritimeServiceApiWebClient") WebClient maritimeApiWebClient) { // ObjectMapper 주입 추가 + super(jobRepository, transactionManager); + this.transitsProcessor = TransitsProcessor; + this.transitsWriter = transitsWriter; + this.transitsRangeReader = transitsRangeReader; + this.maritimeApiWebClient = maritimeApiWebClient; + } + + @Override + protected String getJobName() { + return "TransitsRangeImportJob"; + } + + @Override + protected String getStepName() { + return "TransitsRangeImportStep"; + } + + @Override + protected ItemReader createReader() { // 타입 변경 + return transitsRangeReader; + } + @Bean + @StepScope + public TransitsRangeReader transitsRangeReader( + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate + ) { + // jobParameters 없으면 null 넘어오고 Reader에서 default 처리 + return new TransitsRangeReader(maritimeApiWebClient, startDate, stopDate); + } + @Override + protected ItemProcessor createProcessor() { + return transitsProcessor; + } + + @Override + protected ItemWriter createWriter() { // 타입 변경 + return transitsWriter; + } + + @Override + protected int getChunkSize() { + return 1000; // API에서 100개씩 가져오므로 chunk도 100으로 설정 + } + + @Bean(name = "TransitsRangeImportJob") + public Job transitsRangeImportJob() { + return job(); + } + + @Bean(name = "TransitsRangeImportStep") + public Step transitsRangeImportStep() { + return step(); + } +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementTransits/batch/reader/TransitsRangeReader.java b/src/main/java/com/snp/batch/jobs/shipMovementTransits/batch/reader/TransitsRangeReader.java new file mode 100644 index 0000000..23abbb0 --- /dev/null +++ b/src/main/java/com/snp/batch/jobs/shipMovementTransits/batch/reader/TransitsRangeReader.java @@ -0,0 +1,159 @@ +package com.snp.batch.jobs.shipMovementTransits.batch.reader; + +import com.snp.batch.common.batch.reader.BaseApiReader; +import com.snp.batch.jobs.shipMovementTransits.batch.dto.TransitsDto; +import lombok.extern.slf4j.Slf4j; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.web.reactive.function.client.WebClient; + +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.util.List; + +/** + * 선박 상세 정보 Reader (v2.0 - Chunk 기반) + * + * 기능: + * 1. ship_data 테이블에서 IMO 번호 전체 조회 (최초 1회) + * 2. IMO 번호를 100개씩 분할하여 배치 단위로 처리 + * 3. fetchNextBatch() 호출 시마다 100개씩 API 호출 + * 4. Spring Batch가 100건씩 Process → Write 수행 + * + * Chunk 처리 흐름: + * - beforeFetch() → IMO 전체 조회 (1회) + * - fetchNextBatch() → 100개 IMO로 API 호출 (1,718회) + * - read() → 1건씩 반환 (100번) + * - Processor/Writer → 100건 처리 + * - 반복... (1,718번의 Chunk) + * + * 기존 방식과의 차이: + * - 기존: 17만건 전체 메모리 로드 → Process → Write + * - 신규: 100건씩 로드 → Process → Write (Chunk 1,718회) + */ +@Slf4j +@StepScope +public class TransitsRangeReader extends BaseApiReader { + + private List allData; + private int currentBatchIndex = 0; + private final int batchSize = 1000; + private String startDate; + private String stopDate; + + public TransitsRangeReader(WebClient webClient, + @Value("#{jobParameters['startDate']}") String startDate, + @Value("#{jobParameters['stopDate']}") String stopDate) { + super(webClient); + // 날짜가 없으면 전날 하루 기준 + if (startDate == null || startDate.isBlank() || stopDate == null || stopDate.isBlank()) { + LocalDate yesterday = LocalDate.now().minusDays(1); + this.startDate = yesterday.atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + this.stopDate = yesterday.plusDays(1).atStartOfDay().format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; + } else { + this.startDate = startDate; + this.stopDate = stopDate; + } + + enableChunkMode(); // ✨ Chunk 모드 활성화 + } + + @Override + protected String getReaderName() { + return "Transits"; + } + + @Override + protected void resetCustomState() { + this.currentBatchIndex = 0; + this.allData = null; + } + + @Override + protected String getApiPath() { + return "/Movements/Transits"; + } + + @Override + protected String getApiBaseUrl() { + return "https://webservices.maritime.spglobal.com"; + } + + @Override + protected void beforeFetch() { + log.info("[{}] 요청 날짜 범위: {} → {}", getReaderName(), startDate, stopDate); + } + + /** + * ✨ Chunk 기반 핵심 메서드: 다음 100개 배치를 조회하여 반환 + * + * Spring Batch가 100건씩 read() 호출 완료 후 이 메서드 재호출 + * + * @return 다음 배치 100건 (더 이상 없으면 null) + */ + @Override + protected List fetchNextBatch() throws Exception { + + // 모든 배치 처리 완료 확인 + if (allData == null ) { + log.info("[{}] 최초 API 조회 실행: {} ~ {}", getReaderName(), startDate, stopDate); + allData = callApiWithBatch(startDate, stopDate); + + if (allData == null || allData.isEmpty()) { + log.warn("[{}] 조회된 데이터 없음 → 종료", getReaderName()); + return null; + } + + log.info("[{}] 총 {}건 데이터 조회됨. batchSize = {}", getReaderName(), allData.size(), batchSize); + } + + // 2) 이미 끝까지 읽었으면 종료 + if (currentBatchIndex >= allData.size()) { + log.info("[{}] 모든 배치 처리 완료", getReaderName()); + return null; + } + + // 3) 이번 배치의 end 계산 + int endIndex = Math.min(currentBatchIndex + batchSize, allData.size()); + + // 현재 배치의 IMO 번호 추출 (100개) + List batch = allData.subList(currentBatchIndex, endIndex); + + int currentBatchNumber = (currentBatchIndex / batchSize) + 1; + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + + log.info("[{}] 배치 {}/{} 처리 중: {}건", getReaderName(), currentBatchNumber, totalBatches, batch.size()); + + currentBatchIndex = endIndex; + updateApiCallStats(totalBatches, currentBatchNumber); + return batch; + } + + /** + * Query Parameter를 사용한 API 호출 + * @param startDate,stopDate + * @return API 응답 + */ + private List callApiWithBatch(String startDate, String stopDate) { + String url = getApiPath() + "?startDate=" + startDate +"&stopDate="+stopDate; +// +"&lrno=" + lrno; + + log.debug("[{}] API 호출: {}", getReaderName(), url); + + return webClient.get() + .uri(url) + .retrieve() + .bodyToFlux(TransitsDto.class) + .collectList() + .block(); + } + + @Override + protected void afterFetch(List data) { + if (data == null) { + int totalBatches = (int) Math.ceil((double) allData.size() / batchSize); + log.info("[{}] 전체 {} 개 배치 처리 완료", getReaderName(), totalBatches); + } + } + +} diff --git a/src/main/java/com/snp/batch/jobs/shipMovementTransits/batch/repository/TransitlsRepositoryImpl.java b/src/main/java/com/snp/batch/jobs/shipMovementTransits/batch/repository/TransitsRepositoryImpl.java similarity index 90% rename from src/main/java/com/snp/batch/jobs/shipMovementTransits/batch/repository/TransitlsRepositoryImpl.java rename to src/main/java/com/snp/batch/jobs/shipMovementTransits/batch/repository/TransitsRepositoryImpl.java index af747c0..50f2802 100644 --- a/src/main/java/com/snp/batch/jobs/shipMovementTransits/batch/repository/TransitlsRepositoryImpl.java +++ b/src/main/java/com/snp/batch/jobs/shipMovementTransits/batch/repository/TransitsRepositoryImpl.java @@ -18,16 +18,17 @@ import java.util.List; */ @Slf4j @Repository("TransitsRepository") -public class TransitlsRepositoryImpl extends BaseJdbcRepository +public class TransitsRepositoryImpl extends BaseJdbcRepository implements TransitsRepository { - public TransitlsRepositoryImpl(JdbcTemplate jdbcTemplate) { + public TransitsRepositoryImpl(JdbcTemplate jdbcTemplate) { super(jdbcTemplate); } private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); @Override protected String getTableName() { - return "snp_data.t_transit"; +// return "snp_data.t_transit"; + return "new_snp.t_transit"; } @Override @@ -42,8 +43,10 @@ public class TransitlsRepositoryImpl extends BaseJdbcRepository