shjung hai 1 ano
pai
achega
2e62769530
Modificáronse 28 ficheiros con 1120 adicións e 22 borrados
  1. 1 0
      .idea/gradle.xml
  2. 7 0
      conf/evps-kafka-producer.yml
  3. 17 4
      evps-comm-server/src/main/java/com/evps/comm/server/kafka/KafkaProducerService.java
  4. 2 0
      evps-comm-server/src/main/java/com/evps/comm/server/xnet/server/process/response/EvpsNode.java
  5. 3 2
      evps-comm-server/src/main/java/com/evps/comm/server/xnet/server/process/response/EvpsServiceEnd.java
  6. 3 0
      evps-common/src/main/java/com/evps/common/kafka/dto/EvpsKafkaConst.java
  7. 4 4
      evps-common/src/main/java/com/evps/common/kafka/dto/KafkaEvpsEventDto.java
  8. 4 0
      evps-common/src/main/java/com/evps/common/kafka/dto/KafkaEvpsNodeDto.java
  9. 5 0
      evps-common/src/main/java/com/evps/common/kafka/dto/KafkaEvpsServiceDto.java
  10. 4 0
      evps-common/src/main/java/com/evps/common/kafka/dto/KafkaEvpsServiceEndDto.java
  11. 21 2
      evps-consumer/src/main/java/com/evps/consumer/EvpCommConsumerApplication.java
  12. 5 5
      evps-consumer/src/main/java/com/evps/consumer/service/KafkaConsumerService.java
  13. 10 5
      evps-consumer/src/main/java/com/evps/consumer/service/KafkaUticEvpsConsumerWorker.java
  14. 56 0
      evps-kafka-producer/build.gradle
  15. 7 0
      evps-kafka-producer/conf/evps-kafka-producer.yml
  16. 76 0
      evps-kafka-producer/src/main/java/com/evps/simulator/kafka/producer/EvpsKafkaProducerApplication.java
  17. 33 0
      evps-kafka-producer/src/main/java/com/evps/simulator/kafka/producer/config/ApplicationConfig.java
  18. 59 0
      evps-kafka-producer/src/main/java/com/evps/simulator/kafka/producer/config/KafkaConfig.java
  19. 44 0
      evps-kafka-producer/src/main/java/com/evps/simulator/kafka/producer/config/SchedulingConfig.java
  20. 126 0
      evps-kafka-producer/src/main/java/com/evps/simulator/kafka/producer/kafka/KafkaProducerFactory.java
  21. 103 0
      evps-kafka-producer/src/main/java/com/evps/simulator/kafka/producer/kafka/KafkaProducerService.java
  22. 37 0
      evps-kafka-producer/src/main/java/com/evps/simulator/kafka/producer/scheduler/ApplicationScheduler.java
  23. 70 0
      evps-kafka-producer/src/main/java/com/evps/simulator/kafka/producer/service/EvpsKafkaProducerManagerService.java
  24. 263 0
      evps-kafka-producer/src/main/java/com/evps/simulator/kafka/producer/service/EvpsKafkaSimGen.java
  25. 67 0
      evps-kafka-producer/src/main/resources/application.yml
  26. 41 0
      evps-kafka-producer/src/main/resources/logback-spring-appender.xml
  27. 51 0
      evps-kafka-producer/src/main/resources/logback-spring.xml
  28. 1 0
      settings.gradle

+ 1 - 0
.idea/gradle.xml

@@ -11,6 +11,7 @@
             <option value="$PROJECT_DIR$/evps-comm-server" />
             <option value="$PROJECT_DIR$/evps-common" />
             <option value="$PROJECT_DIR$/evps-consumer" />
+            <option value="$PROJECT_DIR$/evps-kafka-producer" />
           </set>
         </option>
       </GradleProjectSettings>

+ 7 - 0
conf/evps-kafka-producer.yml

@@ -0,0 +1,7 @@
+spring:
+  profiles:
+    active: dev
+
+application:
+  process-id: evps-kafka-producer
+  region-id: 183

+ 17 - 4
evps-comm-server/src/main/java/com/evps/comm/server/kafka/KafkaProducerService.java

@@ -17,6 +17,7 @@ import javax.annotation.PostConstruct;
 public class KafkaProducerService {
 
     private final KafkaConfig config;
+    private final String topicName = EvpsKafkaConst.KAFKA_EVPS_TOPIC_NAME;
 
     private KafkaTemplate<String, KafkaEvpsData> uticEvpsProducer;
 
@@ -31,7 +32,7 @@ public class KafkaProducerService {
     public void sendEvpsServiceTopic(KafkaEvpsServiceDto data) {
         if (this.uticEvpsProducer != null) {
             try {
-                ListenableFuture<SendResult<String, KafkaEvpsData>> result = this.uticEvpsProducer.send(EvpsKafkaConst.KAFKA_EVPS_TOPIC_NAME, EvpsKafkaConst.KAFKA_EVPS_SERVICE, data);
+                ListenableFuture<SendResult<String, KafkaEvpsData>> result = this.uticEvpsProducer.send(this.topicName, EvpsKafkaConst.KAFKA_EVPS_SERVICE, data);
                 log.info("sendEvpsServiceTopic: {}, Key: {}, Data: {}", EvpsKafkaConst.KAFKA_EVPS_SERVICE, data.getServiceId(), data);
             }
             catch (Exception e) {
@@ -40,10 +41,22 @@ public class KafkaProducerService {
         }
     }
 
+    public void sendEvpsServiceEndTopic(KafkaEvpsServiceEndDto data) {
+        if (this.uticEvpsProducer != null) {
+            try {
+                ListenableFuture<SendResult<String, KafkaEvpsData>> result = this.uticEvpsProducer.send(this.topicName, EvpsKafkaConst.KAFKA_EVPS_SERVICE_END, data);
+                log.info("sendEvpsServiceEndTopic: {}, Key: {}, Data: {}", EvpsKafkaConst.KAFKA_EVPS_SERVICE_END, data.getServiceId(), data);
+            }
+            catch (Exception e) {
+                log.error("sendEvpsServiceEndTopic: {}, {}: {}", EvpsKafkaConst.KAFKA_EVPS_SERVICE_END, data.getServiceId(), e.toString());
+            }
+        }
+    }
+
     public void sendEvpsNodeTopic(KafkaEvpsNodeDto data) {
         if (this.uticEvpsProducer != null) {
             try {
-                ListenableFuture<SendResult<String, KafkaEvpsData>> result = this.uticEvpsProducer.send(EvpsKafkaConst.KAFKA_EVPS_TOPIC_NAME, EvpsKafkaConst.KAFKA_EVPS_NODE, data);
+                ListenableFuture<SendResult<String, KafkaEvpsData>> result = this.uticEvpsProducer.send(this.topicName, EvpsKafkaConst.KAFKA_EVPS_NODE, data);
                 log.info("sendEvpsNodeTopic: {}, Key: {}, Data: {}", EvpsKafkaConst.KAFKA_EVPS_NODE, data.getServiceId(), data);
             }
             catch (Exception e) {
@@ -55,7 +68,7 @@ public class KafkaProducerService {
     public void sendEvpsSignalTopic(KafkaEvpsSignalDto data) {
         if (this.uticEvpsProducer != null) {
             try {
-                ListenableFuture<SendResult<String, KafkaEvpsData>> result = this.uticEvpsProducer.send(EvpsKafkaConst.KAFKA_EVPS_TOPIC_NAME, EvpsKafkaConst.KAFKA_EVPS_SIGNAL, data);
+                ListenableFuture<SendResult<String, KafkaEvpsData>> result = this.uticEvpsProducer.send(this.topicName, EvpsKafkaConst.KAFKA_EVPS_SIGNAL, data);
                 log.info("sendEvpsSignalTopic: {}, Key: {}, Data: {}", EvpsKafkaConst.KAFKA_EVPS_SIGNAL, data.getServiceId(), data);
             }
             catch (Exception e) {
@@ -67,7 +80,7 @@ public class KafkaProducerService {
     public void sendEvpsEventTopic(KafkaEvpsEventDto data) {
         if (this.uticEvpsProducer != null) {
             try {
-                ListenableFuture<SendResult<String, KafkaEvpsData>> result = this.uticEvpsProducer.send(EvpsKafkaConst.KAFKA_EVPS_TOPIC_NAME, EvpsKafkaConst.KAFKA_EVPS_EVENT, data);
+                ListenableFuture<SendResult<String, KafkaEvpsData>> result = this.uticEvpsProducer.send(this.topicName, EvpsKafkaConst.KAFKA_EVPS_EVENT, data);
                 log.info("sendEvpsEventTopic: {}, Key: {}, Data: {}", EvpsKafkaConst.KAFKA_EVPS_EVENT, data.getServiceId(), data);
             }
             catch (Exception e) {

+ 2 - 0
evps-comm-server/src/main/java/com/evps/comm/server/xnet/server/process/response/EvpsNode.java

@@ -13,6 +13,7 @@ import com.evps.common.protocol.EvpsProtocolConst;
 import com.evps.common.protocol.response.EvpsCommResponse;
 import com.evps.common.protocol.response.RecvPacketDto;
 import com.evps.common.utils.EvpsByteUtils;
+import com.evps.common.utils.EvpsUtils;
 import lombok.AllArgsConstructor;
 import lombok.extern.slf4j.Slf4j;
 import org.slf4j.MDC;
@@ -100,6 +101,7 @@ public class EvpsNode implements EvpsCommResponse {
 
         KafkaEvpsNodeDto data = KafkaEvpsNodeDto.builder()
                 .serviceId(serviceId)
+                .clctDt(EvpsUtils.getClctDt())
                 .nodeList(new ArrayList<>())
                 .build();
 

+ 3 - 2
evps-comm-server/src/main/java/com/evps/comm/server/xnet/server/process/response/EvpsServiceEnd.java

@@ -65,6 +65,7 @@ public class EvpsServiceEnd implements EvpsCommResponse {
         int reason = ((buffer[idx++] & 0xFF) << 24) | ((buffer[idx++] & 0xFF) << 16) | ((buffer[idx++] & 0xFF) << 8) | (buffer[idx++] & 0xFF);
         return KafkaEvpsServiceEndDto.builder()
                 .serviceId(serviceId)
+                .clctDt(EvpsUtils.getClctDt())
                 .reason(reason)
                 .build();
     }
@@ -82,8 +83,8 @@ public class EvpsServiceEnd implements EvpsCommResponse {
 
         // 이벤트 정보 입력(서비스 종료)
         KafkaEvpsEventDto event = KafkaEvpsEventDto.builder()
-                .clctDt(EvpsUtils.getClctDt())
                 .serviceId(data.getServiceId())
+                .clctDt(EvpsUtils.getClctDt())
                 .eventCd(KafkaEvpsEventDto.EVPS_EVENT_SERVICE_END)
                 .build();
 
@@ -112,7 +113,7 @@ public class EvpsServiceEnd implements EvpsCommResponse {
         ApplicationRepository.delService(service.getServiceId());
 
         // kafka service 전송
-        this.kafkaProducerService.sendEvpsServiceTopic(service);
+        this.kafkaProducerService.sendEvpsServiceEndTopic(data);
 
         // kafka event-terminate 전송
         this.kafkaProducerService.sendEvpsEventTopic(event);

+ 3 - 0
evps-common/src/main/java/com/evps/common/kafka/dto/EvpsKafkaConst.java

@@ -5,10 +5,13 @@ public class EvpsKafkaConst {
     private EvpsKafkaConst() {}
 
     public static final String KAFKA_EVPS_TOPIC_NAME = "utic-evps";
+    public static final String KAFKA_EVPS_TEST_TOPIC_NAME = "utic-evps-test";
+    public static final String KAFKA_EVPS_SIMULATOR_TOPIC_NAME = "utic-evps-sim";
 
     public static final String KAFKA_EVPS_SERVICE = "evps-service";
     public static final String KAFKA_EVPS_NODE    = "evps-node";
     public static final String KAFKA_EVPS_SIGNAL  = "evps-signal";
     public static final String KAFKA_EVPS_EVENT   = "evps-event";
+    public static final String KAFKA_EVPS_SERVICE_END = "evps-service-end";
 
 }

+ 4 - 4
evps-common/src/main/java/com/evps/common/kafka/dto/KafkaEvpsEventDto.java

@@ -18,14 +18,14 @@ public class KafkaEvpsEventDto implements KafkaEvpsData {
     public static final int EVPS_EVENT_VEHICLE_MOVE = 1;
     public static final int EVPS_EVENT_SERVICE_END = 2;
 
+    /**
+     * 긴급차량 서비스 ID
+     */
+    private String serviceId;
     /**
     * 수집시각
     */
     private String clctDt;
-    /**
-    * 긴급차량 서비스 ID
-    */
-    private String serviceId;
     /**
      * 긴급차량 번호(Not Used)
      */

+ 4 - 0
evps-common/src/main/java/com/evps/common/kafka/dto/KafkaEvpsNodeDto.java

@@ -17,6 +17,10 @@ public class KafkaEvpsNodeDto implements KafkaEvpsData{
     * 긴급차량 서비스 ID
     */
     private String serviceId;
+    /**
+     * 수집시각
+     */
+    private String clctDt;
 
     @Builder.Default
     private List<EvpsNodeInfo> nodeList = new ArrayList<>();

+ 5 - 0
evps-common/src/main/java/com/evps/common/kafka/dto/KafkaEvpsServiceDto.java

@@ -1,5 +1,6 @@
 package com.evps.common.kafka.dto;
 
+import com.fasterxml.jackson.annotation.JsonIgnore;
 import lombok.AllArgsConstructor;
 import lombok.Builder;
 import lombok.Data;
@@ -18,6 +19,7 @@ import java.util.List;
 public class KafkaEvpsServiceDto implements KafkaEvpsData {
 
     public static final int SERVICE_START = 1;
+    public static final int SERVICE_NORMAL_END = 2;
 
     /**
     * 긴급차량 서비스 ID
@@ -75,16 +77,19 @@ public class KafkaEvpsServiceDto implements KafkaEvpsData {
     /**
      * 현재 차량 속도(DTO 에서만 사용할 변수)
      */
+    @JsonIgnore
     private Integer curSpd;
 
     /**
     * 서비스 상태 코드(1:진행중-서비스 진행중,2:정상종료-모든 교차로 제어 및 해제 완료,3:취소-아직 통과하지 않은 교차로 존재,4:센터강제종료-운영자가 서비스를 강제로 종료,5:비정상종료-서비스가 존재하지 않음,6:서비스시작실패-제어대상교차로가 없음,7:비정상종료-앱서버에 에러 발생,8:비정상종료-일정시간 앱에서 위치 및 속도 정보가 오지 않는 경우,9:자동종료-경로이탈,10:자동종료-경로진입 가능시간 초과,11:자동종료-정차가능시간 초과,12:취소-모든 교차로 제어및 해제 완료,13:실패-서비스 제어 요청 실패,14:실패-서비스 가능 교차로가 존재하지 않음,15:자동종료-위치정보 수신 가능 시간 초과)
     */
+    @JsonIgnore
     private Integer statusCd;
 
     @Builder.Default
     private List<EvpsRouteInfo> routeList = new ArrayList<>();
 
+    @JsonIgnore
     @Builder.Default
     private List<EvpsNodeInfo> nodeList = new ArrayList<>();
 

+ 4 - 0
evps-common/src/main/java/com/evps/common/kafka/dto/KafkaEvpsServiceEndDto.java

@@ -18,6 +18,10 @@ public class KafkaEvpsServiceEndDto implements KafkaEvpsData {
     * 긴급차량 서비스 ID
     */
     private String serviceId;
+    /**
+     * 수집시각
+     */
+    private String clctDt;
 
     /**
     * 서비스 상태 코드(1:진행중-서비스 진행중,2:정상종료-모든 교차로 제어 및 해제 완료,3:취소-아직 통과하지 않은 교차로 존재,4:센터강제종료-운영자가 서비스를 강제로 종료,5:비정상종료-서비스가 존재하지 않음,6:서비스시작실패-제어대상교차로가 없음,7:비정상종료-앱서버에 에러 발생,8:비정상종료-일정시간 앱에서 위치 및 속도 정보가 오지 않는 경우,9:자동종료-경로이탈,10:자동종료-경로진입 가능시간 초과,11:자동종료-정차가능시간 초과,12:취소-모든 교차로 제어및 해제 완료,13:실패-서비스 제어 요청 실패,14:실패-서비스 가능 교차로가 존재하지 않음,15:자동종료-위치정보 수신 가능 시간 초과)

+ 21 - 2
evps-consumer/src/main/java/com/evps/consumer/EvpCommConsumerApplication.java

@@ -1,5 +1,6 @@
 package com.evps.consumer;
 
+import com.evps.common.kafka.dto.EvpsKafkaConst;
 import com.evps.consumer.service.KafkaConsumerService;
 import lombok.extern.slf4j.Slf4j;
 import org.springframework.boot.ApplicationArguments;
@@ -22,6 +23,7 @@ import java.util.Set;
 public class EvpCommConsumerApplication implements ApplicationRunner, ApplicationListener<ContextClosedEvent> {
 
     private static String bootstrapServers = "172.24.0.30:9092,172.24.0.31:9093,172.24.0.32:9094";
+    private static String topicName = EvpsKafkaConst.KAFKA_EVPS_TOPIC_NAME;
     private static String consumerGroup = "evps-consumer";
 
     private KafkaConsumerService kafkaConsumerService = null;
@@ -54,16 +56,33 @@ public class EvpCommConsumerApplication implements ApplicationRunner, Applicatio
                 else if ("group".equals(optionName)) {
                     consumerGroup = optionValue;
                 }
+                else if ("topic".equals(optionName)) {
+                    if ("test".equals(optionValue)) {
+                        topicName = EvpsKafkaConst.KAFKA_EVPS_TEST_TOPIC_NAME;
+                    }
+                    else if ("sim".equals(optionValue)) {
+                        topicName = EvpsKafkaConst.KAFKA_EVPS_SIMULATOR_TOPIC_NAME;
+                    }
+                    else {
+                        topicName = "xxx";
+                    }
+                }
+            }
+            if (!EvpsKafkaConst.KAFKA_EVPS_TEST_TOPIC_NAME.equals(topicName) && !EvpsKafkaConst.KAFKA_EVPS_TOPIC_NAME.equals(topicName) && !EvpsKafkaConst.KAFKA_EVPS_SIMULATOR_TOPIC_NAME.equals(topicName)) {
+                log.error("tipic name error: {}", topicName);
+                usage();
+                return;
             }
         }
 
-        kafkaConsumerService = new KafkaConsumerService(bootstrapServers, consumerGroup);
+        kafkaConsumerService = new KafkaConsumerService(topicName, bootstrapServers, consumerGroup);
         kafkaConsumerService.start();
     }
 
     private void usage() {
-        log.info("\r\n\n\n\n\nUsage: java -jar evps-consumer --servers=xxx.xxx.xxx.xxx:nnnn --group:evps-consumer\n" +
+        log.info("\r\n\n\n\n\nUsage: java -jar evps-consumer --servers=xxx.xxx.xxx.xxx:nnnn --topic=test --group:evps-consumer\n" +
                 "   --servers=kafka bootstrap server[optional, default=172.24.0.30:9092,172.24.0.31:9093,172.24.0.32:9094]\n" +
+                "   --topic=consumer topic name[optional[sim/test], default=utic-evps, test=utic-evps-test, sim=utic-evps-sim]\n" +
                 "   --group=kafka consumer group name[optional, default=evps-consumer]\n\n\n\n");
     }
 

+ 5 - 5
evps-consumer/src/main/java/com/evps/consumer/service/KafkaConsumerService.java

@@ -1,6 +1,5 @@
 package com.evps.consumer.service;
 
-import com.evps.common.kafka.dto.EvpsKafkaConst;
 import com.evps.common.kafka.dto.KafkaEvpsData;
 import lombok.RequiredArgsConstructor;
 import lombok.extern.slf4j.Slf4j;
@@ -23,6 +22,7 @@ import java.util.Map;
 @RequiredArgsConstructor
 public class KafkaConsumerService {
 
+    private final String topicName;
     private final String bootstrapServers;
     private final String groupId;
 
@@ -31,13 +31,13 @@ public class KafkaConsumerService {
     private ConcurrentMessageListenerContainer<String, KafkaEvpsData> uticEvpsListenerContainer;
 
     public void start() {
-        log.info("Starting Kafka Consumer: bootstrapServers: {}, group: {}", this.bootstrapServers, this.groupId);
+        log.info("Starting Kafka Consumer: bootstrapServers: {}, topic: {}, group: {}", this.bootstrapServers, this.topicName, this.groupId);
 
-        ContainerProperties containerProperties = new ContainerProperties(EvpsKafkaConst.KAFKA_EVPS_TOPIC_NAME);
+        ContainerProperties containerProperties = new ContainerProperties(topicName);
         containerProperties.setGroupId(this.groupId+"Z");
         containerProperties.setPollTimeout(5000);
         //containerProperties.setAckMode(ContainerProperties.AckMode.MANUAL);
-        containerProperties.setMessageListener(new KafkaConsumerWorker());
+        containerProperties.setMessageListener(new KafkaUticEvpsConsumerWorker());
         containerProperties.setConsumerRebalanceListener(new ConsumerAwareRebalanceListener() {
             @Override
             public void onPartitionsRevokedBeforeCommit(Consumer<?, ?> consumer, Collection<TopicPartition> partitions) {
@@ -61,7 +61,7 @@ public class KafkaConsumerService {
         });
         this.kafkaListenerContainer.start();
 
-//        ContainerProperties containerProperties = new ContainerProperties(EvpsKafkaConst.KAFKA_EVPS_TOPIC_NAME);
+//        ContainerProperties containerProperties = new ContainerProperties(topicName);
 //        containerProperties.setGroupId(this.groupId);
 //        containerProperties.setPollTimeout(5000);
 //        containerProperties.setMessageListener(new KafkaUticEvpsConsumerWorker());

+ 10 - 5
evps-consumer/src/main/java/com/evps/consumer/service/KafkaUticEvpsConsumerWorker.java

@@ -12,27 +12,32 @@ import org.springframework.kafka.listener.MessageListener;
 @AllArgsConstructor
 public class KafkaUticEvpsConsumerWorker implements MessageListener<String, String> {
 
-    private static ObjectMapper mapper = new ObjectMapper();
+    private static final ObjectMapper mapper = new ObjectMapper();
 
     @Override
     public void onMessage(ConsumerRecord<String, String> record) {
 
+        log.info("onMessage: Key: {}, Data: {}", record.key(), record.value());
         try {
             if (EvpsKafkaConst.KAFKA_EVPS_EVENT.equals(record.key())) {
                 KafkaEvpsEventDto data = mapper.readValue(record.value(), KafkaEvpsEventDto.class);
-                log.info("EvpsEvent: {}", data);
+                log.info("---EvpsEvent: {}", data);
             }
             else if (EvpsKafkaConst.KAFKA_EVPS_SIGNAL.equals(record.key())) {
                 KafkaEvpsSignalDto data = mapper.readValue(record.value(), KafkaEvpsSignalDto.class);
-                log.info("EvpsSignal: {}", data);
+                log.info("--EvpsSignal: {}", data);
             }
             else if (EvpsKafkaConst.KAFKA_EVPS_NODE.equals(record.key())) {
                 KafkaEvpsNodeDto data = mapper.readValue(record.value(), KafkaEvpsNodeDto.class);
-                log.info("EvpsNode: {}", data);
+                log.info("----EvpsNode: {}", data);
             }
             else if (EvpsKafkaConst.KAFKA_EVPS_SERVICE.equals(record.key())) {
                 KafkaEvpsServiceDto data = mapper.readValue(record.value(), KafkaEvpsServiceDto.class);
-                log.info("EvpsService: {}", data);
+                log.info("-EvpsService: {}", data);
+            }
+            else if (EvpsKafkaConst.KAFKA_EVPS_SERVICE_END.equals(record.key())) {
+                KafkaEvpsServiceEndDto data = mapper.readValue(record.value(), KafkaEvpsServiceEndDto.class);
+                log.info("-EvpsServiceEnd: {}", data);
             }
             else {
                 log.error("Unknown Utic Evps Kafka Key: {}, {}", record.key(), record.value());

+ 56 - 0
evps-kafka-producer/build.gradle

@@ -0,0 +1,56 @@
+plugins {
+    id 'java'
+    id 'maven-publish' // maven 사용
+}
+
+group = 'com.evps'
+version = '0.0.1'
+
+sourceCompatibility = '1.8'
+targetCompatibility = '1.8'
+compileJava.options.encoding = 'UTF-8'
+
+repositories {
+    mavenLocal()
+    mavenCentral()
+    flatDir(dir: 'C:\\java\\repository\\')
+}
+
+dependencies {
+    // lombok 라이브러리 추가 시작
+    compileOnly 'org.projectlombok:lombok'
+    annotationProcessor 'org.projectlombok:lombok'
+    testCompileOnly 'org.projectlombok:lombok'
+    testAnnotationProcessor 'org.projectlombok:lombok'
+    // lombok 라이브러리 추가 끝
+
+    implementation 'org.springframework.boot:spring-boot-starter-web'
+    implementation 'org.springframework.boot:spring-boot-starter-actuator'
+    implementation 'org.springframework.boot:spring-boot-starter-aop'
+
+    implementation 'org.springframework.kafka:spring-kafka'
+    implementation 'com.fasterxml.jackson.core:jackson-databind'
+
+    implementation 'com.evps:evps-common:0.0.1'
+    implementation 'com.its:its-common:0.0.1'
+    implementation 'com.its:its-network:0.0.1'
+    implementation 'com.its:its-spring:0.0.1'
+
+
+    testImplementation 'org.springframework.boot:spring-boot-starter-test'
+}
+
+test {
+    useJUnitPlatform()
+}
+
+jar {
+    enabled = false
+}
+
+compileJava.options.encoding = 'UTF-8'
+tasks.withType(JavaCompile).configureEach {
+    options.compilerArgs << '-Xlint:unchecked'
+    options.deprecation = true
+    options.encoding = 'UTF-8'
+}

+ 7 - 0
evps-kafka-producer/conf/evps-kafka-producer.yml

@@ -0,0 +1,7 @@
+spring:
+  profiles:
+    active: dev
+
+application:
+  process-id: evps-kafka-producer
+  region-id: 183

+ 76 - 0
evps-kafka-producer/src/main/java/com/evps/simulator/kafka/producer/EvpsKafkaProducerApplication.java

@@ -0,0 +1,76 @@
+package com.evps.simulator.kafka.producer;
+
+import com.evps.simulator.kafka.producer.service.EvpsKafkaProducerManagerService;
+import com.its.common.spring.SpringUtils;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.DisposableBean;
+import org.springframework.beans.factory.InitializingBean;
+import org.springframework.boot.Banner;
+import org.springframework.boot.CommandLineRunner;
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.boot.builder.SpringApplicationBuilder;
+import org.springframework.boot.context.ApplicationPidFileWriter;
+import org.springframework.context.ApplicationListener;
+import org.springframework.context.annotation.ComponentScan;
+import org.springframework.context.event.ContextClosedEvent;
+import org.springframework.transaction.annotation.EnableTransactionManagement;
+
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+@Slf4j
+@SpringBootApplication
+@EnableTransactionManagement
+@ComponentScan(basePackages = {"com.its.common.spring", "com.evps.simulator.kafka.producer", "com.evps.simulator.kafka.producer"})
+public class EvpsKafkaProducerApplication implements CommandLineRunner, ApplicationListener<ContextClosedEvent>, InitializingBean, DisposableBean {
+
+    private static final String APPLICATION_NAME = "evps-kafka-producer";
+
+    public static void main(String[] args) {
+        SpringApplication application = new SpringApplicationBuilder()
+                .sources(EvpsKafkaProducerApplication.class)
+                .listeners(new ApplicationPidFileWriter("./conf/" + APPLICATION_NAME + ".pid"))
+                .build();
+        application.setBannerMode(Banner.Mode.OFF);
+        application.run(args);
+    }
+
+    @Override
+    public void run(String... args) throws Exception {
+        SimpleDateFormat sdfDate = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+        log.info("");
+        log.info("");
+        log.info("************************************************************************************");
+        log.info("**                                                                                **");
+        log.info("**                            UTIC Signal System                                  **");
+        log.info("**    UTIC Emergency Vehicle Preemption System Kafka Producer Simulator Program.  **");
+        log.info("**                                                                                **");
+        log.info("**                                                                   [ver.1.0]    **");
+        log.info("** startup: {}", sdfDate.format(new Date()));
+        log.info("************************************************************************************");
+
+        EvpsKafkaProducerManagerService evpsKafkaProducerManagerService = SpringUtils.getBean(EvpsKafkaProducerManagerService.class);
+        evpsKafkaProducerManagerService.run();
+    }
+
+    @Override
+    public void onApplicationEvent(ContextClosedEvent contextClosedEvent) {
+        SimpleDateFormat sdfDate = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+
+        log.error("************************************************************************************");
+        log.error("**    Application Terminated: {}, {}, {}",
+                sdfDate.format(new Date()), contextClosedEvent.getTimestamp(), contextClosedEvent);
+        log.error("************************************************************************************");
+    }
+
+    @Override
+    public void destroy() throws Exception {
+        log.error("Application destroy.");
+    }
+
+    @Override
+    public void afterPropertiesSet() throws Exception {
+        log.info("Application afterPropertiesSet.");
+    }
+}

+ 33 - 0
evps-kafka-producer/src/main/java/com/evps/simulator/kafka/producer/config/ApplicationConfig.java

@@ -0,0 +1,33 @@
+package com.evps.simulator.kafka.producer.config;
+
+import lombok.Getter;
+import lombok.Setter;
+import lombok.ToString;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
+import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.stereotype.Component;
+
+import javax.annotation.PostConstruct;
+
+@Slf4j
+@Getter
+@Setter
+@ToString
+@Component
+@EnableAutoConfiguration(exclude={DataSourceAutoConfiguration.class})
+@ConfigurationProperties(prefix = "application")
+public class ApplicationConfig {
+
+    private String processId = "evps-kafka-producer";
+    private String regionId = "183";
+
+    @PostConstruct
+    private void init() {
+
+        log.info("[{}] -------------------------", this.getClass().getSimpleName());
+        log.info("{}", super.toString());
+    }
+
+}

+ 59 - 0
evps-kafka-producer/src/main/java/com/evps/simulator/kafka/producer/config/KafkaConfig.java

@@ -0,0 +1,59 @@
+package com.evps.simulator.kafka.producer.config;
+
+import com.its.common.utils.NetUtils;
+import lombok.Data;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.stereotype.Component;
+
+import javax.annotation.PostConstruct;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+@Slf4j
+@Data
+@Component
+@ConfigurationProperties(prefix = "application.kafka")
+public class KafkaConfig {
+
+    private String bootstrapServers;
+    private String groupId = "evps-kafka-producer";
+    private String pingTopic = "ping-topic";
+
+    private String consumerGroupId = "evps-comm-server";
+    private String consumerAckConfig = "1";
+
+    private boolean multiConnect = false;
+    private boolean enableNode = false;
+    private String nodeServers = "";
+    public List<Map<String, String>> props = new ArrayList<Map<String, String>>();
+
+    @PostConstruct
+    private void init() {
+        log.info("{}", this);
+    }
+
+    public String getGroupId() {
+        return this.consumerGroupId + "-" + NetUtils.getHostName();
+    }
+
+    public Map<String, Object> getConsumerPropertiesMap() {
+        Map<String, Object> properties = new HashMap<>();
+        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.bootstrapServers);
+        properties.put(ConsumerConfig.GROUP_ID_CONFIG, getGroupId());
+        properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
+        properties.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, 1);
+        properties.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "10000");
+        properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
+        properties.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, "100");
+        properties.put(ConsumerConfig.CHECK_CRCS_CONFIG, false);
+        properties.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 1);
+        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, org.apache.kafka.common.serialization.StringDeserializer.class);
+        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, org.apache.kafka.common.serialization.LongDeserializer.class);
+
+        return properties;
+    }
+}

+ 44 - 0
evps-kafka-producer/src/main/java/com/evps/simulator/kafka/producer/config/SchedulingConfig.java

@@ -0,0 +1,44 @@
+package com.evps.simulator.kafka.producer.config;
+
+import lombok.Data;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.scheduling.annotation.SchedulingConfigurer;
+import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
+import org.springframework.scheduling.config.ScheduledTaskRegistrar;
+
+import javax.annotation.PostConstruct;
+
+@Slf4j
+@Data
+@Configuration
+@ConfigurationProperties(prefix = "application.scheduling")
+public class SchedulingConfig implements SchedulingConfigurer {
+
+    private int poolCore = 0;
+
+    private final int scheduleThreadPoolSize = 10;
+
+    @PostConstruct
+    private void init() {
+        log.info("[{}] ------------", this.getClass().getSimpleName());
+        if (this.poolCore == 0) {
+            log.warn("[{}] poolCore size set as default: {} EA.", this.getClass().getSimpleName(), this.poolCore);
+            this.poolCore = 10;
+        }
+        log.info("[{}] poolCore: {} EA.", this.getClass().getSimpleName(), this.poolCore);
+    }
+
+    @Override
+    public void configureTasks(ScheduledTaskRegistrar scheduledTaskRegistrar) {
+
+        ThreadPoolTaskScheduler threadPoolTaskScheduler = new ThreadPoolTaskScheduler();
+
+        threadPoolTaskScheduler.setPoolSize(this.scheduleThreadPoolSize);
+        threadPoolTaskScheduler.setThreadNamePrefix("scheduler-");
+        threadPoolTaskScheduler.initialize();
+
+        scheduledTaskRegistrar.setTaskScheduler(threadPoolTaskScheduler);
+    }
+}

+ 126 - 0
evps-kafka-producer/src/main/java/com/evps/simulator/kafka/producer/kafka/KafkaProducerFactory.java

@@ -0,0 +1,126 @@
+package com.evps.simulator.kafka.producer.kafka;
+
+import org.apache.kafka.clients.producer.ProducerConfig;
+import org.apache.kafka.common.serialization.StringSerializer;
+import org.springframework.kafka.core.DefaultKafkaProducerFactory;
+import org.springframework.kafka.core.KafkaTemplate;
+import org.springframework.kafka.support.serializer.JsonSerializer;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+public class KafkaProducerFactory {
+
+    private KafkaProducerFactory() {
+        throw new IllegalStateException("KafkaProducerFactory class");
+    }
+
+    public static <K,V> KafkaTemplate<K, V> createJsonTemplate(String bootstrapServers, List<Map<String, String>> props) {
+        Map<String, Object> configs = new HashMap<String, Object>();
+        configs.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
+        configs.put("enable.idempotence", false);
+        configs.put(ProducerConfig.ACKS_CONFIG, "0");
+        configs.put(ProducerConfig.RETRIES_CONFIG, 0);
+        configs.put(ProducerConfig.LINGER_MS_CONFIG, 1);
+        configs.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, 3000);
+        configs.put(ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, 4000);
+        //configs.put("queue.buffering.max.messages", 10000000);
+        //configs.put("queue.buffering.max.kbytes", 2147483647);
+        //configs.put("queue.buffering.max.ms", 0);
+        //configs.put("api.version.request", false);
+        configs.put(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG, 5000);
+        configs.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
+        configs.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
+
+        for (Map<String, String> prop : props) {
+            for (Map.Entry<String, String> elem : prop.entrySet()) {
+                String key = elem.getKey();
+                String val = elem.getValue();
+                if (val != null) {
+                    if (val.equals("true") || val.equals("false")) {
+                        configs.put(key, val.equals("true"));
+                    } else {
+                        configs.put(key, val);
+                    }
+                }
+            }
+        }
+
+        DefaultKafkaProducerFactory<K, V> defaultKafkaProducerFactory = new DefaultKafkaProducerFactory<>(configs);
+        return new KafkaTemplate<>(defaultKafkaProducerFactory);
+    }
+
+    public static <K,V> KafkaTemplate<K, V> createByteArrayTemplate(String bootstrapServers, List<Map<String, String>> props) {
+        Map<String, Object> configs = new HashMap<String, Object>();
+        configs.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
+        configs.put("enable.idempotence", false);
+        configs.put(ProducerConfig.ACKS_CONFIG, "0");
+        configs.put(ProducerConfig.RETRIES_CONFIG, 0);
+        configs.put(ProducerConfig.LINGER_MS_CONFIG, 1);
+        configs.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, 3000);
+        configs.put(ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, 4000);
+        //configs.put("queue.buffering.max.messages", 10000000);
+        //configs.put("queue.buffering.max.kbytes", 2147483647);
+        //configs.put("queue.buffering.max.ms", 0);
+        //configs.put("api.version.request", false);
+        configs.put(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG, 5000);
+        configs.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
+        configs.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, org.apache.kafka.common.serialization.ByteArraySerializer.class);
+
+        for (Map<String, String> prop : props) {
+            for (Map.Entry<String, String> elem : prop.entrySet()) {
+                String key = elem.getKey();
+                String val = elem.getValue();
+                if (val != null) {
+                    if (val.equals("true") || val.equals("false")) {
+                        configs.put(key, val.equals("true"));
+                    } else {
+                        configs.put(key, val);
+                    }
+                }
+            }
+        }
+
+        DefaultKafkaProducerFactory<K, V> defaultKafkaProducerFactory = new DefaultKafkaProducerFactory<K, V>(configs);
+        return new KafkaTemplate<>(defaultKafkaProducerFactory);
+    }
+
+    public static <K,V> KafkaTemplate<K, V> createProducerTemplate(Map<String, Object> props) {
+        DefaultKafkaProducerFactory<K, V> defaultKafkaProducerFactory = new DefaultKafkaProducerFactory<>(props);
+        return new KafkaTemplate<>(defaultKafkaProducerFactory);
+    }
+
+    public static Properties getProperties(String bootstrapServers, List<Map<String, String>> props) {
+        Properties properties  = new Properties();
+        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
+        properties.put("enable.idempotence", false);
+        properties.put(ProducerConfig.ACKS_CONFIG, "0");
+        properties.put(ProducerConfig.RETRIES_CONFIG, 0);
+        properties.put(ProducerConfig.LINGER_MS_CONFIG, 1);
+        properties.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, 3000);
+        properties.put(ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, 4000);
+        //properties.put("queue.buffering.max.messages", 10000000);
+        //properties.put("queue.buffering.max.kbytes", 2147483647);
+        //properties.put("queue.buffering.max.ms", 0);
+        //properties.put("api.version.request", false);
+        properties.put("transaction.timeout.ms", 5000);
+        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
+        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, org.apache.kafka.common.serialization.ByteArraySerializer.class);
+        for (Map<String, String> prop : props) {
+            for (Map.Entry<String, String> elem : prop.entrySet()) {
+                String key = elem.getKey();
+                String val = elem.getValue();
+                if (val != null) {
+                    if (val.equals("true") || val.equals("false")) {
+                        properties.put(key, val.equals("true"));
+                    } else {
+                        properties.put(key, val);
+                    }
+                }
+            }
+        }
+        return properties ;
+    }
+}

+ 103 - 0
evps-kafka-producer/src/main/java/com/evps/simulator/kafka/producer/kafka/KafkaProducerService.java

@@ -0,0 +1,103 @@
+package com.evps.simulator.kafka.producer.kafka;
+
+import com.evps.common.kafka.dto.*;
+import com.evps.simulator.kafka.producer.config.KafkaConfig;
+import lombok.AllArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.kafka.core.KafkaTemplate;
+import org.springframework.kafka.support.SendResult;
+import org.springframework.stereotype.Service;
+import org.springframework.util.concurrent.ListenableFuture;
+
+import javax.annotation.PostConstruct;
+
+@Slf4j
+@AllArgsConstructor
+@Service
+public class KafkaProducerService {
+
+    private final KafkaConfig config;
+    private final String topicName = EvpsKafkaConst.KAFKA_EVPS_SIMULATOR_TOPIC_NAME;
+
+    private KafkaTemplate<String, KafkaEvpsData> uticEvpsProducer;
+
+    @PostConstruct
+    void init() {
+        this.uticEvpsProducer = KafkaProducerFactory.createJsonTemplate(this.config.getBootstrapServers(), this.config.props);
+
+        log.info("[{}] ------------------", this.getClass().getSimpleName());
+        log.info("[{}]   serviceProducer: {}", this.getClass().getSimpleName(), this.uticEvpsProducer);
+    }
+
+    public void sendEvpsServiceTopic(KafkaEvpsServiceDto data) {
+        if (this.uticEvpsProducer != null) {
+            try {
+                ListenableFuture<SendResult<String, KafkaEvpsData>> result = this.uticEvpsProducer.send(this.topicName, EvpsKafkaConst.KAFKA_EVPS_SERVICE, data);
+                log.info("sendEvpsServiceTopic: {}, Key: {}, Data: {}", EvpsKafkaConst.KAFKA_EVPS_SERVICE, data.getServiceId(), data);
+            }
+            catch (Exception e) {
+                log.error("sendEvpsServiceTopic: {}, {}: {}", EvpsKafkaConst.KAFKA_EVPS_SERVICE, data.getServiceId(), e.toString());
+            }
+        }
+    }
+
+    public void sendEvpsServiceEndTopic(KafkaEvpsServiceEndDto data) {
+        if (this.uticEvpsProducer != null) {
+            try {
+                ListenableFuture<SendResult<String, KafkaEvpsData>> result = this.uticEvpsProducer.send(this.topicName, EvpsKafkaConst.KAFKA_EVPS_SERVICE_END, data);
+                log.info("sendEvpsServiceEndTopic: {}, Key: {}, Data: {}", EvpsKafkaConst.KAFKA_EVPS_SERVICE_END, data.getServiceId(), data);
+            }
+            catch (Exception e) {
+                log.error("sendEvpsServiceEndTopic: {}, {}: {}", EvpsKafkaConst.KAFKA_EVPS_SERVICE_END, data.getServiceId(), e.toString());
+            }
+        }
+    }
+
+    public void sendEvpsNodeTopic(KafkaEvpsNodeDto data) {
+        if (this.uticEvpsProducer != null) {
+            try {
+                ListenableFuture<SendResult<String, KafkaEvpsData>> result = this.uticEvpsProducer.send(this.topicName, EvpsKafkaConst.KAFKA_EVPS_NODE, data);
+                log.info("sendEvpsNodeTopic: {}, Key: {}, Data: {}", EvpsKafkaConst.KAFKA_EVPS_NODE, data.getServiceId(), data);
+            }
+            catch (Exception e) {
+                log.error("sendEvpsNodeTopic: {}, {}: {}", EvpsKafkaConst.KAFKA_EVPS_NODE, data.getServiceId(), e.toString());
+            }
+        }
+    }
+
+    public void sendEvpsSignalTopic(KafkaEvpsSignalDto data) {
+        if (this.uticEvpsProducer != null) {
+            try {
+                ListenableFuture<SendResult<String, KafkaEvpsData>> result = this.uticEvpsProducer.send(this.topicName, EvpsKafkaConst.KAFKA_EVPS_SIGNAL, data);
+                log.info("sendEvpsSignalTopic: {}, Key: {}, Data: {}", EvpsKafkaConst.KAFKA_EVPS_SIGNAL, data.getServiceId(), data);
+            }
+            catch (Exception e) {
+                log.error("sendEvpsSignalTopic: {}, {}: {}", EvpsKafkaConst.KAFKA_EVPS_SIGNAL, data.getServiceId(), e.toString());
+            }
+        }
+    }
+
+    public void sendEvpsEventTopic(KafkaEvpsEventDto data) {
+        if (this.uticEvpsProducer != null) {
+            try {
+                ListenableFuture<SendResult<String, KafkaEvpsData>> result = this.uticEvpsProducer.send(this.topicName, EvpsKafkaConst.KAFKA_EVPS_EVENT, data);
+                log.info("sendEvpsEventTopic: {}, Key: {}, Data: {}", EvpsKafkaConst.KAFKA_EVPS_EVENT, data.getServiceId(), data);
+            }
+            catch (Exception e) {
+                log.error("sendEvpsEventTopic: {}, {}: {}", EvpsKafkaConst.KAFKA_EVPS_EVENT, data.getServiceId(), e.toString());
+            }
+        }
+    }
+
+    public void shutdown() {
+        try {
+            if (this.uticEvpsProducer != null) {
+                this.uticEvpsProducer.destroy();
+            }
+        }
+        catch(Exception e) {
+            log.error("Failed to shutdown: {}", e.getMessage());
+        }
+    }
+
+}

+ 37 - 0
evps-kafka-producer/src/main/java/com/evps/simulator/kafka/producer/scheduler/ApplicationScheduler.java

@@ -0,0 +1,37 @@
+package com.evps.simulator.kafka.producer.scheduler;
+
+import com.evps.simulator.kafka.producer.service.EvpsKafkaProducerManagerService;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.scheduling.annotation.Async;
+import org.springframework.scheduling.annotation.EnableScheduling;
+import org.springframework.scheduling.annotation.Scheduled;
+import org.springframework.stereotype.Component;
+
+import javax.annotation.PreDestroy;
+
+@Slf4j
+@RequiredArgsConstructor
+@EnableScheduling
+@Component
+public class ApplicationScheduler {
+
+    private final EvpsKafkaProducerManagerService evpsKafkaProducerManagerService;
+
+    @PreDestroy
+    public void onShutDown() {
+        log.info("ApplicationScheduler.onShutDown: Shutting down...");
+    }
+
+    @Async
+    @Scheduled(cron = "10 0/10 * * * *")  // 10분주기 작업 실행
+    public void sendEvpsKafkaSimulatorData() {
+        try {
+            this.evpsKafkaProducerManagerService.run();
+        }
+        catch(Exception e) {
+            log.error("ApplicationScheduler.sendEvpsKafkaSimulatorData: Exception {}", e.getMessage());
+        }
+    }
+
+}

+ 70 - 0
evps-kafka-producer/src/main/java/com/evps/simulator/kafka/producer/service/EvpsKafkaProducerManagerService.java

@@ -0,0 +1,70 @@
+package com.evps.simulator.kafka.producer.service;
+
+import com.evps.common.kafka.dto.*;
+import com.evps.common.utils.EvpsUtils;
+import com.evps.simulator.kafka.producer.config.ApplicationConfig;
+import com.evps.simulator.kafka.producer.kafka.KafkaProducerService;
+import com.its.common.utils.Elapsed;
+import com.its.common.utils.TimeUtils;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.stereotype.Service;
+
+import javax.annotation.PostConstruct;
+
+@Slf4j
+@Service
+@RequiredArgsConstructor
+public class EvpsKafkaProducerManagerService {
+
+    private final ApplicationConfig config;
+    private final KafkaProducerService kafkaProducerService;
+    private boolean isRunning = false;
+
+    @PostConstruct
+    private void init() {
+        this.isRunning = false;
+    }
+
+    public void run() {
+        if (this.isRunning) {
+            log.warn("EvpsKafkaProducerManagerService: Already running....................");
+            return;
+        }
+
+        this.isRunning = true;
+        Elapsed elapsed = new Elapsed();
+
+        EvpsKafkaSimGen simGenData = new EvpsKafkaSimGen(this.config.getRegionId(), "대종로 사거리", "74무5035", 2);
+
+        simGenData.makeEvpsKafkaSimulatorData();
+
+        KafkaEvpsServiceDto serviceDto = simGenData.getServiceStartDto();
+        serviceDto.setClctDt(EvpsUtils.getClctDt());
+        this.kafkaProducerService.sendEvpsServiceTopic(serviceDto);
+
+        KafkaEvpsNodeDto nodeDto = simGenData.getEvpsNodeDto();
+        nodeDto.setClctDt(EvpsUtils.getClctDt());
+        this.kafkaProducerService.sendEvpsNodeTopic(nodeDto);
+
+        for (int ii = 0; ii < simGenData.getListSignalDto().size(); ii++) {
+            TimeUtils.sleep(1000);
+
+            KafkaEvpsSignalDto signalDto = simGenData.getListSignalDto().get(ii);
+            signalDto.setClctDt(EvpsUtils.getClctDt());
+            this.kafkaProducerService.sendEvpsSignalTopic(signalDto);
+
+            KafkaEvpsEventDto eventDto = simGenData.getListEventDto().get(ii);
+            eventDto.setClctDt(EvpsUtils.getClctDt());
+            this.kafkaProducerService.sendEvpsEventTopic(eventDto);
+        }
+
+        KafkaEvpsServiceEndDto endDto = simGenData.getServiceEndDto();
+        endDto.setClctDt(EvpsUtils.getClctDt());
+        this.kafkaProducerService.sendEvpsServiceEndTopic(endDto);
+
+        log.info("EvpsKafkaProducerManagerService: Run completed. {}", elapsed.elapsedTimeStr());
+        this.isRunning = false;
+    }
+
+}

A diferenza do arquivo foi suprimida porque é demasiado grande
+ 263 - 0
evps-kafka-producer/src/main/java/com/evps/simulator/kafka/producer/service/EvpsKafkaSimGen.java


+ 67 - 0
evps-kafka-producer/src/main/resources/application.yml

@@ -0,0 +1,67 @@
+spring:
+  profiles:
+    active: prod
+  config:
+      import:
+        - optional:file:${user.dir}/conf/evps-kafka-producer.yml
+  application:
+    name: evps-comm-server
+  main:
+    web-application-type: none
+    log-startup-info: true
+    banner-mode: off
+  output:
+    ansi:
+      enabled: always
+  datasource:
+    hikari:
+      connection-test-query: SELECT 1 FROM DUAL
+      minimumIdle: 5
+      maximumPoolSize: 20
+      idleTimeout: 30000
+  lifecycle:
+    timeout-per-shutdown-phase: 10s
+
+server:
+  port: 9871
+  shutdown: graceful
+management:
+  endpoints:
+    web:
+      exposure:
+        include: health, metrics
+
+application:
+  process-id: evps-kafka-producer
+  region-id: 183
+  kafka:
+    bootstrap-servers: 172.24.0.30:9092,172.24.0.31:9093,172.24.0.32:9094
+    group-id: evps-kafka-producer
+    consumer-ack-config: 1
+    ping-topic: ping-topic
+    multi-connect: false
+    node-servers:
+    enable-node: false
+    props:
+    #  - request.timeout.ms: 100
+    #  - max.block.ms: 100
+    #  - transactional.id: tsi-comm-server-01
+    #  - acks: 0
+    #  - retries: 0
+    #  - linger.ms: 1
+
+---
+spring:
+  config:
+    activate:
+      on-profile: dev
+
+application:
+  kafka:
+    bootstrap-servers: 61.82.138.91:19092
+
+---
+spring:
+  config:
+    activate:
+      on-profile: prod

+ 41 - 0
evps-kafka-producer/src/main/resources/logback-spring-appender.xml

@@ -0,0 +1,41 @@
+<included>
+    <appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
+        <!--        <withJansi>true</withJansi>-->
+        <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
+            <charset>${LOG_CHARSET}</charset>
+            <pattern>${LOG_PATTERN_CONSOLE}</pattern>
+        </encoder>
+    </appender>
+
+    <appender name="FILE_LOG" class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <file>${LOG_PATH}${LOG_FILE_NAME}</file>
+        <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
+            <charset>${LOG_CHARSET}</charset>
+            <pattern>${LOG_PATTERN_FILE}</pattern>
+        </encoder>
+        <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
+            <fileNamePattern>${LOG_BACKUP_PATH}${LOG_FILE_NAME}.${LOG_FILE_NAME_BACKUP}</fileNamePattern>
+            <maxFileSize>${MAX_FILESIZE}</maxFileSize>
+            <maxHistory>${MAX_HISTORY}</maxHistory>
+        </rollingPolicy>
+    </appender>
+
+    <appender name="FILE_ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <filter class="ch.qos.logback.classic.filter.LevelFilter">
+            <level>error</level>
+            <onMatch>ACCEPT</onMatch>
+            <onMismatch>DENY</onMismatch>
+        </filter>
+        <file>${LOG_PATH}${LOG_FILE_NAME_ERROR}</file>
+        <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
+            <charset>${LOG_CHARSET}</charset>
+            <pattern>${LOG_PATTERN_ERROR}</pattern>
+        </encoder>
+        <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
+            <fileNamePattern>${LOG_BACKUP_PATH}${LOG_FILE_NAME_ERROR}.${LOG_FILE_NAME_BACKUP}</fileNamePattern>
+            <maxFileSize>${MAX_FILESIZE}</maxFileSize>
+            <maxHistory>${MAX_HISTORY}</maxHistory>
+        </rollingPolicy>
+    </appender>
+
+</included>

+ 51 - 0
evps-kafka-producer/src/main/resources/logback-spring.xml

@@ -0,0 +1,51 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<configuration scan="true" scanPeriod="60 seconds">
+    <shutdownHook class="ch.qos.logback.core.hook.DelayingShutdownHook"/>
+
+    <property name="APP_CLASS_PATH"  value="com.evps.simulator.kafka.producer"/>
+    <property name="PROJECT_PREFIX"  value="evps-kafka"/>
+    <property name="PROJECT_NAME"    value="${PROJECT_PREFIX}-producer"/>
+    <property name="ROOT_LOG_LEVEL"  value="INFO"/>
+    <property name="LOG_CHARSET"     value="UTF-8" />
+    <property name="LOG_PATH"        value="${user.home}/logs/${PROJECT_NAME}/"/>
+    <property name="LOG_BACKUP_PATH" value="${user.home}/logs/${PROJECT_NAME}/backup/"/>
+
+    <property name="LOG_FILE_NAME"         value="${PROJECT_NAME}.log"/>
+    <property name="LOG_FILE_NAME_ERROR"   value="${PROJECT_NAME}.err.log"/>
+    <property name="LOG_FILE_NAME_BACKUP"  value="%d{yyyyMMdd}_%i.log.gz"/>
+
+    <property name="MAX_FILESIZE" value="10MB"/>
+    <property name="MAX_HISTORY"  value="10"/>
+    <property name="LOG_PATTERN_FILE"        value="[%d{yyyy-MM-dd HH:mm:ss.SSS}] [%-5level] %msg%n"/>
+    <property name="LOG_PATTERN_ERROR"       value="[%d{yyyy-MM-dd HH:mm:ss.SSS}] [%30t] [%5level] %42logger{35}.%-20M ${PID:-} %n%msg%n"/>
+    <property name="LOG_PATTERN_PACKET"      value="[%d{yyyy-MM-dd HH:mm:ss.SSS}] [%-5level] %msg%n"/>
+    <property name="LOG_PATTERN_SESSION"     value="[%d{yyyy-MM-dd HH:mm:ss.SSS}] [%-5level] %msg%n"/>
+    <property name="LOG_PATTERN_ASPECT"      value="[%d{yyyy-MM-dd HH:mm:ss.SSS}] [%-5level] %msg%n"/>
+    <property name="LOG_PATTERN_KAFKA"       value="[%d{yyyy-MM-dd HH:mm:ss.SSS}] [%-5level] %msg%n"/>
+    <property name="LOG_PATTERN_CONSOLE"     value="[%d{HH:mm:ss.SSS}] [%5level] %msg %n"/>
+
+    <springProfile name="!xxx">
+        <include resource="logback-spring-appender.xml"/>
+    </springProfile>
+
+    <root level="INFO">
+        <appender-ref ref="CONSOLE"/>
+        <appender-ref ref="FILE_LOG"/>
+        <appender-ref ref="FILE_ERROR"/>
+    </root>
+
+    <springProfile name="!prod">
+        <logger name="${APP_CLASS_PATH}" level="INFO" additivity="false">
+            <appender-ref ref="CONSOLE"/>
+            <appender-ref ref="FILE_LOG"/>
+            <appender-ref ref="FILE_ERROR"/>
+        </logger>
+    </springProfile>
+
+    <springProfile name="prod">
+        <logger name="${APP_CLASS_PATH}" level="INFO" additivity="false">
+            <appender-ref ref="FILE_LOG"/>
+            <appender-ref ref="FILE_ERROR"/>
+        </logger>
+    </springProfile>
+</configuration>

+ 1 - 0
settings.gradle

@@ -2,4 +2,5 @@ rootProject.name = 'utic-evps'
 include 'evps-common'
 include 'evps-comm-server'
 include 'evps-consumer'
+include 'evps-kafka-producer'
 

Algúns arquivos non se mostraron porque demasiados arquivos cambiaron neste cambio