Kafka를 사용한 데이터 동기화 코드 및 문서 추가

This commit is contained in:
roy-zz
2022-04-28 00:24:58 +09:00
parent 432149b6d2
commit 2e69d13b8a
14 changed files with 393 additions and 0 deletions

View File

@@ -1,6 +1,7 @@
dependencies {
implementation(project(":util"))
implementation 'org.springframework.kafka:spring-kafka'
implementation 'org.springframework.boot:spring-boot-starter-web'
implementation 'org.springframework.boot:spring-boot-starter-validation'
implementation 'org.springframework.boot:spring-boot-starter-data-jpa'

View File

@@ -0,0 +1,36 @@
package com.roy.springcloud.catalogservice.config;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import java.util.HashMap;
import java.util.Map;
@EnableKafka
@Configuration
public class KafkaConsumerConfig {
@Bean
public ConsumerFactory<String, String> consumerFactory() {
Map<String, Object> properties = new HashMap<>();
properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092");
properties.put(ConsumerConfig.GROUP_ID_CONFIG, "consumerGroupId");
properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
return new DefaultKafkaConsumerFactory<>(properties);
}
@Bean
public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory = new ConcurrentKafkaListenerContainerFactory<>();
kafkaListenerContainerFactory.setConsumerFactory(consumerFactory());
return kafkaListenerContainerFactory;
}
}

View File

@@ -0,0 +1,5 @@
package com.roy.springcloud.catalogservice.service;
public interface KafkaConsumer {
void processMessage(String kafkaMessage);
}

View File

@@ -0,0 +1,48 @@
package com.roy.springcloud.catalogservice.service.impl;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.roy.springcloud.catalogservice.domain.Catalog;
import com.roy.springcloud.catalogservice.repository.CatalogRepository;
import com.roy.springcloud.catalogservice.service.KafkaConsumer;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
@Slf4j
@Service
@RequiredArgsConstructor
public class KafkaConsumerImpl implements KafkaConsumer {
private final CatalogRepository catalogRepository;
@Override
@Transactional
@KafkaListener(topics = "example-order-topic")
public void processMessage(String kafkaMessage) {
log.info("Kafka Message: ======> {}", kafkaMessage);
Map<Object, Object> map = new HashMap<>();
ObjectMapper mapper = new ObjectMapper();
try {
map = mapper.readValue(kafkaMessage, new TypeReference<>() {});
} catch (JsonProcessingException e) {
e.printStackTrace();
}
String targetProduceId = (String) Optional.of(map.get("productId"))
.orElseThrow(() -> new IllegalStateException("Not found produceId"));
Catalog savedCatalog = catalogRepository.findByProductId(targetProduceId)
.orElseThrow(() -> new IllegalStateException("Not found catalog"));
Integer soldQuantity = (Integer) Optional.of(map.get("quantity"))
.orElseThrow(() -> new IllegalStateException("Not found quantity"));
savedCatalog.setStock(savedCatalog.getStock() - soldQuantity);
}
}