feature: add kafka configuration
This commit is contained in:
@@ -0,0 +1,26 @@
|
||||
package com.eventsourcing.configuration;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
@Configuration
|
||||
@ConfigurationProperties(prefix = "kafka")
|
||||
@Data
|
||||
@AllArgsConstructor
|
||||
@RequiredArgsConstructor
|
||||
public class KafkaConfigProperties {
|
||||
private String acks = "0";
|
||||
private String compressionType = "snappy";
|
||||
private int retries = 3;
|
||||
private int deliveryTimeoutMs = 120000;
|
||||
private int maxRequestSize = 1068576;
|
||||
private int requestTimeoutMs = 30000;
|
||||
private String orderMongoProjectionGroupId = "projection-group";
|
||||
private String enableAutoCommit = "false";
|
||||
private int consumerParallelism = 16;
|
||||
private int eventsTopicPartitions = 6;
|
||||
private int eventsTopicReplicationFactor = 1;
|
||||
}
|
||||
@@ -0,0 +1,56 @@
|
||||
package com.eventsourcing.configuration;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.apache.kafka.clients.consumer.ConsumerConfig;
|
||||
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
|
||||
import org.apache.kafka.common.serialization.StringDeserializer;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.kafka.annotation.EnableKafka;
|
||||
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
|
||||
import org.springframework.kafka.core.ConsumerFactory;
|
||||
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
|
||||
import org.springframework.kafka.listener.ContainerProperties;
|
||||
import org.springframework.kafka.listener.DefaultErrorHandler;
|
||||
import org.springframework.util.backoff.FixedBackOff;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
@Configuration
|
||||
@RequiredArgsConstructor
|
||||
@EnableKafka
|
||||
public class KafkaConsumerConfig {
|
||||
|
||||
@Value(value = "${kafka.bootstrapServers:localhost:9093}")
|
||||
private String bootstrapServers;
|
||||
|
||||
private final KafkaConfigProperties kafkaConfigProperties;
|
||||
|
||||
@Bean
|
||||
public ConcurrentKafkaListenerContainerFactory<String, byte[]>
|
||||
kafkaListenerContainerFactory(ConsumerFactory<String, byte[]> consumerFactory) {
|
||||
ConcurrentKafkaListenerContainerFactory<String, byte[]> factory = new ConcurrentKafkaListenerContainerFactory<>();
|
||||
factory.setConsumerFactory(consumerFactory);
|
||||
factory.setConcurrency(Runtime.getRuntime().availableProcessors());
|
||||
factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL_IMMEDIATE);
|
||||
factory.setCommonErrorHandler(new DefaultErrorHandler(new FixedBackOff(1000L, 2L)));
|
||||
return factory;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ConsumerFactory<String, byte[]> consumerFactory() {
|
||||
return new DefaultKafkaConsumerFactory<>(consumerProps());
|
||||
}
|
||||
|
||||
private Map<String, Object> consumerProps() {
|
||||
final Map<String, Object> consumerProps = new HashMap<>();
|
||||
consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
|
||||
consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, kafkaConfigProperties.getOrderMongoProjectionGroupId());
|
||||
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
|
||||
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
|
||||
consumerProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, kafkaConfigProperties.getEnableAutoCommit());
|
||||
return consumerProps;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,48 @@
|
||||
package com.eventsourcing.configuration;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.apache.kafka.clients.producer.ProducerConfig;
|
||||
import org.apache.kafka.common.serialization.ByteArraySerializer;
|
||||
import org.apache.kafka.common.serialization.StringSerializer;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
|
||||
import org.springframework.kafka.core.KafkaTemplate;
|
||||
import org.springframework.kafka.core.ProducerFactory;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
@Configuration
|
||||
@RequiredArgsConstructor
|
||||
public class KafkaProducerConfig {
|
||||
@Value(value = "${kafka.bootstrapServers:localhost:9093}")
|
||||
private String bootstrapServers;
|
||||
|
||||
private final KafkaConfigProperties kafkaConfigProperties;
|
||||
|
||||
private Map<String, Object> senderProps() {
|
||||
Map<String, Object> producerProps = new HashMap<>();
|
||||
producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
|
||||
producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
||||
producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
|
||||
producerProps.put(ProducerConfig.ACKS_CONFIG, kafkaConfigProperties.getAcks());
|
||||
// producerProps.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, kafkaConfigProperties.getCompressionType());
|
||||
producerProps.put(ProducerConfig.RETRIES_CONFIG, kafkaConfigProperties.getRetries());
|
||||
producerProps.put(ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, kafkaConfigProperties.getDeliveryTimeoutMs());
|
||||
producerProps.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, kafkaConfigProperties.getMaxRequestSize());
|
||||
producerProps.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, kafkaConfigProperties.getRequestTimeoutMs());
|
||||
return producerProps;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ProducerFactory<String, byte[]> producerFactory() {
|
||||
return new DefaultKafkaProducerFactory<>(senderProps());
|
||||
}
|
||||
|
||||
@Bean
|
||||
public KafkaTemplate<String, byte[]> kafkaTemplate(ProducerFactory<String, byte[]> producerFactory) {
|
||||
return new KafkaTemplate<>(producerFactory);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
package com.eventsourcing.configuration;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.kafka.clients.admin.AdminClientConfig;
|
||||
import org.apache.kafka.clients.admin.NewTopic;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.kafka.core.KafkaAdmin;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
|
||||
@Configuration
|
||||
@Slf4j
|
||||
@RequiredArgsConstructor
|
||||
public class KafkaTopicConfiguration {
|
||||
|
||||
@Value(value = "${kafka.bootstrapServers:localhost:9093}")
|
||||
private String bootstrapServers;
|
||||
|
||||
@Value(value = "${order.kafka.topics.bank-account-event-store:bank-account-event-store}")
|
||||
private String bankAccountTopicName;
|
||||
|
||||
@Bean
|
||||
public KafkaAdmin kafkaAdmin() {
|
||||
final Map<String, Object> configs = new HashMap<>();
|
||||
configs.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
|
||||
return new KafkaAdmin(configs);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public NewTopic bankAccountEventStoreTopicInitializer(KafkaAdmin kafkaAdmin) {
|
||||
try {
|
||||
final var topic = new NewTopic(bankAccountTopicName, 3, (short) 1);
|
||||
kafkaAdmin.createOrModifyTopics(topic);
|
||||
log.info("(bankAccountEventStoreTopicInitializer) topic: {}", topic);
|
||||
return topic;
|
||||
} catch (Exception e) {
|
||||
log.error(e.getMessage());
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user