Compare commits

..

5 Commits

Author SHA1 Message Date
Marius Bogoevici
9471976ddd Release 2.0.0.M1 2017-06-23 17:03:39 -04:00
Marius Bogoevici
4c4873b3c1 Re-add Spring Kafka version 2017-06-23 13:17:59 -04:00
Marius Bogoevici
971bc96962 Use Spring Boot and dependencies provided by Spring Cloud Build 2017-06-23 13:12:46 -04:00
Marius Bogoevici
47a225d02a Merge branch 'master' into 2.0.x 2017-06-23 12:44:14 -04:00
Marius Bogoevici
e440378e44 Update version to 2.0.0.BUILD-SNAPSHOT
- Update Spring Boot to version 2.0.0.BUILD-SNAPSHOT
- Set Kafka version to 0.10.2
- Remove tests for Kafka 0.9 and 0.10.0
2017-05-21 11:17:58 -04:00
43 changed files with 246 additions and 2621 deletions

45
pom.xml
View File

@@ -2,44 +2,31 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
<version>1.3.0.M2</version>
<version>2.0.0.M1</version>
<packaging>pom</packaging>
<parent>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-build</artifactId>
<version>1.3.3.RELEASE</version>
<version>2.0.0.M1</version>
<relativePath />
</parent>
<properties>
<java.version>1.7</java.version>
<kafka.version>0.10.1.1</kafka.version>
<spring-kafka.version>1.1.6.RELEASE</spring-kafka.version>
<spring-integration-kafka.version>2.1.1.RELEASE</spring-integration-kafka.version>
<spring-cloud-stream.version>1.3.0.M2</spring-cloud-stream.version>
<spring-cloud-build.version>1.3.3.RELEASE</spring-cloud-build.version>
<kafka.version>0.10.2.0</kafka.version>
<spring-kafka.version>2.0.0.M2</spring-kafka.version>
<spring-integration-kafka.version>3.0.0.M1</spring-integration-kafka.version>
<spring-cloud-stream.version>2.0.0.M1</spring-cloud-stream.version>
</properties>
<modules>
<module>spring-cloud-stream-binder-kafka</module>
<module>spring-cloud-starter-stream-kafka</module>
<module>spring-cloud-stream-binder-kafka-docs</module>
<module>spring-cloud-stream-binder-kafka-0.10.1-test</module>
<module>spring-cloud-stream-binder-kafka-0.10.2-test</module>
<module>spring-cloud-stream-binder-kafka-core</module>
<module>spring-cloud-stream-binder-kstream</module>
</modules>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-core</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream</artifactId>
@@ -74,11 +61,6 @@
<artifactId>kafka-clients</artifactId>
<version>${kafka.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
<version>${spring-kafka.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.integration</groupId>
<artifactId>spring-integration-kafka</artifactId>
@@ -102,20 +84,9 @@
<classifier>test</classifier>
<version>${kafka.version}</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-streams</artifactId>
<version>${kafka.version}</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<pluginManagement>
<plugins>
@@ -160,7 +131,7 @@
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-build-tools</artifactId>
<version>${spring-cloud-build.version}</version>
<version>2.0.0.M1</version>
</dependency>
</dependencies>
<executions>

View File

@@ -4,7 +4,7 @@
<parent>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
<version>1.3.0.M2</version>
<version>2.0.0.M1</version>
</parent>
<artifactId>spring-cloud-starter-stream-kafka</artifactId>
<description>Spring Cloud Starter Stream Kafka</description>

View File

@@ -4,7 +4,7 @@
<parent>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
<version>1.3.0.M2</version>
<version>2.0.0.M1</version>
</parent>
<artifactId>spring-cloud-stream-binder-kafka-0.10.1-test</artifactId>
<description>Spring Cloud Stream Kafka Binder 0.10.1 Tests</description>
@@ -54,6 +54,7 @@
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka-test</artifactId>
<version>1.1.6.RELEASE</version>
<scope>test</scope>
</dependency>
<dependency>
@@ -67,13 +68,6 @@
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-0.10.2-test</artifactId>
<version>${project.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-test</artifactId>

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2015-2017 the original author or authors.
* Copyright 2015-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -16,16 +16,11 @@
package org.springframework.cloud.stream.binder.kafka;
import org.springframework.cloud.stream.binder.ExtendedConsumerProperties;
import org.springframework.cloud.stream.binder.kafka.admin.AdminUtilsOperation;
import org.springframework.cloud.stream.binder.kafka.admin.Kafka10AdminUtilsOperation;
import org.springframework.cloud.stream.binder.kafka.properties.KafkaBinderConfigurationProperties;
import org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties;
import org.springframework.cloud.stream.binder.kafka.provisioning.KafkaTopicProvisioner;
import org.springframework.cloud.stream.provisioning.ConsumerDestination;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Configuration;
import org.springframework.integration.config.EnableIntegration;
import org.springframework.context.support.GenericApplicationContext;
import org.springframework.kafka.support.LoggingProducerListener;
import org.springframework.kafka.support.ProducerListener;
@@ -39,7 +34,6 @@ import org.springframework.kafka.support.ProducerListener;
*/
public class Kafka10TestBinder extends AbstractKafkaTestBinder {
@SuppressWarnings({ "rawtypes", "unchecked" })
public Kafka10TestBinder(KafkaBinderConfigurationProperties binderConfiguration) {
try {
AdminUtilsOperation adminUtilsOperation = new Kafka10AdminUtilsOperation();
@@ -47,26 +41,13 @@ public class Kafka10TestBinder extends AbstractKafkaTestBinder {
new KafkaTopicProvisioner(binderConfiguration, adminUtilsOperation);
provisioningProvider.afterPropertiesSet();
KafkaMessageChannelBinder binder = new KafkaMessageChannelBinder(binderConfiguration,
provisioningProvider) {
/*
* Some tests use multiple instance indexes for the same topic; we need to make
* the error infrastructure beans unique.
*/
@Override
protected String errorsBaseName(ConsumerDestination destination, String group,
ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties) {
return super.errorsBaseName(destination, group, consumerProperties) + "-"
+ consumerProperties.getInstanceIndex();
}
};
KafkaMessageChannelBinder binder = new KafkaMessageChannelBinder(binderConfiguration, provisioningProvider);
binder.setCodec(AbstractKafkaTestBinder.getCodec());
ProducerListener producerListener = new LoggingProducerListener();
binder.setProducerListener(producerListener);
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(Config.class);
GenericApplicationContext context = new GenericApplicationContext();
context.refresh();
binder.setApplicationContext(context);
binder.afterPropertiesSet();
this.setBinder(binder);
@@ -76,10 +57,4 @@ public class Kafka10TestBinder extends AbstractKafkaTestBinder {
}
}
@Configuration
@EnableIntegration
static class Config {
}
}

View File

@@ -62,15 +62,13 @@ import static org.junit.Assert.assertTrue;
/**
* Integration tests for the {@link KafkaMessageChannelBinder}.
*
* This test specifically tests for the 0.10.1.x version of Kafka.
*
* @author Eric Bottard
* @author Marius Bogoevici
* @author Mark Fisher
* @author Ilayaperumal Gopinathan
*/
public class Kafka_0_10_1_BinderTests extends Kafka_0_10_2_BinderTests {
public class Kafka_0_10_1_BinderTests extends KafkaBinderTests {
private final String CLASS_UNDER_TEST_NAME = KafkaMessageChannelBinder.class.getSimpleName();

View File

@@ -1,120 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
<version>1.3.0.M2</version>
</parent>
<artifactId>spring-cloud-stream-binder-kafka-0.10.2-test</artifactId>
<description>Spring Cloud Stream Kafka Binder 0.10.2 Tests</description>
<url>http://projects.spring.io/spring-cloud</url>
<organization>
<name>Pivotal Software, Inc.</name>
<url>http://www.spring.io</url>
</organization>
<properties>
<main.basedir>${basedir}/../..</main.basedir>
<kafka.version>0.10.2.1</kafka.version>
<spring-kafka.version>1.2.2.RELEASE</spring-kafka.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-core</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka</artifactId>
<version>${project.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.11</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.integration</groupId>
<artifactId>spring-integration-kafka</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka</artifactId>
<version>${project.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-schema</artifactId>
<version>${spring-cloud-stream.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>io.confluent</groupId>
<artifactId>kafka-avro-serializer</artifactId>
<version>3.2.2</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>io.confluent</groupId>
<artifactId>kafka-schema-registry</artifactId>
<version>3.2.2</version>
<scope>test</scope>
</dependency>
</dependencies>
<repositories>
<repository>
<id>confluent</id>
<url>http://packages.confluent.io/maven/</url>
</repository>
</repositories>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>3.0.2</version>
<executions>
<execution>
<goals>
<goal>test-jar</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -1,241 +0,0 @@
/*
* Copyright 2014-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kafka;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.UUID;
import io.confluent.kafka.schemaregistry.rest.SchemaRegistryConfig;
import io.confluent.kafka.schemaregistry.rest.SchemaRegistryRestApplication;
import kafka.utils.ZKStringSerializer$;
import kafka.utils.ZkUtils;
import org.I0Itec.zkclient.ZkClient;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
import org.apache.kafka.common.serialization.Deserializer;
import org.assertj.core.api.Assertions;
import org.eclipse.jetty.server.Server;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.springframework.cloud.stream.binder.Binder;
import org.springframework.cloud.stream.binder.Binding;
import org.springframework.cloud.stream.binder.ExtendedConsumerProperties;
import org.springframework.cloud.stream.binder.ExtendedProducerProperties;
import org.springframework.cloud.stream.binder.Spy;
import org.springframework.cloud.stream.binder.kafka.admin.Kafka10AdminUtilsOperation;
import org.springframework.cloud.stream.binder.kafka.properties.KafkaBinderConfigurationProperties;
import org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties;
import org.springframework.cloud.stream.binder.kafka.properties.KafkaProducerProperties;
import org.springframework.integration.channel.DirectChannel;
import org.springframework.integration.channel.QueueChannel;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.support.KafkaHeaders;
import org.springframework.kafka.test.core.BrokerAddress;
import org.springframework.kafka.test.rule.KafkaEmbedded;
import org.springframework.messaging.Message;
import org.springframework.messaging.MessageChannel;
import org.springframework.messaging.SubscribableChannel;
import org.springframework.messaging.support.MessageBuilder;
import static org.junit.Assert.assertTrue;
/**
* Integration tests for the {@link KafkaMessageChannelBinder}.
*
* This test specifically tests for the 0.10.2.x version of Kafka.
*
* @author Eric Bottard
* @author Marius Bogoevici
* @author Mark Fisher
* @author Ilayaperumal Gopinathan
*/
public class Kafka_0_10_2_BinderTests extends KafkaBinderTests {
private final String CLASS_UNDER_TEST_NAME = KafkaMessageChannelBinder.class.getSimpleName();
@ClassRule
public static KafkaEmbedded embeddedKafka = new KafkaEmbedded(1, true, 10);
private Kafka10TestBinder binder;
private Kafka10AdminUtilsOperation adminUtilsOperation = new Kafka10AdminUtilsOperation();
@Override
protected void binderBindUnbindLatency() throws InterruptedException {
Thread.sleep(500);
}
@Override
protected Kafka10TestBinder getBinder() {
if (binder == null) {
KafkaBinderConfigurationProperties binderConfiguration = createConfigurationProperties();
binder = new Kafka10TestBinder(binderConfiguration);
}
return binder;
}
protected KafkaBinderConfigurationProperties createConfigurationProperties() {
KafkaBinderConfigurationProperties binderConfiguration = new KafkaBinderConfigurationProperties();
BrokerAddress[] brokerAddresses = embeddedKafka.getBrokerAddresses();
List<String> bAddresses = new ArrayList<>();
for (BrokerAddress bAddress : brokerAddresses) {
bAddresses.add(bAddress.toString());
}
String[] foo = new String[bAddresses.size()];
binderConfiguration.setBrokers(bAddresses.toArray(foo));
binderConfiguration.setZkNodes(embeddedKafka.getZookeeperConnectionString());
return binderConfiguration;
}
@Override
protected int partitionSize(String topic) {
return consumerFactory().createConsumer().partitionsFor(topic).size();
}
@Override
protected ZkUtils getZkUtils(KafkaBinderConfigurationProperties kafkaBinderConfigurationProperties) {
final ZkClient zkClient = new ZkClient(kafkaBinderConfigurationProperties.getZkConnectionString(),
kafkaBinderConfigurationProperties.getZkSessionTimeout(), kafkaBinderConfigurationProperties.getZkConnectionTimeout(),
ZKStringSerializer$.MODULE$);
return new ZkUtils(zkClient, null, false);
}
@Override
protected void invokeCreateTopic(ZkUtils zkUtils, String topic, int partitions, int replicationFactor, Properties topicConfig) {
adminUtilsOperation.invokeCreateTopic(zkUtils, topic, partitions, replicationFactor, new Properties());
}
@Override
protected int invokePartitionSize(String topic, ZkUtils zkUtils) {
return adminUtilsOperation.partitionSize(topic, zkUtils);
}
@Override
public String getKafkaOffsetHeaderKey() {
return KafkaHeaders.OFFSET;
}
@Override
protected Binder getBinder(KafkaBinderConfigurationProperties kafkaBinderConfigurationProperties) {
return new Kafka10TestBinder(kafkaBinderConfigurationProperties);
}
@Before
public void init() {
String multiplier = System.getenv("KAFKA_TIMEOUT_MULTIPLIER");
if (multiplier != null) {
timeoutMultiplier = Double.parseDouble(multiplier);
}
}
@Override
protected boolean usesExplicitRouting() {
return false;
}
@Override
protected String getClassUnderTestName() {
return CLASS_UNDER_TEST_NAME;
}
@Override
public Spy spyOn(final String name) {
throw new UnsupportedOperationException("'spyOn' is not used by Kafka tests");
}
private ConsumerFactory<byte[], byte[]> consumerFactory() {
Map<String, Object> props = new HashMap<>();
KafkaBinderConfigurationProperties configurationProperties = createConfigurationProperties();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, configurationProperties.getKafkaConnectionString());
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
props.put(ConsumerConfig.GROUP_ID_CONFIG, "TEST-CONSUMER-GROUP");
Deserializer<byte[]> valueDecoder = new ByteArrayDeserializer();
Deserializer<byte[]> keyDecoder = new ByteArrayDeserializer();
return new DefaultKafkaConsumerFactory<>(props, keyDecoder, valueDecoder);
}
@Test
@SuppressWarnings("unchecked")
public void testCustomAvroSerialization() throws Exception {
KafkaBinderConfigurationProperties configurationProperties = createConfigurationProperties();
final ZkClient zkClient = new ZkClient(configurationProperties.getZkConnectionString(),
configurationProperties.getZkSessionTimeout(), configurationProperties.getZkConnectionTimeout(),
ZKStringSerializer$.MODULE$);
final ZkUtils zkUtils = new ZkUtils(zkClient, null, false);
Map<String, Object> schemaRegistryProps = new HashMap<>();
schemaRegistryProps.put("kafkastore.connection.url", configurationProperties.getZkConnectionString());
schemaRegistryProps.put("listeners", "http://0.0.0.0:8082");
schemaRegistryProps.put("port", "8082");
schemaRegistryProps.put("kafkastore.topic", "_schemas");
SchemaRegistryConfig config = new SchemaRegistryConfig(schemaRegistryProps);
SchemaRegistryRestApplication app = new SchemaRegistryRestApplication(config);
Server server = app.createServer();
server.start();
long endTime = System.currentTimeMillis() + 5000;
while(true) {
if (server.isRunning()) {
break;
}
else if (System.currentTimeMillis() > endTime) {
Assertions.fail("Kafka Schema Registry Server failed to start");
}
}
User1 firstOutboundFoo = new User1();
String userName1 = "foo-name" + UUID.randomUUID().toString();
String favColor1 = "foo-color" + UUID.randomUUID().toString();
firstOutboundFoo.setName(userName1);
firstOutboundFoo.setFavoriteColor(favColor1);
Message<?> message = MessageBuilder.withPayload(firstOutboundFoo).build();
SubscribableChannel moduleOutputChannel = new DirectChannel();
String testTopicName = "existing" + System.currentTimeMillis();
invokeCreateTopic(zkUtils, testTopicName, 6, 1, new Properties());
configurationProperties.setAutoAddPartitions(true);
Binder binder = getBinder(configurationProperties);
QueueChannel moduleInputChannel = new QueueChannel();
ExtendedProducerProperties<KafkaProducerProperties> producerProperties = createProducerProperties();
producerProperties.getExtension().getConfiguration().put("value.serializer", "io.confluent.kafka.serializers.KafkaAvroSerializer");
producerProperties.getExtension().getConfiguration().put("schema.registry.url", "http://localhost:8082");
producerProperties.setUseNativeEncoding(true);
Binding<MessageChannel> producerBinding = binder.bindProducer(testTopicName, moduleOutputChannel, producerProperties);
ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();
consumerProperties.getExtension().setAutoRebalanceEnabled(false);
consumerProperties.getExtension().getConfiguration().put("value.deserializer", "io.confluent.kafka.serializers.KafkaAvroDeserializer");
consumerProperties.getExtension().getConfiguration().put("schema.registry.url", "http://localhost:8082");
Binding<MessageChannel> consumerBinding = binder.bindConsumer(testTopicName, "test", moduleInputChannel, consumerProperties);
// Let the consumer actually bind to the producer before sending a msg
binderBindUnbindLatency();
moduleOutputChannel.send(message);
Message<?> inbound = receive(moduleInputChannel);
Assertions.assertThat(inbound).isNotNull();
assertTrue(message.getPayload() instanceof User1);
User1 receivedUser = (User1) message.getPayload();
Assertions.assertThat(receivedUser.getName()).isEqualTo(userName1);
Assertions.assertThat(receivedUser.getFavoriteColor()).isEqualTo(favColor1);
producerBinding.unbind();
consumerBinding.unbind();
}
}

View File

@@ -4,7 +4,7 @@
<parent>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
<version>1.3.0.M2</version>
<version>2.0.0.M1</version>
</parent>
<artifactId>spring-cloud-stream-binder-kafka-core</artifactId>
<description>Spring Cloud Stream Kafka Binder Core</description>

View File

@@ -16,18 +16,10 @@
package org.springframework.cloud.stream.binder.kafka.properties;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.util.ObjectUtils;
import org.springframework.util.StringUtils;
/**
@@ -39,16 +31,13 @@ import org.springframework.util.StringUtils;
@ConfigurationProperties(prefix = "spring.cloud.stream.kafka.binder")
public class KafkaBinderConfigurationProperties {
@Autowired(required = false)
private KafkaProperties kafkaProperties;
private String[] zkNodes = new String[] {"localhost"};
private String[] zkNodes = new String[] { "localhost" };
private Map<String, String> configuration = new HashMap<>();
private Map<String, Object> configuration = new HashMap<>();
private String defaultZkPort = "2181";
private String[] brokers = new String[] { "localhost" };
private String[] brokers = new String[] {"localhost"};
private String defaultBrokerPort = "9092";
@@ -260,78 +249,16 @@ public class KafkaBinderConfigurationProperties {
this.socketBufferSize = socketBufferSize;
}
public Map<String, String> getConfiguration() {
public Map<String, Object> getConfiguration() {
return configuration;
}
public void setConfiguration(Map<String, String> configuration) {
public void setConfiguration(Map<String, Object> configuration) {
this.configuration = configuration;
}
public Map<String, Object> getConsumerConfiguration() {
Map<String, Object> consumerConfiguration = new HashMap<>();
// If Spring Boot Kafka properties are present, add them with lowest precedence
if (this.kafkaProperties != null) {
consumerConfiguration.putAll(this.kafkaProperties.buildConsumerProperties());
}
// Copy configured binder properties
for (Map.Entry<String, String> configurationEntry : this.configuration.entrySet()) {
if (ConsumerConfig.configNames().contains(configurationEntry.getKey())) {
consumerConfiguration.put(configurationEntry.getKey(), configurationEntry.getValue());
}
}
// Override Spring Boot bootstrap server setting if left to default with the value
// configured in the binder
if (ObjectUtils.isEmpty(consumerConfiguration.get(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG))) {
consumerConfiguration.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, getKafkaConnectionString());
}
else {
Object boostrapServersConfig = consumerConfiguration.get(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG);
if (boostrapServersConfig instanceof List) {
@SuppressWarnings("unchecked")
List<String> bootStrapServers = (List<String>) consumerConfiguration
.get(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG);
if (bootStrapServers.size() == 1 && bootStrapServers.get(0).equals("localhost:9092")) {
consumerConfiguration.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, getKafkaConnectionString());
}
}
}
return Collections.unmodifiableMap(consumerConfiguration);
}
public Map<String, Object> getProducerConfiguration() {
Map<String, Object> producerConfiguration = new HashMap<>();
// If Spring Boot Kafka properties are present, add them with lowest precedence
if (this.kafkaProperties != null) {
producerConfiguration.putAll(this.kafkaProperties.buildProducerProperties());
}
// Copy configured binder properties
for (Map.Entry<String, String> configurationEntry : configuration.entrySet()) {
if (ProducerConfig.configNames().contains(configurationEntry.getKey())) {
producerConfiguration.put(configurationEntry.getKey(), configurationEntry.getValue());
}
}
// Override Spring Boot bootstrap server setting if left to default with the value
// configured in the binder
if (ObjectUtils.isEmpty(producerConfiguration.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG))) {
producerConfiguration.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, getKafkaConnectionString());
}
else {
Object boostrapServersConfig = producerConfiguration.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG);
if (boostrapServersConfig instanceof List) {
@SuppressWarnings("unchecked")
List<String> bootStrapServers = (List<String>) producerConfiguration
.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG);
if (bootStrapServers.size() == 1 && bootStrapServers.get(0).equals("localhost:9092")) {
producerConfiguration.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, getKafkaConnectionString());
}
}
}
return Collections.unmodifiableMap(producerConfiguration);
}
public JaasLoginModuleConfiguration getJaas() {
return this.jaas;
return jaas;
}
public void setJaas(JaasLoginModuleConfiguration jaas) {

View File

@@ -23,9 +23,7 @@ import java.util.Map;
* @author Marius Bogoevici
* @author Ilayaperumal Gopinathan
*
* <p>
* Thanks to Laszlo Szabo for providing the initial patch for generic property support.
* </p>
* <p>Thanks to Laszlo Szabo for providing the initial patch for generic property support.</p>
*/
public class KafkaConsumerProperties {
@@ -35,6 +33,8 @@ public class KafkaConsumerProperties {
private Boolean autoCommitOnError;
private boolean resetOffsets;
private StartOffset startOffset;
private boolean enableDlq;
@@ -53,6 +53,14 @@ public class KafkaConsumerProperties {
this.autoCommitOffset = autoCommitOffset;
}
public boolean isResetOffsets() {
return this.resetOffsets;
}
public void setResetOffsets(boolean resetOffsets) {
this.resetOffsets = resetOffsets;
}
public StartOffset getStartOffset() {
return this.startOffset;
}
@@ -94,8 +102,7 @@ public class KafkaConsumerProperties {
}
public enum StartOffset {
earliest(-2L),
latest(-1L);
earliest(-2L), latest(-1L);
private final long referencePoint;

View File

@@ -5,7 +5,7 @@
<parent>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
<version>1.3.0.M2</version>
<version>2.0.0.M1</version>
</parent>
<artifactId>spring-cloud-stream-binder-kafka-docs</artifactId>

View File

@@ -2,7 +2,6 @@
--
This guide describes the Apache Kafka implementation of the Spring Cloud Stream Binder.
It contains information about its design, usage and configuration options, as well as information on how the Stream Cloud Stream concepts map into Apache Kafka specific constructs.
In addition, this guide also explains the Kafka Streams binding capabilities of Spring Cloud Stream.
--
== Usage
@@ -147,8 +146,12 @@ recoveryInterval::
The interval between connection recovery attempts, in milliseconds.
+
Default: `5000`.
resetOffsets::
Whether to reset offsets on the consumer to the value provided by `startOffset`.
+
Default: `false`.
startOffset::
The starting offset for new groups.
The starting offset for new groups, or when `resetOffsets` is `true`.
Allowed values: `earliest`, `latest`.
If the consumer group is set explicitly for the consumer 'binding' (via `spring.cloud.stream.bindings.<channelName>.group`), then 'startOffset' is set to `earliest`; otherwise it is set to `latest` for the `anonymous` consumer group.
+
@@ -430,98 +433,4 @@ On the other hand, if auto topic creation is disabled on the server, then care m
If you want to have full control over how partitions are allocated, then leave the default settings as they are, i.e. do not exclude the kafka broker jar and ensure that `spring.cloud.stream.kafka.binder.autoCreateTopics` is set to `true`, which is the default.
== Kafka Streams Binding Capabilities of Spring Cloud Stream
Spring Cloud Stream Kafka support also includes a binder specifically designed for Kafka Streams binding.
Using this binder, applications can be written that leverage the Kafka Streams API.
For more information on Kafka Streams, see https://kafka.apache.org/documentation/streams/developer-guide[Kafka Streams API Developer Manual]
Kafka Streams support in Spring Cloud Stream is based on the foundations provided by the Spring Kafka project. For details on that support, see http://docs.spring.io/spring-kafka/reference/html/_reference.html#kafka-streams[Kafaka Streams Support in Spring Kafka].
Here are the maven coordinates for the Spring Cloud Stream KStream binder artifact.
[source,xml]
----
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kstream</artifactId>
</dependency>
----
In addition to leveraging the Spring Cloud Stream programming model which is based on Spring Boot, one of the main other benefits that the KStream binder provides is the fact that it avoids the boilerplate configuration that one needs to write when using the Kafka Streams API directly.
High level streams DSL provided through the Kafka Streams API can be used through Spring Cloud Stream in the current support.
=== Usage example of high level streams DSL
This application will listen from a Kafka topic and write the word count for each unique word that it sees in a 5 seconds time window.
[source]
----
@SpringBootApplication
@EnableBinding(KStreamProcessor.class)
public class WordCountProcessorApplication {
@StreamListener("input")
@SendTo("output")
public KStream<?, String> process(KStream<?, String> input) {
return input
.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
.map((key, word) -> new KeyValue<>(word, word))
.groupByKey(Serdes.String(), Serdes.String())
.count(TimeWindows.of(5000), "store-name")
.toStream()
.map((w, c) -> new KeyValue<>(null, "Count for " + w.key() + ": " + c));
}
public static void main(String[] args) {
SpringApplication.run(WordCountProcessorApplication.class, args);
}
----
If you build it as Spring Boot runnable fat jar, you can run the above example in the following way:
[source]
----
java -jar uber.jar --spring.cloud.stream.bindings.input.destination=words --spring.cloud.stream.bindings.output.destination=counts
----
This means that the application will listen from the incoming Kafka topic words and write to the output topic counts.
Spring Cloud Stream will ensure that the messages from both the incoming and outgoing topics are bound as KStream objects.
As one may observe, the developer can exclusively focus on the business aspects of the code, i.e. writing the logic required in the processor rather than setting up the streams specific configuration required by the Kafka Streams infrastructure.
All those boilerplate is handled by Spring Cloud Stream behind the scenes.
=== Support for interactive queries
If access to the `KafkaStreams` is needed for interactive queries, the internal `KafkaStreams` instance can be accessed via `KStreamBuilderFactoryBean.getKafkaStreams()`.
You can autowire the `KStreamBuilderFactoryBean` instance provided by the KStream binder. Then you can get `KafkaStreams` instance from it and retrieve the underlying store, execute queries on it, etc.
=== Kafka Streams properties
configuration::
Map with a key/value pair containing properties pertaining to Kafka Streams API.
This property must be prefixed with `spring.cloud.stream.kstream.binder.`.
Following are some examples of using this property.
[source]
----
spring.cloud.stream.kstream.binder.configuration.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde
spring.cloud.stream.kstream.binder.configuration.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde
spring.cloud.stream.kstream.binder.configuration.commit.interval.ms=1000
----
For more information about all the properties that may go into streams configuration, see StreamsConfig JavaDocs.
There can also be binding specific properties.
For instance, you can use a different Serde for your input or output destination.
[source]
----
spring.cloud.stream.kstream.bindings.output.producer.keySerde=org.apache.kafka.common.serialization.Serdes$IntegerSerde
spring.cloud.stream.kstream.bindings.output.producer.valueSerde=org.apache.kafka.common.serialization.Serdes$LongSerde
----

View File

@@ -10,13 +10,14 @@
<parent>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
<version>1.3.0.M2</version>
<version>2.0.0.M1</version>
</parent>
<dependencies>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-core</artifactId>
<version>2.0.0.M1</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
@@ -57,6 +58,7 @@
<dependency>
<groupId>org.springframework.integration</groupId>
<artifactId>spring-integration-kafka</artifactId>
<version>${spring-integration-kafka.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>

View File

@@ -112,8 +112,8 @@ public class KafkaBinderMetrics implements PublicMetrics {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
if (!ObjectUtils.isEmpty(binderConfigurationProperties.getConsumerConfiguration())) {
props.putAll(binderConfigurationProperties.getConsumerConfiguration());
if (!ObjectUtils.isEmpty(binderConfigurationProperties.getConfiguration())) {
props.putAll(binderConfigurationProperties.getConfiguration());
}
if (!props.containsKey(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG)) {
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,

View File

@@ -52,22 +52,20 @@ import org.springframework.expression.spel.standard.SpelExpressionParser;
import org.springframework.integration.core.MessageProducer;
import org.springframework.integration.kafka.inbound.KafkaMessageDrivenChannelAdapter;
import org.springframework.integration.kafka.outbound.KafkaProducerMessageHandler;
import org.springframework.integration.kafka.support.RawRecordHeaderErrorMessageStrategy;
import org.springframework.integration.support.ErrorMessageStrategy;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.listener.AbstractMessageListenerContainer;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
import org.springframework.kafka.listener.ErrorHandler;
import org.springframework.kafka.listener.config.ContainerProperties;
import org.springframework.kafka.support.ProducerListener;
import org.springframework.kafka.support.SendResult;
import org.springframework.kafka.support.TopicPartitionInitialOffset;
import org.springframework.messaging.Message;
import org.springframework.messaging.MessageChannel;
import org.springframework.messaging.MessageHandler;
import org.springframework.messaging.MessagingException;
import org.springframework.retry.support.RetryTemplate;
import org.springframework.util.Assert;
import org.springframework.util.CollectionUtils;
import org.springframework.util.ObjectUtils;
@@ -89,18 +87,17 @@ import org.springframework.util.concurrent.ListenableFutureCallback;
* @author Doug Saus
*/
public class KafkaMessageChannelBinder extends
AbstractMessageChannelBinder<ExtendedConsumerProperties<KafkaConsumerProperties>,
ExtendedProducerProperties<KafkaProducerProperties>, KafkaTopicProvisioner>
AbstractMessageChannelBinder<ExtendedConsumerProperties<KafkaConsumerProperties>, ExtendedProducerProperties<KafkaProducerProperties>, KafkaTopicProvisioner>
implements ExtendedPropertiesBinder<MessageChannel, KafkaConsumerProperties, KafkaProducerProperties> {
private final KafkaBinderConfigurationProperties configurationProperties;
private final Map<String, TopicInformation> topicsInUse = new HashMap<>();
private ProducerListener<byte[], byte[]> producerListener;
private KafkaExtendedBindingProperties extendedBindingProperties = new KafkaExtendedBindingProperties();
private final Map<String, TopicInformation> topicsInUse = new HashMap<>();
public KafkaMessageChannelBinder(KafkaBinderConfigurationProperties configurationProperties,
KafkaTopicProvisioner provisioningProvider) {
super(false, headersToMap(configurationProperties), provisioningProvider);
@@ -183,8 +180,8 @@ public class KafkaMessageChannelBinder extends
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
props.put(ProducerConfig.ACKS_CONFIG, String.valueOf(this.configurationProperties.getRequiredAcks()));
if (!ObjectUtils.isEmpty(configurationProperties.getProducerConfiguration())) {
props.putAll(configurationProperties.getProducerConfiguration());
if (!ObjectUtils.isEmpty(configurationProperties.getConfiguration())) {
props.putAll(configurationProperties.getConfiguration());
}
if (ObjectUtils.isEmpty(props.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG))) {
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, this.configurationProperties.getKafkaConnectionString());
@@ -257,26 +254,24 @@ public class KafkaMessageChannelBinder extends
? new ContainerProperties(destination.getName())
: new ContainerProperties(topicPartitionInitialOffsets);
int concurrency = Math.min(extendedConsumerProperties.getConcurrency(), listenedPartitions.size());
@SuppressWarnings("rawtypes")
final ConcurrentMessageListenerContainer<?, ?> messageListenerContainer =
new ConcurrentMessageListenerContainer(
consumerFactory, containerProperties) {
final ConcurrentMessageListenerContainer<?, ?> messageListenerContainer = new ConcurrentMessageListenerContainer(
consumerFactory, containerProperties) {
@Override
public void stop(Runnable callback) {
super.stop(callback);
}
};
messageListenerContainer.setConcurrency(concurrency);
messageListenerContainer.getContainerProperties()
.setAckOnError(isAutoCommitOnError(extendedConsumerProperties));
if (!extendedConsumerProperties.getExtension().isAutoCommitOffset()) {
messageListenerContainer.getContainerProperties()
.setAckMode(AbstractMessageListenerContainer.AckMode.MANUAL);
messageListenerContainer.getContainerProperties().setAckOnError(false);
}
else {
messageListenerContainer.getContainerProperties()
.setAckOnError(isAutoCommitOnError(extendedConsumerProperties));
if (this.logger.isDebugEnabled()) {
this.logger.debug(
"Listened partitions: " + StringUtils.collectionToCommaDelimitedString(listenedPartitions));
}
if (this.logger.isDebugEnabled()) {
this.logger.debug(
@@ -285,57 +280,37 @@ public class KafkaMessageChannelBinder extends
final KafkaMessageDrivenChannelAdapter<?, ?> kafkaMessageDrivenChannelAdapter = new KafkaMessageDrivenChannelAdapter<>(
messageListenerContainer);
kafkaMessageDrivenChannelAdapter.setBeanFactory(this.getBeanFactory());
ErrorInfrastructure errorInfrastructure = registerErrorInfrastructure(destination, consumerGroup,
extendedConsumerProperties);
if (extendedConsumerProperties.getMaxAttempts() > 1) {
kafkaMessageDrivenChannelAdapter.setRetryTemplate(buildRetryTemplate(extendedConsumerProperties));
kafkaMessageDrivenChannelAdapter.setRecoveryCallback(errorInfrastructure.getRecoverer());
}
else {
kafkaMessageDrivenChannelAdapter.setErrorChannel(errorInfrastructure.getErrorChannel());
}
return kafkaMessageDrivenChannelAdapter;
}
@Override
protected ErrorMessageStrategy getErrorMessageStrategy() {
return new RawRecordHeaderErrorMessageStrategy();
}
@Override
protected MessageHandler getErrorMessageHandler(final ConsumerDestination destination, final String group,
final ExtendedConsumerProperties<KafkaConsumerProperties> extendedConsumerProperties) {
final RetryTemplate retryTemplate = buildRetryTemplate(extendedConsumerProperties);
kafkaMessageDrivenChannelAdapter.setRetryTemplate(retryTemplate);
if (extendedConsumerProperties.getExtension().isEnableDlq()) {
DefaultKafkaProducerFactory<byte[], byte[]> producerFactory = getProducerFactory(
new ExtendedProducerProperties<>(new KafkaProducerProperties()));
final KafkaTemplate<byte[], byte[]> kafkaTemplate = new KafkaTemplate<>(producerFactory);
return new MessageHandler() {
messageListenerContainer.getContainerProperties().setErrorHandler(new ErrorHandler() {
@Override
public void handleMessage(Message<?> message) throws MessagingException {
final ConsumerRecord<?, ?> record = message.getHeaders()
.get(KafkaMessageDrivenChannelAdapter.KAFKA_RAW_DATA, ConsumerRecord.class);
final byte[] key = record.key() != null ? Utils.toArray(ByteBuffer.wrap((byte[]) record.key()))
public void handle(Exception thrownException, final ConsumerRecord message) {
final byte[] key = message.key() != null ? Utils.toArray(ByteBuffer.wrap((byte[]) message.key()))
: null;
final byte[] payload = record.value() != null
? Utils.toArray(ByteBuffer.wrap((byte[]) record.value())) : null;
final byte[] payload = message.value() != null
? Utils.toArray(ByteBuffer.wrap((byte[]) message.value())) : null;
String dlqName = StringUtils.hasText(extendedConsumerProperties.getExtension().getDlqName())
? extendedConsumerProperties.getExtension().getDlqName()
: "error." + destination.getName() + "." + group;
ListenableFuture<SendResult<byte[], byte[]>> sentDlq = kafkaTemplate.send(dlqName,
record.partition(), key, payload);
message.partition(), key, payload);
sentDlq.addCallback(new ListenableFutureCallback<SendResult<byte[], byte[]>>() {
StringBuilder sb = new StringBuilder().append(" a message with key='")
.append(toDisplayString(ObjectUtils.nullSafeToString(key), 50)).append("'")
.append(" and payload='")
.append(toDisplayString(ObjectUtils.nullSafeToString(payload), 50))
.append("'").append(" received from ")
.append(record.partition());
.append(message.partition());
@Override
public void onFailure(Throwable ex) {
KafkaMessageChannelBinder.this.logger.error(
"Error sending to DLQ " + sb.toString(), ex);
"Error sending to DLQ" + sb.toString(), ex);
}
@Override
@@ -345,12 +320,11 @@ public class KafkaMessageChannelBinder extends
"Sent to DLQ " + sb.toString());
}
}
});
}
};
});
}
return null;
return kafkaMessageDrivenChannelAdapter;
}
private ConsumerFactory<?, ?> createKafkaConsumerFactory(boolean anonymous, String consumerGroup,
@@ -363,8 +337,8 @@ public class KafkaMessageChannelBinder extends
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, anonymous ? "latest" : "earliest");
props.put(ConsumerConfig.GROUP_ID_CONFIG, consumerGroup);
if (!ObjectUtils.isEmpty(configurationProperties.getConsumerConfiguration())) {
props.putAll(configurationProperties.getConsumerConfiguration());
if (!ObjectUtils.isEmpty(configurationProperties.getConfiguration())) {
props.putAll(configurationProperties.getConfiguration());
}
if (ObjectUtils.isEmpty(props.get(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG))) {
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.configurationProperties.getKafkaConnectionString());

View File

@@ -18,11 +18,15 @@ package org.springframework.cloud.stream.binder.kafka.config;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.annotation.PostConstruct;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
import org.apache.kafka.common.utils.AppInfoParser;
@@ -31,6 +35,7 @@ import org.springframework.boot.actuate.endpoint.PublicMetrics;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.stream.binder.Binder;
import org.springframework.cloud.stream.binder.kafka.KafkaBinderHealthIndicator;
@@ -71,7 +76,8 @@ import org.springframework.util.ObjectUtils;
*/
@Configuration
@ConditionalOnMissingBean(Binder.class)
@Import({ KryoCodecAutoConfiguration.class, PropertyPlaceholderAutoConfiguration.class})
@Import({ KryoCodecAutoConfiguration.class, PropertyPlaceholderAutoConfiguration.class,
KafkaBinderConfiguration.KafkaPropertiesConfiguration.class })
@EnableConfigurationProperties({ KafkaBinderConfigurationProperties.class, KafkaExtendedBindingProperties.class })
public class KafkaBinderConfiguration {
@@ -121,8 +127,8 @@ public class KafkaBinderConfiguration {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
if (!ObjectUtils.isEmpty(configurationProperties.getConsumerConfiguration())) {
props.putAll(configurationProperties.getConsumerConfiguration());
if (!ObjectUtils.isEmpty(configurationProperties.getConfiguration())) {
props.putAll(configurationProperties.getConfiguration());
}
if (!props.containsKey(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG)) {
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.configurationProperties.getKafkaConnectionString());
@@ -179,4 +185,52 @@ public class KafkaBinderConfiguration {
private JaasLoginModuleConfiguration zookeeper;
}
@ConditionalOnClass(name = "org.springframework.boot.autoconfigure.kafka.KafkaProperties")
public static class KafkaPropertiesConfiguration {
// KafkaProperties can still be unavailable if KafkaAutoConfiguration is disabled.
@Autowired(required = false)
private KafkaProperties kafkaProperties;
@Autowired
private KafkaBinderConfigurationProperties kafkaBinderConfigurationProperties;
@PostConstruct
public void init() {
Map<String, Object> configuration = this.kafkaBinderConfigurationProperties.getConfiguration();
if (this.kafkaProperties != null) {
for (Map.Entry<String, String> properties : this.kafkaProperties.getProperties().entrySet()) {
if (!configuration.containsKey(properties.getKey())) {
configuration.put(properties.getKey(), properties.getValue());
}
}
for (Map.Entry<String, Object> producerProperties : this.kafkaProperties.buildProducerProperties()
.entrySet()) {
if (!configuration.containsKey(producerProperties.getKey())) {
configuration.put(producerProperties.getKey(), producerProperties.getValue());
}
}
for (Map.Entry<String, Object> consumerProperties : this.kafkaProperties.buildConsumerProperties()
.entrySet()) {
if (!configuration.containsKey(consumerProperties.getKey())) {
configuration.put(consumerProperties.getKey(), consumerProperties.getValue());
}
}
if (ObjectUtils.isEmpty(configuration.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG))) {
configuration.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,
kafkaBinderConfigurationProperties.getKafkaConnectionString());
}
else {
@SuppressWarnings("unchecked")
List<String> bootStrapServers = (List<String>) configuration
.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG);
if (bootStrapServers.size() == 1 && bootStrapServers.get(0).equals("localhost:9092")) {
configuration.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,
kafkaBinderConfigurationProperties.getKafkaConnectionString());
}
}
}
}
}
}

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2014-2017 the original author or authors.
* Copyright 2014-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -17,25 +17,21 @@ package org.springframework.cloud.stream.binder.kafka;
import java.util.List;
import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.Registration;
import org.springframework.cloud.stream.binder.AbstractTestBinder;
import org.springframework.cloud.stream.binder.ExtendedConsumerProperties;
import org.springframework.cloud.stream.binder.ExtendedProducerProperties;
import org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties;
import org.springframework.cloud.stream.binder.kafka.properties.KafkaProducerProperties;
import org.springframework.context.support.GenericApplicationContext;
import org.springframework.integration.channel.PublishSubscribeChannel;
import org.springframework.integration.codec.Codec;
import org.springframework.integration.codec.kryo.KryoRegistrar;
import org.springframework.integration.codec.kryo.PojoCodec;
import org.springframework.integration.context.IntegrationContextUtils;
import org.springframework.integration.tuple.TupleKryoRegistrar;
import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.Registration;
/**
* @author Soby Chacko
* @author Gary Russell
*/
public abstract class AbstractKafkaTestBinder extends
AbstractTestBinder<KafkaMessageChannelBinder, ExtendedConsumerProperties<KafkaConsumerProperties>, ExtendedProducerProperties<KafkaProducerProperties>> {
@@ -45,12 +41,6 @@ public abstract class AbstractKafkaTestBinder extends
// do nothing - the rule will take care of that
}
protected void addErrorChannel(GenericApplicationContext context) {
PublishSubscribeChannel errorChannel = new PublishSubscribeChannel();
context.getBeanFactory().initializeBean(errorChannel, IntegrationContextUtils.ERROR_CHANNEL_BEAN_NAME);
context.getBeanFactory().registerSingleton(IntegrationContextUtils.ERROR_CHANNEL_BEAN_NAME, errorChannel);
}
protected static Codec getCodec() {
return new PojoCodec(new TupleRegistrar());
}

View File

@@ -77,9 +77,7 @@ public class KafkaBinderAutoConfigurationPropertiesTest {
producerFactory);
assertTrue(producerConfigs.get("batch.size").equals(10));
assertTrue(producerConfigs.get("key.serializer").equals(LongSerializer.class));
assertTrue(producerConfigs.get("key.deserializer") == null);
assertTrue(producerConfigs.get("value.serializer").equals(LongSerializer.class));
assertTrue(producerConfigs.get("value.deserializer") == null);
assertTrue(producerConfigs.get("compression.type").equals("snappy"));
List<String> bootstrapServers = new ArrayList<>();
bootstrapServers.add("10.98.09.199:9092");
@@ -98,9 +96,7 @@ public class KafkaBinderAutoConfigurationPropertiesTest {
Map<String, Object> consumerConfigs = (Map<String, Object>) ReflectionUtils.getField(consumerFactoryConfigField,
consumerFactory);
assertTrue(consumerConfigs.get("key.deserializer").equals(LongDeserializer.class));
assertTrue(consumerConfigs.get("key.serializer") == null);
assertTrue(consumerConfigs.get("value.deserializer").equals(LongDeserializer.class));
assertTrue(consumerConfigs.get("value.serialized") == null);
assertTrue(consumerConfigs.get("group.id").equals("groupIdFromBootConfig"));
assertTrue(consumerConfigs.get("auto.offset.reset").equals("earliest"));
assertTrue((((List<String>) consumerConfigs.get("bootstrap.servers")).containsAll(bootstrapServers)));

View File

@@ -24,9 +24,9 @@ import java.util.Properties;
import java.util.UUID;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import kafka.utils.ZKStringSerializer$;
import kafka.utils.ZkUtils;
import org.I0Itec.zkclient.ZkClient;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
@@ -34,6 +34,7 @@ import org.apache.kafka.common.serialization.LongDeserializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.assertj.core.api.Condition;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
@@ -41,6 +42,7 @@ import org.junit.rules.ExpectedException;
import org.springframework.beans.DirectFieldAccessor;
import org.springframework.cloud.stream.binder.Binder;
import org.springframework.cloud.stream.binder.Binding;
import org.springframework.cloud.stream.binder.DefaultBinding;
import org.springframework.cloud.stream.binder.ExtendedConsumerProperties;
import org.springframework.cloud.stream.binder.ExtendedProducerProperties;
import org.springframework.cloud.stream.binder.HeaderMode;
@@ -53,7 +55,6 @@ import org.springframework.cloud.stream.binder.kafka.properties.KafkaProducerPro
import org.springframework.cloud.stream.binder.kafka.utils.KafkaTopicUtils;
import org.springframework.cloud.stream.config.BindingProperties;
import org.springframework.cloud.stream.provisioning.ProvisioningException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.GenericApplicationContext;
import org.springframework.integration.IntegrationMessageHeaderAccessor;
import org.springframework.integration.channel.DirectChannel;
@@ -80,24 +81,21 @@ import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.fail;
import static org.junit.Assert.assertTrue;
import kafka.utils.ZKStringSerializer$;
import kafka.utils.ZkUtils;
/**
* @author Soby Chacko
* @author Ilayaperumal Gopinathan
* @author Henryk Konsek
*/
public abstract class KafkaBinderTests extends
PartitionCapableBinderTests<AbstractKafkaTestBinder, ExtendedConsumerProperties<KafkaConsumerProperties>, ExtendedProducerProperties<KafkaProducerProperties>> {
public abstract class KafkaBinderTests extends PartitionCapableBinderTests<AbstractKafkaTestBinder, ExtendedConsumerProperties<KafkaConsumerProperties>,
ExtendedProducerProperties<KafkaProducerProperties>> {
@Rule
public ExpectedException expectedProvisioningException = ExpectedException.none();
@Override
protected ExtendedConsumerProperties<KafkaConsumerProperties> createConsumerProperties() {
final ExtendedConsumerProperties<KafkaConsumerProperties> kafkaConsumerProperties = new ExtendedConsumerProperties<>(
new KafkaConsumerProperties());
final ExtendedConsumerProperties<KafkaConsumerProperties> kafkaConsumerProperties =
new ExtendedConsumerProperties<>(new KafkaConsumerProperties());
// set the default values that would normally be propagated by Spring Cloud Stream
kafkaConsumerProperties.setInstanceCount(1);
kafkaConsumerProperties.setInstanceIndex(0);
@@ -106,8 +104,7 @@ public abstract class KafkaBinderTests extends
@Override
protected ExtendedProducerProperties<KafkaProducerProperties> createProducerProperties() {
ExtendedProducerProperties<KafkaProducerProperties> producerProperties = new ExtendedProducerProperties<>(
new KafkaProducerProperties());
ExtendedProducerProperties<KafkaProducerProperties> producerProperties = new ExtendedProducerProperties<>(new KafkaProducerProperties());
producerProperties.getExtension().setSync(true);
return producerProperties;
}
@@ -123,24 +120,15 @@ public abstract class KafkaBinderTests extends
protected abstract ZkUtils getZkUtils(KafkaBinderConfigurationProperties kafkaBinderConfigurationProperties);
protected abstract void invokeCreateTopic(ZkUtils zkUtils, String topic, int partitions,
int replicationFactor, Properties topicConfig);
int replicationFactor, Properties topicConfig);
protected abstract int invokePartitionSize(String topic,
ZkUtils zkUtils);
ZkUtils zkUtils);
@Test
@SuppressWarnings("unchecked")
public void testDlqAndRetry() throws Exception {
testDlqGuts(true);
}
@Test
public void testDlq() throws Exception {
testDlqGuts(false);
}
@SuppressWarnings({ "unchecked", "rawtypes" })
public void testDlqGuts(boolean withRetry) throws Exception {
AbstractKafkaTestBinder binder = getBinder();
Binder binder = getBinder();
DirectChannel moduleOutputChannel = new DirectChannel();
DirectChannel moduleInputChannel = new DirectChannel();
QueueChannel dlqChannel = new QueueChannel();
@@ -149,52 +137,23 @@ public abstract class KafkaBinderTests extends
ExtendedProducerProperties<KafkaProducerProperties> producerProperties = createProducerProperties();
producerProperties.setPartitionCount(2);
ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();
consumerProperties.setMaxAttempts(withRetry ? 2 : 1);
consumerProperties.setMaxAttempts(3);
consumerProperties.setBackOffInitialInterval(100);
consumerProperties.setBackOffMaxInterval(150);
consumerProperties.getExtension().setEnableDlq(true);
consumerProperties.getExtension().setAutoRebalanceEnabled(false);
long uniqueBindingId = System.currentTimeMillis();
String producerName = "dlqTest." + uniqueBindingId + ".0";
Binding<MessageChannel> producerBinding = binder.bindProducer(producerName,
Binding<MessageChannel> producerBinding = binder.bindProducer("retryTest." + uniqueBindingId + ".0",
moduleOutputChannel, producerProperties);
Binding<MessageChannel> consumerBinding = binder.bindConsumer(producerName,
Binding<MessageChannel> consumerBinding = binder.bindConsumer("retryTest." + uniqueBindingId + ".0",
"testGroup", moduleInputChannel, consumerProperties);
ExtendedConsumerProperties<KafkaConsumerProperties> dlqConsumerProperties = createConsumerProperties();
dlqConsumerProperties.setMaxAttempts(1);
ApplicationContext context = TestUtils.getPropertyValue(binder.getBinder(), "applicationContext",
ApplicationContext.class);
Map<String, MessageChannel> beansOfType = context.getBeansOfType(MessageChannel.class);
SubscribableChannel boundErrorChannel = context
.getBean(producerName + ".testGroup.errors-0", SubscribableChannel.class);
SubscribableChannel globalErrorChannel = context.getBean("errorChannel", SubscribableChannel.class);
final AtomicReference<Message<?>> boundErrorChannelMessage = new AtomicReference<>();
final AtomicReference<Message<?>> globalErrorChannelMessage = new AtomicReference<>();
final AtomicBoolean hasRecovererInCallStack = new AtomicBoolean(!withRetry);
boundErrorChannel.subscribe(new MessageHandler() {
@Override
public void handleMessage(Message<?> message) throws MessagingException {
boundErrorChannelMessage.set(message);
String stackTrace = Arrays.toString(new RuntimeException().getStackTrace());
hasRecovererInCallStack.set(stackTrace.contains("ErrorMessageSendingRecoverer"));
}
});
globalErrorChannel.subscribe(new MessageHandler() {
@Override
public void handleMessage(Message<?> message) throws MessagingException {
globalErrorChannelMessage.set(message);
}
});
Binding<MessageChannel> dlqConsumerBinding = binder.bindConsumer(
"error.dlqTest." + uniqueBindingId + ".0.testGroup", null, dlqChannel, dlqConsumerProperties);
"error.retryTest." + uniqueBindingId + ".0.testGroup", null, dlqChannel, dlqConsumerProperties);
binderBindUnbindLatency();
String testMessagePayload = "test." + UUID.randomUUID().toString();
Message<String> testMessage = MessageBuilder.withPayload(testMessagePayload).build();
@@ -205,12 +164,6 @@ public abstract class KafkaBinderTests extends
assertThat(receivedMessage.getPayload()).isEqualTo(testMessagePayload);
assertThat(handler.getInvocationCount()).isEqualTo(consumerProperties.getMaxAttempts());
binderBindUnbindLatency();
// verify we got a message on the dedicated error channel and the global (via bridge)
assertThat(boundErrorChannelMessage.get()).isNotNull();
assertThat(globalErrorChannelMessage.get()).isNotNull();
assertThat(hasRecovererInCallStack.get()).isEqualTo(withRetry);
dlqConsumerBinding.unbind();
consumerBinding.unbind();
producerBinding.unbind();
@@ -354,8 +307,7 @@ public abstract class KafkaBinderTests extends
ExtendedConsumerProperties<KafkaConsumerProperties> dlqConsumerProperties = createConsumerProperties();
dlqConsumerProperties.setMaxAttempts(1);
QueueChannel dlqChannel = new QueueChannel();
Binding<MessageChannel> dlqConsumerBinding = binder.bindConsumer(dlqName, null, dlqChannel,
dlqConsumerProperties);
Binding<MessageChannel> dlqConsumerBinding = binder.bindConsumer(dlqName, null, dlqChannel, dlqConsumerProperties);
String testMessagePayload = "test." + UUID.randomUUID().toString();
Message<String> testMessage = MessageBuilder.withPayload(testMessagePayload).build();
@@ -417,9 +369,9 @@ public abstract class KafkaBinderTests extends
@Test
@SuppressWarnings("unchecked")
public void testCompression() throws Exception {
final KafkaProducerProperties.CompressionType[] codecs = new KafkaProducerProperties.CompressionType[] {
final KafkaProducerProperties.CompressionType[] codecs = new KafkaProducerProperties.CompressionType[]{
KafkaProducerProperties.CompressionType.none, KafkaProducerProperties.CompressionType.gzip,
KafkaProducerProperties.CompressionType.snappy };
KafkaProducerProperties.CompressionType.snappy};
byte[] testPayload = new byte[2048];
Arrays.fill(testPayload, (byte) 65);
Binder binder = getBinder();
@@ -589,8 +541,8 @@ public abstract class KafkaBinderTests extends
Binder binder = getBinder(createConfigurationProperties());
GenericApplicationContext context = new GenericApplicationContext();
context.refresh();
// binder.setApplicationContext(context);
// binder.afterPropertiesSet();
//binder.setApplicationContext(context);
//binder.afterPropertiesSet();
DirectChannel output = new DirectChannel();
QueueChannel input1 = new QueueChannel();
@@ -656,6 +608,55 @@ public abstract class KafkaBinderTests extends
}
}
@Test
@Ignore("Needs further discussion")
@SuppressWarnings("unchecked")
public void testReset() throws Exception {
Binder binder = getBinder();
DirectChannel output = new DirectChannel();
QueueChannel input1 = new QueueChannel();
String testTopicName = UUID.randomUUID().toString();
Binding<MessageChannel> producerBinding = binder.bindProducer(testTopicName, output,
createProducerProperties());
String testPayload1 = "foo1-" + UUID.randomUUID().toString();
output.send(new GenericMessage<>(testPayload1.getBytes()));
ExtendedConsumerProperties<KafkaConsumerProperties> properties = createConsumerProperties();
properties.getExtension().setResetOffsets(true);
properties.getExtension().setStartOffset(KafkaConsumerProperties.StartOffset.earliest);
Binding<MessageChannel> consumerBinding = binder.bindConsumer(testTopicName, "startOffsets", input1,
properties);
Message<byte[]> receivedMessage1 = (Message<byte[]>) receive(input1);
assertThat(receivedMessage1).isNotNull();
assertThat(new String(receivedMessage1.getPayload())).isEqualTo(testPayload1);
String testPayload2 = "foo2-" + UUID.randomUUID().toString();
output.send(new GenericMessage<>(testPayload2.getBytes()));
Message<byte[]> receivedMessage2 = (Message<byte[]>) receive(input1);
assertThat(receivedMessage2).isNotNull();
assertThat(new String(receivedMessage2.getPayload())).isEqualTo(testPayload2);
consumerBinding.unbind();
String testPayload3 = "foo3-" + UUID.randomUUID().toString();
output.send(new GenericMessage<>(testPayload3.getBytes()));
ExtendedConsumerProperties<KafkaConsumerProperties> properties2 = createConsumerProperties();
properties2.getExtension().setResetOffsets(true);
properties2.getExtension().setStartOffset(KafkaConsumerProperties.StartOffset.earliest);
consumerBinding = binder.bindConsumer(testTopicName, "startOffsets", input1, properties2);
Message<byte[]> receivedMessage4 = (Message<byte[]>) receive(input1);
assertThat(receivedMessage4).isNotNull();
assertThat(new String(receivedMessage4.getPayload())).isEqualTo(testPayload1);
Message<byte[]> receivedMessage5 = (Message<byte[]>) receive(input1);
assertThat(receivedMessage5).isNotNull();
assertThat(new String(receivedMessage5.getPayload())).isEqualTo(testPayload2);
Message<byte[]> receivedMessage6 = (Message<byte[]>) receive(input1);
assertThat(receivedMessage6).isNotNull();
assertThat(new String(receivedMessage6.getPayload())).isEqualTo(testPayload3);
consumerBinding.unbind();
producerBinding.unbind();
}
@Test
@SuppressWarnings("unchecked")
public void testResume() throws Exception {
@@ -741,6 +742,7 @@ public abstract class KafkaBinderTests extends
moduleOutputChannel1.send(message1);
moduleOutputChannel2.send(message2);
Message<?>[] messages = new Message[2];
messages[0] = receive(moduleInputChannel);
messages[1] = receive(moduleInputChannel);
@@ -766,15 +768,13 @@ public abstract class KafkaBinderTests extends
createProducerBindingProperties(createProducerProperties()));
QueueChannel moduleInputChannel = new QueueChannel();
Binding<MessageChannel> producerBinding = binder.bindProducer(
"testManualAckSucceedsWhenAutoCommitOffsetIsTurnedOff", moduleOutputChannel,
Binding<MessageChannel> producerBinding = binder.bindProducer("testManualAckSucceedsWhenAutoCommitOffsetIsTurnedOff", moduleOutputChannel,
createProducerProperties());
ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();
consumerProperties.getExtension().setAutoCommitOffset(false);
Binding<MessageChannel> consumerBinding = binder.bindConsumer(
"testManualAckSucceedsWhenAutoCommitOffsetIsTurnedOff", "test", moduleInputChannel,
Binding<MessageChannel> consumerBinding = binder.bindConsumer("testManualAckSucceedsWhenAutoCommitOffsetIsTurnedOff", "test", moduleInputChannel,
consumerProperties);
String testPayload1 = "foo" + UUID.randomUUID().toString();
@@ -788,8 +788,7 @@ public abstract class KafkaBinderTests extends
Message<?> receivedMessage = receive(moduleInputChannel);
assertThat(receivedMessage).isNotNull();
assertThat(receivedMessage.getHeaders().get(KafkaHeaders.ACKNOWLEDGMENT)).isNotNull();
Acknowledgment acknowledgment = receivedMessage.getHeaders().get(KafkaHeaders.ACKNOWLEDGMENT,
Acknowledgment.class);
Acknowledgment acknowledgment = receivedMessage.getHeaders().get(KafkaHeaders.ACKNOWLEDGMENT, Acknowledgment.class);
try {
acknowledgment.acknowledge();
}
@@ -811,14 +810,12 @@ public abstract class KafkaBinderTests extends
createProducerBindingProperties(createProducerProperties()));
QueueChannel moduleInputChannel = new QueueChannel();
Binding<MessageChannel> producerBinding = binder.bindProducer(
"testManualAckIsNotPossibleWhenAutoCommitOffsetIsEnabledOnTheBinder", moduleOutputChannel,
Binding<MessageChannel> producerBinding = binder.bindProducer("testManualAckIsNotPossibleWhenAutoCommitOffsetIsEnabledOnTheBinder", moduleOutputChannel,
createProducerProperties());
ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();
Binding<MessageChannel> consumerBinding = binder.bindConsumer(
"testManualAckIsNotPossibleWhenAutoCommitOffsetIsEnabledOnTheBinder", "test", moduleInputChannel,
Binding<MessageChannel> consumerBinding = binder.bindConsumer("testManualAckIsNotPossibleWhenAutoCommitOffsetIsEnabledOnTheBinder", "test", moduleInputChannel,
consumerProperties);
String testPayload1 = "foo" + UUID.randomUUID().toString();
@@ -998,8 +995,8 @@ public abstract class KafkaBinderTests extends
Binding<MessageChannel> outputBinding = binder.bindProducer("partJ.0", output, producerProperties);
if (usesExplicitRouting()) {
Object endpoint = extractEndpoint(outputBinding);
assertThat(getEndpointRouting(endpoint))
.contains(getExpectedRoutingBaseDestination("partJ.0", "test") + "-' + headers['partition']");
assertThat(getEndpointRouting(endpoint)).
contains(getExpectedRoutingBaseDestination("partJ.0", "test") + "-' + headers['partition']");
}
output.send(new GenericMessage<>(2));
@@ -1047,7 +1044,7 @@ public abstract class KafkaBinderTests extends
QueueChannel input2 = new QueueChannel();
Binding<MessageChannel> binding2 = binder.bindConsumer("defaultGroup.0", null, input2,
consumerProperties);
// Since we don't provide any topic info, let Kafka bind the consumer successfully
//Since we don't provide any topic info, let Kafka bind the consumer successfully
Thread.sleep(1000);
String testPayload1 = "foo-" + UUID.randomUUID().toString();
output.send(new GenericMessage<>(testPayload1.getBytes()));
@@ -1066,7 +1063,7 @@ public abstract class KafkaBinderTests extends
output.send(new GenericMessage<>(testPayload2.getBytes()));
binding2 = binder.bindConsumer("defaultGroup.0", null, input2, consumerProperties);
// Since we don't provide any topic info, let Kafka bind the consumer successfully
//Since we don't provide any topic info, let Kafka bind the consumer successfully
Thread.sleep(1000);
String testPayload3 = "foo-" + UUID.randomUUID().toString();
output.send(new GenericMessage<>(testPayload3.getBytes()));
@@ -1229,8 +1226,7 @@ public abstract class KafkaBinderTests extends
consumerProperties.setInstanceIndex(2);
consumerProperties.getExtension().setAutoRebalanceEnabled(false);
expectedProvisioningException.expect(ProvisioningException.class);
expectedProvisioningException
.expectMessage("The number of expected partitions was: 3, but 1 has been found instead");
expectedProvisioningException.expectMessage("The number of expected partitions was: 3, but 1 has been found instead");
Binding binding = binder.bindConsumer(testTopicName, "test", output, consumerProperties);
if (binding != null) {
binding.unbind();
@@ -1346,7 +1342,7 @@ public abstract class KafkaBinderTests extends
Binding<?> binding = null;
try {
KafkaBinderConfigurationProperties configurationProperties = createConfigurationProperties();
Map<String, String> propertiesToOverride = configurationProperties.getConfiguration();
Map<String, Object> propertiesToOverride = configurationProperties.getConfiguration();
propertiesToOverride.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
propertiesToOverride.put("value.deserializer", "org.apache.kafka.common.serialization.LongDeserializer");
configurationProperties.setConfiguration(propertiesToOverride);
@@ -1359,10 +1355,8 @@ public abstract class KafkaBinderTests extends
ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();
binding = binder.bindConsumer(testTopicName, "test", output, consumerProperties);
DirectFieldAccessor consumerAccessor = new DirectFieldAccessor(getKafkaConsumer(binding));
assertTrue("Expected StringDeserializer as a custom key deserializer",
consumerAccessor.getPropertyValue("keyDeserializer") instanceof StringDeserializer);
assertTrue("Expected LongDeserializer as a custom value deserializer",
consumerAccessor.getPropertyValue("valueDeserializer") instanceof LongDeserializer);
assertTrue("Expected StringDeserializer as a custom key deserializer", consumerAccessor.getPropertyValue("keyDeserializer") instanceof StringDeserializer);
assertTrue("Expected LongDeserializer as a custom value deserializer", consumerAccessor.getPropertyValue("valueDeserializer") instanceof LongDeserializer);
}
finally {
if (binding != null) {
@@ -1372,15 +1366,12 @@ public abstract class KafkaBinderTests extends
}
private KafkaConsumer getKafkaConsumer(Binding binding) {
DirectFieldAccessor bindingAccessor = new DirectFieldAccessor(binding);
KafkaMessageDrivenChannelAdapter adapter = (KafkaMessageDrivenChannelAdapter) bindingAccessor
.getPropertyValue("lifecycle");
DirectFieldAccessor bindingAccessor = new DirectFieldAccessor((DefaultBinding) binding);
KafkaMessageDrivenChannelAdapter adapter = (KafkaMessageDrivenChannelAdapter) bindingAccessor.getPropertyValue("lifecycle");
DirectFieldAccessor adapterAccessor = new DirectFieldAccessor(adapter);
ConcurrentMessageListenerContainer messageListenerContainer =
(ConcurrentMessageListenerContainer) adapterAccessor.getPropertyValue("messageListenerContainer");
DirectFieldAccessor containerAccessor = new DirectFieldAccessor(messageListenerContainer);
DefaultKafkaConsumerFactory consumerFactory = (DefaultKafkaConsumerFactory) containerAccessor
.getPropertyValue("consumerFactory");
ConcurrentMessageListenerContainer messageListenerContainer = (ConcurrentMessageListenerContainer) adapterAccessor.getPropertyValue("messageListenerContainer");
DirectFieldAccessor containerAccessor = new DirectFieldAccessor((ConcurrentMessageListenerContainer) messageListenerContainer);
DefaultKafkaConsumerFactory consumerFactory = (DefaultKafkaConsumerFactory) containerAccessor.getPropertyValue("consumerFactory");
return (KafkaConsumer) consumerFactory.createConsumer();
}
@@ -1406,13 +1397,11 @@ public abstract class KafkaBinderTests extends
QueueChannel moduleInputChannel = new QueueChannel();
ExtendedProducerProperties<KafkaProducerProperties> producerProperties = createProducerProperties();
producerProperties.setUseNativeEncoding(true);
producerProperties.getExtension().getConfiguration().put("value.serializer",
"org.apache.kafka.common.serialization.IntegerSerializer");
producerProperties.getExtension().getConfiguration().put("value.serializer", "org.apache.kafka.common.serialization.IntegerSerializer");
producerBinding = binder.bindProducer(testTopicName, moduleOutputChannel, producerProperties);
ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();
consumerProperties.getExtension().setAutoRebalanceEnabled(false);
consumerProperties.getExtension().getConfiguration().put("value.deserializer",
"org.apache.kafka.common.serialization.IntegerDeserializer");
consumerProperties.getExtension().getConfiguration().put("value.deserializer", "org.apache.kafka.common.serialization.IntegerDeserializer");
consumerBinding = binder.bindConsumer(testTopicName, "test", moduleInputChannel, consumerProperties);
// Let the consumer actually bind to the producer before sending a msg
binderBindUnbindLatency();
@@ -1508,9 +1497,9 @@ public abstract class KafkaBinderTests extends
input2.setBeanName("test.input2J");
Binding<MessageChannel> input2Binding = binder.bindConsumer("partJ.raw.0", "test", input2, consumerProperties);
output.send(new GenericMessage<>(new byte[] { (byte) 0 }));
output.send(new GenericMessage<>(new byte[] { (byte) 1 }));
output.send(new GenericMessage<>(new byte[] { (byte) 2 }));
output.send(new GenericMessage<>(new byte[]{(byte) 0}));
output.send(new GenericMessage<>(new byte[]{(byte) 1}));
output.send(new GenericMessage<>(new byte[]{(byte) 2}));
Message<?> receive0 = receive(input0);
assertThat(receive0).isNotNull();
@@ -1549,6 +1538,7 @@ public abstract class KafkaBinderTests extends
catch (UnsupportedOperationException ignored) {
}
ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();
consumerProperties.setConcurrency(2);
consumerProperties.setInstanceIndex(0);
@@ -1568,13 +1558,13 @@ public abstract class KafkaBinderTests extends
input2.setBeanName("test.input2S");
Binding<MessageChannel> input2Binding = binder.bindConsumer("part.raw.0", "test", input2, consumerProperties);
Message<byte[]> message2 = org.springframework.integration.support.MessageBuilder.withPayload(new byte[] { 2 })
Message<byte[]> message2 = org.springframework.integration.support.MessageBuilder.withPayload(new byte[]{2})
.setHeader(IntegrationMessageHeaderAccessor.CORRELATION_ID, "kafkaBinderTestCommonsDelegate")
.setHeader(IntegrationMessageHeaderAccessor.SEQUENCE_NUMBER, 42)
.setHeader(IntegrationMessageHeaderAccessor.SEQUENCE_SIZE, 43).build();
output.send(message2);
output.send(new GenericMessage<>(new byte[] { 1 }));
output.send(new GenericMessage<>(new byte[] { 0 }));
output.send(new GenericMessage<>(new byte[]{1}));
output.send(new GenericMessage<>(new byte[]{0}));
Message<?> receive0 = receive(input0);
assertThat(receive0).isNotNull();
Message<?> receive1 = receive(input1);
@@ -1603,8 +1593,7 @@ public abstract class KafkaBinderTests extends
consumerProperties.setHeaderMode(HeaderMode.raw);
Binding<MessageChannel> consumerBinding = binder.bindConsumer("raw.0", "test", moduleInputChannel,
consumerProperties);
Message<?> message = org.springframework.integration.support.MessageBuilder
.withPayload("testSendAndReceiveWithRawMode".getBytes()).build();
Message<?> message = org.springframework.integration.support.MessageBuilder.withPayload("testSendAndReceiveWithRawMode".getBytes()).build();
// Let the consumer actually bind to the producer before sending a msg
binderBindUnbindLatency();
moduleOutputChannel.send(message);
@@ -1629,15 +1618,13 @@ public abstract class KafkaBinderTests extends
consumerProperties.setHeaderMode(HeaderMode.raw);
Binding<MessageChannel> consumerBinding = binder.bindConsumer("raw.string.0", "test", moduleInputChannel,
consumerProperties);
Message<?> message = org.springframework.integration.support.MessageBuilder
.withPayload("testSendAndReceiveWithRawModeAndStringPayload").build();
Message<?> message = org.springframework.integration.support.MessageBuilder.withPayload("testSendAndReceiveWithRawModeAndStringPayload").build();
// Let the consumer actually bind to the producer before sending a msg
binderBindUnbindLatency();
moduleOutputChannel.send(message);
Message<?> inbound = receive(moduleInputChannel);
assertThat(inbound).isNotNull();
assertThat(new String((byte[]) inbound.getPayload()))
.isEqualTo("testSendAndReceiveWithRawModeAndStringPayload");
assertThat(new String((byte[]) inbound.getPayload())).isEqualTo("testSendAndReceiveWithRawModeAndStringPayload");
producerBinding.unbind();
consumerBinding.unbind();
}
@@ -1669,16 +1656,14 @@ public abstract class KafkaBinderTests extends
Binding<MessageChannel> input3Binding = binder.bindConsumer(barTapName, "tap2", module3InputChannel,
consumerProperties);
Message<?> message = org.springframework.integration.support.MessageBuilder
.withPayload("testSendAndReceiveWithExplicitConsumerGroupWithRawMode".getBytes()).build();
Message<?> message = org.springframework.integration.support.MessageBuilder.withPayload("testSendAndReceiveWithExplicitConsumerGroupWithRawMode".getBytes()).build();
boolean success = false;
boolean retried = false;
while (!success) {
moduleOutputChannel.send(message);
Message<?> inbound = receive(module1InputChannel);
assertThat(inbound).isNotNull();
assertThat(new String((byte[]) inbound.getPayload()))
.isEqualTo("testSendAndReceiveWithExplicitConsumerGroupWithRawMode");
assertThat(new String((byte[]) inbound.getPayload())).isEqualTo("testSendAndReceiveWithExplicitConsumerGroupWithRawMode");
Message<?> tapped1 = receive(module2InputChannel);
Message<?> tapped2 = receive(module3InputChannel);
@@ -1689,15 +1674,12 @@ public abstract class KafkaBinderTests extends
continue;
}
success = true;
assertThat(new String((byte[]) tapped1.getPayload()))
.isEqualTo("testSendAndReceiveWithExplicitConsumerGroupWithRawMode");
assertThat(new String((byte[]) tapped2.getPayload()))
.isEqualTo("testSendAndReceiveWithExplicitConsumerGroupWithRawMode");
assertThat(new String((byte[]) tapped1.getPayload())).isEqualTo("testSendAndReceiveWithExplicitConsumerGroupWithRawMode");
assertThat(new String((byte[]) tapped2.getPayload())).isEqualTo("testSendAndReceiveWithExplicitConsumerGroupWithRawMode");
}
// delete one tap stream is deleted
input3Binding.unbind();
Message<?> message2 = org.springframework.integration.support.MessageBuilder.withPayload("bar".getBytes())
.build();
Message<?> message2 = org.springframework.integration.support.MessageBuilder.withPayload("bar".getBytes()).build();
moduleOutputChannel.send(message2);
// other tap still receives messages
@@ -1753,7 +1735,7 @@ public abstract class KafkaBinderTests extends
receivedMessages.put(offset, message);
latch.countDown();
}
throw new RuntimeException("fail");
throw new RuntimeException();
}
public LinkedHashMap<Long, Message<?>> getReceivedMessages() {

View File

@@ -67,7 +67,7 @@ public class KafkaBinderUnitTests {
// binder level setting
binderConfigurationProperties.setConfiguration(
Collections.singletonMap(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"));
Collections.<String, Object>singletonMap(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"));
factory = method.invoke(binder, false, "foo", ecp);
configs = TestUtils.getPropertyValue(factory, "configs", Map.class);
assertThat(configs.get(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG)).isEqualTo("latest");

View File

@@ -11,4 +11,4 @@
<root level="WARN">
<appender-ref ref="stdout"/>
</root>
</configuration>
</configuration>

View File

@@ -1,69 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>spring-cloud-stream-binder-kstream</artifactId>
<packaging>jar</packaging>
<name>spring-cloud-stream-binder-kstream</name>
<description>Kafka Streams Binder Implementation</description>
<parent>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
<version>1.3.0.M2</version>
</parent>
<dependencies>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-core</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-configuration-processor</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-codec</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-autoconfigure</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.11</artifactId>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-streams</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.11</artifactId>
<classifier>test</classifier>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-test</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@@ -1,189 +0,0 @@
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kstream;
import org.apache.kafka.common.Configurable;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.utils.Utils;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KeyValueMapper;
import org.springframework.cloud.stream.binder.AbstractBinder;
import org.springframework.cloud.stream.binder.BinderHeaders;
import org.springframework.cloud.stream.binder.Binding;
import org.springframework.cloud.stream.binder.DefaultBinding;
import org.springframework.cloud.stream.binder.EmbeddedHeaderUtils;
import org.springframework.cloud.stream.binder.ExtendedConsumerProperties;
import org.springframework.cloud.stream.binder.ExtendedProducerProperties;
import org.springframework.cloud.stream.binder.ExtendedPropertiesBinder;
import org.springframework.cloud.stream.binder.HeaderMode;
import org.springframework.cloud.stream.binder.MessageValues;
import org.springframework.cloud.stream.binder.kafka.properties.KafkaBinderConfigurationProperties;
import org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties;
import org.springframework.cloud.stream.binder.kafka.properties.KafkaProducerProperties;
import org.springframework.cloud.stream.binder.kafka.provisioning.KafkaTopicProvisioner;
import org.springframework.cloud.stream.binder.kstream.config.KStreamConsumerProperties;
import org.springframework.cloud.stream.binder.kstream.config.KStreamExtendedBindingProperties;
import org.springframework.cloud.stream.binder.kstream.config.KStreamProducerProperties;
import org.springframework.messaging.Message;
import org.springframework.messaging.MessageHeaders;
import org.springframework.util.MimeType;
import org.springframework.util.StringUtils;
/**
* @author Marius Bogoevici
*/
public class KStreamBinder extends
AbstractBinder<KStream<Object, Object>, ExtendedConsumerProperties<KStreamConsumerProperties>, ExtendedProducerProperties<KStreamProducerProperties>>
implements ExtendedPropertiesBinder<KStream<Object, Object>, KStreamConsumerProperties, KStreamProducerProperties> {
private String[] headers;
private final KafkaTopicProvisioner kafkaTopicProvisioner;
private final KStreamExtendedBindingProperties kStreamExtendedBindingProperties;
private final StreamsConfig streamsConfig;
public KStreamBinder(KafkaBinderConfigurationProperties binderConfigurationProperties, KafkaTopicProvisioner kafkaTopicProvisioner,
KStreamExtendedBindingProperties kStreamExtendedBindingProperties, StreamsConfig streamsConfig) {
this.headers = EmbeddedHeaderUtils.headersToEmbed(binderConfigurationProperties.getHeaders());
this.kafkaTopicProvisioner = kafkaTopicProvisioner;
this.kStreamExtendedBindingProperties = kStreamExtendedBindingProperties;
this.streamsConfig = streamsConfig;
}
@Override
protected Binding<KStream<Object, Object>> doBindConsumer(String name, String group,
KStream<Object, Object> inputTarget, ExtendedConsumerProperties<KStreamConsumerProperties> properties) {
ExtendedConsumerProperties<KafkaConsumerProperties> extendedConsumerProperties = new ExtendedConsumerProperties<KafkaConsumerProperties>(
new KafkaConsumerProperties());
this.kafkaTopicProvisioner.provisionConsumerDestination(name, group, extendedConsumerProperties);
return new DefaultBinding<>(name, group, inputTarget, null);
}
@Override
@SuppressWarnings("unchecked")
protected Binding<KStream<Object, Object>> doBindProducer(String name, KStream<Object, Object> outboundBindTarget,
ExtendedProducerProperties<KStreamProducerProperties> properties) {
ExtendedProducerProperties<KafkaProducerProperties> extendedProducerProperties = new ExtendedProducerProperties<KafkaProducerProperties>(
new KafkaProducerProperties());
this.kafkaTopicProvisioner.provisionProducerDestination(name , extendedProducerProperties);
if (HeaderMode.embeddedHeaders.equals(properties.getHeaderMode())) {
outboundBindTarget = outboundBindTarget.map(new KeyValueMapper<Object, Object, KeyValue<Object, Object>>() {
@Override
public KeyValue<Object, Object> apply(Object k, Object v) {
if (v instanceof Message) {
try {
return new KeyValue<>(k, (Object)KStreamBinder.this.serializeAndEmbedHeadersIfApplicable((Message<?>) v));
}
catch (Exception e) {
throw new IllegalArgumentException(e);
}
}
else {
throw new IllegalArgumentException("Wrong type of message " + v);
}
}
});
}
else {
if (!properties.isUseNativeEncoding()) {
outboundBindTarget = outboundBindTarget
.map(new KeyValueMapper<Object, Object, KeyValue<Object, Object>>() {
@Override
public KeyValue<Object, Object> apply(Object k, Object v) {
return KeyValue.pair(k, (Object)KStreamBinder.this.serializePayloadIfNecessary((Message<?>) v));
}
});
}
else {
outboundBindTarget = outboundBindTarget
.map(new KeyValueMapper<Object, Object, KeyValue<Object, Object>>() {
@Override
public KeyValue<Object, Object> apply(Object k, Object v) {
return KeyValue.pair(k, ((Message<?>) v).getPayload());
}
});
}
}
if (!properties.isUseNativeEncoding() || StringUtils.hasText(properties.getExtension().getKeySerde()) || StringUtils.hasText(properties.getExtension().getValueSerde())) {
Serde<?> keySerde = Serdes.ByteArray();
Serde<?> valueSerde = Serdes.ByteArray();
try {
if (StringUtils.hasText(properties.getExtension().getKeySerde())) {
keySerde = Utils.newInstance(properties.getExtension().getKeySerde(), Serde.class);
if (keySerde instanceof Configurable) {
((Configurable) keySerde).configure(streamsConfig.originals());
}
}
}
catch (ClassNotFoundException e) {
throw new IllegalStateException("Serde class not found: ", e);
}
try {
if (StringUtils.hasText(properties.getExtension().getValueSerde())) {
valueSerde = Utils.newInstance(properties.getExtension().getValueSerde(), Serde.class);
if (valueSerde instanceof Configurable) {
((Configurable) valueSerde).configure(streamsConfig.originals());
}
}
}
catch (ClassNotFoundException e) {
throw new IllegalStateException("Serde class not found: ", e);
}
outboundBindTarget.to((Serde<Object>) keySerde, (Serde<Object>) valueSerde, name);
}
else {
outboundBindTarget.to(name);
}
return new DefaultBinding<>(name, null, outboundBindTarget, null);
}
private byte[] serializeAndEmbedHeadersIfApplicable(Message<?> message) throws Exception {
MessageValues transformed = serializePayloadIfNecessary(message);
byte[] payload;
Object contentType = transformed.get(MessageHeaders.CONTENT_TYPE);
// transform content type headers to String, so that they can be properly embedded
// in JSON
if (contentType instanceof MimeType) {
transformed.put(MessageHeaders.CONTENT_TYPE, contentType.toString());
}
Object originalContentType = transformed.get(BinderHeaders.BINDER_ORIGINAL_CONTENT_TYPE);
if (originalContentType instanceof MimeType) {
transformed.put(BinderHeaders.BINDER_ORIGINAL_CONTENT_TYPE, originalContentType.toString());
}
payload = EmbeddedHeaderUtils.embedHeaders(transformed, headers);
return payload;
}
@Override
public KStreamConsumerProperties getExtendedConsumerProperties(String channelName) {
return this.kStreamExtendedBindingProperties.getExtendedConsumerProperties(channelName);
}
@Override
public KStreamProducerProperties getExtendedProducerProperties(String channelName) {
return this.kStreamExtendedBindingProperties.getExtendedProducerProperties(channelName);
}
}

View File

@@ -1,167 +0,0 @@
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kstream;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.aopalliance.intercept.MethodInterceptor;
import org.aopalliance.intercept.MethodInvocation;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KStreamBuilder;
import org.apache.kafka.streams.kstream.KeyValueMapper;
import org.springframework.aop.framework.ProxyFactory;
import org.springframework.cloud.stream.binder.ConsumerProperties;
import org.springframework.cloud.stream.binder.EmbeddedHeaderUtils;
import org.springframework.cloud.stream.binder.HeaderMode;
import org.springframework.cloud.stream.binder.MessageSerializationUtils;
import org.springframework.cloud.stream.binder.MessageValues;
import org.springframework.cloud.stream.binder.StringConvertingContentTypeResolver;
import org.springframework.cloud.stream.binding.AbstractBindingTargetFactory;
import org.springframework.cloud.stream.config.BindingProperties;
import org.springframework.cloud.stream.config.BindingServiceProperties;
import org.springframework.cloud.stream.converter.CompositeMessageConverterFactory;
import org.springframework.integration.codec.Codec;
import org.springframework.integration.support.MutableMessageHeaders;
import org.springframework.messaging.Message;
import org.springframework.messaging.converter.MessageConverter;
import org.springframework.messaging.support.MessageBuilder;
import org.springframework.util.Assert;
import org.springframework.util.MimeType;
import org.springframework.util.StringUtils;
/**
* @author Marius Bogoevici
*/
public class KStreamBoundElementFactory extends AbstractBindingTargetFactory<KStream> {
private final KStreamBuilder kStreamBuilder;
private final BindingServiceProperties bindingServiceProperties;
private volatile Codec codec;
private final StringConvertingContentTypeResolver contentTypeResolver = new StringConvertingContentTypeResolver();
private volatile Map<String, Class<?>> payloadTypeCache = new ConcurrentHashMap<>();
private CompositeMessageConverterFactory compositeMessageConverterFactory;
public KStreamBoundElementFactory(KStreamBuilder streamBuilder, BindingServiceProperties bindingServiceProperties,
Codec codec, CompositeMessageConverterFactory compositeMessageConverterFactory) {
super(KStream.class);
this.bindingServiceProperties = bindingServiceProperties;
this.kStreamBuilder = streamBuilder;
this.codec = codec;
this.compositeMessageConverterFactory = compositeMessageConverterFactory;
}
@Override
public KStream createInput(String name) {
KStream<Object, Object> stream = kStreamBuilder.stream(bindingServiceProperties.getBindingDestination(name));
ConsumerProperties properties = bindingServiceProperties.getConsumerProperties(name);
if (HeaderMode.embeddedHeaders.equals(properties.getHeaderMode())) {
stream = stream.map(new KeyValueMapper<Object, Object, KeyValue<Object, Object>>() {
@Override
public KeyValue<Object, Object> apply(Object key, Object value) {
if (!(value instanceof byte[])) {
return new KeyValue<>(key, value);
}
try {
MessageValues messageValues = EmbeddedHeaderUtils
.extractHeaders(MessageBuilder.withPayload((byte[]) value).build(), true);
messageValues = deserializePayloadIfNecessary(messageValues);
return new KeyValue<Object, Object>(null, messageValues.toMessage());
}
catch (Exception e) {
throw new IllegalArgumentException(e);
}
}
});
}
return stream;
}
@Override
@SuppressWarnings("unchecked")
public KStream createOutput(final String name) {
BindingProperties bindingProperties = bindingServiceProperties.getBindingProperties(name);
String contentType = bindingProperties.getContentType();
MessageConverter messageConverter = StringUtils.hasText(contentType) ? compositeMessageConverterFactory
.getMessageConverterForType(MimeType.valueOf(contentType)) : null;
KStreamWrapperHandler handler = new KStreamWrapperHandler(messageConverter);
ProxyFactory proxyFactory = new ProxyFactory(KStreamWrapper.class, KStream.class);
proxyFactory.addAdvice(handler);
return (KStream) proxyFactory.getProxy();
}
private MessageValues deserializePayloadIfNecessary(MessageValues messageValues) {
return MessageSerializationUtils.deserializePayload(messageValues, this.contentTypeResolver, this.codec);
}
interface KStreamWrapper {
void wrap(KStream<Object, Object> delegate);
}
static class KStreamWrapperHandler implements KStreamWrapper, MethodInterceptor {
private KStream<Object, Object> delegate;
private final MessageConverter messageConverter;
public KStreamWrapperHandler(MessageConverter messageConverter) {
this.messageConverter = messageConverter;
}
public void wrap(KStream<Object, Object> delegate) {
Assert.notNull(delegate, "delegate cannot be null");
Assert.isNull(this.delegate, "delegate already set to " + this.delegate);
if (messageConverter != null) {
KeyValueMapper<Object, Object, KeyValue<Object, Object>> keyValueMapper = new KeyValueMapper<Object, Object, KeyValue<Object, Object>>() {
@Override
public KeyValue<Object, Object> apply(Object k, Object v) {
Message<?> message = (Message<?>) v;
return new KeyValue<Object, Object>(k,
messageConverter.toMessage(message.getPayload(),
new MutableMessageHeaders(((Message<?>) v).getHeaders())));
}
};
delegate = delegate.map(keyValueMapper);
}
this.delegate = delegate;
}
@Override
public Object invoke(MethodInvocation methodInvocation) throws Throwable {
if (methodInvocation.getMethod().getDeclaringClass().equals(KStream.class)) {
Assert.notNull(delegate, "Trying to invoke " + methodInvocation
.getMethod() + " but no delegate has been set.");
return methodInvocation.getMethod().invoke(delegate, methodInvocation.getArguments());
}
else if (methodInvocation.getMethod().getDeclaringClass().equals(KStreamWrapper.class)) {
return methodInvocation.getMethod().invoke(this, methodInvocation.getArguments());
}
else {
throw new IllegalStateException("Only KStream method invocations are permitted");
}
}
}
}

View File

@@ -1,75 +0,0 @@
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kstream;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KeyValueMapper;
import org.springframework.cloud.stream.binding.StreamListenerParameterAdapter;
import org.springframework.core.MethodParameter;
import org.springframework.core.ResolvableType;
import org.springframework.messaging.Message;
import org.springframework.messaging.converter.MessageConverter;
import org.springframework.messaging.support.MessageBuilder;
/**
* @author Marius Bogoevici
* @author Soby Chacko
*/
public class KStreamListenerParameterAdapter implements StreamListenerParameterAdapter<KStream<?,?>, KStream<?, ?>> {
private final MessageConverter messageConverter;
public KStreamListenerParameterAdapter(MessageConverter messageConverter) {
this.messageConverter = messageConverter;
}
@Override
public boolean supports(Class bindingTargetType, MethodParameter methodParameter) {
return KStream.class.isAssignableFrom(bindingTargetType)
&& KStream.class.isAssignableFrom(methodParameter.getParameterType());
}
@Override
@SuppressWarnings("unchecked")
public KStream adapt(KStream<?, ?> bindingTarget, MethodParameter parameter) {
ResolvableType resolvableType = ResolvableType.forMethodParameter(parameter);
final Class<?> valueClass = (resolvableType.getGeneric(1).getRawClass() != null)
? (resolvableType.getGeneric(1).getRawClass()) : Object.class;
return bindingTarget.map(new KeyValueMapper() {
@Override
public Object apply(Object o, Object o2) {
if (valueClass.isAssignableFrom(o2.getClass())) {
return new KeyValue<>(o, o2);
}
else if (o2 instanceof Message) {
return new KeyValue<>(o, messageConverter.fromMessage((Message) o2, valueClass));
}
else if(o2 instanceof String || o2 instanceof byte[]) {
Message<Object> message = MessageBuilder.withPayload(o2).build();
return new KeyValue<>(o, messageConverter.fromMessage(message, valueClass));
}
else {
return new KeyValue<>(o, o2);
}
}
});
}
}

View File

@@ -1,65 +0,0 @@
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kstream;
import java.io.Closeable;
import java.io.IOException;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KeyValueMapper;
import org.springframework.cloud.stream.binding.StreamListenerResultAdapter;
import org.springframework.messaging.Message;
import org.springframework.messaging.support.MessageBuilder;
/**
* @author Marius Bogoevici
*/
public class KStreamStreamListenerResultAdapter implements StreamListenerResultAdapter<KStream, KStreamBoundElementFactory.KStreamWrapper> {
@Override
public boolean supports(Class<?> resultType, Class<?> boundElement) {
return KStream.class.isAssignableFrom(resultType) && KStream.class.isAssignableFrom(boundElement);
}
@Override
@SuppressWarnings("unchecked")
public Closeable adapt(KStream streamListenerResult, KStreamBoundElementFactory.KStreamWrapper boundElement) {
boundElement.wrap(streamListenerResult.map(new KeyValueMapper() {
@Override
public Object apply(Object k, Object v) {
if (v instanceof Message<?>) {
return new KeyValue<>(k, v);
}
else {
return new KeyValue<>(k, MessageBuilder.withPayload(v).build());
}
}
}));
return new NoOpCloseable();
}
private static final class NoOpCloseable implements Closeable {
@Override
public void close() throws IOException {
}
}
}

View File

@@ -1,34 +0,0 @@
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kstream.annotations;
import org.apache.kafka.streams.kstream.KStream;
import org.springframework.cloud.stream.annotation.Input;
import org.springframework.cloud.stream.annotation.Output;
/**
* @author Marius Bogoevici
*/
public interface KStreamProcessor {
@Input("input")
KStream<?, ?> input();
@Output("output")
KStream<?, ?> output();
}

View File

@@ -1,96 +0,0 @@
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kstream.config;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.kafka.common.utils.AppInfoParser;
import org.apache.kafka.streams.StreamsConfig;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.stream.binder.kafka.admin.AdminUtilsOperation;
import org.springframework.cloud.stream.binder.kafka.admin.Kafka09AdminUtilsOperation;
import org.springframework.cloud.stream.binder.kafka.admin.Kafka10AdminUtilsOperation;
import org.springframework.cloud.stream.binder.kafka.properties.KafkaBinderConfigurationProperties;
import org.springframework.cloud.stream.binder.kafka.provisioning.KafkaTopicProvisioner;
import org.springframework.cloud.stream.binder.kstream.KStreamBinder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Condition;
import org.springframework.context.annotation.ConditionContext;
import org.springframework.context.annotation.Conditional;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.type.AnnotatedTypeMetadata;
/**
* @author Marius Bogoevici
*/
@Configuration
@EnableConfigurationProperties(KStreamExtendedBindingProperties.class)
public class KStreamBinderConfiguration {
@Autowired(required = false)
private AdminUtilsOperation adminUtilsOperation;
private static final Log logger = LogFactory.getLog(KStreamBinderConfiguration.class);
@Bean
public KafkaTopicProvisioner provisioningProvider(KafkaBinderConfigurationProperties binderConfigurationProperties) {
return new KafkaTopicProvisioner(binderConfigurationProperties, adminUtilsOperation);
}
@Bean
public KStreamBinder kStreamBinder(KafkaBinderConfigurationProperties binderConfigurationProperties,
KafkaTopicProvisioner kafkaTopicProvisioner,
KStreamExtendedBindingProperties kStreamExtendedBindingProperties, StreamsConfig streamsConfig) {
return new KStreamBinder(binderConfigurationProperties, kafkaTopicProvisioner, kStreamExtendedBindingProperties,
streamsConfig);
}
@Bean(name = "adminUtilsOperation")
@Conditional(Kafka09Present.class)
@ConditionalOnClass(name = "kafka.admin.AdminUtils")
public AdminUtilsOperation kafka09AdminUtilsOperation() {
logger.info("AdminUtils selected: Kafka 0.9 AdminUtils");
return new Kafka09AdminUtilsOperation();
}
@Bean(name = "adminUtilsOperation")
@Conditional(Kafka10Present.class)
@ConditionalOnClass(name = "kafka.admin.AdminUtils")
public AdminUtilsOperation kafka10AdminUtilsOperation() {
logger.info("AdminUtils selected: Kafka 0.10 AdminUtils");
return new Kafka10AdminUtilsOperation();
}
static class Kafka10Present implements Condition {
@Override
public boolean matches(ConditionContext conditionContext, AnnotatedTypeMetadata annotatedTypeMetadata) {
return AppInfoParser.getVersion().startsWith("0.10");
}
}
static class Kafka09Present implements Condition {
@Override
public boolean matches(ConditionContext conditionContext, AnnotatedTypeMetadata annotatedTypeMetadata) {
return AppInfoParser.getVersion().startsWith("0.9");
}
}
}

View File

@@ -1,103 +0,0 @@
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kstream.config;
import java.util.Properties;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.kstream.KStreamBuilder;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.beans.factory.UnsatisfiedDependencyException;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.cloud.stream.binder.kafka.properties.KafkaBinderConfigurationProperties;
import org.springframework.cloud.stream.binder.kstream.KStreamBoundElementFactory;
import org.springframework.cloud.stream.binder.kstream.KStreamListenerParameterAdapter;
import org.springframework.cloud.stream.binder.kstream.KStreamStreamListenerResultAdapter;
import org.springframework.cloud.stream.config.BindingServiceProperties;
import org.springframework.cloud.stream.converter.CompositeMessageConverterFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.integration.codec.Codec;
import org.springframework.kafka.annotation.KafkaStreamsDefaultConfiguration;
import org.springframework.kafka.core.KStreamBuilderFactoryBean;
import org.springframework.util.ObjectUtils;
/**
* @author Marius Bogoevici
*/
public class KStreamBinderSupportAutoConfiguration {
@Bean
@ConfigurationProperties(prefix = "spring.cloud.stream.kstream.binder")
public KafkaBinderConfigurationProperties binderConfigurationProperties() {
return new KafkaBinderConfigurationProperties();
}
@Bean(name = KafkaStreamsDefaultConfiguration.DEFAULT_KSTREAM_BUILDER_BEAN_NAME)
public KStreamBuilderFactoryBean defaultKStreamBuilder(
@Qualifier(KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_CONFIG_BEAN_NAME) ObjectProvider<StreamsConfig> streamsConfigProvider) {
StreamsConfig streamsConfig = streamsConfigProvider.getIfAvailable();
if (streamsConfig != null) {
KStreamBuilderFactoryBean kStreamBuilderFactoryBean = new KStreamBuilderFactoryBean(streamsConfig);
kStreamBuilderFactoryBean.setPhase(Integer.MAX_VALUE - 500);
return kStreamBuilderFactoryBean;
}
else {
throw new UnsatisfiedDependencyException(KafkaStreamsDefaultConfiguration.class.getName(),
KafkaStreamsDefaultConfiguration.DEFAULT_KSTREAM_BUILDER_BEAN_NAME, "streamsConfig",
"There is no '" + KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_CONFIG_BEAN_NAME
+ "' StreamsConfig bean in the application context.\n");
}
}
@Bean(KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_CONFIG_BEAN_NAME)
public StreamsConfig streamsConfig(KafkaBinderConfigurationProperties binderConfigurationProperties) {
Properties props = new Properties();
props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, binderConfigurationProperties.getKafkaConnectionString());
props.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.ByteArraySerde.class.getName());
props.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.ByteArraySerde.class.getName());
props.put(StreamsConfig.APPLICATION_ID_CONFIG, "default");
props.put(StreamsConfig.ZOOKEEPER_CONNECT_CONFIG, binderConfigurationProperties.getZkConnectionString());
if (!ObjectUtils.isEmpty(binderConfigurationProperties.getConfiguration())) {
props.putAll(binderConfigurationProperties.getConfiguration());
}
return new StreamsConfig(props);
}
@Bean
public KStreamStreamListenerResultAdapter kStreamStreamListenerResultAdapter() {
return new KStreamStreamListenerResultAdapter();
}
@Bean
public KStreamListenerParameterAdapter kStreamListenerParameterAdapter(
CompositeMessageConverterFactory compositeMessageConverterFactory) {
return new KStreamListenerParameterAdapter(
compositeMessageConverterFactory.getMessageConverterForAllRegistered());
}
@Bean
public KStreamBoundElementFactory kStreamBindableTargetFactory(KStreamBuilder kStreamBuilder,
BindingServiceProperties bindingServiceProperties, Codec codec,
CompositeMessageConverterFactory compositeMessageConverterFactory) {
return new KStreamBoundElementFactory(kStreamBuilder, bindingServiceProperties, codec,
compositeMessageConverterFactory);
}
}

View File

@@ -1,43 +0,0 @@
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kstream.config;
/**
* @author Marius Bogoevici
*/
public class KStreamBindingProperties {
private KStreamConsumerProperties consumer = new KStreamConsumerProperties();
private KStreamProducerProperties producer = new KStreamProducerProperties();
public KStreamConsumerProperties getConsumer() {
return consumer;
}
public void setConsumer(KStreamConsumerProperties consumer) {
this.consumer = consumer;
}
public KStreamProducerProperties getProducer() {
return producer;
}
public void setProducer(KStreamProducerProperties producer) {
this.producer = producer;
}
}

View File

@@ -1,43 +0,0 @@
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kstream.config;
/**
* @author Soby Chacko
*/
public class KStreamCommonProperties {
private String keySerde;
private String valueSerde;
public String getKeySerde() {
return keySerde;
}
public void setKeySerde(String keySerde) {
this.keySerde = keySerde;
}
public String getValueSerde() {
return valueSerde;
}
public void setValueSerde(String valueSerde) {
this.valueSerde = valueSerde;
}
}

View File

@@ -1,24 +0,0 @@
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kstream.config;
/**
* @author Marius Bogoevici
*/
public class KStreamConsumerProperties extends KStreamCommonProperties {
}

View File

@@ -1,61 +0,0 @@
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kstream.config;
import java.util.HashMap;
import java.util.Map;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.cloud.stream.binder.ExtendedBindingProperties;
/**
* @author Marius Bogoevici
*/
@ConfigurationProperties("spring.cloud.stream.kstream")
public class KStreamExtendedBindingProperties
implements ExtendedBindingProperties<KStreamConsumerProperties, KStreamProducerProperties> {
private Map<String, KStreamBindingProperties> bindings = new HashMap<>();
public Map<String, KStreamBindingProperties> getBindings() {
return this.bindings;
}
public void setBindings(Map<String, KStreamBindingProperties> bindings) {
this.bindings = bindings;
}
@Override
public KStreamConsumerProperties getExtendedConsumerProperties(String binding) {
if (this.bindings.containsKey(binding) && this.bindings.get(binding).getConsumer() != null) {
return this.bindings.get(binding).getConsumer();
}
else {
return new KStreamConsumerProperties();
}
}
@Override
public KStreamProducerProperties getExtendedProducerProperties(String binding) {
if (this.bindings.containsKey(binding) && this.bindings.get(binding).getProducer() != null) {
return this.bindings.get(binding).getProducer();
}
else {
return new KStreamProducerProperties();
}
}
}

View File

@@ -1,24 +0,0 @@
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kstream.config;
/**
* @author Marius Bogoevici
*/
public class KStreamProducerProperties extends KStreamCommonProperties {
}

View File

@@ -1,4 +0,0 @@
kstream:\
org.springframework.cloud.stream.binder.kstream.config.KStreamBinderConfiguration

View File

@@ -1,4 +0,0 @@
org.springframework.boot.autoconfigure.EnableAutoConfiguration=\
org.springframework.cloud.stream.binder.kstream.config.KStreamBinderSupportAutoConfiguration

View File

@@ -1,158 +0,0 @@
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kstream;
import java.util.Map;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.LongDeserializer;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KeyValueMapper;
import org.apache.kafka.streams.kstream.Predicate;
import org.apache.kafka.streams.kstream.TimeWindows;
import org.apache.kafka.streams.kstream.Windowed;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.cloud.stream.binder.kstream.annotations.KStreamProcessor;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.serializer.JsonSerde;
import org.springframework.kafka.test.rule.KafkaEmbedded;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.messaging.handler.annotation.SendTo;
import static org.assertj.core.api.Assertions.assertThat;
/**
*
* @author Soby Chacko
*/
public class KStreamBinderPojoInputAndPrimitiveTypeOutputTests {
@ClassRule
public static KafkaEmbedded embeddedKafka = new KafkaEmbedded(1, true, "counts-id");
private static Consumer<Integer, Long> consumer;
@BeforeClass
public static void setUp() throws Exception {
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-id", "false", embeddedKafka);
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName());
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
DefaultKafkaConsumerFactory<Integer, Long> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
consumer = cf.createConsumer();
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts-id");
}
@AfterClass
public static void tearDown() {
consumer.close();
}
@Test
public void testKstreamBinderWithPojoInputAndStringOuput() throws Exception {
SpringApplication app = new SpringApplication(ProductCountApplication.class);
app.setWebEnvironment(false);
ConfigurableApplicationContext context = app.run("--server.port=0",
"--spring.cloud.stream.bindings.input.destination=foos",
"--spring.cloud.stream.bindings.output.destination=counts-id",
"--spring.cloud.stream.kstream.binder.configuration.commit.interval.ms=1000",
"--spring.cloud.stream.kstream.binder.configuration.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
"--spring.cloud.stream.kstream.binder.configuration.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
"--spring.cloud.stream.bindings.output.producer.headerMode=raw",
"--spring.cloud.stream.bindings.output.producer.useNativeEncoding=true",
"--spring.cloud.stream.kstream.bindings.output.producer.keySerde=org.apache.kafka.common.serialization.Serdes$IntegerSerde",
"--spring.cloud.stream.kstream.bindings.output.producer.valueSerde=org.apache.kafka.common.serialization.Serdes$LongSerde",
"--spring.cloud.stream.bindings.input.consumer.headerMode=raw",
"--spring.cloud.stream.kstream.binder.brokers=" + embeddedKafka.getBrokersAsString(),
"--spring.cloud.stream.kstream.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString());
receiveAndValidateFoo(context);
context.close();
}
private void receiveAndValidateFoo(ConfigurableApplicationContext context) throws Exception{
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
template.setDefaultTopic("foos");
template.sendDefault("{\"id\":\"123\"}");
ConsumerRecord<Integer, Long> cr = KafkaTestUtils.getSingleRecord(consumer, "counts-id");
assertThat(cr.key().equals(123));
assertThat(cr.value().equals(1L));
}
@EnableBinding(KStreamProcessor.class)
@EnableAutoConfiguration
public static class ProductCountApplication {
@StreamListener("input")
@SendTo("output")
public KStream<Integer, Long> process(KStream<Object, Product> input) {
return input
.filter(new Predicate<Object, Product>() {
@Override
public boolean test(Object key, Product product) {
return product.getId() == 123;
}
})
.map(new KeyValueMapper<Object, Product, KeyValue<Product, Product>>() {
@Override
public KeyValue<Product, Product> apply(Object key, Product value) {
return new KeyValue<>(value, value);
}
})
.groupByKey(new JsonSerde<>(Product.class), new JsonSerde<>(Product.class))
.count(TimeWindows.of(5000), "id-count-store")
.toStream()
.map(new KeyValueMapper<Windowed<Product>, Long, KeyValue<Integer, Long>>() {
@Override
public KeyValue<Integer, Long> apply(Windowed<Product> key, Long value) {
return new KeyValue<>(key.key().id, value);
}
});
}
}
static class Product {
Integer id;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
}
}

View File

@@ -1,253 +0,0 @@
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kstream;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KeyValueMapper;
import org.apache.kafka.streams.kstream.TimeWindows;
import org.apache.kafka.streams.kstream.ValueMapper;
import org.apache.kafka.streams.kstream.Windowed;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.cloud.stream.binder.kstream.annotations.KStreamProcessor;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KStreamBuilderFactoryBean;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.test.rule.KafkaEmbedded;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.messaging.handler.annotation.SendTo;
import static org.assertj.core.api.Assertions.assertThat;
/**
*
* @author Marius Bogoevici
* @author Soby Chacko
*/
public class KStreamBinderWordCountIntegrationTests {
@ClassRule
public static KafkaEmbedded embeddedKafka = new KafkaEmbedded(1, true, "counts");
private static Consumer<String, String> consumer;
@BeforeClass
public static void setUp() throws Exception {
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false", embeddedKafka);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
consumer = cf.createConsumer();
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts");
}
@AfterClass
public static void tearDown() {
consumer.close();
}
@Test
public void testKstreamWordCountWithStringInputAndPojoOuput() throws Exception {
SpringApplication app = new SpringApplication(WordCountProcessorApplication.class);
app.setWebEnvironment(false);
ConfigurableApplicationContext context = app.run("--server.port=0",
"--spring.cloud.stream.bindings.input.destination=words",
"--spring.cloud.stream.bindings.output.destination=counts",
"--spring.cloud.stream.bindings.output.contentType=application/json",
"--spring.cloud.stream.kstream.binder.configuration.commit.interval.ms=1000",
"--spring.cloud.stream.kstream.binder.configuration.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
"--spring.cloud.stream.kstream.binder.configuration.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
"--spring.cloud.stream.bindings.output.producer.headerMode=raw",
"--spring.cloud.stream.bindings.output.producer.useNativeEncoding=true",
"--spring.cloud.stream.bindings.input.consumer.headerMode=raw",
"--spring.cloud.stream.kstream.binder.brokers=" + embeddedKafka.getBrokersAsString(),
"--spring.cloud.stream.kstream.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString());
receiveAndValidate(context);
context.close();
}
private void receiveAndValidate(ConfigurableApplicationContext context) throws Exception{
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
template.setDefaultTopic("words");
template.sendDefault("foobar");
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts");
assertThat(cr.value().contains("\"word\":\"foobar\",\"count\":1")).isTrue();
}
@EnableBinding(KStreamProcessor.class)
@EnableAutoConfiguration
@EnableConfigurationProperties(WordCountProcessorProperties.class)
public static class WordCountProcessorApplication {
@Autowired
private WordCountProcessorProperties processorProperties;
@Autowired
private KStreamBuilderFactoryBean kafkaStreams;
@StreamListener("input")
@SendTo("output")
public KStream<?, WordCount> process(KStream<Object, String> input) {
return input
.flatMapValues(new ValueMapper<String, Iterable<String>>() {
@Override
public List<String> apply(String value) {
return Arrays.asList(value.toLowerCase().split("\\W+"));
}
})
.map(new KeyValueMapper<Object, String, KeyValue<String, String>>() {
@Override
public KeyValue<String, String> apply(Object key, String value) {
return new KeyValue<>(value, value);
}
})
.groupByKey(Serdes.String(), Serdes.String())
.count(configuredTimeWindow(), processorProperties.getStoreName())
.toStream()
.map(new KeyValueMapper<Windowed<String>, Long, KeyValue<Object, WordCount>>() {
@Override
public KeyValue<Object, WordCount> apply(Windowed<String> key, Long value) {
return new KeyValue<>(null, new WordCount(key.key(), value, new Date(key.window().start()), new Date(key.window().end())));
}
});
}
/**
* Constructs a {@link TimeWindows} property.
*
* @return
*/
private TimeWindows configuredTimeWindow() {
return processorProperties.getAdvanceBy() > 0
? TimeWindows.of(processorProperties.getWindowLength()).advanceBy(processorProperties.getAdvanceBy())
: TimeWindows.of(processorProperties.getWindowLength());
}
}
@ConfigurationProperties(prefix = "kstream.word.count")
static class WordCountProcessorProperties {
private int windowLength = 5000;
private int advanceBy = 0;
private String storeName = "WordCounts";
int getWindowLength() {
return windowLength;
}
public void setWindowLength(int windowLength) {
this.windowLength = windowLength;
}
int getAdvanceBy() {
return advanceBy;
}
public void setAdvanceBy(int advanceBy) {
this.advanceBy = advanceBy;
}
String getStoreName() {
return storeName;
}
public void setStoreName(String storeName) {
this.storeName = storeName;
}
}
static class WordCount {
private String word;
private long count;
private Date start;
private Date end;
WordCount(String word, long count, Date start, Date end) {
this.word = word;
this.count = count;
this.start = start;
this.end = end;
}
public String getWord() {
return word;
}
public void setWord(String word) {
this.word = word;
}
public long getCount() {
return count;
}
public void setCount(long count) {
this.count = count;
}
public Date getStart() {
return start;
}
public void setStart(Date start) {
this.start = start;
}
public Date getEnd() {
return end;
}
public void setEnd(Date end) {
this.end = end;
}
}
}

View File

@@ -1,184 +0,0 @@
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kstream;
import java.util.Map;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KeyValueMapper;
import org.apache.kafka.streams.kstream.Predicate;
import org.apache.kafka.streams.state.QueryableStoreTypes;
import org.apache.kafka.streams.state.ReadOnlyKeyValueStore;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.cloud.stream.binder.kstream.annotations.KStreamProcessor;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KStreamBuilderFactoryBean;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.serializer.JsonSerde;
import org.springframework.kafka.test.rule.KafkaEmbedded;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.messaging.handler.annotation.SendTo;
import static org.assertj.core.api.Assertions.assertThat;
/**
* @author Soby Chacko
*/
public class KStreamInteractiveQueryIntegrationTests {
@ClassRule
public static KafkaEmbedded embeddedKafka = new KafkaEmbedded(1, true, "counts-id");
private static Consumer<String, String> consumer;
@BeforeClass
public static void setUp() throws Exception {
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-id", "false", embeddedKafka);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
consumer = cf.createConsumer();
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts-id");
}
@AfterClass
public static void tearDown() {
consumer.close();
}
@Test
public void testKstreamBinderWithPojoInputAndStringOuput() throws Exception {
SpringApplication app = new SpringApplication(ProductCountApplication.class);
app.setWebEnvironment(false);
ConfigurableApplicationContext context = app.run("--server.port=0",
"--spring.cloud.stream.bindings.input.destination=foos",
"--spring.cloud.stream.bindings.output.destination=counts-id",
"--spring.cloud.stream.kstream.binder.configuration.commit.interval.ms=1000",
"--spring.cloud.stream.kstream.binder.configuration.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
"--spring.cloud.stream.kstream.binder.configuration.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
"--spring.cloud.stream.bindings.output.producer.headerMode=raw",
"--spring.cloud.stream.bindings.output.producer.useNativeEncoding=true",
"--spring.cloud.stream.bindings.input.consumer.headerMode=raw",
"--spring.cloud.stream.kstream.binder.brokers=" + embeddedKafka.getBrokersAsString(),
"--spring.cloud.stream.kstream.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString());
receiveAndValidateFoo(context);
context.close();
}
private void receiveAndValidateFoo(ConfigurableApplicationContext context) throws Exception{
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
template.setDefaultTopic("foos");
template.sendDefault("{\"id\":\"123\"}");
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts-id");
assertThat(cr.value().contains("Count for product with ID 123: 1")).isTrue();
ProductCountApplication.Foo foo = context.getBean(ProductCountApplication.Foo.class);
assertThat(foo.getProductStock(123).equals(1L));
}
@EnableBinding(KStreamProcessor.class)
@EnableAutoConfiguration
public static class ProductCountApplication {
@Autowired
private KStreamBuilderFactoryBean kStreamBuilderFactoryBean;
@StreamListener("input")
@SendTo("output")
public KStream<?, String> process(KStream<Object, Product> input) {
return input
.filter(new Predicate<Object, Product>() {
@Override
public boolean test(Object key, Product product) {
return product.getId() == 123;
}
})
.map(new KeyValueMapper<Object, Product, KeyValue<Integer, Product>>() {
@Override
public KeyValue<Integer, Product> apply(Object key, Product value) {
return new KeyValue<>(value.id, value);
}
})
.groupByKey(new Serdes.IntegerSerde(), new JsonSerde<>(Product.class))
.count("prod-id-count-store")
.toStream()
.map(new KeyValueMapper<Integer, Long, KeyValue<Object, String>>() {
@Override
public KeyValue<Object, String> apply(Integer key, Long value) {
return new KeyValue<>(null, "Count for product with ID 123: " + value);
}
});
}
@Bean
public Foo foo(KStreamBuilderFactoryBean kStreamBuilderFactoryBean) {
return new Foo(kStreamBuilderFactoryBean);
}
static class Foo {
KStreamBuilderFactoryBean kStreamBuilderFactoryBean;
Foo(KStreamBuilderFactoryBean kStreamBuilderFactoryBean) {
this.kStreamBuilderFactoryBean = kStreamBuilderFactoryBean;
}
public Long getProductStock(Integer id) {
KafkaStreams streams = kStreamBuilderFactoryBean.getKafkaStreams();
ReadOnlyKeyValueStore<Object, Object> keyValueStore =
streams.store("prod-id-count-store", QueryableStoreTypes.keyValueStore());
return (Long)keyValueStore.get(id);
}
}
}
static class Product {
Integer id;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
}
}

View File

@@ -1,153 +0,0 @@
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kstream;
import java.util.Map;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KeyValueMapper;
import org.apache.kafka.streams.kstream.Predicate;
import org.apache.kafka.streams.kstream.TimeWindows;
import org.apache.kafka.streams.kstream.Windowed;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.cloud.stream.binder.kstream.annotations.KStreamProcessor;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.serializer.JsonSerde;
import org.springframework.kafka.test.rule.KafkaEmbedded;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.messaging.handler.annotation.SendTo;
import static org.assertj.core.api.Assertions.assertThat;
/**
* @author Marius Bogoevici
* @author Soby Chacko
*/
public class KstreamBinderPojoInputStringOutputIntegrationTests {
@ClassRule
public static KafkaEmbedded embeddedKafka = new KafkaEmbedded(1, true, "counts-id");
private static Consumer<String, String> consumer;
@BeforeClass
public static void setUp() throws Exception {
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-id", "false", embeddedKafka);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
consumer = cf.createConsumer();
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts-id");
}
@AfterClass
public static void tearDown() {
consumer.close();
}
@Test
public void testKstreamBinderWithPojoInputAndStringOuput() throws Exception {
SpringApplication app = new SpringApplication(ProductCountApplication.class);
app.setWebEnvironment(false);
ConfigurableApplicationContext context = app.run("--server.port=0",
"--spring.cloud.stream.bindings.input.destination=foos",
"--spring.cloud.stream.bindings.output.destination=counts-id",
"--spring.cloud.stream.kstream.binder.configuration.commit.interval.ms=1000",
"--spring.cloud.stream.kstream.binder.configuration.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
"--spring.cloud.stream.kstream.binder.configuration.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
"--spring.cloud.stream.bindings.output.producer.headerMode=raw",
"--spring.cloud.stream.bindings.output.producer.useNativeEncoding=true",
"--spring.cloud.stream.bindings.input.consumer.headerMode=raw",
"--spring.cloud.stream.kstream.binder.brokers=" + embeddedKafka.getBrokersAsString(),
"--spring.cloud.stream.kstream.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString());
receiveAndValidateFoo(context);
context.close();
}
private void receiveAndValidateFoo(ConfigurableApplicationContext context) throws Exception {
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
template.setDefaultTopic("foos");
template.sendDefault("{\"id\":\"123\"}");
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts-id");
assertThat(cr.value().contains("Count for product with ID 123: 1")).isTrue();
}
@EnableBinding(KStreamProcessor.class)
@EnableAutoConfiguration
public static class ProductCountApplication {
@StreamListener("input")
@SendTo("output")
public KStream<?, String> process(KStream<Object, Product> input) {
return input
.filter(new Predicate<Object, Product>() {
@Override
public boolean test(Object key, Product product) {
return product.getId() == 123;
}
})
.map(new KeyValueMapper<Object, Product, KeyValue<Product, Product>>() {
@Override
public KeyValue<Product, Product> apply(Object key, Product value) {
return new KeyValue<>(value, value);
}
})
.groupByKey(new JsonSerde<>(Product.class), new JsonSerde<>(Product.class))
.count(TimeWindows.of(5000), "id-count-store")
.toStream()
.map(new KeyValueMapper<Windowed<Product>, Long, KeyValue<Object, String>>() {
@Override
public KeyValue<Object, String> apply(Windowed<Product> key, Long value) {
return new KeyValue<>(null, "Count for product with ID 123: " + value);
}
});
}
}
static class Product {
Integer id;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
}
}

View File

@@ -1,15 +0,0 @@
<configuration>
<appender name="stdout" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{ISO8601} %5p %t %c{2}:%L - %m%n</pattern>
</encoder>
</appender>
<logger name="org.springframework.integration.kafka" level="INFO"/>
<logger name="org.springframework.kafka" level="INFO"/>
<logger name="org.springframework.cloud.stream" level="INFO" />
<logger name="org.springframework.integration.channel" level="INFO" />
<root level="WARN">
<appender-ref ref="stdout"/>
</root>
</configuration>

View File

@@ -1,29 +0,0 @@
#!/bin/bash
#Execute this script from local checkout of spring cloud stream
./mvnw versions:update-parent -DparentVersion=[0.0.1,$2] -Pspring -DgenerateBackupPoms=false -DallowSnapshots=true
./mvnw versions:set -DnewVersion=$1 -DgenerateBackupPoms=false
lines=$(find . -name 'pom.xml' | xargs egrep "SNAPSHOT" | grep -v regex | wc -l)
if [ $lines -eq 0 ]; then
echo "No snapshots found"
else
echo "Snapshots found."
fi
lines=$(find . -name 'pom.xml' | xargs egrep "M[0-9]" | grep -v regex | wc -l)
if [ $lines -eq 0 ]; then
echo "No milestones found"
else
echo "Milestones found."
fi
lines=$(find . -name 'pom.xml' | xargs egrep "RC[0-9]" | grep -v regex | wc -l)
if [ $lines -eq 0 ]; then
echo "No release candidates found"
else
echo "Release candidates found."
fi