Compare commits

..

5 Commits

Author SHA1 Message Date
Marius Bogoevici
9471976ddd Release 2.0.0.M1 2017-06-23 17:03:39 -04:00
Marius Bogoevici
4c4873b3c1 Re-add Spring Kafka version 2017-06-23 13:17:59 -04:00
Marius Bogoevici
971bc96962 Use Spring Boot and dependencies provided by Spring Cloud Build 2017-06-23 13:12:46 -04:00
Marius Bogoevici
47a225d02a Merge branch 'master' into 2.0.x 2017-06-23 12:44:14 -04:00
Marius Bogoevici
e440378e44 Update version to 2.0.0.BUILD-SNAPSHOT
- Update Spring Boot to version 2.0.0.BUILD-SNAPSHOT
- Set Kafka version to 0.10.2
- Remove tests for Kafka 0.9 and 0.10.0
2017-05-21 11:17:58 -04:00
22 changed files with 242 additions and 749 deletions

32
pom.xml
View File

@@ -2,42 +2,31 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
<version>1.3.0.M1</version>
<version>2.0.0.M1</version>
<packaging>pom</packaging>
<parent>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-build</artifactId>
<version>1.3.3.RELEASE</version>
<version>2.0.0.M1</version>
<relativePath />
</parent>
<properties>
<java.version>1.7</java.version>
<kafka.version>0.10.1.1</kafka.version>
<spring-kafka.version>1.1.6.RELEASE</spring-kafka.version>
<spring-integration-kafka.version>2.1.1.RELEASE</spring-integration-kafka.version>
<spring-cloud-stream.version>1.3.0.M1</spring-cloud-stream.version>
<kafka.version>0.10.2.0</kafka.version>
<spring-kafka.version>2.0.0.M2</spring-kafka.version>
<spring-integration-kafka.version>3.0.0.M1</spring-integration-kafka.version>
<spring-cloud-stream.version>2.0.0.M1</spring-cloud-stream.version>
</properties>
<modules>
<module>spring-cloud-stream-binder-kafka</module>
<module>spring-cloud-starter-stream-kafka</module>
<module>spring-cloud-stream-binder-kafka-docs</module>
<module>spring-cloud-stream-binder-kafka-0.10.1-test</module>
<module>spring-cloud-stream-binder-kafka-0.10.2-test</module>
<module>spring-cloud-stream-binder-kafka-core</module>
</modules>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-core</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream</artifactId>
@@ -72,11 +61,6 @@
<artifactId>kafka-clients</artifactId>
<version>${kafka.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
<version>${spring-kafka.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.integration</groupId>
<artifactId>spring-integration-kafka</artifactId>
@@ -102,7 +86,7 @@
</dependency>
</dependencies>
</dependencyManagement>
<build>
<pluginManagement>
<plugins>
@@ -147,7 +131,7 @@
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-build-tools</artifactId>
<version>${project.parent.version}</version>
<version>2.0.0.M1</version>
</dependency>
</dependencies>
<executions>

View File

@@ -4,7 +4,7 @@
<parent>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
<version>1.3.0.M1</version>
<version>2.0.0.M1</version>
</parent>
<artifactId>spring-cloud-starter-stream-kafka</artifactId>
<description>Spring Cloud Starter Stream Kafka</description>

View File

@@ -4,7 +4,7 @@
<parent>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
<version>1.3.0.M1</version>
<version>2.0.0.M1</version>
</parent>
<artifactId>spring-cloud-stream-binder-kafka-0.10.1-test</artifactId>
<description>Spring Cloud Stream Kafka Binder 0.10.1 Tests</description>
@@ -54,6 +54,7 @@
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka-test</artifactId>
<version>1.1.6.RELEASE</version>
<scope>test</scope>
</dependency>
<dependency>
@@ -67,13 +68,6 @@
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-0.10.2-test</artifactId>
<version>${project.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-test</artifactId>

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2015-2017 the original author or authors.
* Copyright 2015-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -16,16 +16,11 @@
package org.springframework.cloud.stream.binder.kafka;
import org.springframework.cloud.stream.binder.ExtendedConsumerProperties;
import org.springframework.cloud.stream.binder.kafka.admin.AdminUtilsOperation;
import org.springframework.cloud.stream.binder.kafka.admin.Kafka10AdminUtilsOperation;
import org.springframework.cloud.stream.binder.kafka.properties.KafkaBinderConfigurationProperties;
import org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties;
import org.springframework.cloud.stream.binder.kafka.provisioning.KafkaTopicProvisioner;
import org.springframework.cloud.stream.provisioning.ConsumerDestination;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Configuration;
import org.springframework.integration.config.EnableIntegration;
import org.springframework.context.support.GenericApplicationContext;
import org.springframework.kafka.support.LoggingProducerListener;
import org.springframework.kafka.support.ProducerListener;
@@ -39,7 +34,6 @@ import org.springframework.kafka.support.ProducerListener;
*/
public class Kafka10TestBinder extends AbstractKafkaTestBinder {
@SuppressWarnings({ "rawtypes", "unchecked" })
public Kafka10TestBinder(KafkaBinderConfigurationProperties binderConfiguration) {
try {
AdminUtilsOperation adminUtilsOperation = new Kafka10AdminUtilsOperation();
@@ -47,26 +41,13 @@ public class Kafka10TestBinder extends AbstractKafkaTestBinder {
new KafkaTopicProvisioner(binderConfiguration, adminUtilsOperation);
provisioningProvider.afterPropertiesSet();
KafkaMessageChannelBinder binder = new KafkaMessageChannelBinder(binderConfiguration,
provisioningProvider) {
/*
* Some tests use multiple instance indexes for the same topic; we need to make
* the error infrastructure beans unique.
*/
@Override
protected String errorsBaseName(ConsumerDestination destination, String group,
ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties) {
return super.errorsBaseName(destination, group, consumerProperties) + "-"
+ consumerProperties.getInstanceIndex();
}
};
KafkaMessageChannelBinder binder = new KafkaMessageChannelBinder(binderConfiguration, provisioningProvider);
binder.setCodec(AbstractKafkaTestBinder.getCodec());
ProducerListener producerListener = new LoggingProducerListener();
binder.setProducerListener(producerListener);
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(Config.class);
GenericApplicationContext context = new GenericApplicationContext();
context.refresh();
binder.setApplicationContext(context);
binder.afterPropertiesSet();
this.setBinder(binder);
@@ -76,10 +57,4 @@ public class Kafka10TestBinder extends AbstractKafkaTestBinder {
}
}
@Configuration
@EnableIntegration
static class Config {
}
}

View File

@@ -62,15 +62,13 @@ import static org.junit.Assert.assertTrue;
/**
* Integration tests for the {@link KafkaMessageChannelBinder}.
*
* This test specifically tests for the 0.10.1.x version of Kafka.
*
* @author Eric Bottard
* @author Marius Bogoevici
* @author Mark Fisher
* @author Ilayaperumal Gopinathan
*/
public class Kafka_0_10_1_BinderTests extends Kafka_0_10_2_BinderTests {
public class Kafka_0_10_1_BinderTests extends KafkaBinderTests {
private final String CLASS_UNDER_TEST_NAME = KafkaMessageChannelBinder.class.getSimpleName();

View File

@@ -1,120 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
<version>1.3.0.M1</version>
</parent>
<artifactId>spring-cloud-stream-binder-kafka-0.10.2-test</artifactId>
<description>Spring Cloud Stream Kafka Binder 0.10.2 Tests</description>
<url>http://projects.spring.io/spring-cloud</url>
<organization>
<name>Pivotal Software, Inc.</name>
<url>http://www.spring.io</url>
</organization>
<properties>
<main.basedir>${basedir}/../..</main.basedir>
<kafka.version>0.10.2.1</kafka.version>
<spring-kafka.version>1.2.2.RELEASE</spring-kafka.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-core</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka</artifactId>
<version>${project.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.11</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.integration</groupId>
<artifactId>spring-integration-kafka</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka</artifactId>
<version>${project.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-schema</artifactId>
<version>${spring-cloud-stream.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>io.confluent</groupId>
<artifactId>kafka-avro-serializer</artifactId>
<version>3.2.2</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>io.confluent</groupId>
<artifactId>kafka-schema-registry</artifactId>
<version>3.2.2</version>
<scope>test</scope>
</dependency>
</dependencies>
<repositories>
<repository>
<id>confluent</id>
<url>http://packages.confluent.io/maven/</url>
</repository>
</repositories>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>3.0.2</version>
<executions>
<execution>
<goals>
<goal>test-jar</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -1,241 +0,0 @@
/*
* Copyright 2014-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kafka;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.UUID;
import io.confluent.kafka.schemaregistry.rest.SchemaRegistryConfig;
import io.confluent.kafka.schemaregistry.rest.SchemaRegistryRestApplication;
import kafka.utils.ZKStringSerializer$;
import kafka.utils.ZkUtils;
import org.I0Itec.zkclient.ZkClient;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
import org.apache.kafka.common.serialization.Deserializer;
import org.assertj.core.api.Assertions;
import org.eclipse.jetty.server.Server;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.springframework.cloud.stream.binder.Binder;
import org.springframework.cloud.stream.binder.Binding;
import org.springframework.cloud.stream.binder.ExtendedConsumerProperties;
import org.springframework.cloud.stream.binder.ExtendedProducerProperties;
import org.springframework.cloud.stream.binder.Spy;
import org.springframework.cloud.stream.binder.kafka.admin.Kafka10AdminUtilsOperation;
import org.springframework.cloud.stream.binder.kafka.properties.KafkaBinderConfigurationProperties;
import org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties;
import org.springframework.cloud.stream.binder.kafka.properties.KafkaProducerProperties;
import org.springframework.integration.channel.DirectChannel;
import org.springframework.integration.channel.QueueChannel;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.support.KafkaHeaders;
import org.springframework.kafka.test.core.BrokerAddress;
import org.springframework.kafka.test.rule.KafkaEmbedded;
import org.springframework.messaging.Message;
import org.springframework.messaging.MessageChannel;
import org.springframework.messaging.SubscribableChannel;
import org.springframework.messaging.support.MessageBuilder;
import static org.junit.Assert.assertTrue;
/**
* Integration tests for the {@link KafkaMessageChannelBinder}.
*
* This test specifically tests for the 0.10.2.x version of Kafka.
*
* @author Eric Bottard
* @author Marius Bogoevici
* @author Mark Fisher
* @author Ilayaperumal Gopinathan
*/
public class Kafka_0_10_2_BinderTests extends KafkaBinderTests {
private final String CLASS_UNDER_TEST_NAME = KafkaMessageChannelBinder.class.getSimpleName();
@ClassRule
public static KafkaEmbedded embeddedKafka = new KafkaEmbedded(1, true, 10);
private Kafka10TestBinder binder;
private Kafka10AdminUtilsOperation adminUtilsOperation = new Kafka10AdminUtilsOperation();
@Override
protected void binderBindUnbindLatency() throws InterruptedException {
Thread.sleep(500);
}
@Override
protected Kafka10TestBinder getBinder() {
if (binder == null) {
KafkaBinderConfigurationProperties binderConfiguration = createConfigurationProperties();
binder = new Kafka10TestBinder(binderConfiguration);
}
return binder;
}
protected KafkaBinderConfigurationProperties createConfigurationProperties() {
KafkaBinderConfigurationProperties binderConfiguration = new KafkaBinderConfigurationProperties();
BrokerAddress[] brokerAddresses = embeddedKafka.getBrokerAddresses();
List<String> bAddresses = new ArrayList<>();
for (BrokerAddress bAddress : brokerAddresses) {
bAddresses.add(bAddress.toString());
}
String[] foo = new String[bAddresses.size()];
binderConfiguration.setBrokers(bAddresses.toArray(foo));
binderConfiguration.setZkNodes(embeddedKafka.getZookeeperConnectionString());
return binderConfiguration;
}
@Override
protected int partitionSize(String topic) {
return consumerFactory().createConsumer().partitionsFor(topic).size();
}
@Override
protected ZkUtils getZkUtils(KafkaBinderConfigurationProperties kafkaBinderConfigurationProperties) {
final ZkClient zkClient = new ZkClient(kafkaBinderConfigurationProperties.getZkConnectionString(),
kafkaBinderConfigurationProperties.getZkSessionTimeout(), kafkaBinderConfigurationProperties.getZkConnectionTimeout(),
ZKStringSerializer$.MODULE$);
return new ZkUtils(zkClient, null, false);
}
@Override
protected void invokeCreateTopic(ZkUtils zkUtils, String topic, int partitions, int replicationFactor, Properties topicConfig) {
adminUtilsOperation.invokeCreateTopic(zkUtils, topic, partitions, replicationFactor, new Properties());
}
@Override
protected int invokePartitionSize(String topic, ZkUtils zkUtils) {
return adminUtilsOperation.partitionSize(topic, zkUtils);
}
@Override
public String getKafkaOffsetHeaderKey() {
return KafkaHeaders.OFFSET;
}
@Override
protected Binder getBinder(KafkaBinderConfigurationProperties kafkaBinderConfigurationProperties) {
return new Kafka10TestBinder(kafkaBinderConfigurationProperties);
}
@Before
public void init() {
String multiplier = System.getenv("KAFKA_TIMEOUT_MULTIPLIER");
if (multiplier != null) {
timeoutMultiplier = Double.parseDouble(multiplier);
}
}
@Override
protected boolean usesExplicitRouting() {
return false;
}
@Override
protected String getClassUnderTestName() {
return CLASS_UNDER_TEST_NAME;
}
@Override
public Spy spyOn(final String name) {
throw new UnsupportedOperationException("'spyOn' is not used by Kafka tests");
}
private ConsumerFactory<byte[], byte[]> consumerFactory() {
Map<String, Object> props = new HashMap<>();
KafkaBinderConfigurationProperties configurationProperties = createConfigurationProperties();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, configurationProperties.getKafkaConnectionString());
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
props.put(ConsumerConfig.GROUP_ID_CONFIG, "TEST-CONSUMER-GROUP");
Deserializer<byte[]> valueDecoder = new ByteArrayDeserializer();
Deserializer<byte[]> keyDecoder = new ByteArrayDeserializer();
return new DefaultKafkaConsumerFactory<>(props, keyDecoder, valueDecoder);
}
@Test
@SuppressWarnings("unchecked")
public void testCustomAvroSerialization() throws Exception {
KafkaBinderConfigurationProperties configurationProperties = createConfigurationProperties();
final ZkClient zkClient = new ZkClient(configurationProperties.getZkConnectionString(),
configurationProperties.getZkSessionTimeout(), configurationProperties.getZkConnectionTimeout(),
ZKStringSerializer$.MODULE$);
final ZkUtils zkUtils = new ZkUtils(zkClient, null, false);
Map<String, Object> schemaRegistryProps = new HashMap<>();
schemaRegistryProps.put("kafkastore.connection.url", configurationProperties.getZkConnectionString());
schemaRegistryProps.put("listeners", "http://0.0.0.0:8082");
schemaRegistryProps.put("port", "8082");
schemaRegistryProps.put("kafkastore.topic", "_schemas");
SchemaRegistryConfig config = new SchemaRegistryConfig(schemaRegistryProps);
SchemaRegistryRestApplication app = new SchemaRegistryRestApplication(config);
Server server = app.createServer();
server.start();
long endTime = System.currentTimeMillis() + 5000;
while(true) {
if (server.isRunning()) {
break;
}
else if (System.currentTimeMillis() > endTime) {
Assertions.fail("Kafka Schema Registry Server failed to start");
}
}
User1 firstOutboundFoo = new User1();
String userName1 = "foo-name" + UUID.randomUUID().toString();
String favColor1 = "foo-color" + UUID.randomUUID().toString();
firstOutboundFoo.setName(userName1);
firstOutboundFoo.setFavoriteColor(favColor1);
Message<?> message = MessageBuilder.withPayload(firstOutboundFoo).build();
SubscribableChannel moduleOutputChannel = new DirectChannel();
String testTopicName = "existing" + System.currentTimeMillis();
invokeCreateTopic(zkUtils, testTopicName, 6, 1, new Properties());
configurationProperties.setAutoAddPartitions(true);
Binder binder = getBinder(configurationProperties);
QueueChannel moduleInputChannel = new QueueChannel();
ExtendedProducerProperties<KafkaProducerProperties> producerProperties = createProducerProperties();
producerProperties.getExtension().getConfiguration().put("value.serializer", "io.confluent.kafka.serializers.KafkaAvroSerializer");
producerProperties.getExtension().getConfiguration().put("schema.registry.url", "http://localhost:8082");
producerProperties.setUseNativeEncoding(true);
Binding<MessageChannel> producerBinding = binder.bindProducer(testTopicName, moduleOutputChannel, producerProperties);
ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();
consumerProperties.getExtension().setAutoRebalanceEnabled(false);
consumerProperties.getExtension().getConfiguration().put("value.deserializer", "io.confluent.kafka.serializers.KafkaAvroDeserializer");
consumerProperties.getExtension().getConfiguration().put("schema.registry.url", "http://localhost:8082");
Binding<MessageChannel> consumerBinding = binder.bindConsumer(testTopicName, "test", moduleInputChannel, consumerProperties);
// Let the consumer actually bind to the producer before sending a msg
binderBindUnbindLatency();
moduleOutputChannel.send(message);
Message<?> inbound = receive(moduleInputChannel);
Assertions.assertThat(inbound).isNotNull();
assertTrue(message.getPayload() instanceof User1);
User1 receivedUser = (User1) message.getPayload();
Assertions.assertThat(receivedUser.getName()).isEqualTo(userName1);
Assertions.assertThat(receivedUser.getFavoriteColor()).isEqualTo(favColor1);
producerBinding.unbind();
consumerBinding.unbind();
}
}

View File

@@ -4,7 +4,7 @@
<parent>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
<version>1.3.0.M1</version>
<version>2.0.0.M1</version>
</parent>
<artifactId>spring-cloud-stream-binder-kafka-core</artifactId>
<description>Spring Cloud Stream Kafka Binder Core</description>

View File

@@ -16,18 +16,10 @@
package org.springframework.cloud.stream.binder.kafka.properties;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.util.ObjectUtils;
import org.springframework.util.StringUtils;
/**
@@ -39,16 +31,13 @@ import org.springframework.util.StringUtils;
@ConfigurationProperties(prefix = "spring.cloud.stream.kafka.binder")
public class KafkaBinderConfigurationProperties {
@Autowired(required = false)
private KafkaProperties kafkaProperties;
private String[] zkNodes = new String[] {"localhost"};
private String[] zkNodes = new String[] { "localhost" };
private Map<String, String> configuration = new HashMap<>();
private Map<String, Object> configuration = new HashMap<>();
private String defaultZkPort = "2181";
private String[] brokers = new String[] { "localhost" };
private String[] brokers = new String[] {"localhost"};
private String defaultBrokerPort = "9092";
@@ -260,78 +249,16 @@ public class KafkaBinderConfigurationProperties {
this.socketBufferSize = socketBufferSize;
}
public Map<String, String> getConfiguration() {
public Map<String, Object> getConfiguration() {
return configuration;
}
public void setConfiguration(Map<String, String> configuration) {
public void setConfiguration(Map<String, Object> configuration) {
this.configuration = configuration;
}
public Map<String, Object> getConsumerConfiguration() {
Map<String, Object> consumerConfiguration = new HashMap<>();
// If Spring Boot Kafka properties are present, add them with lowest precedence
if (this.kafkaProperties != null) {
consumerConfiguration.putAll(this.kafkaProperties.buildConsumerProperties());
}
// Copy configured binder properties
for (Map.Entry<String, String> configurationEntry : this.configuration.entrySet()) {
if (ConsumerConfig.configNames().contains(configurationEntry.getKey())) {
consumerConfiguration.put(configurationEntry.getKey(), configurationEntry.getValue());
}
}
// Override Spring Boot bootstrap server setting if left to default with the value
// configured in the binder
if (ObjectUtils.isEmpty(consumerConfiguration.get(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG))) {
consumerConfiguration.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, getKafkaConnectionString());
}
else {
Object boostrapServersConfig = consumerConfiguration.get(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG);
if (boostrapServersConfig instanceof List) {
@SuppressWarnings("unchecked")
List<String> bootStrapServers = (List<String>) consumerConfiguration
.get(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG);
if (bootStrapServers.size() == 1 && bootStrapServers.get(0).equals("localhost:9092")) {
consumerConfiguration.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, getKafkaConnectionString());
}
}
}
return Collections.unmodifiableMap(consumerConfiguration);
}
public Map<String, Object> getProducerConfiguration() {
Map<String, Object> producerConfiguration = new HashMap<>();
// If Spring Boot Kafka properties are present, add them with lowest precedence
if (this.kafkaProperties != null) {
producerConfiguration.putAll(this.kafkaProperties.buildProducerProperties());
}
// Copy configured binder properties
for (Map.Entry<String, String> configurationEntry : configuration.entrySet()) {
if (ProducerConfig.configNames().contains(configurationEntry.getKey())) {
producerConfiguration.put(configurationEntry.getKey(), configurationEntry.getValue());
}
}
// Override Spring Boot bootstrap server setting if left to default with the value
// configured in the binder
if (ObjectUtils.isEmpty(producerConfiguration.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG))) {
producerConfiguration.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, getKafkaConnectionString());
}
else {
Object boostrapServersConfig = producerConfiguration.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG);
if (boostrapServersConfig instanceof List) {
@SuppressWarnings("unchecked")
List<String> bootStrapServers = (List<String>) producerConfiguration
.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG);
if (bootStrapServers.size() == 1 && bootStrapServers.get(0).equals("localhost:9092")) {
producerConfiguration.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, getKafkaConnectionString());
}
}
}
return Collections.unmodifiableMap(producerConfiguration);
}
public JaasLoginModuleConfiguration getJaas() {
return this.jaas;
return jaas;
}
public void setJaas(JaasLoginModuleConfiguration jaas) {

View File

@@ -23,9 +23,7 @@ import java.util.Map;
* @author Marius Bogoevici
* @author Ilayaperumal Gopinathan
*
* <p>
* Thanks to Laszlo Szabo for providing the initial patch for generic property support.
* </p>
* <p>Thanks to Laszlo Szabo for providing the initial patch for generic property support.</p>
*/
public class KafkaConsumerProperties {
@@ -35,6 +33,8 @@ public class KafkaConsumerProperties {
private Boolean autoCommitOnError;
private boolean resetOffsets;
private StartOffset startOffset;
private boolean enableDlq;
@@ -53,6 +53,14 @@ public class KafkaConsumerProperties {
this.autoCommitOffset = autoCommitOffset;
}
public boolean isResetOffsets() {
return this.resetOffsets;
}
public void setResetOffsets(boolean resetOffsets) {
this.resetOffsets = resetOffsets;
}
public StartOffset getStartOffset() {
return this.startOffset;
}
@@ -94,8 +102,7 @@ public class KafkaConsumerProperties {
}
public enum StartOffset {
earliest(-2L),
latest(-1L);
earliest(-2L), latest(-1L);
private final long referencePoint;

View File

@@ -5,7 +5,7 @@
<parent>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
<version>1.3.0.M1</version>
<version>2.0.0.M1</version>
</parent>
<artifactId>spring-cloud-stream-binder-kafka-docs</artifactId>

View File

@@ -146,8 +146,12 @@ recoveryInterval::
The interval between connection recovery attempts, in milliseconds.
+
Default: `5000`.
resetOffsets::
Whether to reset offsets on the consumer to the value provided by `startOffset`.
+
Default: `false`.
startOffset::
The starting offset for new groups.
The starting offset for new groups, or when `resetOffsets` is `true`.
Allowed values: `earliest`, `latest`.
If the consumer group is set explicitly for the consumer 'binding' (via `spring.cloud.stream.bindings.<channelName>.group`), then 'startOffset' is set to `earliest`; otherwise it is set to `latest` for the `anonymous` consumer group.
+

View File

@@ -10,13 +10,14 @@
<parent>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
<version>1.3.0.M1</version>
<version>2.0.0.M1</version>
</parent>
<dependencies>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-core</artifactId>
<version>2.0.0.M1</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
@@ -57,6 +58,7 @@
<dependency>
<groupId>org.springframework.integration</groupId>
<artifactId>spring-integration-kafka</artifactId>
<version>${spring-integration-kafka.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>

View File

@@ -112,8 +112,8 @@ public class KafkaBinderMetrics implements PublicMetrics {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
if (!ObjectUtils.isEmpty(binderConfigurationProperties.getConsumerConfiguration())) {
props.putAll(binderConfigurationProperties.getConsumerConfiguration());
if (!ObjectUtils.isEmpty(binderConfigurationProperties.getConfiguration())) {
props.putAll(binderConfigurationProperties.getConfiguration());
}
if (!props.containsKey(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG)) {
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,

View File

@@ -52,22 +52,20 @@ import org.springframework.expression.spel.standard.SpelExpressionParser;
import org.springframework.integration.core.MessageProducer;
import org.springframework.integration.kafka.inbound.KafkaMessageDrivenChannelAdapter;
import org.springframework.integration.kafka.outbound.KafkaProducerMessageHandler;
import org.springframework.integration.kafka.support.RawRecordHeaderErrorMessageStrategy;
import org.springframework.integration.support.ErrorMessageStrategy;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.listener.AbstractMessageListenerContainer;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
import org.springframework.kafka.listener.ErrorHandler;
import org.springframework.kafka.listener.config.ContainerProperties;
import org.springframework.kafka.support.ProducerListener;
import org.springframework.kafka.support.SendResult;
import org.springframework.kafka.support.TopicPartitionInitialOffset;
import org.springframework.messaging.Message;
import org.springframework.messaging.MessageChannel;
import org.springframework.messaging.MessageHandler;
import org.springframework.messaging.MessagingException;
import org.springframework.retry.support.RetryTemplate;
import org.springframework.util.Assert;
import org.springframework.util.CollectionUtils;
import org.springframework.util.ObjectUtils;
@@ -89,18 +87,17 @@ import org.springframework.util.concurrent.ListenableFutureCallback;
* @author Doug Saus
*/
public class KafkaMessageChannelBinder extends
AbstractMessageChannelBinder<ExtendedConsumerProperties<KafkaConsumerProperties>,
ExtendedProducerProperties<KafkaProducerProperties>, KafkaTopicProvisioner>
AbstractMessageChannelBinder<ExtendedConsumerProperties<KafkaConsumerProperties>, ExtendedProducerProperties<KafkaProducerProperties>, KafkaTopicProvisioner>
implements ExtendedPropertiesBinder<MessageChannel, KafkaConsumerProperties, KafkaProducerProperties> {
private final KafkaBinderConfigurationProperties configurationProperties;
private final Map<String, TopicInformation> topicsInUse = new HashMap<>();
private ProducerListener<byte[], byte[]> producerListener;
private KafkaExtendedBindingProperties extendedBindingProperties = new KafkaExtendedBindingProperties();
private final Map<String, TopicInformation> topicsInUse = new HashMap<>();
public KafkaMessageChannelBinder(KafkaBinderConfigurationProperties configurationProperties,
KafkaTopicProvisioner provisioningProvider) {
super(false, headersToMap(configurationProperties), provisioningProvider);
@@ -183,8 +180,8 @@ public class KafkaMessageChannelBinder extends
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
props.put(ProducerConfig.ACKS_CONFIG, String.valueOf(this.configurationProperties.getRequiredAcks()));
if (!ObjectUtils.isEmpty(configurationProperties.getProducerConfiguration())) {
props.putAll(configurationProperties.getProducerConfiguration());
if (!ObjectUtils.isEmpty(configurationProperties.getConfiguration())) {
props.putAll(configurationProperties.getConfiguration());
}
if (ObjectUtils.isEmpty(props.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG))) {
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, this.configurationProperties.getKafkaConnectionString());
@@ -257,26 +254,20 @@ public class KafkaMessageChannelBinder extends
? new ContainerProperties(destination.getName())
: new ContainerProperties(topicPartitionInitialOffsets);
int concurrency = Math.min(extendedConsumerProperties.getConcurrency(), listenedPartitions.size());
@SuppressWarnings("rawtypes")
final ConcurrentMessageListenerContainer<?, ?> messageListenerContainer =
new ConcurrentMessageListenerContainer(
consumerFactory, containerProperties) {
final ConcurrentMessageListenerContainer<?, ?> messageListenerContainer = new ConcurrentMessageListenerContainer(
consumerFactory, containerProperties) {
@Override
public void stop(Runnable callback) {
super.stop(callback);
}
};
messageListenerContainer.setConcurrency(concurrency);
messageListenerContainer.getContainerProperties()
.setAckOnError(isAutoCommitOnError(extendedConsumerProperties));
if (!extendedConsumerProperties.getExtension().isAutoCommitOffset()) {
messageListenerContainer.getContainerProperties()
.setAckMode(AbstractMessageListenerContainer.AckMode.MANUAL);
messageListenerContainer.getContainerProperties().setAckOnError(false);
}
else {
messageListenerContainer.getContainerProperties()
.setAckOnError(isAutoCommitOnError(extendedConsumerProperties));
}
if (this.logger.isDebugEnabled()) {
this.logger.debug(
@@ -289,57 +280,37 @@ public class KafkaMessageChannelBinder extends
final KafkaMessageDrivenChannelAdapter<?, ?> kafkaMessageDrivenChannelAdapter = new KafkaMessageDrivenChannelAdapter<>(
messageListenerContainer);
kafkaMessageDrivenChannelAdapter.setBeanFactory(this.getBeanFactory());
ErrorInfrastructure errorInfrastructure = registerErrorInfrastructure(destination, consumerGroup,
extendedConsumerProperties);
if (extendedConsumerProperties.getMaxAttempts() > 1) {
kafkaMessageDrivenChannelAdapter.setRetryTemplate(buildRetryTemplate(extendedConsumerProperties));
kafkaMessageDrivenChannelAdapter.setRecoveryCallback(errorInfrastructure.getRecoverer());
}
else {
kafkaMessageDrivenChannelAdapter.setErrorChannel(errorInfrastructure.getErrorChannel());
}
return kafkaMessageDrivenChannelAdapter;
}
@Override
protected ErrorMessageStrategy getErrorMessageStrategy() {
return new RawRecordHeaderErrorMessageStrategy();
}
@Override
protected MessageHandler getErrorMessageHandler(final ConsumerDestination destination, final String group,
final ExtendedConsumerProperties<KafkaConsumerProperties> extendedConsumerProperties) {
final RetryTemplate retryTemplate = buildRetryTemplate(extendedConsumerProperties);
kafkaMessageDrivenChannelAdapter.setRetryTemplate(retryTemplate);
if (extendedConsumerProperties.getExtension().isEnableDlq()) {
DefaultKafkaProducerFactory<byte[], byte[]> producerFactory = getProducerFactory(
new ExtendedProducerProperties<>(new KafkaProducerProperties()));
final KafkaTemplate<byte[], byte[]> kafkaTemplate = new KafkaTemplate<>(producerFactory);
return new MessageHandler() {
messageListenerContainer.getContainerProperties().setErrorHandler(new ErrorHandler() {
@Override
public void handleMessage(Message<?> message) throws MessagingException {
final ConsumerRecord<?, ?> record = message.getHeaders()
.get(KafkaMessageDrivenChannelAdapter.KAFKA_RAW_DATA, ConsumerRecord.class);
final byte[] key = record.key() != null ? Utils.toArray(ByteBuffer.wrap((byte[]) record.key()))
public void handle(Exception thrownException, final ConsumerRecord message) {
final byte[] key = message.key() != null ? Utils.toArray(ByteBuffer.wrap((byte[]) message.key()))
: null;
final byte[] payload = record.value() != null
? Utils.toArray(ByteBuffer.wrap((byte[]) record.value())) : null;
final byte[] payload = message.value() != null
? Utils.toArray(ByteBuffer.wrap((byte[]) message.value())) : null;
String dlqName = StringUtils.hasText(extendedConsumerProperties.getExtension().getDlqName())
? extendedConsumerProperties.getExtension().getDlqName()
: "error." + destination.getName() + "." + group;
ListenableFuture<SendResult<byte[], byte[]>> sentDlq = kafkaTemplate.send(dlqName,
record.partition(), key, payload);
message.partition(), key, payload);
sentDlq.addCallback(new ListenableFutureCallback<SendResult<byte[], byte[]>>() {
StringBuilder sb = new StringBuilder().append(" a message with key='")
.append(toDisplayString(ObjectUtils.nullSafeToString(key), 50)).append("'")
.append(" and payload='")
.append(toDisplayString(ObjectUtils.nullSafeToString(payload), 50))
.append("'").append(" received from ")
.append(record.partition());
.append(message.partition());
@Override
public void onFailure(Throwable ex) {
KafkaMessageChannelBinder.this.logger.error(
"Error sending to DLQ " + sb.toString(), ex);
"Error sending to DLQ" + sb.toString(), ex);
}
@Override
@@ -349,12 +320,11 @@ public class KafkaMessageChannelBinder extends
"Sent to DLQ " + sb.toString());
}
}
});
}
};
});
}
return null;
return kafkaMessageDrivenChannelAdapter;
}
private ConsumerFactory<?, ?> createKafkaConsumerFactory(boolean anonymous, String consumerGroup,
@@ -367,8 +337,8 @@ public class KafkaMessageChannelBinder extends
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, anonymous ? "latest" : "earliest");
props.put(ConsumerConfig.GROUP_ID_CONFIG, consumerGroup);
if (!ObjectUtils.isEmpty(configurationProperties.getConsumerConfiguration())) {
props.putAll(configurationProperties.getConsumerConfiguration());
if (!ObjectUtils.isEmpty(configurationProperties.getConfiguration())) {
props.putAll(configurationProperties.getConfiguration());
}
if (ObjectUtils.isEmpty(props.get(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG))) {
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.configurationProperties.getKafkaConnectionString());

View File

@@ -18,11 +18,15 @@ package org.springframework.cloud.stream.binder.kafka.config;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.annotation.PostConstruct;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
import org.apache.kafka.common.utils.AppInfoParser;
@@ -31,6 +35,7 @@ import org.springframework.boot.actuate.endpoint.PublicMetrics;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.stream.binder.Binder;
import org.springframework.cloud.stream.binder.kafka.KafkaBinderHealthIndicator;
@@ -71,7 +76,8 @@ import org.springframework.util.ObjectUtils;
*/
@Configuration
@ConditionalOnMissingBean(Binder.class)
@Import({ KryoCodecAutoConfiguration.class, PropertyPlaceholderAutoConfiguration.class})
@Import({ KryoCodecAutoConfiguration.class, PropertyPlaceholderAutoConfiguration.class,
KafkaBinderConfiguration.KafkaPropertiesConfiguration.class })
@EnableConfigurationProperties({ KafkaBinderConfigurationProperties.class, KafkaExtendedBindingProperties.class })
public class KafkaBinderConfiguration {
@@ -121,8 +127,8 @@ public class KafkaBinderConfiguration {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
if (!ObjectUtils.isEmpty(configurationProperties.getConsumerConfiguration())) {
props.putAll(configurationProperties.getConsumerConfiguration());
if (!ObjectUtils.isEmpty(configurationProperties.getConfiguration())) {
props.putAll(configurationProperties.getConfiguration());
}
if (!props.containsKey(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG)) {
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.configurationProperties.getKafkaConnectionString());
@@ -179,4 +185,52 @@ public class KafkaBinderConfiguration {
private JaasLoginModuleConfiguration zookeeper;
}
@ConditionalOnClass(name = "org.springframework.boot.autoconfigure.kafka.KafkaProperties")
public static class KafkaPropertiesConfiguration {
// KafkaProperties can still be unavailable if KafkaAutoConfiguration is disabled.
@Autowired(required = false)
private KafkaProperties kafkaProperties;
@Autowired
private KafkaBinderConfigurationProperties kafkaBinderConfigurationProperties;
@PostConstruct
public void init() {
Map<String, Object> configuration = this.kafkaBinderConfigurationProperties.getConfiguration();
if (this.kafkaProperties != null) {
for (Map.Entry<String, String> properties : this.kafkaProperties.getProperties().entrySet()) {
if (!configuration.containsKey(properties.getKey())) {
configuration.put(properties.getKey(), properties.getValue());
}
}
for (Map.Entry<String, Object> producerProperties : this.kafkaProperties.buildProducerProperties()
.entrySet()) {
if (!configuration.containsKey(producerProperties.getKey())) {
configuration.put(producerProperties.getKey(), producerProperties.getValue());
}
}
for (Map.Entry<String, Object> consumerProperties : this.kafkaProperties.buildConsumerProperties()
.entrySet()) {
if (!configuration.containsKey(consumerProperties.getKey())) {
configuration.put(consumerProperties.getKey(), consumerProperties.getValue());
}
}
if (ObjectUtils.isEmpty(configuration.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG))) {
configuration.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,
kafkaBinderConfigurationProperties.getKafkaConnectionString());
}
else {
@SuppressWarnings("unchecked")
List<String> bootStrapServers = (List<String>) configuration
.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG);
if (bootStrapServers.size() == 1 && bootStrapServers.get(0).equals("localhost:9092")) {
configuration.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,
kafkaBinderConfigurationProperties.getKafkaConnectionString());
}
}
}
}
}
}

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2014-2017 the original author or authors.
* Copyright 2014-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -17,25 +17,21 @@ package org.springframework.cloud.stream.binder.kafka;
import java.util.List;
import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.Registration;
import org.springframework.cloud.stream.binder.AbstractTestBinder;
import org.springframework.cloud.stream.binder.ExtendedConsumerProperties;
import org.springframework.cloud.stream.binder.ExtendedProducerProperties;
import org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties;
import org.springframework.cloud.stream.binder.kafka.properties.KafkaProducerProperties;
import org.springframework.context.support.GenericApplicationContext;
import org.springframework.integration.channel.PublishSubscribeChannel;
import org.springframework.integration.codec.Codec;
import org.springframework.integration.codec.kryo.KryoRegistrar;
import org.springframework.integration.codec.kryo.PojoCodec;
import org.springframework.integration.context.IntegrationContextUtils;
import org.springframework.integration.tuple.TupleKryoRegistrar;
import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.Registration;
/**
* @author Soby Chacko
* @author Gary Russell
*/
public abstract class AbstractKafkaTestBinder extends
AbstractTestBinder<KafkaMessageChannelBinder, ExtendedConsumerProperties<KafkaConsumerProperties>, ExtendedProducerProperties<KafkaProducerProperties>> {
@@ -45,12 +41,6 @@ public abstract class AbstractKafkaTestBinder extends
// do nothing - the rule will take care of that
}
protected void addErrorChannel(GenericApplicationContext context) {
PublishSubscribeChannel errorChannel = new PublishSubscribeChannel();
context.getBeanFactory().initializeBean(errorChannel, IntegrationContextUtils.ERROR_CHANNEL_BEAN_NAME);
context.getBeanFactory().registerSingleton(IntegrationContextUtils.ERROR_CHANNEL_BEAN_NAME, errorChannel);
}
protected static Codec getCodec() {
return new PojoCodec(new TupleRegistrar());
}

View File

@@ -77,9 +77,7 @@ public class KafkaBinderAutoConfigurationPropertiesTest {
producerFactory);
assertTrue(producerConfigs.get("batch.size").equals(10));
assertTrue(producerConfigs.get("key.serializer").equals(LongSerializer.class));
assertTrue(producerConfigs.get("key.deserializer") == null);
assertTrue(producerConfigs.get("value.serializer").equals(LongSerializer.class));
assertTrue(producerConfigs.get("value.deserializer") == null);
assertTrue(producerConfigs.get("compression.type").equals("snappy"));
List<String> bootstrapServers = new ArrayList<>();
bootstrapServers.add("10.98.09.199:9092");
@@ -98,9 +96,7 @@ public class KafkaBinderAutoConfigurationPropertiesTest {
Map<String, Object> consumerConfigs = (Map<String, Object>) ReflectionUtils.getField(consumerFactoryConfigField,
consumerFactory);
assertTrue(consumerConfigs.get("key.deserializer").equals(LongDeserializer.class));
assertTrue(consumerConfigs.get("key.serializer") == null);
assertTrue(consumerConfigs.get("value.deserializer").equals(LongDeserializer.class));
assertTrue(consumerConfigs.get("value.serialized") == null);
assertTrue(consumerConfigs.get("group.id").equals("groupIdFromBootConfig"));
assertTrue(consumerConfigs.get("auto.offset.reset").equals("earliest"));
assertTrue((((List<String>) consumerConfigs.get("bootstrap.servers")).containsAll(bootstrapServers)));

View File

@@ -24,9 +24,9 @@ import java.util.Properties;
import java.util.UUID;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import kafka.utils.ZKStringSerializer$;
import kafka.utils.ZkUtils;
import org.I0Itec.zkclient.ZkClient;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
@@ -34,6 +34,7 @@ import org.apache.kafka.common.serialization.LongDeserializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.assertj.core.api.Condition;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
@@ -41,6 +42,7 @@ import org.junit.rules.ExpectedException;
import org.springframework.beans.DirectFieldAccessor;
import org.springframework.cloud.stream.binder.Binder;
import org.springframework.cloud.stream.binder.Binding;
import org.springframework.cloud.stream.binder.DefaultBinding;
import org.springframework.cloud.stream.binder.ExtendedConsumerProperties;
import org.springframework.cloud.stream.binder.ExtendedProducerProperties;
import org.springframework.cloud.stream.binder.HeaderMode;
@@ -53,7 +55,6 @@ import org.springframework.cloud.stream.binder.kafka.properties.KafkaProducerPro
import org.springframework.cloud.stream.binder.kafka.utils.KafkaTopicUtils;
import org.springframework.cloud.stream.config.BindingProperties;
import org.springframework.cloud.stream.provisioning.ProvisioningException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.GenericApplicationContext;
import org.springframework.integration.IntegrationMessageHeaderAccessor;
import org.springframework.integration.channel.DirectChannel;
@@ -80,24 +81,21 @@ import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.fail;
import static org.junit.Assert.assertTrue;
import kafka.utils.ZKStringSerializer$;
import kafka.utils.ZkUtils;
/**
* @author Soby Chacko
* @author Ilayaperumal Gopinathan
* @author Henryk Konsek
*/
public abstract class KafkaBinderTests extends
PartitionCapableBinderTests<AbstractKafkaTestBinder, ExtendedConsumerProperties<KafkaConsumerProperties>, ExtendedProducerProperties<KafkaProducerProperties>> {
public abstract class KafkaBinderTests extends PartitionCapableBinderTests<AbstractKafkaTestBinder, ExtendedConsumerProperties<KafkaConsumerProperties>,
ExtendedProducerProperties<KafkaProducerProperties>> {
@Rule
public ExpectedException expectedProvisioningException = ExpectedException.none();
@Override
protected ExtendedConsumerProperties<KafkaConsumerProperties> createConsumerProperties() {
final ExtendedConsumerProperties<KafkaConsumerProperties> kafkaConsumerProperties = new ExtendedConsumerProperties<>(
new KafkaConsumerProperties());
final ExtendedConsumerProperties<KafkaConsumerProperties> kafkaConsumerProperties =
new ExtendedConsumerProperties<>(new KafkaConsumerProperties());
// set the default values that would normally be propagated by Spring Cloud Stream
kafkaConsumerProperties.setInstanceCount(1);
kafkaConsumerProperties.setInstanceIndex(0);
@@ -106,8 +104,7 @@ public abstract class KafkaBinderTests extends
@Override
protected ExtendedProducerProperties<KafkaProducerProperties> createProducerProperties() {
ExtendedProducerProperties<KafkaProducerProperties> producerProperties = new ExtendedProducerProperties<>(
new KafkaProducerProperties());
ExtendedProducerProperties<KafkaProducerProperties> producerProperties = new ExtendedProducerProperties<>(new KafkaProducerProperties());
producerProperties.getExtension().setSync(true);
return producerProperties;
}
@@ -123,24 +120,15 @@ public abstract class KafkaBinderTests extends
protected abstract ZkUtils getZkUtils(KafkaBinderConfigurationProperties kafkaBinderConfigurationProperties);
protected abstract void invokeCreateTopic(ZkUtils zkUtils, String topic, int partitions,
int replicationFactor, Properties topicConfig);
int replicationFactor, Properties topicConfig);
protected abstract int invokePartitionSize(String topic,
ZkUtils zkUtils);
ZkUtils zkUtils);
@Test
@SuppressWarnings("unchecked")
public void testDlqAndRetry() throws Exception {
testDlqGuts(true);
}
@Test
public void testDlq() throws Exception {
testDlqGuts(false);
}
@SuppressWarnings({ "unchecked", "rawtypes" })
public void testDlqGuts(boolean withRetry) throws Exception {
AbstractKafkaTestBinder binder = getBinder();
Binder binder = getBinder();
DirectChannel moduleOutputChannel = new DirectChannel();
DirectChannel moduleInputChannel = new DirectChannel();
QueueChannel dlqChannel = new QueueChannel();
@@ -149,52 +137,23 @@ public abstract class KafkaBinderTests extends
ExtendedProducerProperties<KafkaProducerProperties> producerProperties = createProducerProperties();
producerProperties.setPartitionCount(2);
ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();
consumerProperties.setMaxAttempts(withRetry ? 2 : 1);
consumerProperties.setMaxAttempts(3);
consumerProperties.setBackOffInitialInterval(100);
consumerProperties.setBackOffMaxInterval(150);
consumerProperties.getExtension().setEnableDlq(true);
consumerProperties.getExtension().setAutoRebalanceEnabled(false);
long uniqueBindingId = System.currentTimeMillis();
String producerName = "dlqTest." + uniqueBindingId + ".0";
Binding<MessageChannel> producerBinding = binder.bindProducer(producerName,
Binding<MessageChannel> producerBinding = binder.bindProducer("retryTest." + uniqueBindingId + ".0",
moduleOutputChannel, producerProperties);
Binding<MessageChannel> consumerBinding = binder.bindConsumer(producerName,
Binding<MessageChannel> consumerBinding = binder.bindConsumer("retryTest." + uniqueBindingId + ".0",
"testGroup", moduleInputChannel, consumerProperties);
ExtendedConsumerProperties<KafkaConsumerProperties> dlqConsumerProperties = createConsumerProperties();
dlqConsumerProperties.setMaxAttempts(1);
ApplicationContext context = TestUtils.getPropertyValue(binder.getBinder(), "applicationContext",
ApplicationContext.class);
Map<String, MessageChannel> beansOfType = context.getBeansOfType(MessageChannel.class);
SubscribableChannel boundErrorChannel = context
.getBean(producerName + ".testGroup.errors-0", SubscribableChannel.class);
SubscribableChannel globalErrorChannel = context.getBean("errorChannel", SubscribableChannel.class);
final AtomicReference<Message<?>> boundErrorChannelMessage = new AtomicReference<>();
final AtomicReference<Message<?>> globalErrorChannelMessage = new AtomicReference<>();
final AtomicBoolean hasRecovererInCallStack = new AtomicBoolean(!withRetry);
boundErrorChannel.subscribe(new MessageHandler() {
@Override
public void handleMessage(Message<?> message) throws MessagingException {
boundErrorChannelMessage.set(message);
String stackTrace = Arrays.toString(new RuntimeException().getStackTrace());
hasRecovererInCallStack.set(stackTrace.contains("ErrorMessageSendingRecoverer"));
}
});
globalErrorChannel.subscribe(new MessageHandler() {
@Override
public void handleMessage(Message<?> message) throws MessagingException {
globalErrorChannelMessage.set(message);
}
});
Binding<MessageChannel> dlqConsumerBinding = binder.bindConsumer(
"error.dlqTest." + uniqueBindingId + ".0.testGroup", null, dlqChannel, dlqConsumerProperties);
"error.retryTest." + uniqueBindingId + ".0.testGroup", null, dlqChannel, dlqConsumerProperties);
binderBindUnbindLatency();
String testMessagePayload = "test." + UUID.randomUUID().toString();
Message<String> testMessage = MessageBuilder.withPayload(testMessagePayload).build();
@@ -205,12 +164,6 @@ public abstract class KafkaBinderTests extends
assertThat(receivedMessage.getPayload()).isEqualTo(testMessagePayload);
assertThat(handler.getInvocationCount()).isEqualTo(consumerProperties.getMaxAttempts());
binderBindUnbindLatency();
// verify we got a message on the dedicated error channel and the global (via bridge)
assertThat(boundErrorChannelMessage.get()).isNotNull();
assertThat(globalErrorChannelMessage.get()).isNotNull();
assertThat(hasRecovererInCallStack.get()).isEqualTo(withRetry);
dlqConsumerBinding.unbind();
consumerBinding.unbind();
producerBinding.unbind();
@@ -354,8 +307,7 @@ public abstract class KafkaBinderTests extends
ExtendedConsumerProperties<KafkaConsumerProperties> dlqConsumerProperties = createConsumerProperties();
dlqConsumerProperties.setMaxAttempts(1);
QueueChannel dlqChannel = new QueueChannel();
Binding<MessageChannel> dlqConsumerBinding = binder.bindConsumer(dlqName, null, dlqChannel,
dlqConsumerProperties);
Binding<MessageChannel> dlqConsumerBinding = binder.bindConsumer(dlqName, null, dlqChannel, dlqConsumerProperties);
String testMessagePayload = "test." + UUID.randomUUID().toString();
Message<String> testMessage = MessageBuilder.withPayload(testMessagePayload).build();
@@ -417,9 +369,9 @@ public abstract class KafkaBinderTests extends
@Test
@SuppressWarnings("unchecked")
public void testCompression() throws Exception {
final KafkaProducerProperties.CompressionType[] codecs = new KafkaProducerProperties.CompressionType[] {
final KafkaProducerProperties.CompressionType[] codecs = new KafkaProducerProperties.CompressionType[]{
KafkaProducerProperties.CompressionType.none, KafkaProducerProperties.CompressionType.gzip,
KafkaProducerProperties.CompressionType.snappy };
KafkaProducerProperties.CompressionType.snappy};
byte[] testPayload = new byte[2048];
Arrays.fill(testPayload, (byte) 65);
Binder binder = getBinder();
@@ -589,8 +541,8 @@ public abstract class KafkaBinderTests extends
Binder binder = getBinder(createConfigurationProperties());
GenericApplicationContext context = new GenericApplicationContext();
context.refresh();
// binder.setApplicationContext(context);
// binder.afterPropertiesSet();
//binder.setApplicationContext(context);
//binder.afterPropertiesSet();
DirectChannel output = new DirectChannel();
QueueChannel input1 = new QueueChannel();
@@ -656,6 +608,55 @@ public abstract class KafkaBinderTests extends
}
}
@Test
@Ignore("Needs further discussion")
@SuppressWarnings("unchecked")
public void testReset() throws Exception {
Binder binder = getBinder();
DirectChannel output = new DirectChannel();
QueueChannel input1 = new QueueChannel();
String testTopicName = UUID.randomUUID().toString();
Binding<MessageChannel> producerBinding = binder.bindProducer(testTopicName, output,
createProducerProperties());
String testPayload1 = "foo1-" + UUID.randomUUID().toString();
output.send(new GenericMessage<>(testPayload1.getBytes()));
ExtendedConsumerProperties<KafkaConsumerProperties> properties = createConsumerProperties();
properties.getExtension().setResetOffsets(true);
properties.getExtension().setStartOffset(KafkaConsumerProperties.StartOffset.earliest);
Binding<MessageChannel> consumerBinding = binder.bindConsumer(testTopicName, "startOffsets", input1,
properties);
Message<byte[]> receivedMessage1 = (Message<byte[]>) receive(input1);
assertThat(receivedMessage1).isNotNull();
assertThat(new String(receivedMessage1.getPayload())).isEqualTo(testPayload1);
String testPayload2 = "foo2-" + UUID.randomUUID().toString();
output.send(new GenericMessage<>(testPayload2.getBytes()));
Message<byte[]> receivedMessage2 = (Message<byte[]>) receive(input1);
assertThat(receivedMessage2).isNotNull();
assertThat(new String(receivedMessage2.getPayload())).isEqualTo(testPayload2);
consumerBinding.unbind();
String testPayload3 = "foo3-" + UUID.randomUUID().toString();
output.send(new GenericMessage<>(testPayload3.getBytes()));
ExtendedConsumerProperties<KafkaConsumerProperties> properties2 = createConsumerProperties();
properties2.getExtension().setResetOffsets(true);
properties2.getExtension().setStartOffset(KafkaConsumerProperties.StartOffset.earliest);
consumerBinding = binder.bindConsumer(testTopicName, "startOffsets", input1, properties2);
Message<byte[]> receivedMessage4 = (Message<byte[]>) receive(input1);
assertThat(receivedMessage4).isNotNull();
assertThat(new String(receivedMessage4.getPayload())).isEqualTo(testPayload1);
Message<byte[]> receivedMessage5 = (Message<byte[]>) receive(input1);
assertThat(receivedMessage5).isNotNull();
assertThat(new String(receivedMessage5.getPayload())).isEqualTo(testPayload2);
Message<byte[]> receivedMessage6 = (Message<byte[]>) receive(input1);
assertThat(receivedMessage6).isNotNull();
assertThat(new String(receivedMessage6.getPayload())).isEqualTo(testPayload3);
consumerBinding.unbind();
producerBinding.unbind();
}
@Test
@SuppressWarnings("unchecked")
public void testResume() throws Exception {
@@ -741,6 +742,7 @@ public abstract class KafkaBinderTests extends
moduleOutputChannel1.send(message1);
moduleOutputChannel2.send(message2);
Message<?>[] messages = new Message[2];
messages[0] = receive(moduleInputChannel);
messages[1] = receive(moduleInputChannel);
@@ -766,15 +768,13 @@ public abstract class KafkaBinderTests extends
createProducerBindingProperties(createProducerProperties()));
QueueChannel moduleInputChannel = new QueueChannel();
Binding<MessageChannel> producerBinding = binder.bindProducer(
"testManualAckSucceedsWhenAutoCommitOffsetIsTurnedOff", moduleOutputChannel,
Binding<MessageChannel> producerBinding = binder.bindProducer("testManualAckSucceedsWhenAutoCommitOffsetIsTurnedOff", moduleOutputChannel,
createProducerProperties());
ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();
consumerProperties.getExtension().setAutoCommitOffset(false);
Binding<MessageChannel> consumerBinding = binder.bindConsumer(
"testManualAckSucceedsWhenAutoCommitOffsetIsTurnedOff", "test", moduleInputChannel,
Binding<MessageChannel> consumerBinding = binder.bindConsumer("testManualAckSucceedsWhenAutoCommitOffsetIsTurnedOff", "test", moduleInputChannel,
consumerProperties);
String testPayload1 = "foo" + UUID.randomUUID().toString();
@@ -788,8 +788,7 @@ public abstract class KafkaBinderTests extends
Message<?> receivedMessage = receive(moduleInputChannel);
assertThat(receivedMessage).isNotNull();
assertThat(receivedMessage.getHeaders().get(KafkaHeaders.ACKNOWLEDGMENT)).isNotNull();
Acknowledgment acknowledgment = receivedMessage.getHeaders().get(KafkaHeaders.ACKNOWLEDGMENT,
Acknowledgment.class);
Acknowledgment acknowledgment = receivedMessage.getHeaders().get(KafkaHeaders.ACKNOWLEDGMENT, Acknowledgment.class);
try {
acknowledgment.acknowledge();
}
@@ -811,14 +810,12 @@ public abstract class KafkaBinderTests extends
createProducerBindingProperties(createProducerProperties()));
QueueChannel moduleInputChannel = new QueueChannel();
Binding<MessageChannel> producerBinding = binder.bindProducer(
"testManualAckIsNotPossibleWhenAutoCommitOffsetIsEnabledOnTheBinder", moduleOutputChannel,
Binding<MessageChannel> producerBinding = binder.bindProducer("testManualAckIsNotPossibleWhenAutoCommitOffsetIsEnabledOnTheBinder", moduleOutputChannel,
createProducerProperties());
ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();
Binding<MessageChannel> consumerBinding = binder.bindConsumer(
"testManualAckIsNotPossibleWhenAutoCommitOffsetIsEnabledOnTheBinder", "test", moduleInputChannel,
Binding<MessageChannel> consumerBinding = binder.bindConsumer("testManualAckIsNotPossibleWhenAutoCommitOffsetIsEnabledOnTheBinder", "test", moduleInputChannel,
consumerProperties);
String testPayload1 = "foo" + UUID.randomUUID().toString();
@@ -998,8 +995,8 @@ public abstract class KafkaBinderTests extends
Binding<MessageChannel> outputBinding = binder.bindProducer("partJ.0", output, producerProperties);
if (usesExplicitRouting()) {
Object endpoint = extractEndpoint(outputBinding);
assertThat(getEndpointRouting(endpoint))
.contains(getExpectedRoutingBaseDestination("partJ.0", "test") + "-' + headers['partition']");
assertThat(getEndpointRouting(endpoint)).
contains(getExpectedRoutingBaseDestination("partJ.0", "test") + "-' + headers['partition']");
}
output.send(new GenericMessage<>(2));
@@ -1047,7 +1044,7 @@ public abstract class KafkaBinderTests extends
QueueChannel input2 = new QueueChannel();
Binding<MessageChannel> binding2 = binder.bindConsumer("defaultGroup.0", null, input2,
consumerProperties);
// Since we don't provide any topic info, let Kafka bind the consumer successfully
//Since we don't provide any topic info, let Kafka bind the consumer successfully
Thread.sleep(1000);
String testPayload1 = "foo-" + UUID.randomUUID().toString();
output.send(new GenericMessage<>(testPayload1.getBytes()));
@@ -1066,7 +1063,7 @@ public abstract class KafkaBinderTests extends
output.send(new GenericMessage<>(testPayload2.getBytes()));
binding2 = binder.bindConsumer("defaultGroup.0", null, input2, consumerProperties);
// Since we don't provide any topic info, let Kafka bind the consumer successfully
//Since we don't provide any topic info, let Kafka bind the consumer successfully
Thread.sleep(1000);
String testPayload3 = "foo-" + UUID.randomUUID().toString();
output.send(new GenericMessage<>(testPayload3.getBytes()));
@@ -1229,8 +1226,7 @@ public abstract class KafkaBinderTests extends
consumerProperties.setInstanceIndex(2);
consumerProperties.getExtension().setAutoRebalanceEnabled(false);
expectedProvisioningException.expect(ProvisioningException.class);
expectedProvisioningException
.expectMessage("The number of expected partitions was: 3, but 1 has been found instead");
expectedProvisioningException.expectMessage("The number of expected partitions was: 3, but 1 has been found instead");
Binding binding = binder.bindConsumer(testTopicName, "test", output, consumerProperties);
if (binding != null) {
binding.unbind();
@@ -1346,7 +1342,7 @@ public abstract class KafkaBinderTests extends
Binding<?> binding = null;
try {
KafkaBinderConfigurationProperties configurationProperties = createConfigurationProperties();
Map<String, String> propertiesToOverride = configurationProperties.getConfiguration();
Map<String, Object> propertiesToOverride = configurationProperties.getConfiguration();
propertiesToOverride.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
propertiesToOverride.put("value.deserializer", "org.apache.kafka.common.serialization.LongDeserializer");
configurationProperties.setConfiguration(propertiesToOverride);
@@ -1359,10 +1355,8 @@ public abstract class KafkaBinderTests extends
ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();
binding = binder.bindConsumer(testTopicName, "test", output, consumerProperties);
DirectFieldAccessor consumerAccessor = new DirectFieldAccessor(getKafkaConsumer(binding));
assertTrue("Expected StringDeserializer as a custom key deserializer",
consumerAccessor.getPropertyValue("keyDeserializer") instanceof StringDeserializer);
assertTrue("Expected LongDeserializer as a custom value deserializer",
consumerAccessor.getPropertyValue("valueDeserializer") instanceof LongDeserializer);
assertTrue("Expected StringDeserializer as a custom key deserializer", consumerAccessor.getPropertyValue("keyDeserializer") instanceof StringDeserializer);
assertTrue("Expected LongDeserializer as a custom value deserializer", consumerAccessor.getPropertyValue("valueDeserializer") instanceof LongDeserializer);
}
finally {
if (binding != null) {
@@ -1372,15 +1366,12 @@ public abstract class KafkaBinderTests extends
}
private KafkaConsumer getKafkaConsumer(Binding binding) {
DirectFieldAccessor bindingAccessor = new DirectFieldAccessor(binding);
KafkaMessageDrivenChannelAdapter adapter = (KafkaMessageDrivenChannelAdapter) bindingAccessor
.getPropertyValue("lifecycle");
DirectFieldAccessor bindingAccessor = new DirectFieldAccessor((DefaultBinding) binding);
KafkaMessageDrivenChannelAdapter adapter = (KafkaMessageDrivenChannelAdapter) bindingAccessor.getPropertyValue("lifecycle");
DirectFieldAccessor adapterAccessor = new DirectFieldAccessor(adapter);
ConcurrentMessageListenerContainer messageListenerContainer =
(ConcurrentMessageListenerContainer) adapterAccessor.getPropertyValue("messageListenerContainer");
DirectFieldAccessor containerAccessor = new DirectFieldAccessor(messageListenerContainer);
DefaultKafkaConsumerFactory consumerFactory = (DefaultKafkaConsumerFactory) containerAccessor
.getPropertyValue("consumerFactory");
ConcurrentMessageListenerContainer messageListenerContainer = (ConcurrentMessageListenerContainer) adapterAccessor.getPropertyValue("messageListenerContainer");
DirectFieldAccessor containerAccessor = new DirectFieldAccessor((ConcurrentMessageListenerContainer) messageListenerContainer);
DefaultKafkaConsumerFactory consumerFactory = (DefaultKafkaConsumerFactory) containerAccessor.getPropertyValue("consumerFactory");
return (KafkaConsumer) consumerFactory.createConsumer();
}
@@ -1406,13 +1397,11 @@ public abstract class KafkaBinderTests extends
QueueChannel moduleInputChannel = new QueueChannel();
ExtendedProducerProperties<KafkaProducerProperties> producerProperties = createProducerProperties();
producerProperties.setUseNativeEncoding(true);
producerProperties.getExtension().getConfiguration().put("value.serializer",
"org.apache.kafka.common.serialization.IntegerSerializer");
producerProperties.getExtension().getConfiguration().put("value.serializer", "org.apache.kafka.common.serialization.IntegerSerializer");
producerBinding = binder.bindProducer(testTopicName, moduleOutputChannel, producerProperties);
ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();
consumerProperties.getExtension().setAutoRebalanceEnabled(false);
consumerProperties.getExtension().getConfiguration().put("value.deserializer",
"org.apache.kafka.common.serialization.IntegerDeserializer");
consumerProperties.getExtension().getConfiguration().put("value.deserializer", "org.apache.kafka.common.serialization.IntegerDeserializer");
consumerBinding = binder.bindConsumer(testTopicName, "test", moduleInputChannel, consumerProperties);
// Let the consumer actually bind to the producer before sending a msg
binderBindUnbindLatency();
@@ -1508,9 +1497,9 @@ public abstract class KafkaBinderTests extends
input2.setBeanName("test.input2J");
Binding<MessageChannel> input2Binding = binder.bindConsumer("partJ.raw.0", "test", input2, consumerProperties);
output.send(new GenericMessage<>(new byte[] { (byte) 0 }));
output.send(new GenericMessage<>(new byte[] { (byte) 1 }));
output.send(new GenericMessage<>(new byte[] { (byte) 2 }));
output.send(new GenericMessage<>(new byte[]{(byte) 0}));
output.send(new GenericMessage<>(new byte[]{(byte) 1}));
output.send(new GenericMessage<>(new byte[]{(byte) 2}));
Message<?> receive0 = receive(input0);
assertThat(receive0).isNotNull();
@@ -1549,6 +1538,7 @@ public abstract class KafkaBinderTests extends
catch (UnsupportedOperationException ignored) {
}
ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();
consumerProperties.setConcurrency(2);
consumerProperties.setInstanceIndex(0);
@@ -1568,13 +1558,13 @@ public abstract class KafkaBinderTests extends
input2.setBeanName("test.input2S");
Binding<MessageChannel> input2Binding = binder.bindConsumer("part.raw.0", "test", input2, consumerProperties);
Message<byte[]> message2 = org.springframework.integration.support.MessageBuilder.withPayload(new byte[] { 2 })
Message<byte[]> message2 = org.springframework.integration.support.MessageBuilder.withPayload(new byte[]{2})
.setHeader(IntegrationMessageHeaderAccessor.CORRELATION_ID, "kafkaBinderTestCommonsDelegate")
.setHeader(IntegrationMessageHeaderAccessor.SEQUENCE_NUMBER, 42)
.setHeader(IntegrationMessageHeaderAccessor.SEQUENCE_SIZE, 43).build();
output.send(message2);
output.send(new GenericMessage<>(new byte[] { 1 }));
output.send(new GenericMessage<>(new byte[] { 0 }));
output.send(new GenericMessage<>(new byte[]{1}));
output.send(new GenericMessage<>(new byte[]{0}));
Message<?> receive0 = receive(input0);
assertThat(receive0).isNotNull();
Message<?> receive1 = receive(input1);
@@ -1603,8 +1593,7 @@ public abstract class KafkaBinderTests extends
consumerProperties.setHeaderMode(HeaderMode.raw);
Binding<MessageChannel> consumerBinding = binder.bindConsumer("raw.0", "test", moduleInputChannel,
consumerProperties);
Message<?> message = org.springframework.integration.support.MessageBuilder
.withPayload("testSendAndReceiveWithRawMode".getBytes()).build();
Message<?> message = org.springframework.integration.support.MessageBuilder.withPayload("testSendAndReceiveWithRawMode".getBytes()).build();
// Let the consumer actually bind to the producer before sending a msg
binderBindUnbindLatency();
moduleOutputChannel.send(message);
@@ -1629,15 +1618,13 @@ public abstract class KafkaBinderTests extends
consumerProperties.setHeaderMode(HeaderMode.raw);
Binding<MessageChannel> consumerBinding = binder.bindConsumer("raw.string.0", "test", moduleInputChannel,
consumerProperties);
Message<?> message = org.springframework.integration.support.MessageBuilder
.withPayload("testSendAndReceiveWithRawModeAndStringPayload").build();
Message<?> message = org.springframework.integration.support.MessageBuilder.withPayload("testSendAndReceiveWithRawModeAndStringPayload").build();
// Let the consumer actually bind to the producer before sending a msg
binderBindUnbindLatency();
moduleOutputChannel.send(message);
Message<?> inbound = receive(moduleInputChannel);
assertThat(inbound).isNotNull();
assertThat(new String((byte[]) inbound.getPayload()))
.isEqualTo("testSendAndReceiveWithRawModeAndStringPayload");
assertThat(new String((byte[]) inbound.getPayload())).isEqualTo("testSendAndReceiveWithRawModeAndStringPayload");
producerBinding.unbind();
consumerBinding.unbind();
}
@@ -1669,16 +1656,14 @@ public abstract class KafkaBinderTests extends
Binding<MessageChannel> input3Binding = binder.bindConsumer(barTapName, "tap2", module3InputChannel,
consumerProperties);
Message<?> message = org.springframework.integration.support.MessageBuilder
.withPayload("testSendAndReceiveWithExplicitConsumerGroupWithRawMode".getBytes()).build();
Message<?> message = org.springframework.integration.support.MessageBuilder.withPayload("testSendAndReceiveWithExplicitConsumerGroupWithRawMode".getBytes()).build();
boolean success = false;
boolean retried = false;
while (!success) {
moduleOutputChannel.send(message);
Message<?> inbound = receive(module1InputChannel);
assertThat(inbound).isNotNull();
assertThat(new String((byte[]) inbound.getPayload()))
.isEqualTo("testSendAndReceiveWithExplicitConsumerGroupWithRawMode");
assertThat(new String((byte[]) inbound.getPayload())).isEqualTo("testSendAndReceiveWithExplicitConsumerGroupWithRawMode");
Message<?> tapped1 = receive(module2InputChannel);
Message<?> tapped2 = receive(module3InputChannel);
@@ -1689,15 +1674,12 @@ public abstract class KafkaBinderTests extends
continue;
}
success = true;
assertThat(new String((byte[]) tapped1.getPayload()))
.isEqualTo("testSendAndReceiveWithExplicitConsumerGroupWithRawMode");
assertThat(new String((byte[]) tapped2.getPayload()))
.isEqualTo("testSendAndReceiveWithExplicitConsumerGroupWithRawMode");
assertThat(new String((byte[]) tapped1.getPayload())).isEqualTo("testSendAndReceiveWithExplicitConsumerGroupWithRawMode");
assertThat(new String((byte[]) tapped2.getPayload())).isEqualTo("testSendAndReceiveWithExplicitConsumerGroupWithRawMode");
}
// delete one tap stream is deleted
input3Binding.unbind();
Message<?> message2 = org.springframework.integration.support.MessageBuilder.withPayload("bar".getBytes())
.build();
Message<?> message2 = org.springframework.integration.support.MessageBuilder.withPayload("bar".getBytes()).build();
moduleOutputChannel.send(message2);
// other tap still receives messages
@@ -1753,7 +1735,7 @@ public abstract class KafkaBinderTests extends
receivedMessages.put(offset, message);
latch.countDown();
}
throw new RuntimeException("fail");
throw new RuntimeException();
}
public LinkedHashMap<Long, Message<?>> getReceivedMessages() {

View File

@@ -67,7 +67,7 @@ public class KafkaBinderUnitTests {
// binder level setting
binderConfigurationProperties.setConfiguration(
Collections.singletonMap(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"));
Collections.<String, Object>singletonMap(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"));
factory = method.invoke(binder, false, "foo", ecp);
configs = TestUtils.getPropertyValue(factory, "configs", Map.class);
assertThat(configs.get(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG)).isEqualTo("latest");

View File

@@ -1,29 +0,0 @@
#!/bin/bash
#Execute this script from local checkout of spring cloud stream
./mvnw versions:update-parent -DparentVersion=[0.0.1,$2] -Pspring -DgenerateBackupPoms=false -DallowSnapshots=true
./mvnw versions:set -DnewVersion=$1 -DgenerateBackupPoms=false
lines=$(find . -name 'pom.xml' | xargs egrep "SNAPSHOT" | grep -v regex | wc -l)
if [ $lines -eq 0 ]; then
echo "No snapshots found"
else
echo "Snapshots found."
fi
lines=$(find . -name 'pom.xml' | xargs egrep "M[0-9]" | grep -v regex | wc -l)
if [ $lines -eq 0 ]; then
echo "No milestones found"
else
echo "Milestones found."
fi
lines=$(find . -name 'pom.xml' | xargs egrep "RC[0-9]" | grep -v regex | wc -l)
if [ $lines -eq 0 ]; then
echo "No release candidates found"
else
echo "Release candidates found."
fi