Compare commits

..

5 Commits
main ... v3.2.2

Author SHA1 Message Date
buildmaster
2851464bd0 Update SNAPSHOT to 3.2.2 2022-02-16 18:29:43 +00:00
Soby Chacko
da049fc980 Update Spring Kafka/SIK versions 2022-02-16 10:08:33 -05:00
Gary Russell
170166ac57 GH-1195: Fix Pause/Resume Documentation
Resolves https://github.com/spring-cloud/spring-cloud-stream-binder-kafka/issues/1195

Remove obsolete documentation.

**cherry-pick to 3.2.x**
2022-02-07 14:33:45 -05:00
Rex Ijiekhuamen
5b880a8104 Fixed invalid java code snippet 2022-01-24 10:23:17 -05:00
Soby Chacko
42d3b92c7b Test package changes 2022-01-18 15:52:18 -05:00
50 changed files with 3126 additions and 292 deletions

View File

@@ -1,10 +0,0 @@
---
name: Bug report
about: Create a report to help us improve
title: Please create new issues in https://github.com/spring-cloud/spring-cloud-stream/issues
labels: ''
assignees: ''
---
Please create all new issues in https://github.com/spring-cloud/spring-cloud-stream/issues. The Kafka binder repository has been relocated to the core Spring Cloud Stream repo.

View File

@@ -14,24 +14,10 @@ Edit the files in the src/main/asciidoc/ directory instead.
image::https://circleci.com/gh/spring-cloud/spring-cloud-stream-binder-kafka.svg?style=svg["CircleCI", link="https://circleci.com/gh/spring-cloud/spring-cloud-stream-binder-kafka"]
image::https://codecov.io/gh/spring-cloud/spring-cloud-stream-binder-kafka/branch/{github-tag}/graph/badge.svg["codecov", link="https://codecov.io/gh/spring-cloud/spring-cloud-stream-binder-kafka"]
image::https://badges.gitter.im/spring-cloud/spring-cloud-stream.svg[Gitter, link="https://gitter.im/spring-cloud/spring-cloud-stream?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge"]
image::https://badges.gitter.im/spring-cloud/spring-cloud-stream-binder-kafka.svg[Gitter, link="https://gitter.im/spring-cloud/spring-cloud-stream-binder-kafka?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge"]
// ======================================================================================
//= Overview
[partintro]
--
This guide describes the Apache Kafka implementation of the Spring Cloud Stream Binder.
It contains information about its design, usage, and configuration options, as well as information on how the Stream Cloud Stream concepts map onto Apache Kafka specific constructs.
In addition, this guide explains the Kafka Streams binding capabilities of Spring Cloud Stream.
--
== ANNOUNCEMENT
**IMPORTANT: This repository is now migrated as part of core Spring Cloud Stream - https://github.com/spring-cloud/spring-cloud-stream.
Please create new issues over at the core repository.**
== Apache Kafka Binder
=== Usage

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
<version>4.0.0-SNAPSHOT</version>
<version>3.2.2</version>
</parent>
<packaging>jar</packaging>
<name>spring-cloud-stream-binder-kafka-docs</name>

View File

@@ -9,7 +9,7 @@
|spring.cloud.stream.dynamic-destinations | `[]` | A list of destinations that can be bound dynamically. If set, only listed destinations can be bound.
|spring.cloud.stream.function.batch-mode | `false` |
|spring.cloud.stream.function.bindings | |
|spring.cloud.stream.function.definition | | Definition of functions to bind. If several functions need to be composed into one, use pipes (e.g., 'fooFunc\|barFunc')
|spring.cloud.stream.input-bindings | | A semi-colon delimited string to explicitly define input bindings (specifically for cases when there is no implicit trigger to create such bindings such as Function, Supplier or Consumer).
|spring.cloud.stream.instance-count | `1` | The number of deployed instances of an application. Default: 1. NOTE: Could also be managed per individual binding "spring.cloud.stream.bindings.foo.consumer.instance-count" where 'foo' is the name of the binding.
|spring.cloud.stream.instance-index | `0` | The instance id of the application: a number from 0 to instanceCount-1. Used for partitioning and with Kafka. NOTE: Could also be managed per individual binding "spring.cloud.stream.bindings.foo.consumer.instance-index" where 'foo' is the name of the binding.
|spring.cloud.stream.instance-index-list | | A list of instance id's from 0 to instanceCount-1. Used for partitioning and with Kafka. NOTE: Could also be managed per individual binding "spring.cloud.stream.bindings.foo.consumer.instance-index-list" where 'foo' is the name of the binding. This setting will override the one set in 'spring.cloud.stream.instance-index'
@@ -55,14 +55,10 @@
|spring.cloud.stream.metrics.meter-filter | | Pattern to control the 'meters' one wants to capture. By default all 'meters' will be captured. For example, 'spring.integration.*' will only capture metric information for meters whose name starts with 'spring.integration'.
|spring.cloud.stream.metrics.properties | | Application properties that should be added to the metrics payload For example: `spring.application**`.
|spring.cloud.stream.metrics.schedule-interval | `60s` | Interval expressed as Duration for scheduling metrics snapshots publishing. Defaults to 60 seconds
|spring.cloud.stream.output-bindings | | A semi-colon delimited string to explicitly define output bindings (specifically for cases when there is no implicit trigger to create such bindings such as Function, Supplier or Consumer).
|spring.cloud.stream.override-cloud-connectors | `false` | This property is only applicable when the cloud profile is active and Spring Cloud Connectors are provided with the application. If the property is false (the default), the binder detects a suitable bound service (for example, a RabbitMQ service bound in Cloud Foundry for the RabbitMQ binder) and uses it for creating connections (usually through Spring Cloud Connectors). When set to true, this property instructs binders to completely ignore the bound services and rely on Spring Boot properties (for example, relying on the spring.rabbitmq.* properties provided in the environment for the RabbitMQ binder). The typical usage of this property is to be nested in a customized environment when connecting to multiple systems.
|spring.cloud.stream.pollable-source | `none` | A semi-colon delimited list of binding names of pollable sources. Binding names follow the same naming convention as functions. For example, name '...pollable-source=foobar' will be accessible as 'foobar-iin-0'' binding
|spring.cloud.stream.poller.cron | | Cron expression value for the Cron Trigger.
|spring.cloud.stream.poller.fixed-delay | `1000` | Fixed delay for default poller.
|spring.cloud.stream.poller.initial-delay | `0` | Initial delay for periodic triggers.
|spring.cloud.stream.poller.max-messages-per-poll | `1` | Maximum messages per poll for the default poller.
|spring.cloud.stream.poller.time-unit | | The TimeUnit to apply to delay values.
|spring.cloud.stream.sendto.destination | `none` | The name of the header used to determine the name of the output destination
|spring.cloud.stream.source | | A colon delimited string representing the names of the sources based on which source bindings will be created. This is primarily to support cases where source binding may be required without providing a corresponding Supplier. (e.g., for cases where the actual source of data is outside of scope of spring-cloud-stream - HTTP -> Stream)
|spring.cloud.stream.source | | A semi-colon delimited string representing the names of the sources based on which source bindings will be created. This is primarily to support cases where source binding may be required without providing a corresponding Supplier. (e.g., for cases where the actual source of data is outside of scope of spring-cloud-stream - HTTP -> Stream) @deprecated use {@link #outputBindings}
|===

View File

@@ -430,6 +430,213 @@ public Function<KTable<String, String>, KStream<String, String>> bar() {
You can compose them as `foo|bar`, but keep in mind that the second function (`bar` in this case) must have a `KTable` as input since the first function (`foo`) has `KTable` as output.
==== Imperative programming model.
Starting with `3.1.0` version of the binder, we recommend using the functional programming model described above for Kafka Streams binder based applications.
The support for `StreamListener` is deprecated starting with `3.1.0` of Spring Cloud Stream.
Below, we are providing some details on the `StreamListener` based Kafka Streams processors as a reference.
Following is the equivalent of the Word count example using `StreamListener`.
[source]
----
@SpringBootApplication
@EnableBinding(KafkaStreamsProcessor.class)
public class WordCountProcessorApplication {
@StreamListener("input")
@SendTo("output")
public KStream<?, WordCount> process(KStream<?, String> input) {
return input
.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
.groupBy((key, value) -> value)
.windowedBy(TimeWindows.of(5000))
.count(Materialized.as("WordCounts-multi"))
.toStream()
.map((key, value) -> new KeyValue<>(null, new WordCount(key.key(), value, new Date(key.window().start()), new Date(key.window().end()))));
}
public static void main(String[] args) {
SpringApplication.run(WordCountProcessorApplication.class, args);
}
----
As you can see, this is a bit more verbose since you need to provide `EnableBinding` and the other extra annotations like `StreamListener` and `SendTo` to make it a complete application.
`EnableBinding` is where you specify your binding interface that contains your bindings.
In this case, we are using the stock `KafkaStreamsProcessor` binding interface that has the following contracts.
[source]
----
public interface KafkaStreamsProcessor {
@Input("input")
KStream<?, ?> input();
@Output("output")
KStream<?, ?> output();
}
----
Binder will create bindings for the input `KStream` and output `KStream` since you are using a binding interface that contains those declarations.
In addition to the obvious differences in the programming model offered in the functional style, one particular thing that needs to be mentioned here is that the binding names are what you specify in the binding interface.
For example, in the above application, since we are using `KafkaStreamsProcessor`, the binding names are `input` and `output`.
Binding properties need to use those names. For instance `spring.cloud.stream.bindings.input.destination`, `spring.cloud.stream.bindings.output.destination` etc.
Keep in mind that this is fundamentally different from the functional style since there the binder generates binding names for the application.
This is because the application does not provide any binding interfaces in the functional model using `EnableBinding`.
Here is another example of a sink where we have two inputs.
[source]
----
@EnableBinding(KStreamKTableBinding.class)
.....
.....
@StreamListener
public void process(@Input("inputStream") KStream<String, PlayEvent> playEvents,
@Input("inputTable") KTable<Long, Song> songTable) {
....
....
}
interface KStreamKTableBinding {
@Input("inputStream")
KStream<?, ?> inputStream();
@Input("inputTable")
KTable<?, ?> inputTable();
}
----
Following is the `StreamListener` equivalent of the same `BiFunction` based processor that we saw above.
[source]
----
@EnableBinding(KStreamKTableBinding.class)
....
....
@StreamListener
@SendTo("output")
public KStream<String, Long> process(@Input("input") KStream<String, Long> userClicksStream,
@Input("inputTable") KTable<String, String> userRegionsTable) {
....
....
}
interface KStreamKTableBinding extends KafkaStreamsProcessor {
@Input("inputX")
KTable<?, ?> inputTable();
}
----
Finally, here is the `StreamListener` equivalent of the application with three inputs and curried functions.
[source]
----
@EnableBinding(CustomGlobalKTableProcessor.class)
...
...
@StreamListener
@SendTo("output")
public KStream<Long, EnrichedOrder> process(
@Input("input-1") KStream<Long, Order> ordersStream,
@Input("input-2") GlobalKTable<Long, Customer> customers,
@Input("input-3") GlobalKTable<Long, Product> products) {
KStream<Long, CustomerOrder> customerOrdersStream = ordersStream.join(
customers, (orderId, order) -> order.getCustomerId(),
(order, customer) -> new CustomerOrder(customer, order));
return customerOrdersStream.join(products,
(orderId, customerOrder) -> customerOrder.productId(),
(customerOrder, product) -> {
EnrichedOrder enrichedOrder = new EnrichedOrder();
enrichedOrder.setProduct(product);
enrichedOrder.setCustomer(customerOrder.customer);
enrichedOrder.setOrder(customerOrder.order);
return enrichedOrder;
});
}
interface CustomGlobalKTableProcessor {
@Input("input-1")
KStream<?, ?> input1();
@Input("input-2")
GlobalKTable<?, ?> input2();
@Input("input-3")
GlobalKTable<?, ?> input3();
@Output("output")
KStream<?, ?> output();
}
----
You might notice that the above two examples are even more verbose since in addition to provide `EnableBinding`, you also need to write your own custom binding interface as well.
Using the functional model, you can avoid all those ceremonial details.
Before we move on from looking at the general programming model offered by Kafka Streams binder, here is the `StreamListener` version of multiple output bindings.
[source]
----
EnableBinding(KStreamProcessorWithBranches.class)
public static class WordCountProcessorApplication {
@Autowired
private TimeWindows timeWindows;
@StreamListener("input")
@SendTo({"output1","output2","output3"})
public KStream<?, WordCount>[] process(KStream<Object, String> input) {
Predicate<Object, WordCount> isEnglish = (k, v) -> v.word.equals("english");
Predicate<Object, WordCount> isFrench = (k, v) -> v.word.equals("french");
Predicate<Object, WordCount> isSpanish = (k, v) -> v.word.equals("spanish");
return input
.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
.groupBy((key, value) -> value)
.windowedBy(timeWindows)
.count(Materialized.as("WordCounts-1"))
.toStream()
.map((key, value) -> new KeyValue<>(null, new WordCount(key.key(), value, new Date(key.window().start()), new Date(key.window().end()))))
.branch(isEnglish, isFrench, isSpanish);
}
interface KStreamProcessorWithBranches {
@Input("input")
KStream<?, ?> input();
@Output("output1")
KStream<?, ?> output1();
@Output("output2")
KStream<?, ?> output2();
@Output("output3")
KStream<?, ?> output3();
}
}
----
To recap, we have reviewed the various programming model choices when using the Kafka Streams binder.
The binder provides binding capabilities for `KStream`, `KTable` and `GlobalKTable` on the input.
`KTable` and `GlobalKTable` bindings are only available on the input.
Binder supports both input and output bindings for `KStream`.
The upshot of the programming model of Kafka Streams binder is that the binder provides you the flexibility of going with a fully functional programming model or using the `StreamListener` based imperative approach.
=== Ancillaries to the programming model
==== Multiple Kafka Streams processors within a single application
@@ -470,7 +677,7 @@ This is also true when you have a single Kafka Streams processor and other types
Application id is a mandatory property that you need to provide for a Kafka Streams application.
Spring Cloud Stream Kafka Streams binder allows you to configure this application id in multiple ways.
If you only have one single processor in the application, then you can set this at the binder level using the following property:
If you only have one single processor or `StreamListener` in the application, then you can set this at the binder level using the following property:
`spring.cloud.stream.kafka.streams.binder.applicationId`.
@@ -505,6 +712,33 @@ and
`spring.cloud.stream.kafka.streams.binder.functions.anotherProcess.applicationId`
In the case of `StreamListener`, you need to set this on the first input binding on the processor.
For e.g. imagine that you have the following two `StreamListener` based processors.
```
@StreamListener
@SendTo("output")
public KStream<String, String> process(@Input("input") <KStream<Object, String>> input) {
...
}
@StreamListener
@SendTo("anotherOutput")
public KStream<String, String> anotherProcess(@Input("anotherInput") <KStream<Object, String>> input) {
...
}
```
Then you must set the application id for this using the following binding property.
`spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId`
and
`spring.cloud.stream.kafka.streams.bindings.anotherInput.consumer.applicationId`
For function based model also, this approach of setting application id at the binding level will work.
However, setting per function at the binder level as we have seen above is much easier if you are using the functional model.
@@ -515,12 +749,14 @@ If the application does not provide an application ID, then in that case the bin
This is convenient in development scenarios as it avoids the need for explicitly providing the application ID.
The generated application ID in this manner will be static over application restarts.
In the case of functional model, the generated application ID will be the function bean name followed by the literal `applicationID`, for e.g `process-applicationID` if `process` if the function bean name.
In the case of `StreamListener`, instead of using the function bean name, the generated application ID will be use the containing class name followed by the method name followed by the literal `applicationId`.
====== Summary of setting Application ID
* By default, binder will auto generate the application ID per function methods.
* By default, binder will auto generate the application ID per function or `StreamListener` methods.
* If you have a single processor, then you can use `spring.kafka.streams.applicationId`, `spring.application.name` or `spring.cloud.stream.kafka.streams.binder.applicationId`.
* If you have multiple processors, then application ID can be set per function using the property - `spring.cloud.stream.kafka.streams.binder.functions.<function-name>.applicationId`.
In the case of `StreamListener`, this can be done using `spring.cloud.stream.kafka.streams.bindings.input.applicationId`, assuming that the input binding name is `input`.
==== Overriding the default binding names generated by the binder with the functional style
@@ -580,7 +816,7 @@ Keys are always deserialized using native Serdes.
For values, by default, deserialization on the inbound is natively performed by Kafka.
Please note that this is a major change on default behavior from previous versions of Kafka Streams binder where the deserialization was done by the framework.
Kafka Streams binder will try to infer matching `Serde` types by looking at the type signature of `java.util.function.Function|Consumer`.
Kafka Streams binder will try to infer matching `Serde` types by looking at the type signature of `java.util.function.Function|Consumer` or `StreamListener`.
Here is the order that it matches the Serdes.
* If the application provides a bean of type `Serde` and if the return type is parameterized with the actual type of the incoming key or value type, then it will use that `Serde` for inbound deserialization.
@@ -780,7 +1016,7 @@ It is always recommended to explicitly create a DLQ topic for each input binding
==== DLQ per input consumer binding
The property `spring.cloud.stream.kafka.streams.binder.deserializationExceptionHandler` is applicable for the entire application.
This implies that if there are multiple functions in the same application, this property is applied to all of them.
This implies that if there are multiple functions or `StreamListener` methods in the same application, this property is applied to all of them.
However, if you have multiple processors or multiple input bindings within a single processor, then you can use the finer-grained DLQ control that the binder provides per input consumer binding.
If you have the following processor,
@@ -825,7 +1061,7 @@ If you set a consumer binding's `dlqPartitions` property to a value greater than
A couple of things to keep in mind when using the exception handling feature in Kafka Streams binder.
* The property `spring.cloud.stream.kafka.streams.binder.deserializationExceptionHandler` is applicable for the entire application.
This implies that if there are multiple functions in the same application, this property is applied to all of them.
This implies that if there are multiple functions or `StreamListener` methods in the same application, this property is applied to all of them.
* The exception handling for deserialization works consistently with native deserialization and framework provided message conversion.
==== Handling Production Exceptions in the Binder
@@ -1868,7 +2104,7 @@ Default: `logAndFail`
applicationId::
Convenient way to set the application.id for the Kafka Streams application globally at the binder level.
If the application contains multiple functions, then the application id should be set differently.
If the application contains multiple functions or `StreamListener` methods, then the application id should be set differently.
See above where setting the application id is discussed in detail.
+
Default: application will generate a static application ID. See the application ID section for more details.
@@ -1932,7 +2168,7 @@ The following properties are available for Kafka Streams consumers and must be p
For convenience, if there are multiple input bindings and they all require a common value, that can be configured by using the prefix `spring.cloud.stream.kafka.streams.default.consumer.`.
applicationId::
Setting application.id per input binding.
Setting application.id per input binding. This is only preferred for `StreamListener` based processors, for function based processors see other approaches outlined above.
+
Default: See above.
@@ -2006,7 +2242,7 @@ In Kafka Streams, you can control of the number of threads a processor can creat
This, you can do using the various `configuration` options described above under binder, functions, producer or consumer level.
You can also use the `concurrency` property that core Spring Cloud Stream provides for this purpose.
When using this, you need to use it on the consumer.
When you have more than one input binding, set this on the first input binding.
When you have more than one input bindings either in a function or `StreamListener`, set this on the first input binding.
For e.g. when setting `spring.cloud.stream.bindings.process-in-0.consumer.concurrency`, it will be translated as `num.stream.threads` by the binder.
If you have multiple processors and one processor defines binding level concurrency, but not the others, those ones with no binding level concurrency will default back to the binder wide property specified through
`spring.cloud.stream.kafka.streams.binder.configuration.num.stream.threads`.

View File

@@ -40,7 +40,7 @@ The Apache Kafka Binder implementation maps each destination to an Apache Kafka
The consumer group maps directly to the same Apache Kafka concept.
Partitioning also maps directly to Apache Kafka partitions as well.
The binder currently uses the Apache Kafka `kafka-clients` version `3.1.0`.
The binder currently uses the Apache Kafka `kafka-clients` version `2.3.1`.
This client can communicate with older brokers (see the Kafka documentation), but certain features may not be available.
For example, with versions earlier than 0.11.x.x, native headers are not supported.
Also, 0.11.x.x does not support the `autoAddPartitions` property.
@@ -364,6 +364,8 @@ Starting with version 3.0, when `spring.cloud.stream.binding.<name>.consumer.bat
Otherwise, the method will be called with one record at a time.
The size of the batch is controlled by Kafka consumer properties `max.poll.records`, `fetch.min.bytes`, `fetch.max.wait.ms`; refer to the Kafka documentation for more information.
Bear in mind that batch mode is not supported with `@StreamListener` - it only works with the newer functional programming model.
IMPORTANT: Retry within the binder is not supported when using batch mode, so `maxAttempts` will be overridden to 1.
You can configure a `SeekToCurrentBatchErrorHandler` (using a `ListenerContainerCustomizer`) to achieve similar functionality to retry in the binder.
You can also use a manual `AckMode` and call `Ackowledgment.nack(index, sleep)` to commit the offsets for a partial batch and have the remaining records redelivered.

14
pom.xml
View File

@@ -2,12 +2,12 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
<version>4.0.0-SNAPSHOT</version>
<version>3.2.2</version>
<packaging>pom</packaging>
<parent>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-build</artifactId>
<version>4.0.0-SNAPSHOT</version>
<version>3.1.1</version>
<relativePath />
</parent>
<scm>
@@ -20,11 +20,11 @@
<tag>HEAD</tag>
</scm>
<properties>
<java.version>17</java.version>
<spring-kafka.version>3.0.0-M1</spring-kafka.version>
<spring-integration-kafka.version>6.0.0-SNAPSHOT</spring-integration-kafka.version>
<java.version>1.8</java.version>
<spring-kafka.version>2.8.2</spring-kafka.version>
<spring-integration-kafka.version>5.5.8</spring-integration-kafka.version>
<kafka.version>3.0.0</kafka.version>
<spring-cloud-stream.version>4.0.0-SNAPSHOT</spring-cloud-stream.version>
<spring-cloud-stream.version>3.2.2</spring-cloud-stream.version>
<maven-checkstyle-plugin.failsOnError>true</maven-checkstyle-plugin.failsOnError>
<maven-checkstyle-plugin.failsOnViolation>true</maven-checkstyle-plugin.failsOnViolation>
<maven-checkstyle-plugin.includeTestSourceDirectory>true</maven-checkstyle-plugin.includeTestSourceDirectory>
@@ -144,7 +144,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<!-- <version>1.7</version>-->
<version>1.7</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>

View File

@@ -4,7 +4,7 @@
<parent>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
<version>4.0.0-SNAPSHOT</version>
<version>3.2.2</version>
</parent>
<artifactId>spring-cloud-starter-stream-kafka</artifactId>
<description>Spring Cloud Starter Stream Kafka</description>

View File

@@ -5,7 +5,7 @@
<parent>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
<version>4.0.0-SNAPSHOT</version>
<version>3.2.2</version>
</parent>
<artifactId>spring-cloud-stream-binder-kafka-core</artifactId>
<description>Spring Cloud Stream Kafka Binder Core</description>

View File

@@ -28,9 +28,10 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import jakarta.validation.constraints.AssertTrue;
import jakarta.validation.constraints.Min;
import jakarta.validation.constraints.NotNull;
import javax.validation.constraints.AssertTrue;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotNull;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.kafka.clients.consumer.ConsumerConfig;

View File

@@ -19,7 +19,7 @@ package org.springframework.cloud.stream.binder.kafka.properties;
import java.util.HashMap;
import java.util.Map;
import jakarta.validation.constraints.NotNull;
import javax.validation.constraints.NotNull;
import org.springframework.expression.Expression;

View File

@@ -10,7 +10,7 @@
<parent>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
<version>4.0.0-SNAPSHOT</version>
<version>3.2.2</version>
</parent>
<properties>
@@ -73,6 +73,35 @@
<artifactId>kafka_2.13</artifactId>
<classifier>test</classifier>
</dependency>
<!-- Following dependency is only provided for testing and won't be packaged with the binder apps-->
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
<version>${avro.version}</version>
<scope>provided</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.avro</groupId>
<artifactId>avro-maven-plugin</artifactId>
<version>${avro.version}</version>
<executions>
<execution>
<phase>generate-test-sources</phase>
<goals>
<goal>schema</goal>
</goals>
<configuration>
<outputDirectory>${project.basedir}/target/generated-test-sources</outputDirectory>
<testOutputDirectory>${project.basedir}/target/generated-test-sources</testOutputDirectory>
<testSourceDirectory>${project.basedir}/src/test/resources/avro</testSourceDirectory>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -211,7 +211,7 @@ public class InteractiveQueryService {
throwable = e;
}
throw new IllegalStateException(
"Error when retrieving state store.", throwable != null ? throwable : new Throwable("Kafka Streams is not ready."));
"Error when retrieving state store", throwable != null ? throwable : new Throwable("Kafka Streams is not ready."));
});
}

View File

@@ -0,0 +1,66 @@
/*
* Copyright 2017-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kafka.streams;
import org.apache.kafka.streams.kstream.KStream;
import org.springframework.cloud.stream.binding.StreamListenerParameterAdapter;
import org.springframework.core.MethodParameter;
import org.springframework.core.ResolvableType;
/**
* {@link StreamListenerParameterAdapter} for KStream.
*
* @author Marius Bogoevici
* @author Soby Chacko
*/
class KStreamStreamListenerParameterAdapter
implements StreamListenerParameterAdapter<KStream<?, ?>, KStream<?, ?>> {
private final KafkaStreamsMessageConversionDelegate kafkaStreamsMessageConversionDelegate;
private final KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue;
KStreamStreamListenerParameterAdapter(
KafkaStreamsMessageConversionDelegate kafkaStreamsMessageConversionDelegate,
KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue) {
this.kafkaStreamsMessageConversionDelegate = kafkaStreamsMessageConversionDelegate;
this.KafkaStreamsBindingInformationCatalogue = KafkaStreamsBindingInformationCatalogue;
}
@Override
public boolean supports(Class bindingTargetType, MethodParameter methodParameter) {
return KafkaStreamsBinderUtils.supportsKStream(methodParameter, bindingTargetType);
}
@Override
@SuppressWarnings("unchecked")
public KStream adapt(KStream<?, ?> bindingTarget, MethodParameter parameter) {
ResolvableType resolvableType = ResolvableType.forMethodParameter(parameter);
final Class<?> valueClass = (resolvableType.getGeneric(1).getRawClass() != null)
? (resolvableType.getGeneric(1).getRawClass()) : Object.class;
if (this.KafkaStreamsBindingInformationCatalogue
.isUseNativeDecoding(bindingTarget)) {
return bindingTarget;
}
else {
return this.kafkaStreamsMessageConversionDelegate
.deserializeOnInbound(valueClass, bindingTarget);
}
}
}

View File

@@ -0,0 +1,58 @@
/*
* Copyright 2017-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kafka.streams;
import java.io.Closeable;
import java.io.IOException;
import org.apache.kafka.streams.kstream.KStream;
import org.springframework.cloud.stream.binding.StreamListenerResultAdapter;
/**
* {@link StreamListenerResultAdapter} for KStream.
*
* @author Marius Bogoevici
* @author Soby Chacko
*/
class KStreamStreamListenerResultAdapter implements
StreamListenerResultAdapter<KStream, KStreamBoundElementFactory.KStreamWrapper> {
@Override
public boolean supports(Class<?> resultType, Class<?> boundElement) {
return KStream.class.isAssignableFrom(resultType)
&& KStream.class.isAssignableFrom(boundElement);
}
@Override
@SuppressWarnings("unchecked")
public Closeable adapt(KStream streamListenerResult,
KStreamBoundElementFactory.KStreamWrapper boundElement) {
boundElement.wrap(streamListenerResult);
return new NoOpCloseable();
}
private static final class NoOpCloseable implements Closeable {
@Override
public void close() throws IOException {
}
}
}

View File

@@ -17,6 +17,7 @@
package org.springframework.cloud.stream.binder.kafka.streams;
import java.lang.reflect.Constructor;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -49,7 +50,10 @@ import org.springframework.cloud.stream.binder.BinderConfiguration;
import org.springframework.cloud.stream.binder.kafka.streams.function.FunctionDetectorCondition;
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsBinderConfigurationProperties;
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsExtendedBindingProperties;
import org.springframework.cloud.stream.binder.kafka.streams.serde.CompositeNonNativeSerde;
import org.springframework.cloud.stream.binder.kafka.streams.serde.MessageConverterDelegateSerde;
import org.springframework.cloud.stream.binding.BindingService;
import org.springframework.cloud.stream.binding.StreamListenerResultAdapter;
import org.springframework.cloud.stream.config.BinderProperties;
import org.springframework.cloud.stream.config.BindingServiceConfiguration;
import org.springframework.cloud.stream.config.BindingServiceProperties;
@@ -292,6 +296,37 @@ public class KafkaStreamsBinderSupportAutoConfiguration {
}
}
@Bean
public KStreamStreamListenerResultAdapter kstreamStreamListenerResultAdapter() {
return new KStreamStreamListenerResultAdapter();
}
@Bean
public KStreamStreamListenerParameterAdapter kstreamStreamListenerParameterAdapter(
KafkaStreamsMessageConversionDelegate kstreamBoundMessageConversionDelegate,
KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue) {
return new KStreamStreamListenerParameterAdapter(
kstreamBoundMessageConversionDelegate,
KafkaStreamsBindingInformationCatalogue);
}
@Bean
public KafkaStreamsStreamListenerSetupMethodOrchestrator kafkaStreamsStreamListenerSetupMethodOrchestrator(
BindingServiceProperties bindingServiceProperties,
KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties,
KeyValueSerdeResolver keyValueSerdeResolver,
KafkaStreamsBindingInformationCatalogue kafkaStreamsBindingInformationCatalogue,
KStreamStreamListenerParameterAdapter kafkaStreamListenerParameterAdapter,
Collection<StreamListenerResultAdapter> streamListenerResultAdapters,
ObjectProvider<CleanupConfig> cleanupConfig,
ObjectProvider<StreamsBuilderFactoryBeanConfigurer> customizerProvider, ConfigurableEnvironment environment) {
return new KafkaStreamsStreamListenerSetupMethodOrchestrator(
bindingServiceProperties, kafkaStreamsExtendedBindingProperties,
keyValueSerdeResolver, kafkaStreamsBindingInformationCatalogue,
kafkaStreamListenerParameterAdapter, streamListenerResultAdapters,
cleanupConfig.getIfUnique(), customizerProvider.getIfUnique(), environment);
}
@Bean
public KafkaStreamsMessageConversionDelegate messageConversionDelegate(
@Qualifier(IntegrationContextUtils.ARGUMENT_RESOLVER_MESSAGE_CONVERTER_BEAN_NAME)
@@ -303,6 +338,20 @@ public class KafkaStreamsBinderSupportAutoConfiguration {
KafkaStreamsBindingInformationCatalogue, binderConfigurationProperties);
}
@Bean
public MessageConverterDelegateSerde messageConverterDelegateSerde(
@Qualifier(IntegrationContextUtils.ARGUMENT_RESOLVER_MESSAGE_CONVERTER_BEAN_NAME)
CompositeMessageConverter compositeMessageConverterFactory) {
return new MessageConverterDelegateSerde(compositeMessageConverterFactory);
}
@Bean
public CompositeNonNativeSerde compositeNonNativeSerde(
@Qualifier(IntegrationContextUtils.ARGUMENT_RESOLVER_MESSAGE_CONVERTER_BEAN_NAME)
CompositeMessageConverter compositeMessageConverterFactory) {
return new CompositeNonNativeSerde(compositeMessageConverterFactory);
}
@Bean
public KStreamBoundElementFactory kStreamBoundElementFactory(
BindingServiceProperties bindingServiceProperties,

View File

@@ -42,7 +42,7 @@ import org.springframework.util.CollectionUtils;
* A catalogue that provides binding information for Kafka Streams target types such as
* KStream. It also keeps a catalogue for the underlying {@link StreamsBuilderFactoryBean}
* and {@link StreamsConfig} associated with various
* Kafka Streams functions in the
* {@link org.springframework.cloud.stream.annotation.StreamListener} methods in the
* {@link org.springframework.context.ApplicationContext}.
*
* @author Soby Chacko

View File

@@ -51,6 +51,7 @@ import org.springframework.cloud.stream.binder.kafka.streams.function.KafkaStrea
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsBinderConfigurationProperties;
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsConsumerProperties;
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsExtendedBindingProperties;
import org.springframework.cloud.stream.binding.StreamListenerErrorMessages;
import org.springframework.cloud.stream.config.BindingProperties;
import org.springframework.cloud.stream.config.BindingServiceProperties;
import org.springframework.cloud.stream.function.FunctionConstants;
@@ -561,7 +562,7 @@ public class KafkaStreamsFunctionProcessor extends AbstractKafkaStreamsBinderPro
}
}
else {
//throw new IllegalStateException(StreamListenerErrorMessages.INVALID_DECLARATIVE_METHOD_PARAMETERS);
throw new IllegalStateException(StreamListenerErrorMessages.INVALID_DECLARATIVE_METHOD_PARAMETERS);
}
}
return arguments;

View File

@@ -0,0 +1,521 @@
/*
* Copyright 2018-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kafka.streams;
import java.lang.reflect.Method;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.kstream.GlobalKTable;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KTable;
import org.apache.kafka.streams.state.StoreBuilder;
import org.apache.kafka.streams.state.Stores;
import org.springframework.beans.factory.BeanInitializationException;
import org.springframework.cloud.stream.annotation.Input;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsStateStore;
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsConsumerProperties;
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsExtendedBindingProperties;
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsStateStoreProperties;
import org.springframework.cloud.stream.binding.StreamListenerErrorMessages;
import org.springframework.cloud.stream.binding.StreamListenerParameterAdapter;
import org.springframework.cloud.stream.binding.StreamListenerResultAdapter;
import org.springframework.cloud.stream.binding.StreamListenerSetupMethodOrchestrator;
import org.springframework.cloud.stream.config.BindingProperties;
import org.springframework.cloud.stream.config.BindingServiceProperties;
import org.springframework.context.ApplicationContext;
import org.springframework.core.MethodParameter;
import org.springframework.core.ResolvableType;
import org.springframework.core.annotation.AnnotationUtils;
import org.springframework.core.env.ConfigurableEnvironment;
import org.springframework.kafka.config.StreamsBuilderFactoryBean;
import org.springframework.kafka.config.StreamsBuilderFactoryBeanConfigurer;
import org.springframework.kafka.core.CleanupConfig;
import org.springframework.messaging.handler.annotation.SendTo;
import org.springframework.util.Assert;
import org.springframework.util.ObjectUtils;
import org.springframework.util.ReflectionUtils;
import org.springframework.util.StringUtils;
/**
* Kafka Streams specific implementation for {@link StreamListenerSetupMethodOrchestrator}
* that overrides the default mechanisms for invoking StreamListener adapters.
* <p>
* The orchestration primarily focus on the following areas:
* <p>
* 1. Allow multiple KStream output bindings (KStream branching) by allowing more than one
* output values on {@link SendTo} 2. Allow multiple inbound bindings for multiple KStream
* and or KTable/GlobalKTable types. 3. Each StreamListener method that it orchestrates
* gets its own {@link StreamsBuilderFactoryBean} and {@link StreamsConfig}
*
* @author Soby Chacko
* @author Lei Chen
* @author Gary Russell
*/
class KafkaStreamsStreamListenerSetupMethodOrchestrator extends AbstractKafkaStreamsBinderProcessor
implements StreamListenerSetupMethodOrchestrator {
private static final Log LOG = LogFactory
.getLog(KafkaStreamsStreamListenerSetupMethodOrchestrator.class);
private final StreamListenerParameterAdapter streamListenerParameterAdapter;
private final Collection<StreamListenerResultAdapter> streamListenerResultAdapters;
private final BindingServiceProperties bindingServiceProperties;
private final KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties;
private final KeyValueSerdeResolver keyValueSerdeResolver;
private final KafkaStreamsBindingInformationCatalogue kafkaStreamsBindingInformationCatalogue;
private final Map<Method, List<String>> registeredStoresPerMethod = new HashMap<>();
private final Map<Method, StreamsBuilderFactoryBean> methodStreamsBuilderFactoryBeanMap = new HashMap<>();
StreamsBuilderFactoryBeanConfigurer customizer;
private final ConfigurableEnvironment environment;
KafkaStreamsStreamListenerSetupMethodOrchestrator(
BindingServiceProperties bindingServiceProperties,
KafkaStreamsExtendedBindingProperties extendedBindingProperties,
KeyValueSerdeResolver keyValueSerdeResolver,
KafkaStreamsBindingInformationCatalogue bindingInformationCatalogue,
StreamListenerParameterAdapter streamListenerParameterAdapter,
Collection<StreamListenerResultAdapter> listenerResultAdapters,
CleanupConfig cleanupConfig,
StreamsBuilderFactoryBeanConfigurer customizer,
ConfigurableEnvironment environment) {
super(bindingServiceProperties, bindingInformationCatalogue, extendedBindingProperties, keyValueSerdeResolver, cleanupConfig);
this.bindingServiceProperties = bindingServiceProperties;
this.kafkaStreamsExtendedBindingProperties = extendedBindingProperties;
this.keyValueSerdeResolver = keyValueSerdeResolver;
this.kafkaStreamsBindingInformationCatalogue = bindingInformationCatalogue;
this.streamListenerParameterAdapter = streamListenerParameterAdapter;
this.streamListenerResultAdapters = listenerResultAdapters;
this.customizer = customizer;
this.environment = environment;
}
@Override
public boolean supports(Method method) {
return methodParameterSupports(method) && (methodReturnTypeSuppports(method)
|| Void.TYPE.equals(method.getReturnType()));
}
private boolean methodReturnTypeSuppports(Method method) {
Class<?> returnType = method.getReturnType();
if (returnType.equals(KStream.class) || (returnType.isArray()
&& returnType.getComponentType().equals(KStream.class))) {
return true;
}
return false;
}
private boolean methodParameterSupports(Method method) {
boolean supports = false;
for (int i = 0; i < method.getParameterCount(); i++) {
MethodParameter methodParameter = MethodParameter.forExecutable(method, i);
Class<?> parameterType = methodParameter.getParameterType();
if (parameterType.equals(KStream.class) || parameterType.equals(KTable.class)
|| parameterType.equals(GlobalKTable.class)) {
supports = true;
}
}
return supports;
}
@Override
@SuppressWarnings({"rawtypes", "unchecked"})
public void orchestrateStreamListenerSetupMethod(StreamListener streamListener,
Method method, Object bean) {
String[] methodAnnotatedOutboundNames = getOutboundBindingTargetNames(method);
validateStreamListenerMethod(streamListener, method,
methodAnnotatedOutboundNames);
String methodAnnotatedInboundName = streamListener.value();
Object[] adaptedInboundArguments = adaptAndRetrieveInboundArguments(method,
methodAnnotatedInboundName, this.applicationContext,
this.streamListenerParameterAdapter);
try {
ReflectionUtils.makeAccessible(method);
if (Void.TYPE.equals(method.getReturnType())) {
method.invoke(bean, adaptedInboundArguments);
}
else {
Object result = method.invoke(bean, adaptedInboundArguments);
if (methodAnnotatedOutboundNames != null && methodAnnotatedOutboundNames.length > 0) {
if (result.getClass().isArray()) {
Assert.isTrue(
methodAnnotatedOutboundNames.length == ((Object[]) result).length,
"Result does not match with the number of declared outbounds");
}
else {
Assert.isTrue(methodAnnotatedOutboundNames.length == 1,
"Result does not match with the number of declared outbounds");
}
}
if (methodAnnotatedOutboundNames != null && methodAnnotatedOutboundNames.length > 0) {
methodAnnotatedInboundName = populateInboundIfMissing(method, methodAnnotatedInboundName);
final StreamsBuilderFactoryBean streamsBuilderFactoryBean = this.kafkaStreamsBindingInformationCatalogue
.getStreamsBuilderFactoryBeanPerBinding().get(methodAnnotatedInboundName);
if (result.getClass().isArray()) {
Object[] outboundKStreams = (Object[]) result;
int i = 0;
for (Object outboundKStream : outboundKStreams) {
final String methodAnnotatedOutboundName = methodAnnotatedOutboundNames[i++];
this.kafkaStreamsBindingInformationCatalogue.addStreamBuilderFactoryPerBinding(
methodAnnotatedOutboundName, streamsBuilderFactoryBean);
Object targetBean = this.applicationContext
.getBean(methodAnnotatedOutboundName);
kafkaStreamsBindingInformationCatalogue.addOutboundKStreamResolvable(targetBean, ResolvableType.forMethodReturnType(method));
adaptStreamListenerResult(outboundKStream, targetBean);
}
}
else {
this.kafkaStreamsBindingInformationCatalogue.addStreamBuilderFactoryPerBinding(
methodAnnotatedOutboundNames[0], streamsBuilderFactoryBean);
Object targetBean = this.applicationContext
.getBean(methodAnnotatedOutboundNames[0]);
kafkaStreamsBindingInformationCatalogue.addOutboundKStreamResolvable(targetBean, ResolvableType.forMethodReturnType(method));
adaptStreamListenerResult(result, targetBean);
}
}
}
}
catch (Exception ex) {
throw new BeanInitializationException(
"Cannot setup StreamListener for " + method, ex);
}
}
private String populateInboundIfMissing(Method method, String methodAnnotatedInboundName) {
if (!StringUtils.hasText(methodAnnotatedInboundName)) {
Object[] arguments = new Object[method.getParameterTypes().length];
if (arguments.length > 0) {
MethodParameter methodParameter = MethodParameter.forExecutable(method, 0);
if (methodParameter.hasParameterAnnotation(Input.class)) {
Input methodAnnotation = methodParameter
.getParameterAnnotation(Input.class);
methodAnnotatedInboundName = methodAnnotation.value();
}
}
}
return methodAnnotatedInboundName;
}
@SuppressWarnings("unchecked")
private void adaptStreamListenerResult(Object outboundKStream, Object targetBean) {
for (StreamListenerResultAdapter streamListenerResultAdapter : this.streamListenerResultAdapters) {
if (streamListenerResultAdapter.supports(
outboundKStream.getClass(), targetBean.getClass())) {
streamListenerResultAdapter.adapt(outboundKStream,
targetBean);
break;
}
}
}
@Override
@SuppressWarnings({"unchecked"})
public Object[] adaptAndRetrieveInboundArguments(Method method, String inboundName,
ApplicationContext applicationContext,
StreamListenerParameterAdapter... adapters) {
Object[] arguments = new Object[method.getParameterTypes().length];
for (int parameterIndex = 0; parameterIndex < arguments.length; parameterIndex++) {
MethodParameter methodParameter = MethodParameter.forExecutable(method,
parameterIndex);
Class<?> parameterType = methodParameter.getParameterType();
Object targetReferenceValue = null;
if (methodParameter.hasParameterAnnotation(Input.class)) {
targetReferenceValue = AnnotationUtils
.getValue(methodParameter.getParameterAnnotation(Input.class));
Input methodAnnotation = methodParameter
.getParameterAnnotation(Input.class);
inboundName = methodAnnotation.value();
}
else if (arguments.length == 1 && StringUtils.hasText(inboundName)) {
targetReferenceValue = inboundName;
}
if (targetReferenceValue != null) {
Assert.isInstanceOf(String.class, targetReferenceValue,
"Annotation value must be a String");
Object targetBean = applicationContext
.getBean((String) targetReferenceValue);
BindingProperties bindingProperties = this.bindingServiceProperties
.getBindingProperties(inboundName);
// Retrieve the StreamsConfig created for this method if available.
// Otherwise, create the StreamsBuilderFactory and get the underlying
// config.
if (!this.methodStreamsBuilderFactoryBeanMap.containsKey(method)) {
StreamsBuilderFactoryBean streamsBuilderFactoryBean = buildStreamsBuilderAndRetrieveConfig(method.getDeclaringClass().getSimpleName() + "-" + method.getName(),
applicationContext,
inboundName, null, customizer, this.environment, bindingProperties);
this.methodStreamsBuilderFactoryBeanMap.put(method, streamsBuilderFactoryBean);
}
try {
StreamsBuilderFactoryBean streamsBuilderFactoryBean = this.methodStreamsBuilderFactoryBeanMap
.get(method);
StreamsBuilder streamsBuilder = streamsBuilderFactoryBean.getObject();
final String applicationId = streamsBuilderFactoryBean.getStreamsConfiguration().getProperty(StreamsConfig.APPLICATION_ID_CONFIG);
KafkaStreamsConsumerProperties extendedConsumerProperties = this.kafkaStreamsExtendedBindingProperties
.getExtendedConsumerProperties(inboundName);
extendedConsumerProperties.setApplicationId(applicationId);
// get state store spec
KafkaStreamsStateStoreProperties spec = buildStateStoreSpec(method);
Serde<?> keySerde = this.keyValueSerdeResolver
.getInboundKeySerde(extendedConsumerProperties, ResolvableType.forMethodParameter(methodParameter));
LOG.info("Key Serde used for " + targetReferenceValue + ": " + keySerde.getClass().getName());
Serde<?> valueSerde = bindingServiceProperties.getConsumerProperties(inboundName).isUseNativeDecoding() ?
getValueSerde(inboundName, extendedConsumerProperties, ResolvableType.forMethodParameter(methodParameter)) : Serdes.ByteArray();
LOG.info("Value Serde used for " + targetReferenceValue + ": " + valueSerde.getClass().getName());
Topology.AutoOffsetReset autoOffsetReset = getAutoOffsetReset(inboundName, extendedConsumerProperties);
if (parameterType.isAssignableFrom(KStream.class)) {
KStream<?, ?> stream = getkStream(inboundName, spec,
bindingProperties, extendedConsumerProperties, streamsBuilder, keySerde, valueSerde,
autoOffsetReset, parameterIndex == 0);
KStreamBoundElementFactory.KStreamWrapper kStreamWrapper = (KStreamBoundElementFactory.KStreamWrapper) targetBean;
// wrap the proxy created during the initial target type binding
// with real object (KStream)
kStreamWrapper.wrap((KStream<Object, Object>) stream);
this.kafkaStreamsBindingInformationCatalogue.addKeySerde(stream, keySerde);
BindingProperties bindingProperties1 = this.kafkaStreamsBindingInformationCatalogue.getBindingProperties().get(kStreamWrapper);
this.kafkaStreamsBindingInformationCatalogue.registerBindingProperties(stream, bindingProperties1);
this.kafkaStreamsBindingInformationCatalogue.addStreamBuilderFactoryPerBinding(inboundName, streamsBuilderFactoryBean);
this.kafkaStreamsBindingInformationCatalogue.addConsumerPropertiesPerSbfb(streamsBuilderFactoryBean,
bindingServiceProperties.getConsumerProperties(inboundName));
for (StreamListenerParameterAdapter streamListenerParameterAdapter : adapters) {
if (streamListenerParameterAdapter.supports(stream.getClass(),
methodParameter)) {
arguments[parameterIndex] = streamListenerParameterAdapter
.adapt(stream, methodParameter);
break;
}
}
if (arguments[parameterIndex] == null
&& parameterType.isAssignableFrom(stream.getClass())) {
arguments[parameterIndex] = stream;
}
Assert.notNull(arguments[parameterIndex],
"Cannot convert argument " + parameterIndex + " of "
+ method + "from " + stream.getClass() + " to "
+ parameterType);
}
else {
handleKTableGlobalKTableInputs(arguments, parameterIndex, inboundName, parameterType, targetBean, streamsBuilderFactoryBean,
streamsBuilder, extendedConsumerProperties, keySerde, valueSerde, autoOffsetReset, parameterIndex == 0);
}
}
catch (Exception ex) {
throw new IllegalStateException(ex);
}
}
else {
throw new IllegalStateException(
StreamListenerErrorMessages.INVALID_DECLARATIVE_METHOD_PARAMETERS);
}
}
return arguments;
}
private StoreBuilder buildStateStore(KafkaStreamsStateStoreProperties spec) {
try {
Serde<?> keySerde = this.keyValueSerdeResolver
.getStateStoreKeySerde(spec.getKeySerdeString());
Serde<?> valueSerde = this.keyValueSerdeResolver
.getStateStoreValueSerde(spec.getValueSerdeString());
StoreBuilder builder;
switch (spec.getType()) {
case KEYVALUE:
builder = Stores.keyValueStoreBuilder(
Stores.persistentKeyValueStore(spec.getName()), keySerde,
valueSerde);
break;
case WINDOW:
builder = Stores
.windowStoreBuilder(
Stores.persistentWindowStore(spec.getName(),
Duration.ofMillis(spec.getRetention()), Duration.ofMillis(3), false),
keySerde, valueSerde);
break;
case SESSION:
builder = Stores.sessionStoreBuilder(Stores.persistentSessionStore(
spec.getName(), Duration.ofMillis(spec.getRetention())), keySerde, valueSerde);
break;
default:
throw new UnsupportedOperationException(
"state store type (" + spec.getType() + ") is not supported!");
}
if (spec.isCacheEnabled()) {
builder = builder.withCachingEnabled();
}
if (spec.isLoggingDisabled()) {
builder = builder.withLoggingDisabled();
}
return builder;
}
catch (Exception ex) {
LOG.error("failed to build state store exception : " + ex);
throw ex;
}
}
private KStream<?, ?> getkStream(String inboundName,
KafkaStreamsStateStoreProperties storeSpec,
BindingProperties bindingProperties,
KafkaStreamsConsumerProperties kafkaStreamsConsumerProperties, StreamsBuilder streamsBuilder,
Serde<?> keySerde, Serde<?> valueSerde,
Topology.AutoOffsetReset autoOffsetReset, boolean firstBuild) {
if (storeSpec != null) {
StoreBuilder storeBuilder = buildStateStore(storeSpec);
streamsBuilder.addStateStore(storeBuilder);
if (LOG.isInfoEnabled()) {
LOG.info("state store " + storeBuilder.name() + " added to topology");
}
}
return getKStream(inboundName, bindingProperties, kafkaStreamsConsumerProperties, streamsBuilder,
keySerde, valueSerde, autoOffsetReset, firstBuild);
}
private void validateStreamListenerMethod(StreamListener streamListener,
Method method, String[] methodAnnotatedOutboundNames) {
String methodAnnotatedInboundName = streamListener.value();
if (methodAnnotatedOutboundNames != null) {
for (String s : methodAnnotatedOutboundNames) {
if (StringUtils.hasText(s)) {
Assert.isTrue(isDeclarativeOutput(method, s),
"Method must be declarative");
}
}
}
if (StringUtils.hasText(methodAnnotatedInboundName)) {
int methodArgumentsLength = method.getParameterTypes().length;
for (int parameterIndex = 0; parameterIndex < methodArgumentsLength; parameterIndex++) {
MethodParameter methodParameter = MethodParameter.forExecutable(method,
parameterIndex);
Assert.isTrue(
isDeclarativeInput(methodAnnotatedInboundName, methodParameter),
"Method must be declarative");
}
}
}
@SuppressWarnings("unchecked")
private boolean isDeclarativeOutput(Method m, String targetBeanName) {
boolean declarative;
Class<?> returnType = m.getReturnType();
if (returnType.isArray()) {
Class<?> targetBeanClass = this.applicationContext.getType(targetBeanName);
declarative = this.streamListenerResultAdapters.stream()
.anyMatch((slpa) -> slpa.supports(returnType.getComponentType(),
targetBeanClass));
return declarative;
}
Class<?> targetBeanClass = this.applicationContext.getType(targetBeanName);
declarative = this.streamListenerResultAdapters.stream()
.anyMatch((slpa) -> slpa.supports(returnType, targetBeanClass));
return declarative;
}
@SuppressWarnings("unchecked")
private boolean isDeclarativeInput(String targetBeanName,
MethodParameter methodParameter) {
if (!methodParameter.getParameterType().isAssignableFrom(Object.class)
&& this.applicationContext.containsBean(targetBeanName)) {
Class<?> targetBeanClass = this.applicationContext.getType(targetBeanName);
if (targetBeanClass != null) {
boolean supports = KafkaStreamsBinderUtils.supportsKStream(methodParameter, targetBeanClass);
if (!supports) {
supports = KTable.class.isAssignableFrom(targetBeanClass)
&& KTable.class.isAssignableFrom(methodParameter.getParameterType());
if (!supports) {
supports = GlobalKTable.class.isAssignableFrom(targetBeanClass)
&& GlobalKTable.class.isAssignableFrom(methodParameter.getParameterType());
}
}
return supports;
}
}
return false;
}
private static String[] getOutboundBindingTargetNames(Method method) {
SendTo sendTo = AnnotationUtils.findAnnotation(method, SendTo.class);
if (sendTo != null) {
Assert.isTrue(!ObjectUtils.isEmpty(sendTo.value()),
StreamListenerErrorMessages.ATLEAST_ONE_OUTPUT);
Assert.isTrue(sendTo.value().length >= 1,
"At least one outbound destination need to be provided.");
return sendTo.value();
}
return null;
}
@SuppressWarnings({"unchecked"})
private KafkaStreamsStateStoreProperties buildStateStoreSpec(Method method) {
if (!this.registeredStoresPerMethod.containsKey(method)) {
KafkaStreamsStateStore spec = AnnotationUtils.findAnnotation(method,
KafkaStreamsStateStore.class);
if (spec != null) {
Assert.isTrue(!ObjectUtils.isEmpty(spec.name()), "name cannot be empty");
Assert.isTrue(spec.name().length() >= 1, "name cannot be empty.");
this.registeredStoresPerMethod.put(method, new ArrayList<>());
this.registeredStoresPerMethod.get(method).add(spec.name());
KafkaStreamsStateStoreProperties props = new KafkaStreamsStateStoreProperties();
props.setName(spec.name());
props.setType(spec.type());
props.setLength(spec.lengthMs());
props.setKeySerdeString(spec.keySerde());
props.setRetention(spec.retentionMs());
props.setValueSerdeString(spec.valueSerde());
props.setCacheEnabled(spec.cache());
props.setLoggingDisabled(!spec.logging());
return props;
}
}
return null;
}
}

View File

@@ -39,7 +39,8 @@ import org.springframework.kafka.streams.KafkaStreamsMicrometerListener;
* This {@link SmartLifecycle} class ensures that the bean created from it is started very
* late through the bootstrap process by setting the phase value closer to
* Integer.MAX_VALUE. This is to guarantee that the {@link StreamsBuilderFactoryBean} on a
* function with multiple bindings is only started after all the binding phases have completed successfully.
* {@link org.springframework.cloud.stream.annotation.StreamListener} method with multiple
* bindings is only started after all the binding phases have completed successfully.
*
* @author Soby Chacko
*/

View File

@@ -0,0 +1,90 @@
/*
* Copyright 2017-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kafka.streams.annotations;
import org.apache.kafka.streams.kstream.KStream;
import org.springframework.cloud.stream.annotation.Input;
import org.springframework.cloud.stream.annotation.Output;
/**
* Bindable interface for {@link KStream} input and output.
*
* This interface can be used as a bindable interface with
* {@link org.springframework.cloud.stream.annotation.EnableBinding} when both input and
* output types are single KStream. In other scenarios where multiple types are required,
* other similar bindable interfaces can be created and used. For example, there are cases
* in which multiple KStreams are required on the outbound in the case of KStream
* branching or multiple input types are required either in the form of multiple KStreams
* and a combination of KStreams and KTables. In those cases, new bindable interfaces
* compatible with the requirements must be created. Here are some examples.
*
* <pre class="code">
* interface KStreamBranchProcessor {
* &#064;Input("input")
* KStream&lt;?, ?&gt; input();
*
* &#064;Output("output-1")
* KStream&lt;?, ?&gt; output1();
*
* &#064;Output("output-2")
* KStream&lt;?, ?&gt; output2();
*
* &#064;Output("output-3")
* KStream&lt;?, ?&gt; output3();
*
* ......
*
* }
*</pre>
*
* <pre class="code">
* interface KStreamKtableProcessor {
* &#064;Input("input-1")
* KStream&lt;?, ?&gt; input1();
*
* &#064;Input("input-2")
* KTable&lt;?, ?&gt; input2();
*
* &#064;Output("output")
* KStream&lt;?, ?&gt; output();
*
* ......
*
* }
*</pre>
*
* @author Marius Bogoevici
* @author Soby Chacko
*/
public interface KafkaStreamsProcessor {
/**
* Input binding.
* @return {@link Input} binding for {@link KStream} type.
*/
@Input("input")
KStream<?, ?> input();
/**
* Output binding.
* @return {@link Output} binding for {@link KStream} type.
*/
@Output("output")
KStream<?, ?> output();
}

View File

@@ -0,0 +1,115 @@
/*
* Copyright 2018-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kafka.streams.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsStateStoreProperties;
/**
* Interface for Kafka Stream state store.
*
* This interface can be used to inject a state store specification into KStream building
* process so that the desired store can be built by StreamBuilder and added to topology
* for later use by processors. This is particularly useful when need to combine stream
* DSL with low level processor APIs. In those cases, if a writable state store is desired
* in processors, it needs to be created using this annotation. Here is the example.
*
* <pre class="code">
* &#064;StreamListener("input")
* &#064;KafkaStreamsStateStore(name="mystate", type= KafkaStreamsStateStoreProperties.StoreType.WINDOW,
* size=300000)
* public void process(KStream&lt;Object, Product&gt; input) {
* ......
* }
* </pre>
*
* With that, you should be able to read/write this state store in your
* processor/transformer code.
*
* <pre class="code">
* new Processor&lt;Object, Product&gt;() {
* WindowStore&lt;Object, String&gt; state;
* &#064;Override
* public void init(ProcessorContext processorContext) {
* state = (WindowStore)processorContext.getStateStore("mystate");
* ......
* }
* }
* </pre>
*
* @author Lei Chen
*/
@Target({ ElementType.TYPE, ElementType.METHOD, ElementType.ANNOTATION_TYPE })
@Retention(RetentionPolicy.RUNTIME)
public @interface KafkaStreamsStateStore {
/**
* Provides name of the state store.
* @return name of state store.
*/
String name() default "";
/**
* State store type.
* @return {@link KafkaStreamsStateStoreProperties.StoreType} of state store.
*/
KafkaStreamsStateStoreProperties.StoreType type() default KafkaStreamsStateStoreProperties.StoreType.KEYVALUE;
/**
* Serde used for key.
* @return key serde of state store.
*/
String keySerde() default "org.apache.kafka.common.serialization.Serdes$StringSerde";
/**
* Serde used for value.
* @return value serde of state store.
*/
String valueSerde() default "org.apache.kafka.common.serialization.Serdes$StringSerde";
/**
* Length in milli-second of Windowed store window.
* @return length in milli-second of window(for windowed store).
*/
long lengthMs() default 0;
/**
* Retention period for Windowed store windows.
* @return the maximum period of time in milli-second to keep each window in this
* store(for windowed store).
*/
long retentionMs() default 0;
/**
* Whether catching is enabled or not.
* @return whether caching should be enabled on the created store.
*/
boolean cache() default false;
/**
* Whether logging is enabled or not.
* @return whether logging should be enabled on the created store.
*/
boolean logging() default true;
}

View File

@@ -0,0 +1,161 @@
/*
* Copyright 2018-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kafka.streams.properties;
/**
* Properties for Kafka Streams state store.
*
* @author Lei Chen
*/
public class KafkaStreamsStateStoreProperties {
/**
* Enumeration for store type.
*/
public enum StoreType {
/**
* Key value store.
*/
KEYVALUE("keyvalue"),
/**
* Window store.
*/
WINDOW("window"),
/**
* Session store.
*/
SESSION("session");
private final String type;
StoreType(final String type) {
this.type = type;
}
@Override
public String toString() {
return this.type;
}
}
/**
* Name for this state store.
*/
private String name;
/**
* Type for this state store.
*/
private StoreType type;
/**
* Size/length of this state store in ms. Only applicable for window store.
*/
private long length;
/**
* Retention period for this state store in ms.
*/
private long retention;
/**
* Key serde class specified per state store.
*/
private String keySerdeString;
/**
* Value serde class specified per state store.
*/
private String valueSerdeString;
/**
* Whether caching is enabled on this state store.
*/
private boolean cacheEnabled;
/**
* Whether logging is enabled on this state store.
*/
private boolean loggingDisabled;
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
public StoreType getType() {
return this.type;
}
public void setType(StoreType type) {
this.type = type;
}
public long getLength() {
return this.length;
}
public void setLength(long length) {
this.length = length;
}
public long getRetention() {
return this.retention;
}
public void setRetention(long retention) {
this.retention = retention;
}
public String getKeySerdeString() {
return this.keySerdeString;
}
public void setKeySerdeString(String keySerdeString) {
this.keySerdeString = keySerdeString;
}
public String getValueSerdeString() {
return this.valueSerdeString;
}
public void setValueSerdeString(String valueSerdeString) {
this.valueSerdeString = valueSerdeString;
}
public boolean isCacheEnabled() {
return this.cacheEnabled;
}
public void setCacheEnabled(boolean cacheEnabled) {
this.cacheEnabled = cacheEnabled;
}
public boolean isLoggingDisabled() {
return this.loggingDisabled;
}
public void setLoggingDisabled(boolean loggingDisabled) {
this.loggingDisabled = loggingDisabled;
}
}

View File

@@ -0,0 +1,37 @@
/*
* Copyright 2018-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kafka.streams.serde;
import org.springframework.messaging.converter.CompositeMessageConverter;
/**
* This class provides the same functionality as {@link MessageConverterDelegateSerde} and is deprecated.
* It is kept for backward compatibility reasons and will be removed in version 3.1
*
* @author Soby Chacko
* @since 2.1
*
* @deprecated in favor of {@link MessageConverterDelegateSerde}
*/
@Deprecated
public class CompositeNonNativeSerde extends MessageConverterDelegateSerde {
public CompositeNonNativeSerde(CompositeMessageConverter compositeMessageConverter) {
super(compositeMessageConverter);
}
}

View File

@@ -0,0 +1,228 @@
/*
* Copyright 2019-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kafka.streams.serde;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.Serializer;
import org.springframework.messaging.Message;
import org.springframework.messaging.MessageHeaders;
import org.springframework.messaging.converter.CompositeMessageConverter;
import org.springframework.messaging.converter.MessageConverter;
import org.springframework.messaging.support.MessageBuilder;
import org.springframework.util.Assert;
import org.springframework.util.MimeType;
import org.springframework.util.MimeTypeUtils;
/**
* A {@link Serde} implementation that wraps the list of {@link MessageConverter}s from
* {@link CompositeMessageConverter}.
*
* The primary motivation for this class is to provide an avro based {@link Serde} that is
* compatible with the schema registry that Spring Cloud Stream provides. When using the
* schema registry support from Spring Cloud Stream in a Kafka Streams binder based
* application, the applications can deserialize the incoming Kafka Streams records using
* the built in Avro {@link MessageConverter}. However, this same message conversion
* approach will not work downstream in other operations in the topology for Kafka Streams
* as some of them needs a {@link Serde} instance that can talk to the Spring Cloud Stream
* provided Schema Registry. This implementation will solve that problem.
*
* Only Avro and JSON based converters are exposed as binder provided {@link Serde}
* implementations currently.
*
* Users of this class must call the
* {@link MessageConverterDelegateSerde#configure(Map, boolean)} method to configure the
* {@link Serde} object. At the very least the configuration map must include a key called
* "valueClass" to indicate the type of the target object for deserialization. If any
* other content type other than JSON is needed (only Avro is available now other than
* JSON), that needs to be included in the configuration map with the key "contentType".
* For example,
*
* <pre class="code">
* Map&lt;String, Object&gt; config = new HashMap&lt;&gt;();
* config.put("valueClass", Foo.class);
* config.put("contentType", "application/avro");
* </pre>
*
* Then use the above map when calling the configure method.
*
* This class is only intended to be used when writing a Spring Cloud Stream Kafka Streams
* application that uses Spring Cloud Stream schema registry for schema evolution.
*
* An instance of this class is provided as a bean by the binder configuration and
* typically the applications can autowire that bean. This is the expected usage pattern
* of this class.
*
* @param <T> type of the object to marshall
* @author Soby Chacko
* @since 3.0
* @deprecated in favor of other schema registry providers instead of Spring Cloud Schema Registry. See its motivation above.
*/
@Deprecated
public class MessageConverterDelegateSerde<T> implements Serde<T> {
private static final String VALUE_CLASS_HEADER = "valueClass";
private static final String AVRO_FORMAT = "avro";
private static final MimeType DEFAULT_AVRO_MIME_TYPE = new MimeType("application",
"*+" + AVRO_FORMAT);
private final MessageConverterDelegateDeserializer<T> messageConverterDelegateDeserializer;
private final MessageConverterDelegateSerializer<T> messageConverterDelegateSerializer;
public MessageConverterDelegateSerde(
CompositeMessageConverter compositeMessageConverter) {
this.messageConverterDelegateDeserializer = new MessageConverterDelegateDeserializer<>(
compositeMessageConverter);
this.messageConverterDelegateSerializer = new MessageConverterDelegateSerializer<>(
compositeMessageConverter);
}
@Override
public void configure(Map<String, ?> configs, boolean isKey) {
this.messageConverterDelegateDeserializer.configure(configs, isKey);
this.messageConverterDelegateSerializer.configure(configs, isKey);
}
@Override
public void close() {
// No-op
}
@Override
public Serializer<T> serializer() {
return this.messageConverterDelegateSerializer;
}
@Override
public Deserializer<T> deserializer() {
return this.messageConverterDelegateDeserializer;
}
private static MimeType resolveMimeType(Map<String, ?> configs) {
if (configs.containsKey(MessageHeaders.CONTENT_TYPE)) {
String contentType = (String) configs.get(MessageHeaders.CONTENT_TYPE);
if (DEFAULT_AVRO_MIME_TYPE.equals(MimeTypeUtils.parseMimeType(contentType))) {
return DEFAULT_AVRO_MIME_TYPE;
}
else if (contentType.contains("avro")) {
return MimeTypeUtils.parseMimeType("application/avro");
}
else {
return new MimeType("application", "json", StandardCharsets.UTF_8);
}
}
else {
return new MimeType("application", "json", StandardCharsets.UTF_8);
}
}
/**
* Custom {@link Deserializer} that uses the {@link org.springframework.cloud.stream.converter.CompositeMessageConverterFactory}.
*
* @param <U> parameterized target type for deserialization
*/
private static class MessageConverterDelegateDeserializer<U> implements Deserializer<U> {
private final MessageConverter messageConverter;
private MimeType mimeType;
private Class<?> valueClass;
MessageConverterDelegateDeserializer(
CompositeMessageConverter compositeMessageConverter) {
this.messageConverter = compositeMessageConverter;
}
@Override
public void configure(Map<String, ?> configs, boolean isKey) {
Assert.isTrue(configs.containsKey(VALUE_CLASS_HEADER),
"Deserializers must provide a configuration for valueClass.");
final Object valueClass = configs.get(VALUE_CLASS_HEADER);
Assert.isTrue(valueClass instanceof Class,
"Deserializers must provide a valid value for valueClass.");
this.valueClass = (Class<?>) valueClass;
this.mimeType = resolveMimeType(configs);
}
@SuppressWarnings("unchecked")
@Override
public U deserialize(String topic, byte[] data) {
Message<?> message = MessageBuilder.withPayload(data)
.setHeader(MessageHeaders.CONTENT_TYPE, this.mimeType.toString())
.build();
U messageConverted = (U) this.messageConverter.fromMessage(message,
this.valueClass);
Assert.notNull(messageConverted, "Deserialization failed.");
return messageConverted;
}
@Override
public void close() {
// No-op
}
}
/**
* Custom {@link Serializer} that uses the {@link org.springframework.cloud.stream.converter.CompositeMessageConverterFactory}.
*
* @param <V> parameterized type for serialization
*/
private static class MessageConverterDelegateSerializer<V> implements Serializer<V> {
private final MessageConverter messageConverter;
private MimeType mimeType;
MessageConverterDelegateSerializer(
CompositeMessageConverter compositeMessageConverter) {
this.messageConverter = compositeMessageConverter;
}
@Override
public void configure(Map<String, ?> configs, boolean isKey) {
this.mimeType = resolveMimeType(configs);
}
@Override
public byte[] serialize(String topic, V data) {
Message<?> message = MessageBuilder.withPayload(data).build();
Map<String, Object> headers = new HashMap<>(message.getHeaders());
headers.put(MessageHeaders.CONTENT_TYPE, this.mimeType.toString());
MessageHeaders messageHeaders = new MessageHeaders(headers);
final Object payload = this.messageConverter
.toMessage(message.getPayload(), messageHeaders).getPayload();
return (byte[]) payload;
}
@Override
public void close() {
// No-op
}
}
}

View File

@@ -224,7 +224,6 @@ public class KafkaStreamsFunctionCompositionTests {
try (ConfigurableApplicationContext context = app.run(
"--server.port=0",
"--spring.jmx.enabled=false",
"--spring.cloud.stream.kafka.streams.binder.applicationId=my-app-id",
"--spring.cloud.stream.function.definition=fooBiFunc|anotherFooFunc|yetAnotherFooFunc|lastFunctionInChain",
"--spring.cloud.stream.function.bindings.fooBiFuncanotherFooFuncyetAnotherFooFunclastFunctionInChain-in-0=input1",
"--spring.cloud.stream.function.bindings.fooBiFuncanotherFooFuncyetAnotherFooFunclastFunctionInChain-in-1=input2",
@@ -267,7 +266,6 @@ public class KafkaStreamsFunctionCompositionTests {
try (ConfigurableApplicationContext context = app.run(
"--server.port=0",
"--spring.jmx.enabled=false",
"--spring.cloud.stream.kafka.streams.binder.applicationId=my-app-id-xyz",
"--spring.cloud.stream.function.definition=curriedFunc|anotherFooFunc|yetAnotherFooFunc|lastFunctionInChain",
"--spring.cloud.stream.function.bindings.curriedFuncanotherFooFuncyetAnotherFooFunclastFunctionInChain-in-0=input1",
"--spring.cloud.stream.function.bindings.curriedFuncanotherFooFuncyetAnotherFooFunclastFunctionInChain-in-1=input2",

View File

@@ -19,7 +19,6 @@ package org.springframework.cloud.stream.binder.kafka.streams;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.function.Function;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
@@ -42,6 +41,7 @@ import org.apache.kafka.streams.state.ReadOnlyKeyValueStore;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Ignore;
import org.junit.Test;
import org.mockito.Mockito;
@@ -49,6 +49,9 @@ import org.springframework.boot.SpringApplication;
import org.springframework.boot.WebApplicationType;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsProcessor;
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsBinderConfigurationProperties;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
@@ -61,9 +64,9 @@ import org.springframework.kafka.support.serializer.JsonSerde;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.messaging.handler.annotation.SendTo;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.mockito.internal.verification.VerificationModeFactory.times;
/**
@@ -147,23 +150,22 @@ public class KafkaStreamsInteractiveQueryIntegrationTests {
QueryableStoreType<ReadOnlyKeyValueStore<Object, Object>> storeType = QueryableStoreTypes.keyValueStore();
final StringSerializer serializer = new StringSerializer();
try {
interactiveQueryService.getHostInfo("foo", "foobarApp-key", serializer);
interactiveQueryService.getHostInfo("foo", "fooKey", serializer);
}
catch (Exception ignored) {
}
Mockito.verify(mockKafkaStreams, times(3))
.queryMetadataForKey("foo", "foobarApp-key", serializer);
.queryMetadataForKey("foo", "fooKey", serializer);
}
@Test
public void testKstreamBinderWithPojoInputAndStringOuput() {
@Ignore
public void testKstreamBinderWithPojoInputAndStringOuput() throws Exception {
SpringApplication app = new SpringApplication(ProductCountApplication.class);
app.setWebApplicationType(WebApplicationType.NONE);
ConfigurableApplicationContext context = app.run("--server.port=0",
"--spring.jmx.enabled=false",
"--spring.cloud.stream.function.bindings.process-in-0=input",
"--spring.cloud.stream.function.bindings.process-out-0=output",
"--spring.cloud.stream.bindings.input.destination=foos",
"--spring.cloud.stream.bindings.output.destination=counts-id",
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
@@ -224,25 +226,27 @@ public class KafkaStreamsInteractiveQueryIntegrationTests {
assertThat(hostInfo.host() + ":" + hostInfo.port())
.isEqualTo(embeddedKafka.getBrokersAsString());
assertThatThrownBy(() -> interactiveQueryService
.getHostInfo("prod-id-count-store-foo", 123, new IntegerSerializer()))
.isInstanceOf(IllegalStateException.class)
.hasMessageContaining("Error when retrieving state store.");
HostInfo hostInfoFoo = interactiveQueryService
.getHostInfo("prod-id-count-store-foo", 123, new IntegerSerializer());
assertThat(hostInfoFoo).isNull();
final List<HostInfo> hostInfos = interactiveQueryService.getAllHostsInfo("prod-id-count-store");
assertThat(hostInfos.size()).isEqualTo(1);
final HostInfo hostInfo1 = hostInfos.get(0);
assertThat(hostInfo1.host() + ":" + hostInfo1.port())
.isEqualTo(embeddedKafka.getBrokersAsString());
}
@EnableBinding(KafkaStreamsProcessor.class)
@EnableAutoConfiguration
public static class ProductCountApplication {
@Bean
public Function<KStream<Object, Product>, KStream<?, String>> process() {
@StreamListener("input")
@SendTo("output")
public KStream<?, String> process(KStream<Object, Product> input) {
return input -> input.filter((key, product) -> product.getId() == 123)
return input.filter((key, product) -> product.getId() == 123)
.map((key, value) -> new KeyValue<>(value.id, value))
.groupByKey(Grouped.with(new Serdes.IntegerSerde(),
new JsonSerde<>(Product.class)))

View File

@@ -31,6 +31,7 @@ import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.springframework.beans.DirectFieldAccessor;
import org.springframework.boot.WebApplicationType;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.builder.SpringApplicationBuilder;
@@ -38,6 +39,7 @@ import org.springframework.cloud.stream.binder.kafka.streams.KeyValueSerdeResolv
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.config.StreamsBuilderFactoryBean;
import org.springframework.kafka.support.mapping.DefaultJackson2JavaTypeMapper;
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
@@ -141,19 +143,18 @@ public class KafkaStreamsBinderBootstrapTest {
assertThat(streamsConfiguration3.containsKey("spring.json.value.type.method")).isFalse();
applicationContext.getBean(KeyValueSerdeResolver.class);
//TODO: In Kafka Streams 3.1, taskTopology field is removed. Re-evaluate this testing strategy.
// String configuredSerdeTypeResolver = (String) new DirectFieldAccessor(input2SBFB.getKafkaStreams())
// .getPropertyValue("taskTopology.processorNodes[0].valDeserializer.typeResolver.arg$2");
//
// assertThat(this.getClass().getName() + ".determineType").isEqualTo(configuredSerdeTypeResolver);
//
// String configuredKeyDeserializerFieldName = ((String) new DirectFieldAccessor(input2SBFB.getKafkaStreams())
// .getPropertyValue("taskTopology.processorNodes[0].keyDeserializer.typeMapper.classIdFieldName"));
// assertThat(DefaultJackson2JavaTypeMapper.KEY_DEFAULT_CLASSID_FIELD_NAME).isEqualTo(configuredKeyDeserializerFieldName);
//
// String configuredValueDeserializerFieldName = ((String) new DirectFieldAccessor(input2SBFB.getKafkaStreams())
// .getPropertyValue("taskTopology.processorNodes[0].valDeserializer.typeMapper.classIdFieldName"));
// assertThat(DefaultJackson2JavaTypeMapper.DEFAULT_CLASSID_FIELD_NAME).isEqualTo(configuredValueDeserializerFieldName);
String configuredSerdeTypeResolver = (String) new DirectFieldAccessor(input2SBFB.getKafkaStreams())
.getPropertyValue("taskTopology.processorNodes[0].valDeserializer.typeResolver.arg$2");
assertThat(this.getClass().getName() + ".determineType").isEqualTo(configuredSerdeTypeResolver);
String configuredKeyDeserializerFieldName = ((String) new DirectFieldAccessor(input2SBFB.getKafkaStreams())
.getPropertyValue("taskTopology.processorNodes[0].keyDeserializer.typeMapper.classIdFieldName"));
assertThat(DefaultJackson2JavaTypeMapper.KEY_DEFAULT_CLASSID_FIELD_NAME).isEqualTo(configuredKeyDeserializerFieldName);
String configuredValueDeserializerFieldName = ((String) new DirectFieldAccessor(input2SBFB.getKafkaStreams())
.getPropertyValue("taskTopology.processorNodes[0].valDeserializer.typeMapper.classIdFieldName"));
assertThat(DefaultJackson2JavaTypeMapper.DEFAULT_CLASSID_FIELD_NAME).isEqualTo(configuredValueDeserializerFieldName);
applicationContext.close();
}

View File

@@ -0,0 +1,271 @@
/*
* Copyright 2018-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kafka.streams.integration;
import java.time.Duration;
import java.util.Arrays;
import java.util.Map;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.Grouped;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.Materialized;
import org.apache.kafka.streams.kstream.TimeWindows;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.mock.mockito.SpyBean;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsProcessor;
import org.springframework.cloud.stream.binder.kafka.utils.DlqPartitionFunction;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.PropertySource;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.messaging.handler.annotation.SendTo;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
/**
* @author Soby Chacko
*/
@RunWith(SpringRunner.class)
@ContextConfiguration
@DirtiesContext
public abstract class DeserializationErrorHandlerByKafkaTests {
@ClassRule
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
"abc-DeserializationErrorHandlerByKafkaTests-In",
"xyz-DeserializationErrorHandlerByKafkaTests-In",
"DeserializationErrorHandlerByKafkaTests-out",
"error.abc-DeserializationErrorHandlerByKafkaTests-In.group",
"error.xyz-DeserializationErrorHandlerByKafkaTests-In.group",
"error.word1.groupx",
"error.word2.groupx");
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
.getEmbeddedKafka();
@SpyBean
org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsMessageConversionDelegate conversionDelegate;
private static Consumer<String, String> consumer;
@BeforeClass
public static void setUp() {
System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers",
embeddedKafka.getBrokersAsString());
System.setProperty("server.port", "0");
System.setProperty("spring.jmx.enabled", "false");
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("fooc", "false",
embeddedKafka);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
consumerProps);
consumer = cf.createConsumer();
embeddedKafka.consumeFromEmbeddedTopics(consumer, "DeserializationErrorHandlerByKafkaTests-out", "DeserializationErrorHandlerByKafkaTests-out");
}
@AfterClass
public static void tearDown() {
consumer.close();
System.clearProperty("spring.cloud.stream.kafka.streams.binder.brokers");
System.clearProperty("server.port");
System.clearProperty("spring.jmx.enabled");
}
@SpringBootTest(properties = {
"spring.cloud.stream.bindings.input.destination=abc-DeserializationErrorHandlerByKafkaTests-In",
"spring.cloud.stream.bindings.output.destination=DeserializationErrorHandlerByKafkaTests-Out",
"spring.cloud.stream.kafka.streams.bindings.input.consumer.application-id=deser-kafka-dlq",
"spring.cloud.stream.bindings.input.group=group",
"spring.cloud.stream.kafka.streams.binder.deserializationExceptionHandler=sendToDlq",
"spring.cloud.stream.kafka.streams.bindings.input.consumer.valueSerde="
+ "org.apache.kafka.common.serialization.Serdes$IntegerSerde" }, webEnvironment = SpringBootTest.WebEnvironment.NONE)
public static class DeserializationByKafkaAndDlqTests
extends DeserializationErrorHandlerByKafkaTests {
@Test
@Ignore
public void test() {
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
senderProps);
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
template.setDefaultTopic("abc-DeserializationErrorHandlerByKafkaTests-In");
template.sendDefault(1, null, "foobar");
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobar",
"false", embeddedKafka);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
consumerProps);
Consumer<String, String> consumer1 = cf.createConsumer();
embeddedKafka.consumeFromAnEmbeddedTopic(consumer1, "error.abc-DeserializationErrorHandlerByKafkaTests-In.group");
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer1,
"error.abc-DeserializationErrorHandlerByKafkaTests-In.group");
assertThat(cr.value()).isEqualTo("foobar");
assertThat(cr.partition()).isEqualTo(0); // custom partition function
// Ensuring that the deserialization was indeed done by Kafka natively
verify(conversionDelegate, never()).deserializeOnInbound(any(Class.class),
any(KStream.class));
verify(conversionDelegate, never()).serializeOnOutbound(any(KStream.class));
}
}
@SpringBootTest(properties = {
"spring.cloud.stream.bindings.input.destination=xyz-DeserializationErrorHandlerByKafkaTests-In",
"spring.cloud.stream.bindings.output.destination=DeserializationErrorHandlerByKafkaTests-Out",
"spring.cloud.stream.kafka.streams.bindings.input.consumer.application-id=deser-kafka-dlq",
"spring.cloud.stream.bindings.input.group=group",
"spring.cloud.stream.kafka.streams.bindings.input.consumer.deserializationExceptionHandler=sendToDlq",
"spring.cloud.stream.kafka.streams.bindings.input.consumer.valueSerde="
+ "org.apache.kafka.common.serialization.Serdes$IntegerSerde" }, webEnvironment = SpringBootTest.WebEnvironment.NONE)
public static class DeserializationByKafkaAndDlqPerBindingTests
extends DeserializationErrorHandlerByKafkaTests {
@Test
public void test() {
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
senderProps);
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
template.setDefaultTopic("xyz-DeserializationErrorHandlerByKafkaTests-In");
template.sendDefault(1, null, "foobar");
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobar",
"false", embeddedKafka);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
consumerProps);
Consumer<String, String> consumer1 = cf.createConsumer();
embeddedKafka.consumeFromAnEmbeddedTopic(consumer1, "error.xyz-DeserializationErrorHandlerByKafkaTests-In.group");
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer1,
"error.xyz-DeserializationErrorHandlerByKafkaTests-In.group");
assertThat(cr.value()).isEqualTo("foobar");
assertThat(cr.partition()).isEqualTo(0); // custom partition function
// Ensuring that the deserialization was indeed done by Kafka natively
verify(conversionDelegate, never()).deserializeOnInbound(any(Class.class),
any(KStream.class));
verify(conversionDelegate, never()).serializeOnOutbound(any(KStream.class));
}
}
@SpringBootTest(properties = {
"spring.cloud.stream.bindings.input.destination=word1,word2",
"spring.cloud.stream.kafka.streams.bindings.input.consumer.application-id=deser-kafka-dlq-multi-input",
"spring.cloud.stream.bindings.input.group=groupx",
"spring.cloud.stream.kafka.streams.binder.serdeError=sendToDlq",
"spring.cloud.stream.kafka.streams.bindings.input.consumer.valueSerde="
+ "org.apache.kafka.common.serialization.Serdes$IntegerSerde" }, webEnvironment = SpringBootTest.WebEnvironment.NONE)
// @checkstyle:on
public static class DeserializationByKafkaAndDlqTestsWithMultipleInputs
extends DeserializationErrorHandlerByKafkaTests {
@Test
public void test() {
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
senderProps);
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
template.setDefaultTopic("word1");
template.sendDefault("foobar");
template.setDefaultTopic("word2");
template.sendDefault("foobar");
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobarx",
"false", embeddedKafka);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
consumerProps);
Consumer<String, String> consumer1 = cf.createConsumer();
embeddedKafka.consumeFromEmbeddedTopics(consumer1, "error.word1.groupx",
"error.word2.groupx");
ConsumerRecord<String, String> cr1 = KafkaTestUtils.getSingleRecord(consumer1,
"error.word1.groupx");
assertThat(cr1.value()).isEqualTo("foobar");
ConsumerRecord<String, String> cr2 = KafkaTestUtils.getSingleRecord(consumer1,
"error.word2.groupx");
assertThat(cr2.value()).isEqualTo("foobar");
// Ensuring that the deserialization was indeed done by Kafka natively
verify(conversionDelegate, never()).deserializeOnInbound(any(Class.class),
any(KStream.class));
verify(conversionDelegate, never()).serializeOnOutbound(any(KStream.class));
}
}
@EnableBinding(KafkaStreamsProcessor.class)
@EnableAutoConfiguration
@PropertySource("classpath:/org/springframework/cloud/stream/binder/kstream/integTest-1.properties")
public static class WordCountProcessorApplication {
@StreamListener("input")
@SendTo("output")
public KStream<?, String> process(KStream<Object, String> input) {
return input
.flatMapValues(
value -> Arrays.asList(value.toLowerCase().split("\\W+")))
.map((key, value) -> new KeyValue<>(value, value))
.groupByKey(Grouped.with(Serdes.String(), Serdes.String()))
.windowedBy(TimeWindows.of(Duration.ofMillis(5000))).count(Materialized.as("foo-WordCounts-x"))
.toStream().map((key, value) -> new KeyValue<>(null,
"Count for " + key.key() + " : " + value));
}
@Bean
public DlqPartitionFunction partitionFunction() {
return (group, rec, ex) -> 0;
}
}
}

View File

@@ -0,0 +1,286 @@
/*
* Copyright 2018-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kafka.streams.integration;
import java.time.Duration;
import java.util.Map;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.Grouped;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.Materialized;
import org.apache.kafka.streams.kstream.TimeWindows;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.mock.mockito.SpyBean;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsProcessor;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.serializer.JsonSerde;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.messaging.handler.annotation.SendTo;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.verify;
/**
* @author Soby Chacko
*/
@RunWith(SpringRunner.class)
@ContextConfiguration
@DirtiesContext
public abstract class DeserializtionErrorHandlerByBinderTests {
@ClassRule
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
"foos", "goos",
"counts-id", "error.foos.foobar-group", "error.goos.foobar-group", "error.foos1.fooz-group",
"error.foos2.fooz-group");
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
.getEmbeddedKafka();
@SpyBean
org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsMessageConversionDelegate conversionDelegate;
private static Consumer<Integer, String> consumer;
@BeforeClass
public static void setUp() throws Exception {
System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers",
embeddedKafka.getBrokersAsString());
System.setProperty("server.port", "0");
System.setProperty("spring.jmx.enabled", "false");
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("kafka-streams-dlq-tests", "false",
embeddedKafka);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(
consumerProps);
consumer = cf.createConsumer();
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts-id");
}
@AfterClass
public static void tearDown() {
consumer.close();
System.clearProperty("spring.cloud.stream.kafka.streams.binder.brokers");
System.clearProperty("server.port");
System.clearProperty("spring.jmx.enabled");
}
@SpringBootTest(properties = {
"spring.cloud.stream.bindings.input.consumer.useNativeDecoding=false",
"spring.cloud.stream.bindings.output.producer.useNativeEncoding=false",
"spring.cloud.stream.bindings.input.destination=foos",
"spring.cloud.stream.bindings.output.destination=counts-id",
"spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
"spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
+ "=org.apache.kafka.common.serialization.Serdes$IntegerSerde",
"spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
"spring.cloud.stream.kafka.streams.binder.deserializationExceptionHandler=sendToDlq",
"spring.cloud.stream.kafka.streams.bindings.input.consumer.application-id"
+ "=deserializationByBinderAndDlqTests",
"spring.cloud.stream.kafka.streams.bindings.input.consumer.dlqPartitions=1",
"spring.cloud.stream.bindings.input.group=foobar-group" }, webEnvironment = SpringBootTest.WebEnvironment.NONE)
public static class DeserializationByBinderAndDlqTests
extends DeserializtionErrorHandlerByBinderTests {
@Test
@Ignore
public void test() {
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
senderProps);
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
template.setDefaultTopic("foos");
template.sendDefault(1, 7, "hello");
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobar",
"false", embeddedKafka);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
consumerProps);
Consumer<String, String> consumer1 = cf.createConsumer();
embeddedKafka.consumeFromAnEmbeddedTopic(consumer1,
"error.foos.foobar-group");
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer1,
"error.foos.foobar-group");
assertThat(cr.value()).isEqualTo("hello");
assertThat(cr.partition()).isEqualTo(0);
// Ensuring that the deserialization was indeed done by the binder
verify(conversionDelegate).deserializeOnInbound(any(Class.class),
any(KStream.class));
}
}
@SpringBootTest(properties = {
"spring.cloud.stream.bindings.input.consumer.useNativeDecoding=false",
"spring.cloud.stream.bindings.output.producer.useNativeEncoding=false",
"spring.cloud.stream.bindings.input.destination=goos",
"spring.cloud.stream.bindings.output.destination=counts-id",
"spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
"spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
+ "=org.apache.kafka.common.serialization.Serdes$IntegerSerde",
"spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
"spring.cloud.stream.kafka.streams.bindings.input.consumer.deserializationExceptionHandler=sendToDlq",
"spring.cloud.stream.kafka.streams.bindings.input.consumer.application-id"
+ "=deserializationByBinderAndDlqTests",
"spring.cloud.stream.kafka.streams.bindings.input.consumer.dlqPartitions=1",
"spring.cloud.stream.bindings.input.group=foobar-group" }, webEnvironment = SpringBootTest.WebEnvironment.NONE)
public static class DeserializationByBinderAndDlqSetOnConsumerBindingTests
extends DeserializtionErrorHandlerByBinderTests {
@Test
public void test() {
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
senderProps);
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
template.setDefaultTopic("goos");
template.sendDefault(1, 7, "hello");
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobar",
"false", embeddedKafka);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
consumerProps);
Consumer<String, String> consumer1 = cf.createConsumer();
embeddedKafka.consumeFromAnEmbeddedTopic(consumer1,
"error.goos.foobar-group");
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer1,
"error.goos.foobar-group");
assertThat(cr.value()).isEqualTo("hello");
assertThat(cr.partition()).isEqualTo(0);
// Ensuring that the deserialization was indeed done by the binder
verify(conversionDelegate).deserializeOnInbound(any(Class.class),
any(KStream.class));
}
}
@SpringBootTest(properties = {
"spring.cloud.stream.bindings.input.consumer.useNativeDecoding=false",
"spring.cloud.stream.bindings.output.producer.useNativeEncoding=false",
"spring.cloud.stream.bindings.input.destination=foos1,foos2",
"spring.cloud.stream.bindings.output.destination=counts-id",
"spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
"spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
"spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
"spring.cloud.stream.kafka.streams.binder.serdeError=sendToDlq",
"spring.cloud.stream.kafka.streams.bindings.input.consumer.application-id"
+ "=deserializationByBinderAndDlqTestsWithMultipleInputs",
"spring.cloud.stream.bindings.input.group=fooz-group" }, webEnvironment = SpringBootTest.WebEnvironment.NONE)
public static class DeserializationByBinderAndDlqTestsWithMultipleInputs
extends DeserializtionErrorHandlerByBinderTests {
@Test
@SuppressWarnings("unchecked")
public void test() {
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
senderProps);
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
template.setDefaultTopic("foos1");
template.sendDefault("hello");
template.setDefaultTopic("foos2");
template.sendDefault("hello");
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobar1",
"false", embeddedKafka);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
consumerProps);
Consumer<String, String> consumer1 = cf.createConsumer();
embeddedKafka.consumeFromEmbeddedTopics(consumer1, "error.foos1.fooz-group",
"error.foos2.fooz-group");
ConsumerRecord<String, String> cr1 = KafkaTestUtils.getSingleRecord(consumer1,
"error.foos1.fooz-group");
assertThat(cr1.value().equals("hello")).isTrue();
ConsumerRecord<String, String> cr2 = KafkaTestUtils.getSingleRecord(consumer1,
"error.foos2.fooz-group");
assertThat(cr2.value().equals("hello")).isTrue();
// Ensuring that the deserialization was indeed done by the binder
verify(conversionDelegate).deserializeOnInbound(any(Class.class),
any(KStream.class));
}
}
@EnableBinding(KafkaStreamsProcessor.class)
@EnableAutoConfiguration
public static class ProductCountApplication {
@StreamListener("input")
@SendTo("output")
public KStream<Integer, Long> process(KStream<Object, Product> input) {
return input.filter((key, product) -> product.getId() == 123)
.map((key, value) -> new KeyValue<>(value, value))
.groupByKey(Grouped.with(new JsonSerde<>(Product.class),
new JsonSerde<>(Product.class)))
.windowedBy(TimeWindows.of(Duration.ofMillis(5000)))
.count(Materialized.as("id-count-store-x")).toStream()
.map((key, value) -> new KeyValue<>(key.key().id, value));
}
}
static class Product {
Integer id;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
}
}

View File

@@ -20,7 +20,6 @@ import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
@@ -40,7 +39,12 @@ import org.springframework.boot.actuate.health.CompositeHealthContributor;
import org.springframework.boot.actuate.health.Health;
import org.springframework.boot.actuate.health.Status;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.Input;
import org.springframework.cloud.stream.annotation.Output;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsBinderHealthIndicator;
import org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsProcessor;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.config.KafkaStreamsCustomizer;
@@ -52,6 +56,7 @@ import org.springframework.kafka.support.SendResult;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.messaging.handler.annotation.SendTo;
import org.springframework.util.concurrent.ListenableFuture;
import org.springframework.util.concurrent.ListenableFutureCallback;
@@ -202,8 +207,6 @@ public class KafkaStreamsBinderHealthIndicatorTests {
SpringApplication app = new SpringApplication(KStreamApplication.class);
app.setWebApplicationType(WebApplicationType.NONE);
return app.run("--server.port=0", "--spring.jmx.enabled=false",
"--spring.cloud.stream.function.bindings.process-in-0=input",
"--spring.cloud.stream.function.bindings.process-out-0=output",
"--spring.cloud.stream.bindings.input.destination=in",
"--spring.cloud.stream.bindings.output.destination=out",
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
@@ -222,11 +225,6 @@ public class KafkaStreamsBinderHealthIndicatorTests {
SpringApplication app = new SpringApplication(AnotherKStreamApplication.class);
app.setWebApplicationType(WebApplicationType.NONE);
return app.run("--server.port=0", "--spring.jmx.enabled=false",
"--spring.cloud.function.definition=process;process2",
"--spring.cloud.stream.function.bindings.process-in-0=input",
"--spring.cloud.stream.function.bindings.process-out-0=output",
"--spring.cloud.stream.function.bindings.process2-in-0=input2",
"--spring.cloud.stream.function.bindings.process2-out-0=output2",
"--spring.cloud.stream.bindings.input.destination=in",
"--spring.cloud.stream.bindings.output.destination=out",
"--spring.cloud.stream.bindings.input2.destination=in2",
@@ -244,12 +242,14 @@ public class KafkaStreamsBinderHealthIndicatorTests {
+ embeddedKafka.getBrokersAsString());
}
@EnableBinding(KafkaStreamsProcessor.class)
@EnableAutoConfiguration
public static class KStreamApplication {
@Bean
public Function<KStream<Object, Product>, KStream<Object, Product>> process() {
return input -> input.filter((key, product) -> {
@StreamListener("input")
@SendTo("output")
public KStream<Object, Product> process(KStream<Object, Product> input) {
return input.filter((key, product) -> {
if (product.getId() != 123) {
throw new IllegalArgumentException();
}
@@ -259,12 +259,14 @@ public class KafkaStreamsBinderHealthIndicatorTests {
}
@EnableBinding({ KafkaStreamsProcessor.class, KafkaStreamsProcessorX.class })
@EnableAutoConfiguration
public static class AnotherKStreamApplication {
@Bean
public Function<KStream<Object, Product>, KStream<Object, Product>> process() {
return input -> input.filter((key, product) -> {
@StreamListener("input")
@SendTo("output")
public KStream<Object, Product> process(KStream<Object, Product> input) {
return input.filter((key, product) -> {
if (product.getId() != 123) {
throw new IllegalArgumentException();
}
@@ -272,9 +274,10 @@ public class KafkaStreamsBinderHealthIndicatorTests {
});
}
@Bean
public Function<KStream<Object, Product>, KStream<Object, Product>> process2() {
return input -> input.filter((key, product) -> {
@StreamListener("input2")
@SendTo("output2")
public KStream<Object, Product> process2(KStream<Object, Product> input) {
return input.filter((key, product) -> {
if (product.getId() != 123) {
throw new IllegalArgumentException();
}
@@ -297,6 +300,16 @@ public class KafkaStreamsBinderHealthIndicatorTests {
}
public interface KafkaStreamsProcessorX {
@Input("input2")
KStream<?, ?> input();
@Output("output2")
KStream<?, ?> output();
}
public static class Product {
Integer id;
@@ -310,4 +323,5 @@ public class KafkaStreamsBinderHealthIndicatorTests {
}
}
}

View File

@@ -20,7 +20,6 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
@@ -38,6 +37,10 @@ import org.junit.Test;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.WebApplicationType;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.Input;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsProcessor;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.core.CleanupConfig;
@@ -47,6 +50,7 @@ import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.messaging.handler.annotation.SendTo;
import static org.assertj.core.api.Assertions.assertThat;
@@ -96,8 +100,6 @@ public class KafkaStreamsBinderMultipleInputTopicsTest {
ConfigurableApplicationContext context = app.run("--server.port=0",
"--spring.jmx.enabled=false",
"--spring.cloud.stream.function.bindings.process-in-0=input",
"--spring.cloud.stream.function.bindings.process-out-0=output",
"--spring.cloud.stream.bindings.input.destination=words1,words2",
"--spring.cloud.stream.bindings.output.destination=counts",
"--spring.cloud.stream.bindings.output.contentType=application/json",
@@ -144,13 +146,21 @@ public class KafkaStreamsBinderMultipleInputTopicsTest {
assertThat(wordCounts.contains("{\"word\":\"foobar2\",\"count\":1}")).isTrue();
}
@EnableBinding(KafkaStreamsProcessor.class)
@EnableAutoConfiguration
static class WordCountProcessorApplication {
@Bean
public Function<KStream<Object, String>, KStream<?, WordCount>> process() {
@StreamListener
@SendTo("output")
public KStream<?, WordCount> process(
@Input("input") KStream<Object, String> input) {
return input -> input
input.map((k, v) -> {
System.out.println(k);
System.out.println(v);
return new KeyValue<>(k, v);
});
return input
.flatMapValues(
value -> Arrays.asList(value.toLowerCase().split("\\W+")))
.map((key, value) -> new KeyValue<>(value, value))

View File

@@ -18,7 +18,6 @@ package org.springframework.cloud.stream.binder.kafka.streams.integration;
import java.time.Duration;
import java.util.Map;
import java.util.function.Function;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
@@ -37,8 +36,10 @@ import org.junit.Test;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.WebApplicationType;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsProcessor;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
@@ -46,6 +47,7 @@ import org.springframework.kafka.support.serializer.JsonSerde;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.messaging.handler.annotation.SendTo;
import static org.assertj.core.api.Assertions.assertThat;
@@ -87,8 +89,6 @@ public class KafkaStreamsBinderPojoInputAndPrimitiveTypeOutputTests {
app.setWebApplicationType(WebApplicationType.NONE);
ConfigurableApplicationContext context = app.run("--server.port=0",
"--spring.jmx.enabled=false",
"--spring.cloud.stream.function.bindings.process-in-0=input",
"--spring.cloud.stream.function.bindings.process-out-0=output",
"--spring.cloud.stream.bindings.input.destination=foos",
"--spring.cloud.stream.bindings.output.destination=counts-id",
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
@@ -122,19 +122,24 @@ public class KafkaStreamsBinderPojoInputAndPrimitiveTypeOutputTests {
assertThat(cr.value()).isEqualTo(1L);
}
@EnableBinding(KafkaStreamsProcessor.class)
@EnableAutoConfiguration
public static class ProductCountApplication {
@Bean
public Function<KStream<Object, Product>, KStream<Integer, Long>> process() {
return input -> input.filter((key, product) -> product.getId() == 123)
@StreamListener("input")
@SendTo("output")
public KStream<Integer, Long> process(KStream<Object, Product> input) {
return input.filter((key, product) -> product.getId() == 123)
.map((key, value) -> new KeyValue<>(value, value))
.groupByKey(Grouped.with(new JsonSerde<>(Product.class),
new JsonSerde<>(Product.class)))
.windowedBy(TimeWindows.of(Duration.ofMillis(5000)))
.count(Materialized.as("id-count-store-x")).toStream()
.map((key, value) -> new KeyValue<>(key.key().id, value));
.map((key, value) -> {
return new KeyValue<>(key.key().id, value);
});
}
}
public static class Product {

View File

@@ -0,0 +1,186 @@
/*
* Copyright 2018-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kafka.streams.integration;
import java.time.Duration;
import java.util.Arrays;
import java.util.Map;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.Grouped;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.Materialized;
import org.apache.kafka.streams.kstream.TimeWindows;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.mock.mockito.SpyBean;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsProcessor;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.messaging.handler.annotation.SendTo;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.util.StopWatch;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
/**
* @author Soby Chacko
*/
@RunWith(SpringRunner.class)
@ContextConfiguration
@DirtiesContext
public abstract class KafkaStreamsNativeEncodingDecodingTests {
@ClassRule
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
"decode-counts", "decode-counts-1");
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
.getEmbeddedKafka();
@SpyBean
org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsMessageConversionDelegate conversionDelegate;
private static Consumer<String, String> consumer;
@BeforeClass
public static void setUp() {
System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers",
embeddedKafka.getBrokersAsString());
System.setProperty("server.port", "0");
System.setProperty("spring.jmx.enabled", "false");
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false",
embeddedKafka);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
consumerProps);
consumer = cf.createConsumer();
embeddedKafka.consumeFromEmbeddedTopics(consumer, "decode-counts", "decode-counts-1");
}
@AfterClass
public static void tearDown() {
consumer.close();
System.clearProperty("spring.cloud.stream.kafka.streams.binder.brokers");
System.clearProperty("server.port");
System.clearProperty("spring.jmx.enabled");
}
@SpringBootTest(properties = {
"spring.cloud.stream.bindings.input.destination=decode-words-1",
"spring.cloud.stream.bindings.output.destination=decode-counts-1",
"spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId"
+ "=NativeEncodingDecodingEnabledTests-abc" }, webEnvironment = SpringBootTest.WebEnvironment.NONE)
public static class NativeEncodingDecodingEnabledTests
extends KafkaStreamsNativeEncodingDecodingTests {
@Test
public void test() throws Exception {
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
senderProps);
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
template.setDefaultTopic("decode-words-1");
template.sendDefault("foobar");
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer,
"decode-counts-1");
assertThat(cr.value().equals("Count for foobar : 1")).isTrue();
verify(conversionDelegate, never()).serializeOnOutbound(any(KStream.class));
verify(conversionDelegate, never()).deserializeOnInbound(any(Class.class),
any(KStream.class));
}
}
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.NONE, properties = {
"spring.cloud.stream.bindings.input.destination=decode-words",
"spring.cloud.stream.bindings.output.destination=decode-counts",
"spring.cloud.stream.bindings.input.consumer.useNativeDecoding=false",
"spring.cloud.stream.bindings.output.producer.useNativeEncoding=false",
"spring.cloud.stream.kafka.streams.bindings.input3.consumer.applicationId"
+ "=hello-NativeEncodingDecodingEnabledTests-xyz" })
public static class NativeEncodingDecodingDisabledTests
extends KafkaStreamsNativeEncodingDecodingTests {
@Test
public void test() {
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
senderProps);
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
template.setDefaultTopic("decode-words");
template.sendDefault("foobar");
StopWatch stopWatch = new StopWatch();
stopWatch.start();
System.out.println("Starting: ");
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer,
"decode-counts");
stopWatch.stop();
System.out.println("Total time: " + stopWatch.getTotalTimeSeconds());
assertThat(cr.value().equals("Count for foobar : 1")).isTrue();
verify(conversionDelegate).serializeOnOutbound(any(KStream.class));
verify(conversionDelegate).deserializeOnInbound(any(Class.class),
any(KStream.class));
}
}
@EnableBinding(KafkaStreamsProcessor.class)
@EnableAutoConfiguration
public static class WordCountProcessorApplication {
@StreamListener("input")
@SendTo("output")
public KStream<?, String> process(KStream<Object, String> input) {
return input
.flatMapValues(
value -> Arrays.asList(value.toLowerCase().split("\\W+")))
.map((key, value) -> new KeyValue<>(value, value))
.groupByKey(Grouped.with(Serdes.String(), Serdes.String()))
.windowedBy(TimeWindows.of(Duration.ofSeconds(5))).count(Materialized.as("foo-WordCounts-x"))
.toStream().map((key, value) -> new KeyValue<>(null,
"Count for " + key.key() + " : " + value));
}
}
}

View File

@@ -18,8 +18,6 @@ package org.springframework.cloud.stream.binder.kafka.streams.integration;
import java.time.Duration;
import java.util.Map;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.kstream.KStream;
@@ -34,6 +32,11 @@ import org.junit.Test;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.WebApplicationType;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.Input;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsStateStore;
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsStateStoreProperties;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
@@ -64,7 +67,6 @@ public class KafkaStreamsStateStoreIntegrationTests {
app.setWebApplicationType(WebApplicationType.NONE);
ConfigurableApplicationContext context = app.run("--server.port=0",
"--spring.jmx.enabled=false",
"--spring.cloud.stream.function.bindings.process-in-0=input",
"--spring.cloud.stream.bindings.input.destination=foobar",
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
@@ -87,14 +89,41 @@ public class KafkaStreamsStateStoreIntegrationTests {
}
}
@Test
public void testKstreamStateStoreBuilderBeansDefinedInApplication() throws Exception {
SpringApplication app = new SpringApplication(StateStoreBeanApplication.class);
app.setWebApplicationType(WebApplicationType.NONE);
ConfigurableApplicationContext context = app.run("--server.port=0",
"--spring.jmx.enabled=false",
"--spring.cloud.stream.bindings.input3.destination=foobar",
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
"--spring.cloud.stream.kafka.streams.bindings.input3.consumer.applicationId"
+ "=KafkaStreamsStateStoreIntegrationTests-xyzabc-123",
"--spring.cloud.stream.kafka.streams.binder.brokers="
+ embeddedKafka.getBrokersAsString());
try {
Thread.sleep(2000);
receiveAndValidateFoo(context, StateStoreBeanApplication.class);
}
catch (Exception e) {
throw e;
}
finally {
context.close();
}
}
@Test
public void testSameStateStoreIsCreatedOnlyOnceWhenMultipleInputBindingsArePresent() throws Exception {
SpringApplication app = new SpringApplication(ProductCountApplicationWithMultipleInputBindings.class);
app.setWebApplicationType(WebApplicationType.NONE);
ConfigurableApplicationContext context = app.run("--server.port=0",
"--spring.jmx.enabled=false",
"--spring.cloud.stream.function.bindings.process-in-0=input1",
"--spring.cloud.stream.function.bindings.process-in-1=input2",
"--spring.cloud.stream.bindings.input1.destination=foobar",
"--spring.cloud.stream.bindings.input2.destination=hello-foobar",
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
@@ -142,12 +171,22 @@ public class KafkaStreamsStateStoreIntegrationTests {
assertThat(state.persistent()).isTrue();
assertThat(productCount.processed).isTrue();
}
else if (clazz.isAssignableFrom(StateStoreBeanApplication.class)) {
StateStoreBeanApplication productCount = context
.getBean(StateStoreBeanApplication.class);
WindowStore<Object, String> state = productCount.state;
assertThat(state != null).isTrue();
assertThat(state.name()).isEqualTo("mystate");
assertThat(state.persistent()).isTrue();
assertThat(productCount.processed).isTrue();
}
else {
fail("Expected assertions did not happen");
fail("Expected assertiond did not happen");
}
}
@EnableBinding(KafkaStreamsProcessorX.class)
@EnableAutoConfiguration
public static class ProductCountApplication {
@@ -155,10 +194,46 @@ public class KafkaStreamsStateStoreIntegrationTests {
boolean processed;
@Bean
public Consumer<KStream<Object, Product>> process() {
@StreamListener("input")
@KafkaStreamsStateStore(name = "mystate", type = KafkaStreamsStateStoreProperties.StoreType.WINDOW, lengthMs = 300000, retentionMs = 300000)
@SuppressWarnings({ "deprecation", "unchecked" })
public void process(KStream<Object, Product> input) {
return input -> input.process(() -> new Processor<Object, Product>() {
input.process(() -> new Processor<Object, Product>() {
@Override
public void init(ProcessorContext processorContext) {
state = (WindowStore) processorContext.getStateStore("mystate");
}
@Override
public void process(Object s, Product product) {
processed = true;
}
@Override
public void close() {
if (state != null) {
state.close();
}
}
}, "mystate");
}
}
@EnableBinding(KafkaStreamsProcessorZ.class)
@EnableAutoConfiguration
public static class StateStoreBeanApplication {
WindowStore<Object, String> state;
boolean processed;
@StreamListener("input3")
@SuppressWarnings({"unchecked" })
public void process(KStream<Object, Product> input) {
input.process(() -> new Processor<Object, Product>() {
@Override
public void init(ProcessorContext processorContext) {
@@ -188,6 +263,8 @@ public class KafkaStreamsStateStoreIntegrationTests {
}
}
@EnableBinding(KafkaStreamsProcessorY.class)
@EnableAutoConfiguration
public static class ProductCountApplicationWithMultipleInputBindings {
@@ -195,41 +272,33 @@ public class KafkaStreamsStateStoreIntegrationTests {
boolean processed;
@Bean
public BiConsumer<KStream<Object, Product>, KStream<Object, Product>> process() {
@StreamListener
@KafkaStreamsStateStore(name = "mystate", type = KafkaStreamsStateStoreProperties.StoreType.WINDOW, lengthMs = 300000, retentionMs = 300000)
@SuppressWarnings({ "deprecation", "unchecked" })
public void process(@Input("input1")KStream<Object, Product> input, @Input("input2")KStream<Object, Product> input2) {
return (input, input2) -> {
input.process(() -> new Processor<Object, Product>() {
input.process(() -> new Processor<Object, Product>() {
@Override
public void init(ProcessorContext processorContext) {
state = (WindowStore) processorContext.getStateStore("mystate");
}
@Override
public void init(ProcessorContext processorContext) {
state = (WindowStore) processorContext.getStateStore("mystate");
@Override
public void process(Object s, Product product) {
processed = true;
}
@Override
public void close() {
if (state != null) {
state.close();
}
}
}, "mystate");
@Override
public void process(Object s, Product product) {
processed = true;
}
@Override
public void close() {
if (state != null) {
state.close();
}
}
}, "mystate");
//simple use of input2, we are not using input2 for anything other than triggering some test behavior.
input2.foreach((key, value) -> { });
};
}
@Bean
public StoreBuilder mystore() {
return Stores.windowStoreBuilder(
Stores.persistentWindowStore("mystate",
Duration.ofMillis(3), Duration.ofMillis(3), false), Serdes.String(),
Serdes.String());
//simple use of input2, we are not using input2 for anything other than triggering some test behavior.
input2.foreach((key, value) -> { });
}
}
@@ -246,4 +315,25 @@ public class KafkaStreamsStateStoreIntegrationTests {
}
}
interface KafkaStreamsProcessorX {
@Input("input")
KStream<?, ?> input();
}
interface KafkaStreamsProcessorY {
@Input("input1")
KStream<?, ?> input1();
@Input("input2")
KStream<?, ?> input2();
}
interface KafkaStreamsProcessorZ {
@Input("input3")
KStream<?, ?> input3();
}
}

View File

@@ -18,7 +18,6 @@ package org.springframework.cloud.stream.binder.kafka.streams.integration;
import java.time.Duration;
import java.util.Map;
import java.util.function.Function;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
@@ -36,8 +35,10 @@ import org.junit.Test;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.WebApplicationType;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsProcessor;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.integration.test.util.TestUtils;
import org.springframework.kafka.config.StreamsBuilderFactoryBean;
import org.springframework.kafka.core.CleanupConfig;
@@ -48,6 +49,7 @@ import org.springframework.kafka.support.serializer.JsonSerde;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.messaging.handler.annotation.SendTo;
import static org.assertj.core.api.Assertions.assertThat;
@@ -89,8 +91,6 @@ public class KafkastreamsBinderPojoInputStringOutputIntegrationTests {
app.setWebApplicationType(WebApplicationType.NONE);
ConfigurableApplicationContext context = app.run("--server.port=0",
"--spring.jmx.enabled=false",
"--spring.cloud.stream.function.bindings.process-in-0=input",
"--spring.cloud.stream.function.bindings.process-out-0=output",
"--spring.cloud.stream.bindings.input.destination=foos",
"--spring.cloud.stream.bindings.output.destination=counts-id",
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
@@ -105,7 +105,7 @@ public class KafkastreamsBinderPojoInputStringOutputIntegrationTests {
receiveAndValidateFoo();
// Assertions on StreamBuilderFactoryBean
StreamsBuilderFactoryBean streamsBuilderFactoryBean = context
.getBean("&stream-builder-process", StreamsBuilderFactoryBean.class);
.getBean("&stream-builder-ProductCountApplication-process", StreamsBuilderFactoryBean.class);
CleanupConfig cleanup = TestUtils.getPropertyValue(streamsBuilderFactoryBean,
"cleanupConfig", CleanupConfig.class);
assertThat(cleanup.cleanupOnStart()).isFalse();
@@ -128,12 +128,15 @@ public class KafkastreamsBinderPojoInputStringOutputIntegrationTests {
assertThat(cr.value().contains("Count for product with ID 123: 1")).isTrue();
}
@EnableBinding(KafkaStreamsProcessor.class)
@EnableAutoConfiguration
public static class ProductCountApplication {
@Bean
public Function<KStream<Object, Product>, KStream<Integer, String>> process() {
return input -> input.filter((key, product) -> product.getId() == 123)
@StreamListener("input")
@SendTo("output")
public KStream<Integer, String> process(KStream<Object, Product> input) {
return input.filter((key, product) -> product.getId() == 123)
.map((key, value) -> new KeyValue<>(value, value))
.groupByKey(Grouped.with(new JsonSerde<>(Product.class),
new JsonSerde<>(Product.class)))

View File

@@ -0,0 +1,95 @@
/*
* Copyright 2019-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kafka.streams.integration;
import org.apache.kafka.streams.kstream.KStream;
import org.junit.ClassRule;
import org.junit.Test;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.WebApplicationType;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.Input;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.kafka.config.StreamsBuilderFactoryBean;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
import org.springframework.stereotype.Component;
import static org.assertj.core.api.Assertions.assertThat;
public class MultiProcessorsWithSameNameAndBindingTests {
@ClassRule
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
"counts");
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
.getEmbeddedKafka();
@Test
public void testBinderStartsSuccessfullyWhenTwoProcessorsWithSameNamesAndBindingsPresent() {
SpringApplication app = new SpringApplication(
MultiProcessorsWithSameNameAndBindingTests.WordCountProcessorApplication.class);
app.setWebApplicationType(WebApplicationType.NONE);
try (ConfigurableApplicationContext context = app.run("--server.port=0",
"--spring.jmx.enabled=false",
"--spring.cloud.stream.bindings.input.destination=words",
"--spring.cloud.stream.bindings.input-1.destination=words",
"--spring.cloud.stream.bindings.output.destination=counts",
"--spring.cloud.stream.bindings.output.contentType=application/json",
"--spring.cloud.stream.kafka.streams.binder.brokers="
+ embeddedKafka.getBrokersAsString())) {
StreamsBuilderFactoryBean streamsBuilderFactoryBean1 = context
.getBean("&stream-builder-Foo-process", StreamsBuilderFactoryBean.class);
assertThat(streamsBuilderFactoryBean1).isNotNull();
StreamsBuilderFactoryBean streamsBuilderFactoryBean2 = context
.getBean("&stream-builder-Bar-process", StreamsBuilderFactoryBean.class);
assertThat(streamsBuilderFactoryBean2).isNotNull();
}
}
@EnableBinding(KafkaStreamsProcessorX.class)
@EnableAutoConfiguration
static class WordCountProcessorApplication {
@Component
static class Foo {
@StreamListener
public void process(@Input("input-1") KStream<Object, String> input) {
}
}
//Second class with a stub processor that has the same name as above ("process")
@Component
static class Bar {
@StreamListener
public void process(@Input("input-1") KStream<Object, String> input) {
}
}
}
interface KafkaStreamsProcessorX {
@Input("input-1")
KStream<?, ?> input1();
}
}

View File

@@ -0,0 +1,184 @@
/*
* Copyright 2017-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kafka.streams.integration;
import java.io.IOException;
import java.util.Map;
import java.util.Random;
import java.util.UUID;
import com.example.Sensor;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.KStream;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.WebApplicationType;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.cloud.function.context.converter.avro.AvroSchemaMessageConverter;
import org.springframework.cloud.function.context.converter.avro.AvroSchemaServiceManagerImpl;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.Input;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsProcessor;
import org.springframework.cloud.stream.binder.kafka.streams.integration.utils.TestAvroSerializer;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.messaging.Message;
import org.springframework.messaging.converter.MessageConverter;
import org.springframework.messaging.handler.annotation.SendTo;
import org.springframework.messaging.support.MessageBuilder;
import org.springframework.util.MimeTypeUtils;
import static org.assertj.core.api.Assertions.assertThat;
/**
* @author Soby Chacko
*/
public class PerRecordAvroContentTypeTests {
@ClassRule
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
"received-sensors");
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
.getEmbeddedKafka();
private static Consumer<String, byte[]> consumer;
@BeforeClass
public static void setUp() throws Exception {
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("avro-ct-test",
"false", embeddedKafka);
// Receive the data as byte[]
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
ByteArrayDeserializer.class);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
DefaultKafkaConsumerFactory<String, byte[]> cf = new DefaultKafkaConsumerFactory<>(
consumerProps);
consumer = cf.createConsumer();
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "received-sensors");
}
@AfterClass
public static void tearDown() {
consumer.close();
}
@Test
public void testPerRecordAvroConentTypeAndVerifySerialization() throws Exception {
SpringApplication app = new SpringApplication(SensorCountAvroApplication.class);
app.setWebApplicationType(WebApplicationType.NONE);
try (ConfigurableApplicationContext ignored = app.run("--server.port=0",
"--spring.jmx.enabled=false",
"--spring.cloud.stream.bindings.input.consumer.useNativeDecoding=false",
"--spring.cloud.stream.bindings.output.producer.useNativeEncoding=false",
"--spring.cloud.stream.bindings.input.destination=sensors",
"--spring.cloud.stream.bindings.output.destination=received-sensors",
"--spring.cloud.stream.bindings.output.contentType=application/avro",
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.application-id=per-record-avro-contentType-test",
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
"--spring.cloud.stream.kafka.streams.binder.brokers="
+ embeddedKafka.getBrokersAsString())) {
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
// Use a custom avro test serializer
senderProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
TestAvroSerializer.class);
DefaultKafkaProducerFactory<Integer, Sensor> pf = new DefaultKafkaProducerFactory<>(
senderProps);
try {
KafkaTemplate<Integer, Sensor> template = new KafkaTemplate<>(pf, true);
Random random = new Random();
Sensor sensor = new Sensor();
sensor.setId(UUID.randomUUID().toString() + "-v1");
sensor.setAcceleration(random.nextFloat() * 10);
sensor.setVelocity(random.nextFloat() * 100);
sensor.setTemperature(random.nextFloat() * 50);
// Send with avro content type set.
Message<?> message = MessageBuilder.withPayload(sensor)
.setHeader("contentType", "application/avro").build();
template.setDefaultTopic("sensors");
template.send(message);
// Serialized byte[] ^^ is received by the binding process and deserialzed
// it using avro converter.
// Then finally, the data will be output to a return topic as byte[]
// (using the same avro converter).
// Receive the byte[] from return topic
ConsumerRecord<String, byte[]> cr = KafkaTestUtils
.getSingleRecord(consumer, "received-sensors");
final byte[] value = cr.value();
// Convert the byte[] received back to avro object and verify that it is
// the same as the one we sent ^^.
AvroSchemaMessageConverter avroSchemaMessageConverter = new AvroSchemaMessageConverter(new AvroSchemaServiceManagerImpl());
Message<?> receivedMessage = MessageBuilder.withPayload(value)
.setHeader("contentType",
MimeTypeUtils.parseMimeType("application/avro"))
.build();
Sensor messageConverted = (Sensor) avroSchemaMessageConverter
.fromMessage(receivedMessage, Sensor.class);
assertThat(messageConverted).isEqualTo(sensor);
}
finally {
pf.destroy();
}
}
}
@EnableBinding(KafkaStreamsProcessor.class)
@EnableAutoConfiguration
static class SensorCountAvroApplication {
@StreamListener
@SendTo("output")
public KStream<?, Sensor> process(@Input("input") KStream<Object, Sensor> input) {
// return the same Sensor object unchanged so that we can do test
// verifications
return input.map(KeyValue::new);
}
@Bean
public MessageConverter sensorMessageConverter() throws IOException {
return new AvroSchemaMessageConverter(new AvroSchemaServiceManagerImpl());
}
}
}

View File

@@ -0,0 +1,63 @@
/*
* Copyright 2018-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.binder.kafka.streams.integration.utils;
import java.util.HashMap;
import java.util.Map;
import org.apache.kafka.common.serialization.Serializer;
import org.springframework.cloud.function.context.converter.avro.AvroSchemaMessageConverter;
import org.springframework.cloud.function.context.converter.avro.AvroSchemaServiceManagerImpl;
import org.springframework.messaging.Message;
import org.springframework.messaging.MessageHeaders;
import org.springframework.messaging.support.MessageBuilder;
/**
* Custom avro serializer intended to be used for testing only.
*
* @param <S> Target type to serialize
* @author Soby Chacko
*/
public class TestAvroSerializer<S> implements Serializer<S> {
public TestAvroSerializer() {
}
@Override
public void configure(Map<String, ?> configs, boolean isKey) {
}
@Override
public byte[] serialize(String topic, S data) {
AvroSchemaMessageConverter avroSchemaMessageConverter = new AvroSchemaMessageConverter(new AvroSchemaServiceManagerImpl());
Message<?> message = MessageBuilder.withPayload(data).build();
Map<String, Object> headers = new HashMap<>(message.getHeaders());
headers.put(MessageHeaders.CONTENT_TYPE, "application/avro");
MessageHeaders messageHeaders = new MessageHeaders(headers);
final Object payload = avroSchemaMessageConverter
.toMessage(message.getPayload(), messageHeaders).getPayload();
return (byte[]) payload;
}
@Override
public void close() {
}
}

View File

@@ -0,0 +1,11 @@
{
"namespace" : "com.example",
"type" : "record",
"name" : "Sensor",
"fields" : [
{"name":"id","type":"string"},
{"name":"temperature", "type":"float", "default":0.0},
{"name":"acceleration", "type":"float","default":0.0},
{"name":"velocity","type":"float","default":0.0}
]
}

View File

@@ -0,0 +1,6 @@
spring.cloud.stream.bindings.input.destination=DeserializationErrorHandlerByKafkaTests-In
spring.cloud.stream.bindings.output.destination=DeserializationErrorHandlerByKafkaTests-Out
spring.cloud.stream.bindings.output.contentType=application/json
spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000
spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde
spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde

View File

@@ -10,7 +10,7 @@
<parent>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
<version>4.0.0-SNAPSHOT</version>
<version>3.2.2</version>
</parent>
<dependencies>

View File

@@ -679,7 +679,7 @@ public class KafkaMessageChannelBinder extends
concurrency = extendedConsumerProperties.getConcurrency();
}
resetOffsetsForAutoRebalance(extendedConsumerProperties, consumerFactory, containerProperties);
containerProperties.setAuthExceptionRetryInterval(this.configurationProperties.getAuthorizationExceptionRetryInterval());
containerProperties.setAuthorizationExceptionRetryInterval(this.configurationProperties.getAuthorizationExceptionRetryInterval());
@SuppressWarnings("rawtypes")
final ConcurrentMessageListenerContainer<?, ?> messageListenerContainer = new ConcurrentMessageListenerContainer(
consumerFactory, containerProperties) {

View File

@@ -28,6 +28,7 @@ import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean
import org.springframework.boot.autoconfigure.kafka.KafkaAutoConfiguration;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.stream.annotation.StreamMessageConverter;
import org.springframework.cloud.stream.binder.Binder;
import org.springframework.cloud.stream.binder.kafka.KafkaBinderMetrics;
import org.springframework.cloud.stream.binder.kafka.KafkaBindingRebalanceListener;
@@ -140,6 +141,7 @@ public class KafkaBinderConfiguration {
}
@Bean
@StreamMessageConverter
@ConditionalOnMissingBean(KafkaNullConverter.class)
MessageConverter kafkaNullConverter() {
return new KafkaNullConverter();

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2018-2021 the original author or authors.
* Copyright 2018-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -18,14 +18,12 @@ package org.springframework.cloud.stream.binder.kafka.integration;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.binder.MeterBinder;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
@@ -35,14 +33,19 @@ import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.test.context.FilteredClassLoader;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.context.runner.ApplicationContextRunner;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.Input;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.cloud.stream.binder.Binding;
import org.springframework.cloud.stream.binder.PollableMessageSource;
import org.springframework.cloud.stream.binding.BindingService;
import org.springframework.cloud.stream.config.ConsumerEndpointCustomizer;
import org.springframework.cloud.stream.config.ListenerContainerCustomizer;
import org.springframework.cloud.stream.config.MessageSourceCustomizer;
import org.springframework.cloud.stream.config.ProducerMessageHandlerCustomizer;
import org.springframework.cloud.stream.messaging.Processor;
import org.springframework.cloud.stream.messaging.Sink;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.integration.kafka.inbound.KafkaMessageDrivenChannelAdapter;
import org.springframework.integration.kafka.inbound.KafkaMessageSource;
import org.springframework.integration.kafka.outbound.KafkaProducerMessageHandler;
@@ -60,18 +63,13 @@ import static org.assertj.core.api.Assertions.assertThat;
* @author Oleg Zhurakousky
* @author Jon Schneider
* @author Gary Russell
* @author Soby Chacko
*
* @since 2.0
*/
@RunWith(SpringRunner.class)
// @checkstyle:off
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.NONE,
properties = {
"spring.cloud.stream.bindings.input.group=" + KafkaBinderActuatorTests.TEST_CONSUMER_GROUP,
"spring.cloud.stream.function.bindings.process-in-0=input",
"spring.cloud.stream.pollable-source=input"}
)
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.NONE, properties = "spring.cloud.stream.bindings.input.group="
+ KafkaBinderActuatorTests.TEST_CONSUMER_GROUP)
// @checkstyle:on
@DirtiesContext
public class KafkaBinderActuatorTests {
@@ -102,22 +100,17 @@ public class KafkaBinderActuatorTests {
@Test
public void testKafkaBinderMetricsExposed() {
this.kafkaTemplate.send("input", null, "foo".getBytes());
this.kafkaTemplate.send(Sink.INPUT, null, "foo".getBytes());
this.kafkaTemplate.flush();
assertThat(this.meterRegistry.get("spring.cloud.stream.binder.kafka.offset")
.tag("group", TEST_CONSUMER_GROUP).tag("topic", "input").gauge()
.tag("group", TEST_CONSUMER_GROUP).tag("topic", Sink.INPUT).gauge()
.value()).isGreaterThan(0);
}
@Test
@Ignore
public void testKafkaBinderMetricsWhenNoMicrometer() {
new ApplicationContextRunner().withUserConfiguration(KafkaMetricsTestConfig.class)
.withPropertyValues(
"spring.cloud.stream.bindings.input.group", KafkaBinderActuatorTests.TEST_CONSUMER_GROUP,
"spring.cloud.stream.function.bindings.process-in-0", "input",
"spring.cloud.stream.pollable-source", "input")
.withClassLoader(new FilteredClassLoader("io.micrometer.core"))
.run(context -> {
assertThat(context.getBeanNamesForType(MeterRegistry.class))
@@ -155,8 +148,8 @@ public class KafkaBinderActuatorTests {
});
}
@EnableBinding({ Processor.class, PMS.class })
@EnableAutoConfiguration
@Configuration
public static class KafkaMetricsTestConfig {
@Bean
@@ -179,18 +172,19 @@ public class KafkaBinderActuatorTests {
return (handler, destinationName) -> handler.setBeanName("setByCustomizer:" + destinationName);
}
@Bean
public Consumer<String> process() {
@StreamListener(Sink.INPUT)
public void process(@SuppressWarnings("unused") String payload) throws InterruptedException {
// Artificial slow listener to emulate consumer lag
return s -> {
try {
Thread.sleep(1000);
}
catch (InterruptedException e) {
//no-op
}
};
Thread.sleep(1000);
}
}
public interface PMS {
@Input
PollableMessageSource source();
}
}

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2018-2021 the original author or authors.
* Copyright 2018-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -21,7 +21,6 @@ import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.common.TopicPartition;
@@ -34,6 +33,10 @@ import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.Input;
import org.springframework.cloud.stream.annotation.Output;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.cloud.stream.binder.Binder;
import org.springframework.cloud.stream.binder.BinderFactory;
import org.springframework.cloud.stream.binder.ConsumerProperties;
@@ -44,9 +47,10 @@ import org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerPro
import org.springframework.cloud.stream.binder.kafka.properties.KafkaProducerProperties;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
import org.springframework.messaging.MessageChannel;
import org.springframework.messaging.SubscribableChannel;
import org.springframework.messaging.handler.annotation.SendTo;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.junit4.SpringRunner;
@@ -58,11 +62,6 @@ import static org.assertj.core.api.Assertions.assertThat;
*/
@RunWith(SpringRunner.class)
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.NONE, properties = {
"spring.cloud.stream.function.definition=process;processCustom",
"spring.cloud.stream.function.bindings.process-in-0=standard-in",
"spring.cloud.stream.function.bindings.process-out-0=standard-out",
"spring.cloud.stream.function.bindings.processCustom-in-0=custom-in",
"spring.cloud.stream.function.bindings.processCustom-out-0=custom-out",
"spring.cloud.stream.kafka.bindings.standard-out.producer.configuration.key.serializer=FooSerializer.class",
"spring.cloud.stream.kafka.default.producer.configuration.key.serializer=BarSerializer.class",
"spring.cloud.stream.kafka.default.producer.configuration.value.serializer=BarSerializer.class",
@@ -168,19 +167,22 @@ public class KafkaBinderExtendedPropertiesTest {
Boolean.TRUE);
}
@EnableBinding(CustomBindingForExtendedPropertyTesting.class)
@EnableAutoConfiguration
@Configuration
public static class KafkaMetricsTestConfig {
@Bean
public Function<String, String> process() {
return payload -> payload;
@StreamListener("standard-in")
@SendTo("standard-out")
public String process(String payload) {
return payload;
}
@StreamListener("custom-in")
@SendTo("custom-out")
public String processCustom(String payload) {
return payload;
}
@Bean
public Function<String, String> processCustom() {
return payload -> payload;
}
@Bean
public RebalanceListener rebalanceListener() {
return new RebalanceListener();
@@ -188,6 +190,22 @@ public class KafkaBinderExtendedPropertiesTest {
}
interface CustomBindingForExtendedPropertyTesting {
@Input("standard-in")
SubscribableChannel standardIn();
@Output("standard-out")
MessageChannel standardOut();
@Input("custom-in")
SubscribableChannel customIn();
@Output("custom-out")
MessageChannel customOut();
}
public static class RebalanceListener implements KafkaBindingRebalanceListener {
private final Map<String, Boolean> bindings = new HashMap<>();
@@ -197,18 +215,23 @@ public class KafkaBinderExtendedPropertiesTest {
@Override
public void onPartitionsRevokedBeforeCommit(String bindingName,
Consumer<?, ?> consumer, Collection<TopicPartition> partitions) {
}
@Override
public void onPartitionsRevokedAfterCommit(String bindingName,
Consumer<?, ?> consumer, Collection<TopicPartition> partitions) {
}
@Override
public void onPartitionsAssigned(String bindingName, Consumer<?, ?> consumer,
Collection<TopicPartition> partitions, boolean initial) {
this.bindings.put(bindingName, initial);
this.latch.countDown();
}
}
}

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2016-2021 the original author or authors.
* Copyright 2016-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -18,7 +18,6 @@ package org.springframework.cloud.stream.binder.kafka.integration;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@@ -28,12 +27,12 @@ import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.cloud.stream.function.StreamBridge;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.Input;
import org.springframework.cloud.stream.annotation.Output;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.KafkaNull;
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
@@ -48,19 +47,21 @@ import static org.assertj.core.api.Assertions.assertThat;
/**
* @author Aldo Sinanaj
* @author Gary Russell
* @author Soby Chacko
*/
@RunWith(SpringRunner.class)
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.NONE, properties = {
"spring.kafka.consumer.auto-offset-reset=earliest",
"spring.cloud.stream.function.bindings.inputListen-in-0=kafkaNullInput"})
"spring.kafka.consumer.auto-offset-reset=earliest" })
@DirtiesContext
@Ignore
public class KafkaNullConverterTest {
private static final String KAFKA_BROKERS_PROPERTY = "spring.kafka.bootstrap-servers";
@Autowired
private ApplicationContext context;
private MessageChannel kafkaNullOutput;
@Autowired
private MessageChannel kafkaNullInput;
@Autowired
private KafkaNullConverterTestConfig config;
@@ -80,11 +81,8 @@ public class KafkaNullConverterTest {
}
@Test
@Ignore
public void testKafkaNullConverterOutput() throws InterruptedException {
final StreamBridge streamBridge = context.getBean(StreamBridge.class);
streamBridge.send("kafkaNullOutput", new GenericMessage<>(KafkaNull.INSTANCE));
this.kafkaNullOutput.send(new GenericMessage<>(KafkaNull.INSTANCE));
assertThat(this.config.countDownLatchOutput.await(10, TimeUnit.SECONDS)).isTrue();
assertThat(this.config.outputPayload).isNull();
@@ -92,17 +90,14 @@ public class KafkaNullConverterTest {
@Test
public void testKafkaNullConverterInput() throws InterruptedException {
final MessageChannel kafkaNullInput = context.getBean("kafkaNullInput", MessageChannel.class);
kafkaNullInput.send(new GenericMessage<>(KafkaNull.INSTANCE));
this.kafkaNullInput.send(new GenericMessage<>(KafkaNull.INSTANCE));
assertThat(this.config.countDownLatchInput.await(10, TimeUnit.SECONDS)).isTrue();
assertThat(this.config.inputPayload).isNull();
}
@EnableAutoConfiguration
@Configuration
@TestConfiguration
@EnableBinding(KafkaNullTestChannels.class)
public static class KafkaNullConverterTestConfig {
final CountDownLatch countDownLatchOutput = new CountDownLatch(1);
@@ -119,13 +114,22 @@ public class KafkaNullConverterTest {
countDownLatchOutput.countDown();
}
@Bean
public Consumer<byte[]> inputListen() {
return in -> {
this.inputPayload = in;
countDownLatchInput.countDown();
};
@StreamListener("kafkaNullInput")
public void inputListen(@Payload(required = false) byte[] payload) {
this.inputPayload = payload;
countDownLatchInput.countDown();
}
}
public interface KafkaNullTestChannels {
@Input
MessageChannel kafkaNullInput();
@Output
MessageChannel kafkaNullOutput();
}
}

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2019-2021 the original author or authors.
* Copyright 2019-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -35,12 +35,11 @@ import org.springframework.beans.factory.BeanCreationException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.binder.BinderFactory;
import org.springframework.cloud.stream.binder.kafka.KafkaMessageChannelBinder;
import org.springframework.cloud.stream.function.StreamBridge;
import org.springframework.context.ApplicationContext;
import org.springframework.cloud.stream.messaging.Source;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.ProducerFactory;
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
import org.springframework.kafka.test.utils.KafkaTestUtils;
@@ -59,7 +58,6 @@ import static org.assertj.core.api.Assertions.assertThat;
/**
* @author Gary Russell
* @author Soby Chacko
* @since 2.1.4
*
*/
@@ -82,7 +80,7 @@ public class ProducerOnlyTransactionTests {
private Sender sender;
@Autowired
private ApplicationContext context;
private MessageChannel output;
@BeforeClass
public static void setup() {
@@ -97,8 +95,7 @@ public class ProducerOnlyTransactionTests {
@Test
public void testProducerTx() {
final StreamBridge streamBridge = context.getBean(StreamBridge.class);
this.sender.DoInTransaction(streamBridge);
this.sender.DoInTransaction(this.output);
assertThat(this.sender.isInTx()).isTrue();
Map<String, Object> props = KafkaTestUtils.consumerProps("consumeTx", "false",
embeddedKafka.getEmbeddedKafka());
@@ -112,9 +109,9 @@ public class ProducerOnlyTransactionTests {
assertThat(record.value()).isEqualTo("foo".getBytes());
}
@EnableBinding(Source.class)
@EnableAutoConfiguration
@EnableTransactionManagement
@Configuration
public static class Config {
@Bean
@@ -143,9 +140,9 @@ public class ProducerOnlyTransactionTests {
private boolean isInTx;
@Transactional
public void DoInTransaction(StreamBridge streamBridge) {
public void DoInTransaction(MessageChannel output) {
this.isInTx = TransactionSynchronizationManager.isActualTransactionActive();
streamBridge.send("output", new GenericMessage<>("foo".getBytes()));
output.send(new GenericMessage<>("foo"));
}
public boolean isInTx() {

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2018-2021 the original author or authors.
* Copyright 2018-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -16,8 +16,6 @@
package org.springframework.cloud.stream.binder.kafka.integration.topic.configs;
import java.util.function.Function;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
@@ -25,21 +23,24 @@ import org.junit.runner.RunWith;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.context.annotation.Bean;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.Input;
import org.springframework.cloud.stream.annotation.Output;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
import org.springframework.messaging.MessageChannel;
import org.springframework.messaging.SubscribableChannel;
import org.springframework.messaging.handler.annotation.SendTo;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.junit4.SpringRunner;
/**
* @author Heiko Does
* @author Soby Chacko
*/
@RunWith(SpringRunner.class)
@SpringBootTest(
classes = BaseKafkaBinderTopicPropertiesUpdateTest.TopicAutoConfigsTestConfig.class,
webEnvironment = SpringBootTest.WebEnvironment.NONE, properties = {
"spring.cloud.stream.function.bindings.process-in-0=standard-in",
"spring.cloud.stream.function.bindings.process-out-0=standard-out",
"spring.cloud.stream.kafka.bindings.standard-out.producer.topic.properties.retention.ms=9001",
"spring.cloud.stream.kafka.default.producer.topic.properties.retention.ms=-1",
"spring.cloud.stream.kafka.bindings.standard-in.consumer.topic.properties.retention.ms=9001",
@@ -64,12 +65,24 @@ public abstract class BaseKafkaBinderTopicPropertiesUpdateTest {
System.clearProperty(KAFKA_BROKERS_PROPERTY);
}
@EnableBinding(CustomBindingForTopicPropertiesUpdateTesting.class)
@EnableAutoConfiguration
public static class TopicAutoConfigsTestConfig {
@Bean
public Function<String, String> process() {
return payload -> payload;
@StreamListener("standard-in")
@SendTo("standard-out")
public String process(String payload) {
return payload;
}
}
interface CustomBindingForTopicPropertiesUpdateTesting {
@Input("standard-in")
SubscribableChannel standardIn();
@Output("standard-out")
MessageChannel standardOut();
}
}

View File

@@ -1,5 +1,5 @@
/*
* Copyright 2019-2021 the original author or authors.
* Copyright 2019-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -21,7 +21,6 @@ import java.util.List;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import kafka.server.KafkaConfig;
import org.junit.AfterClass;
@@ -34,10 +33,13 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.ApplicationRunner;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.Input;
import org.springframework.cloud.stream.annotation.Output;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.cloud.stream.config.ListenerContainerCustomizer;
import org.springframework.context.ApplicationContext;
import org.springframework.cloud.stream.messaging.Processor;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
@@ -47,6 +49,8 @@ import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.kafka.transaction.KafkaAwareTransactionManager;
import org.springframework.messaging.MessageChannel;
import org.springframework.messaging.SubscribableChannel;
import org.springframework.messaging.support.GenericMessage;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.util.backoff.FixedBackOff;
@@ -57,7 +61,6 @@ import static org.mockito.Mockito.mock;
/**
* @author Gary Russell
* @author Soby Chacko
* @since 3.0
*
*/
@@ -66,11 +69,6 @@ import static org.mockito.Mockito.mock;
"spring.kafka.consumer.properties.isolation.level=read_committed",
"spring.kafka.consumer.enable-auto-commit=false",
"spring.kafka.consumer.auto-offset-reset=earliest",
"spring.cloud.function.definition=listenIn;listenIn2",
"spring.cloud.stream.function.bindings.listenIn-in-0=input",
"spring.cloud.stream.function.bindings.listenIn-out-0=output",
"spring.cloud.stream.function.bindings.listenIn2-in-0=input2",
"spring.cloud.stream.function.bindings.listenIn2-out-0=output2",
"spring.cloud.stream.bindings.input.destination=consumer.producer.txIn",
"spring.cloud.stream.bindings.input.group=consumer.producer.tx",
"spring.cloud.stream.bindings.input.consumer.max-attempts=1",
@@ -93,9 +91,6 @@ public class ConsumerProducerTransactionTests {
@Autowired
private Config config;
@Autowired
private ApplicationContext context;
@BeforeClass
public static void setup() {
System.setProperty(KAFKA_BROKERS_PROPERTY,
@@ -120,22 +115,26 @@ public class ConsumerProducerTransactionTests {
public void externalTM() {
assertThat(this.config.input2Container.getContainerProperties().getTransactionManager())
.isSameAs(this.config.tm);
final MessageChannel output2 = context.getBean("output2", MessageChannel.class);
Object handler = KafkaTestUtils.getPropertyValue(output2, "dispatcher.handlers", Set.class)
Object handler = KafkaTestUtils.getPropertyValue(this.config.output2, "dispatcher.handlers", Set.class)
.iterator().next();
assertThat(KafkaTestUtils.getPropertyValue(handler, "delegate.kafkaTemplate.producerFactory"))
.isSameAs(this.config.pf);
}
@EnableBinding(TwoProcessors.class)
@EnableAutoConfiguration
@Configuration
public static class Config {
final List<String> outs = new ArrayList<>();
final CountDownLatch latch = new CountDownLatch(2);
@Autowired
private MessageChannel output;
@Autowired
MessageChannel output2;
AbstractMessageListenerContainer<?, ?> input2Container;
ProducerFactory pf;
@@ -148,19 +147,16 @@ public class ConsumerProducerTransactionTests {
this.latch.countDown();
}
@Bean
public Function<String, String> listenIn() {
return in -> {
if (in.equals("two")) {
throw new RuntimeException("fail");
}
return in.toUpperCase();
};
@StreamListener(Processor.INPUT)
public void listenIn(String in) {
this.output.send(new GenericMessage<>(in.toUpperCase()));
if (in.equals("two")) {
throw new RuntimeException("fail");
}
}
@Bean
public Function<String, String> listenIn2() {
return in -> in;
@StreamListener("input2")
public void listenIn2(String in) {
}
@Bean
@@ -191,6 +187,17 @@ public class ConsumerProducerTransactionTests {
this.tm = mock;
return mock;
}
}
}
}
public interface TwoProcessors extends Processor {
@Input
SubscribableChannel input2();
@Output
MessageChannel output2();
}
}