Compare commits
176 Commits
v2.1.0.RC2
...
v3.0.0.M4
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cea40bc969 | ||
|
|
717022e274 | ||
|
|
d0f1c8d703 | ||
|
|
7a532b2bbd | ||
|
|
3773fa2c05 | ||
|
|
5734ce689b | ||
|
|
608086ff4d | ||
|
|
16713b3b4f | ||
|
|
2432a7309b | ||
|
|
6fdb0001d6 | ||
|
|
f2ab4a07c6 | ||
|
|
584115580b | ||
|
|
752b509374 | ||
|
|
96062b23f2 | ||
|
|
da6145f382 | ||
|
|
39a5b25b9c | ||
|
|
4e250b34cf | ||
|
|
10de84f4fe | ||
|
|
309f588325 | ||
|
|
ca8e086d4c | ||
|
|
cce392aa94 | ||
|
|
e53b0f0de9 | ||
|
|
413cc4d2b5 | ||
|
|
94f76b903f | ||
|
|
8d5f794461 | ||
|
|
00c13c0035 | ||
|
|
e1e46226d5 | ||
|
|
46cbeb1804 | ||
|
|
3e4af104b8 | ||
|
|
16bb3e2f62 | ||
|
|
8145ab19fb | ||
|
|
930d33aeba | ||
|
|
fe2a398b8b | ||
|
|
24e1fc9722 | ||
|
|
245a43c1d8 | ||
|
|
4d1fed63ee | ||
|
|
786bd3ec62 | ||
|
|
183f21c880 | ||
|
|
18737b8fea | ||
|
|
840745e593 | ||
|
|
2df0377acb | ||
|
|
164948ad33 | ||
|
|
a472845cb4 | ||
|
|
12db5fc20e | ||
|
|
46f1b41832 | ||
|
|
64ca773a4f | ||
|
|
a92149f121 | ||
|
|
2721fe4d05 | ||
|
|
8907693ffb | ||
|
|
ca0bfc028f | ||
|
|
688f05cbc9 | ||
|
|
acb4180ea3 | ||
|
|
77e4087871 | ||
|
|
0d339e77b8 | ||
|
|
799eb5be28 | ||
|
|
2c7615db1f | ||
|
|
b2b1438ad7 | ||
|
|
87399fe904 | ||
|
|
f03e3e17c6 | ||
|
|
48aae76ec9 | ||
|
|
6976bcd3a8 | ||
|
|
912ab14309 | ||
|
|
b4fcb791c3 | ||
|
|
ca3f20ff26 | ||
|
|
97ecf48867 | ||
|
|
4d0b62f839 | ||
|
|
592b9a02d8 | ||
|
|
4ca58bea06 | ||
|
|
108ccbc572 | ||
|
|
303679b9f9 | ||
|
|
35434c680b | ||
|
|
e382604b04 | ||
|
|
bfe9529a51 | ||
|
|
d8df388d9f | ||
|
|
569344afa6 | ||
|
|
c1e58d187a | ||
|
|
12afaf1144 | ||
|
|
bbb455946e | ||
|
|
5dac51df62 | ||
|
|
c1db3d950c | ||
|
|
4d59670096 | ||
|
|
d213b4ff2c | ||
|
|
66198e345a | ||
|
|
df8d151878 | ||
|
|
d96dc8361b | ||
|
|
9c88b4d808 | ||
|
|
94be206651 | ||
|
|
4ab6432f23 | ||
|
|
24b52809ed | ||
|
|
7450d0731d | ||
|
|
08b41f7396 | ||
|
|
e725a172ba | ||
|
|
b7a3511375 | ||
|
|
d6c06286cd | ||
|
|
321331103b | ||
|
|
f549ae069c | ||
|
|
2cc60a744d | ||
|
|
2ddc837c1a | ||
|
|
816a4ec232 | ||
|
|
1a19eeabec | ||
|
|
d386ff7923 | ||
|
|
9644f2dbbf | ||
|
|
498998f1a4 | ||
|
|
5558f7f7fc | ||
|
|
dec9ff696f | ||
|
|
5014ab8fe1 | ||
|
|
3d3f02b6cc | ||
|
|
7790d4b196 | ||
|
|
602aed8a4d | ||
|
|
c1f4cf9dc6 | ||
|
|
c990140452 | ||
|
|
383add504a | ||
|
|
5f9395a5ec | ||
|
|
70eb25d413 | ||
|
|
6bc74c6e5c | ||
|
|
33603c62f0 | ||
|
|
efd46835a1 | ||
|
|
3a267bc751 | ||
|
|
62e98df0c7 | ||
|
|
9e156911b4 | ||
|
|
cd28454818 | ||
|
|
172d469faa | ||
|
|
74cb25a56a | ||
|
|
7dd4b66f58 | ||
|
|
908fb77a88 | ||
|
|
5660c7cf76 | ||
|
|
f8c1fb45a6 | ||
|
|
73d3d79651 | ||
|
|
792705d304 | ||
|
|
0a48999e3a | ||
|
|
b7b93c1352 | ||
|
|
718cb44de7 | ||
|
|
de78bfa00b | ||
|
|
27bb33f9e3 | ||
|
|
ecd8cc587c | ||
|
|
f506da758f | ||
|
|
12a528fd88 | ||
|
|
4b138c4a2f | ||
|
|
0af784aaa9 | ||
|
|
0deb8754bf | ||
|
|
95a4681d27 | ||
|
|
b4a2950acd | ||
|
|
c5c81f8148 | ||
|
|
d80e66d9b8 | ||
|
|
86c9704ef4 | ||
|
|
63a4acda1b | ||
|
|
31a6f5d7e3 | ||
|
|
4d02c38f70 | ||
|
|
c22c08e259 | ||
|
|
02e1fec3b4 | ||
|
|
fb91b2aff9 | ||
|
|
a881b9a54a | ||
|
|
fc92a6b0af | ||
|
|
d65e8ff59d | ||
|
|
9948674f30 | ||
|
|
cfc1e0e212 | ||
|
|
4bd206f37f | ||
|
|
9d1d90e608 | ||
|
|
0b8a760b5a | ||
|
|
d63f9e5fa6 | ||
|
|
c51d7e0613 | ||
|
|
7093551a86 | ||
|
|
e9f24b00c8 | ||
|
|
ed1589f957 | ||
|
|
c5f72250b5 | ||
|
|
0dc871aff0 | ||
|
|
67bcd0749a | ||
|
|
6001136ec0 | ||
|
|
e93904c238 | ||
|
|
43c8f02bff | ||
|
|
64ea989b08 | ||
|
|
525adef3a6 | ||
|
|
1866feb75f | ||
|
|
bf1c366d9b | ||
|
|
81f4a861c5 | ||
|
|
662aa71159 |
110
.mvn/wrapper/MavenWrapperDownloader.java
vendored
Executable file
110
.mvn/wrapper/MavenWrapperDownloader.java
vendored
Executable file
@@ -0,0 +1,110 @@
|
||||
/*
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
https://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
*/
|
||||
|
||||
import java.net.*;
|
||||
import java.io.*;
|
||||
import java.nio.channels.*;
|
||||
import java.util.Properties;
|
||||
|
||||
public class MavenWrapperDownloader {
|
||||
|
||||
/**
|
||||
* Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided.
|
||||
*/
|
||||
private static final String DEFAULT_DOWNLOAD_URL =
|
||||
"https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar";
|
||||
|
||||
/**
|
||||
* Path to the maven-wrapper.properties file, which might contain a downloadUrl property to
|
||||
* use instead of the default one.
|
||||
*/
|
||||
private static final String MAVEN_WRAPPER_PROPERTIES_PATH =
|
||||
".mvn/wrapper/maven-wrapper.properties";
|
||||
|
||||
/**
|
||||
* Path where the maven-wrapper.jar will be saved to.
|
||||
*/
|
||||
private static final String MAVEN_WRAPPER_JAR_PATH =
|
||||
".mvn/wrapper/maven-wrapper.jar";
|
||||
|
||||
/**
|
||||
* Name of the property which should be used to override the default download url for the wrapper.
|
||||
*/
|
||||
private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl";
|
||||
|
||||
public static void main(String args[]) {
|
||||
System.out.println("- Downloader started");
|
||||
File baseDirectory = new File(args[0]);
|
||||
System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath());
|
||||
|
||||
// If the maven-wrapper.properties exists, read it and check if it contains a custom
|
||||
// wrapperUrl parameter.
|
||||
File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH);
|
||||
String url = DEFAULT_DOWNLOAD_URL;
|
||||
if(mavenWrapperPropertyFile.exists()) {
|
||||
FileInputStream mavenWrapperPropertyFileInputStream = null;
|
||||
try {
|
||||
mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile);
|
||||
Properties mavenWrapperProperties = new Properties();
|
||||
mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream);
|
||||
url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url);
|
||||
} catch (IOException e) {
|
||||
System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'");
|
||||
} finally {
|
||||
try {
|
||||
if(mavenWrapperPropertyFileInputStream != null) {
|
||||
mavenWrapperPropertyFileInputStream.close();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
// Ignore ...
|
||||
}
|
||||
}
|
||||
}
|
||||
System.out.println("- Downloading from: : " + url);
|
||||
|
||||
File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH);
|
||||
if(!outputFile.getParentFile().exists()) {
|
||||
if(!outputFile.getParentFile().mkdirs()) {
|
||||
System.out.println(
|
||||
"- ERROR creating output direcrory '" + outputFile.getParentFile().getAbsolutePath() + "'");
|
||||
}
|
||||
}
|
||||
System.out.println("- Downloading to: " + outputFile.getAbsolutePath());
|
||||
try {
|
||||
downloadFileFromURL(url, outputFile);
|
||||
System.out.println("Done");
|
||||
System.exit(0);
|
||||
} catch (Throwable e) {
|
||||
System.out.println("- Error downloading");
|
||||
e.printStackTrace();
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
private static void downloadFileFromURL(String urlString, File destination) throws Exception {
|
||||
URL website = new URL(urlString);
|
||||
ReadableByteChannel rbc;
|
||||
rbc = Channels.newChannel(website.openStream());
|
||||
FileOutputStream fos = new FileOutputStream(destination);
|
||||
fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
|
||||
fos.close();
|
||||
rbc.close();
|
||||
}
|
||||
|
||||
}
|
||||
BIN
.mvn/wrapper/maven-wrapper.jar
vendored
Normal file → Executable file
BIN
.mvn/wrapper/maven-wrapper.jar
vendored
Normal file → Executable file
Binary file not shown.
2
.mvn/wrapper/maven-wrapper.properties
vendored
Normal file → Executable file
2
.mvn/wrapper/maven-wrapper.properties
vendored
Normal file → Executable file
@@ -1 +1 @@
|
||||
distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.3.3/apache-maven-3.3.3-bin.zip
|
||||
distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.5.4/apache-maven-3.5.4-bin.zip
|
||||
@@ -21,7 +21,7 @@
|
||||
<repository>
|
||||
<id>spring-snapshots</id>
|
||||
<name>Spring Snapshots</name>
|
||||
<url>http://repo.spring.io/libs-snapshot-local</url>
|
||||
<url>https://repo.spring.io/libs-snapshot-local</url>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
@@ -29,7 +29,7 @@
|
||||
<repository>
|
||||
<id>spring-milestones</id>
|
||||
<name>Spring Milestones</name>
|
||||
<url>http://repo.spring.io/libs-milestone-local</url>
|
||||
<url>https://repo.spring.io/libs-milestone-local</url>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
@@ -37,7 +37,7 @@
|
||||
<repository>
|
||||
<id>spring-releases</id>
|
||||
<name>Spring Releases</name>
|
||||
<url>http://repo.spring.io/release</url>
|
||||
<url>https://repo.spring.io/release</url>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
@@ -47,7 +47,7 @@
|
||||
<pluginRepository>
|
||||
<id>spring-snapshots</id>
|
||||
<name>Spring Snapshots</name>
|
||||
<url>http://repo.spring.io/libs-snapshot-local</url>
|
||||
<url>https://repo.spring.io/libs-snapshot-local</url>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
@@ -55,7 +55,7 @@
|
||||
<pluginRepository>
|
||||
<id>spring-milestones</id>
|
||||
<name>Spring Milestones</name>
|
||||
<url>http://repo.spring.io/libs-milestone-local</url>
|
||||
<url>https://repo.spring.io/libs-milestone-local</url>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
|
||||
4
LICENSE
4
LICENSE
@@ -1,6 +1,6 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
https://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
@@ -192,7 +192,7 @@
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
https://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
|
||||
184
README.adoc
184
README.adoc
@@ -1,4 +1,8 @@
|
||||
// Do not edit this file (e.g. go instead to src/main/asciidoc)
|
||||
////
|
||||
DO NOT EDIT THIS FILE. IT WAS GENERATED.
|
||||
Manual changes to this file will be lost when it is generated again.
|
||||
Edit the files in the src/main/asciidoc/ directory instead.
|
||||
////
|
||||
|
||||
:jdkversion: 1.8
|
||||
:github-tag: master
|
||||
@@ -128,7 +132,7 @@ If set to `true`, the binder creates new topics automatically.
|
||||
If set to `false`, the binder relies on the topics being already configured.
|
||||
In the latter case, if the topics do not exist, the binder fails to start.
|
||||
+
|
||||
NOTE: This setting is independent of the `auto.topic.create.enable` setting of the broker and does not influence it.
|
||||
NOTE: This setting is independent of the `auto.create.topics.enable` setting of the broker and does not influence it.
|
||||
If the server is set to auto-create topics, they may be created as part of the metadata retrieval request, with default broker settings.
|
||||
+
|
||||
Default: `true`.
|
||||
@@ -158,26 +162,20 @@ Default: none.
|
||||
[[kafka-consumer-properties]]
|
||||
==== Kafka Consumer Properties
|
||||
|
||||
NOTE: To avoid repetition, Spring Cloud Stream supports setting values for all channels, in the format of `spring.cloud.stream.default.<property>=<value>`.
|
||||
|
||||
|
||||
The following properties are available for Kafka consumers only and
|
||||
must be prefixed with `spring.cloud.stream.kafka.bindings.<channelName>.consumer.`.
|
||||
|
||||
admin.configuration::
|
||||
A `Map` of Kafka topic properties used when provisioning topics -- for example, `spring.cloud.stream.kafka.bindings.input.consumer.admin.configuration.message.format.version=0.9.0.0`
|
||||
+
|
||||
Default: none.
|
||||
Since version 2.1.1, this property is deprecated in favor of `topic.properties`, and support for it will be removed in a future version.
|
||||
|
||||
admin.replicas-assignment::
|
||||
A Map<Integer, List<Integer>> of replica assignments, with the key being the partition and the value being the assignments.
|
||||
Used when provisioning new topics.
|
||||
See the `NewTopic` Javadocs in the `kafka-clients` jar.
|
||||
+
|
||||
Default: none.
|
||||
Since version 2.1.1, this property is deprecated in favor of `topic.replicas-assignment`, and support for it will be removed in a future version.
|
||||
|
||||
admin.replication-factor::
|
||||
The replication factor to use when provisioning topics. Overrides the binder-wide setting.
|
||||
Ignored if `replicas-assignments` is present.
|
||||
+
|
||||
Default: none (the binder-wide default of 1 is used).
|
||||
Since version 2.1.1, this property is deprecated in favor of `topic.replication-factor`, and support for it will be removed in a future version.
|
||||
|
||||
autoRebalanceEnabled::
|
||||
When `true`, topic partitions is automatically rebalanced between the members of a consumer group.
|
||||
@@ -245,6 +243,8 @@ Default: null (If not specified, messages that result in errors are forwarded to
|
||||
dlqProducerProperties::
|
||||
Using this, DLQ-specific producer properties can be set.
|
||||
All the properties available through kafka producer properties can be set through this property.
|
||||
When native decoding is enabled on the consumer (i.e., useNativeDecoding: true) , the application must provide corresponding key/value serializers for DLQ.
|
||||
This must be provided in the form of `dlqProducerProperties.configuration.key.serializer` and `dlqProducerProperties.configuration.value.serializer`.
|
||||
+
|
||||
Default: Default Kafka producer properties.
|
||||
standardHeaders::
|
||||
@@ -270,30 +270,54 @@ Note, the time taken to detect new topics that match the pattern is controlled b
|
||||
This can be configured using the `configuration` property above.
|
||||
+
|
||||
Default: `false`
|
||||
topic.properties::
|
||||
A `Map` of Kafka topic properties used when provisioning new topics -- for example, `spring.cloud.stream.kafka.bindings.input.consumer.topic.properties.message.format.version=0.9.0.0`
|
||||
+
|
||||
Default: none.
|
||||
topic.replicas-assignment::
|
||||
A Map<Integer, List<Integer>> of replica assignments, with the key being the partition and the value being the assignments.
|
||||
Used when provisioning new topics.
|
||||
See the `NewTopic` Javadocs in the `kafka-clients` jar.
|
||||
+
|
||||
Default: none.
|
||||
topic.replication-factor::
|
||||
The replication factor to use when provisioning topics. Overrides the binder-wide setting.
|
||||
Ignored if `replicas-assignments` is present.
|
||||
+
|
||||
Default: none (the binder-wide default of 1 is used).
|
||||
pollTimeout::
|
||||
Timeout used for polling in pollable consumers.
|
||||
+
|
||||
Default: 5 seconds.
|
||||
|
||||
==== Consuming Batches
|
||||
|
||||
Starting with version 3.0, when `spring.cloud.stream.binding.<name>.consumer.batch-mode` is set to `true`, all of the records received by polling the Kafka `Consumer` will be presented as a `List<?>` to the listener method.
|
||||
Otherwise, the method will be called with one record at a time.
|
||||
The size of the batch is controlled by Kafka consumer properties `max.poll.records`, `min.fetch.bytes`, `fetch.max.wait.ms`; refer to the Kafka documentation for more information.
|
||||
|
||||
IMPORTANT: Retry within the binder is not supported when using batch mode, so `maxAttempts` will be overridden to 1.
|
||||
You can configure a `SeekToCurrentBatchErrorHandler` (using a `ListenerContainerCustomizer`) to achieve similar functionality to retry in the binder.
|
||||
You can also use a manual `AckMode` and call `Ackowledgment.nack(index, sleep)` to commit the offsets for a partial batch and have the remaining records redelivered.
|
||||
Refer to the https://docs.spring.io/spring-kafka/docs/2.3.0.BUILD-SNAPSHOT/reference/html/#committing-offsets[Spring for Apache Kafka documentation] for more information about these techniques.
|
||||
|
||||
[[kafka-producer-properties]]
|
||||
==== Kafka Producer Properties
|
||||
|
||||
NOTE: To avoid repetition, Spring Cloud Stream supports setting values for all channels, in the format of `spring.cloud.stream.default.<property>=<value>`.
|
||||
|
||||
|
||||
The following properties are available for Kafka producers only and
|
||||
must be prefixed with `spring.cloud.stream.kafka.bindings.<channelName>.producer.`.
|
||||
|
||||
admin.configuration::
|
||||
A `Map` of Kafka topic properties used when provisioning new topics -- for example, `spring.cloud.stream.kafka.bindings.input.consumer.admin.configuration.message.format.version=0.9.0.0`
|
||||
+
|
||||
Default: none.
|
||||
Since version 2.1.1, this property is deprecated in favor of `topic.properties`, and support for it will be removed in a future version.
|
||||
|
||||
admin.replicas-assignment::
|
||||
A Map<Integer, List<Integer>> of replica assignments, with the key being the partition and the value being the assignments.
|
||||
Used when provisioning new topics.
|
||||
See `NewTopic` javadocs in the `kafka-clients` jar.
|
||||
+
|
||||
Default: none.
|
||||
Since version 2.1.1, this property is deprecated in favor of `topic.replicas-assignment`, and support for it will be removed in a future version.
|
||||
|
||||
admin.replication-factor::
|
||||
The replication factor to use when provisioning new topics. Overrides the binder-wide setting.
|
||||
Ignored if `replicas-assignments` is present.
|
||||
+
|
||||
Default: none (the binder-wide default of 1 is used).
|
||||
Since version 2.1.1, this property is deprecated in favor of `topic.replication-factor`, and support for it will be removed in a future version.
|
||||
|
||||
bufferSize::
|
||||
Upper limit, in bytes, of how much data the Kafka producer attempts to batch before sending.
|
||||
@@ -303,6 +327,13 @@ sync::
|
||||
Whether the producer is synchronous.
|
||||
+
|
||||
Default: `false`.
|
||||
sendTimeoutExpression::
|
||||
A SpEL expression evaluated against the outgoing message used to evaluate the time to wait for ack when synchronous publish is enabled -- for example, `headers['mySendTimeout']`.
|
||||
The value of the timeout is in milliseconds.
|
||||
With versions before 3.0, the payload could not be used unless native encoding was being used because, by the time this expression was evaluated, the payload was already in the form of a `byte[]`.
|
||||
Now, the expression is evaluated before the payload is converted.
|
||||
+
|
||||
Default: `none`.
|
||||
batchTimeout::
|
||||
How long the producer waits to allow more messages to accumulate in the same batch before sending the messages.
|
||||
(Normally, the producer does not wait at all and simply sends all the messages that accumulated while the previous send was in progress.) A non-zero value may increase throughput at the expense of latency.
|
||||
@@ -310,7 +341,8 @@ How long the producer waits to allow more messages to accumulate in the same bat
|
||||
Default: `0`.
|
||||
messageKeyExpression::
|
||||
A SpEL expression evaluated against the outgoing message used to populate the key of the produced Kafka message -- for example, `headers['myKey']`.
|
||||
The payload cannot be used because, by the time this expression is evaluated, the payload is already in the form of a `byte[]`.
|
||||
With versions before 3.0, the payload could not be used unless native encoding was being used because, by the time this expression was evaluated, the payload was already in the form of a `byte[]`.
|
||||
Now, the expression is evaluated before the payload is converted.
|
||||
+
|
||||
Default: `none`.
|
||||
headerPatterns::
|
||||
@@ -326,6 +358,35 @@ configuration::
|
||||
Map with a key/value pair containing generic Kafka producer properties.
|
||||
+
|
||||
Default: Empty map.
|
||||
topic.properties::
|
||||
A `Map` of Kafka topic properties used when provisioning new topics -- for example, `spring.cloud.stream.kafka.bindings.output.producer.topic.properties.message.format.version=0.9.0.0`
|
||||
+
|
||||
topic.replicas-assignment::
|
||||
A Map<Integer, List<Integer>> of replica assignments, with the key being the partition and the value being the assignments.
|
||||
Used when provisioning new topics.
|
||||
See the `NewTopic` Javadocs in the `kafka-clients` jar.
|
||||
+
|
||||
Default: none.
|
||||
topic.replication-factor::
|
||||
The replication factor to use when provisioning topics. Overrides the binder-wide setting.
|
||||
Ignored if `replicas-assignments` is present.
|
||||
+
|
||||
Default: none (the binder-wide default of 1 is used).
|
||||
useTopicHeader::
|
||||
Set to `true` to override the default binding destination (topic name) with the value of the `KafkaHeaders.TOPIC` message header in the outbound message.
|
||||
If the header is not present, the default binding destination is used.
|
||||
Default: `false`.
|
||||
+
|
||||
recordMetadataChannel::
|
||||
The bean name of a `MessageChannel` to which successful send results should be sent; the bean must exist in the application context.
|
||||
The message sent to the channel is the sent message (after conversion, if any) with an additional header `KafkaHeaders.RECORD_METADATA`.
|
||||
The header contains a `RecordMetadata` object provided by the Kafka client; it includes the partition and offset where the record was written in the topic.
|
||||
|
||||
`ResultMetadata meta = sendResultMsg.getHeaders().get(KafkaHeaders.RECORD_METADATA, RecordMetadata.class)`
|
||||
|
||||
Failed sends go the producer error channel (if configured); see <<kafka-error-channels>>.
|
||||
Default: null
|
||||
+
|
||||
|
||||
NOTE: The Kafka binder uses the `partitionCount` setting of the producer as a hint to create a topic with the given partition count (in conjunction with the `minPartitionCount`, the maximum of the two being the value being used).
|
||||
Exercise caution when configuring both `minPartitionCount` for a binder and `partitionCount` for an application, as the larger value is used.
|
||||
@@ -333,6 +394,13 @@ If a topic already exists with a smaller partition count and `autoAddPartitions`
|
||||
If a topic already exists with a smaller partition count and `autoAddPartitions` is enabled, new partitions are added.
|
||||
If a topic already exists with a larger number of partitions than the maximum of (`minPartitionCount` or `partitionCount`), the existing partition count is used.
|
||||
|
||||
compression::
|
||||
Set the `compression.type` producer property.
|
||||
Supported values are `none`, `gzip`, `snappy` and `lz4`.
|
||||
If you override the `kafka-clients` jar to 2.1.0 (or later), as discussed in the https://docs.spring.io/spring-kafka/docs/2.2.x/reference/html/deps-for-21x.html[Spring for Apache Kafka documentation], and wish to use `zstd` compression, use `spring.cloud.stream.kafka.bindings.<binding-name>.producer.configuration.compression.type=zstd`.
|
||||
+
|
||||
Default: `none`.
|
||||
|
||||
==== Usage examples
|
||||
|
||||
In this section, we show the use of the preceding properties for specific scenarios.
|
||||
@@ -368,7 +436,7 @@ public class ManuallyAcknowdledgingConsumer {
|
||||
===== Example: Security Configuration
|
||||
|
||||
Apache Kafka 0.9 supports secure connections between client and brokers.
|
||||
To take advantage of this feature, follow the guidelines in the http://kafka.apache.org/090/documentation.html#security_configclients[Apache Kafka Documentation] as well as the Kafka 0.9 http://docs.confluent.io/2.0.0/kafka/security.html[security guidelines from the Confluent documentation].
|
||||
To take advantage of this feature, follow the guidelines in the https://kafka.apache.org/090/documentation.html#security_configclients[Apache Kafka Documentation] as well as the Kafka 0.9 https://docs.confluent.io/2.0.0/kafka/security.html[security guidelines from the Confluent documentation].
|
||||
Use the `spring.cloud.stream.kafka.binder.configuration` option to set security properties for all clients created by the binder.
|
||||
|
||||
For example, to set `security.protocol` to `SASL_SSL`, set the following property:
|
||||
@@ -380,7 +448,7 @@ spring.cloud.stream.kafka.binder.configuration.security.protocol=SASL_SSL
|
||||
|
||||
All the other security properties can be set in a similar manner.
|
||||
|
||||
When using Kerberos, follow the instructions in the http://kafka.apache.org/090/documentation.html#security_sasl_clientconfig[reference documentation] for creating and referencing the JAAS configuration.
|
||||
When using Kerberos, follow the instructions in the https://kafka.apache.org/090/documentation.html#security_sasl_clientconfig[reference documentation] for creating and referencing the JAAS configuration.
|
||||
|
||||
Spring Cloud Stream supports passing JAAS configuration information to the application by using a JAAS configuration file and using Spring Boot properties.
|
||||
|
||||
@@ -493,6 +561,50 @@ public class Application {
|
||||
}
|
||||
----
|
||||
|
||||
[[kafka-transactional-binder]]
|
||||
=== Transactional Binder
|
||||
|
||||
Enable transactions by setting `spring.cloud.stream.kafka.binder.transaction.transactionIdPrefix` to a non-empty value, e.g. `tx-`.
|
||||
When used in a processor application, the consumer starts the transaction; any records sent on the consumer thread participate in the same transaction.
|
||||
When the listener exits normally, the listener container will send the offset to the transaction and commit it.
|
||||
A common producer factory is used for all producer bindings configured using `spring.cloud.stream.kafka.binder.transaction.producer.*` properties; individual binding Kafka producer properties are ignored.
|
||||
|
||||
If you wish to use transactions in a source application, or from some arbitrary thread for producer-only transaction (e.g. `@Scheduled` method), you must get a reference to the transactional producer factory and define a `KafkaTransactionManager` bean using it.
|
||||
|
||||
====
|
||||
[source, java]
|
||||
----
|
||||
@Bean
|
||||
public PlatformTransactionManager transactionManager(BinderFactory binders) {
|
||||
ProducerFactory<byte[], byte[]> pf = ((KafkaMessageChannelBinder) binders.getBinder(null,
|
||||
MessageChannel.class)).getTransactionalProducerFactory();
|
||||
return new KafkaTransactionManager<>(pf);
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
Notice that we get a reference to the binder using the `BinderFactory`; use `null` in the first argument when there is only one binder configured.
|
||||
If more than one binder is configured, use the binder name to get the reference.
|
||||
Once we have a reference to the binder, we can obtain a reference to the `ProducerFactory` and create a transaction manager.
|
||||
|
||||
Then you would use normal Spring transaction support, e.g. `TransactionTemplate` or `@Transactional`, for example:
|
||||
|
||||
====
|
||||
[source, java]
|
||||
----
|
||||
public static class Sender {
|
||||
|
||||
@Transactional
|
||||
public void doInTransaction(MessageChannel output, List<String> stuffToSend) {
|
||||
stuffToSend.forEach(stuff -> output.send(new GenericMessage<>(stuff)));
|
||||
}
|
||||
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
If you wish to synchronize producer-only transactions with those from some other transaction manager, use a `ChainedTransactionManager`.
|
||||
|
||||
[[kafka-error-channels]]
|
||||
=== Error Channels
|
||||
|
||||
@@ -623,7 +735,7 @@ source control.
|
||||
|
||||
The projects that require middleware generally include a
|
||||
`docker-compose.yml`, so consider using
|
||||
http://compose.docker.io/[Docker Compose] to run the middeware servers
|
||||
https://compose.docker.io/[Docker Compose] to run the middeware servers
|
||||
in Docker containers.
|
||||
|
||||
=== Documentation
|
||||
@@ -632,13 +744,13 @@ There is a "full" profile that will generate documentation.
|
||||
|
||||
=== Working with the code
|
||||
If you don't have an IDE preference we would recommend that you use
|
||||
http://www.springsource.com/developer/sts[Spring Tools Suite] or
|
||||
http://eclipse.org[Eclipse] when working with the code. We use the
|
||||
http://eclipse.org/m2e/[m2eclipe] eclipse plugin for maven support. Other IDEs and tools
|
||||
https://www.springsource.com/developer/sts[Spring Tools Suite] or
|
||||
https://eclipse.org[Eclipse] when working with the code. We use the
|
||||
https://eclipse.org/m2e/[m2eclipe] eclipse plugin for maven support. Other IDEs and tools
|
||||
should also work without issue.
|
||||
|
||||
==== Importing into eclipse with m2eclipse
|
||||
We recommend the http://eclipse.org/m2e/[m2eclipe] eclipse plugin when working with
|
||||
We recommend the https://eclipse.org/m2e/[m2eclipe] eclipse plugin when working with
|
||||
eclipse. If you don't already have m2eclipse installed it is available from the "eclipse
|
||||
marketplace".
|
||||
|
||||
@@ -691,7 +803,7 @@ added after the original pull request but before a merge.
|
||||
`eclipse-code-formatter.xml` file from the
|
||||
https://github.com/spring-cloud/build/tree/master/eclipse-coding-conventions.xml[Spring
|
||||
Cloud Build] project. If using IntelliJ, you can use the
|
||||
http://plugins.jetbrains.com/plugin/6546[Eclipse Code Formatter
|
||||
https://plugins.jetbrains.com/plugin/6546[Eclipse Code Formatter
|
||||
Plugin] to import the same file.
|
||||
* Make sure all new `.java` files to have a simple Javadoc class comment with at least an
|
||||
`@author` tag identifying you, and preferably at least a paragraph on what the class is
|
||||
@@ -704,7 +816,7 @@ added after the original pull request but before a merge.
|
||||
* A few unit tests would help a lot as well -- someone has to do it.
|
||||
* If no-one else is using your branch, please rebase it against the current master (or
|
||||
other target branch in the main project).
|
||||
* When writing a commit message please follow http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html[these conventions],
|
||||
* When writing a commit message please follow https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html[these conventions],
|
||||
if you are fixing an existing issue please add `Fixes gh-XXXX` at the end of the commit
|
||||
message (where XXXX is the issue number).
|
||||
|
||||
|
||||
43
docs/pom.xml
43
docs/pom.xml
@@ -1,13 +1,13 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<artifactId>spring-cloud-stream-binder-kafka-docs</artifactId>
|
||||
<parent>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
|
||||
<version>2.1.0.RC2</version>
|
||||
<version>3.0.0.M4</version>
|
||||
</parent>
|
||||
<packaging>pom</packaging>
|
||||
<name>spring-cloud-stream-binder-kafka-docs</name>
|
||||
@@ -15,35 +15,50 @@
|
||||
<properties>
|
||||
<docs.main>spring-cloud-stream-binder-kafka</docs.main>
|
||||
<main.basedir>${basedir}/..</main.basedir>
|
||||
<maven.plugin.plugin.version>3.4</maven.plugin.plugin.version>
|
||||
</properties>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<artifactId>maven-deploy-plugin</artifactId>
|
||||
<version>2.8.2</version>
|
||||
<configuration>
|
||||
<skip>true</skip>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
<profiles>
|
||||
<profile>
|
||||
<id>docs</id>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>pl.project13.maven</groupId>
|
||||
<artifactId>git-commit-id-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-dependency-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.asciidoctor</groupId>
|
||||
<artifactId>asciidoctor-maven-plugin</artifactId>
|
||||
<inherited>false</inherited>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-resources-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>com.agilejava.docbkx</groupId>
|
||||
<artifactId>docbkx-maven-plugin</artifactId>
|
||||
<groupId>org.asciidoctor</groupId>
|
||||
<artifactId>asciidoctor-maven-plugin</artifactId>
|
||||
<version>${asciidoctor-maven-plugin.version}</version>
|
||||
<configuration>
|
||||
<sourceDirectory>${project.build.directory}/refdocs/</sourceDirectory>
|
||||
<attributes>
|
||||
<spring-cloud-stream-version>${project.version}</spring-cloud-stream-version>
|
||||
</attributes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-antrun-plugin</artifactId>
|
||||
<inherited>false</inherited>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>build-helper-maven-plugin</artifactId>
|
||||
<inherited>false</inherited>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
@@ -34,7 +34,7 @@ source control.
|
||||
|
||||
The projects that require middleware generally include a
|
||||
`docker-compose.yml`, so consider using
|
||||
http://compose.docker.io/[Docker Compose] to run the middeware servers
|
||||
https://compose.docker.io/[Docker Compose] to run the middeware servers
|
||||
in Docker containers.
|
||||
|
||||
=== Documentation
|
||||
@@ -43,13 +43,13 @@ There is a "full" profile that will generate documentation.
|
||||
|
||||
=== Working with the code
|
||||
If you don't have an IDE preference we would recommend that you use
|
||||
http://www.springsource.com/developer/sts[Spring Tools Suite] or
|
||||
http://eclipse.org[Eclipse] when working with the code. We use the
|
||||
http://eclipse.org/m2e/[m2eclipe] eclipse plugin for maven support. Other IDEs and tools
|
||||
https://www.springsource.com/developer/sts[Spring Tools Suite] or
|
||||
https://eclipse.org[Eclipse] when working with the code. We use the
|
||||
https://eclipse.org/m2e/[m2eclipe] eclipse plugin for maven support. Other IDEs and tools
|
||||
should also work without issue.
|
||||
|
||||
==== Importing into eclipse with m2eclipse
|
||||
We recommend the http://eclipse.org/m2e/[m2eclipe] eclipse plugin when working with
|
||||
We recommend the https://eclipse.org/m2e/[m2eclipe] eclipse plugin when working with
|
||||
eclipse. If you don't already have m2eclipse installed it is available from the "eclipse
|
||||
marketplace".
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ added after the original pull request but before a merge.
|
||||
`eclipse-code-formatter.xml` file from the
|
||||
https://github.com/spring-cloud/build/tree/master/eclipse-coding-conventions.xml[Spring
|
||||
Cloud Build] project. If using IntelliJ, you can use the
|
||||
http://plugins.jetbrains.com/plugin/6546[Eclipse Code Formatter
|
||||
https://plugins.jetbrains.com/plugin/6546[Eclipse Code Formatter
|
||||
Plugin] to import the same file.
|
||||
* Make sure all new `.java` files to have a simple Javadoc class comment with at least an
|
||||
`@author` tag identifying you, and preferably at least a paragraph on what the class is
|
||||
@@ -37,6 +37,6 @@ added after the original pull request but before a merge.
|
||||
* A few unit tests would help a lot as well -- someone has to do it.
|
||||
* If no-one else is using your branch, please rebase it against the current master (or
|
||||
other target branch in the main project).
|
||||
* When writing a commit message please follow http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html[these conventions],
|
||||
* When writing a commit message please follow https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html[these conventions],
|
||||
if you are fixing an existing issue please add `Fixes gh-XXXX` at the end of the commit
|
||||
message (where XXXX is the issue number).
|
||||
@@ -25,10 +25,8 @@ spring.cloud.stream.bindings.input.group=so8400replay
|
||||
spring.cloud.stream.bindings.input.destination=error.so8400out.so8400
|
||||
|
||||
spring.cloud.stream.bindings.output.destination=so8400out
|
||||
spring.cloud.stream.bindings.output.producer.partitioned=true
|
||||
|
||||
spring.cloud.stream.bindings.parkingLot.destination=so8400in.parkingLot
|
||||
spring.cloud.stream.bindings.parkingLot.producer.partitioned=true
|
||||
|
||||
spring.cloud.stream.kafka.binder.configuration.auto.offset.reset=earliest
|
||||
|
||||
|
||||
@@ -40,7 +40,7 @@ function check_if_anything_to_sync() {
|
||||
|
||||
function retrieve_current_branch() {
|
||||
# Code getting the name of the current branch. For master we want to publish as we did until now
|
||||
# http://stackoverflow.com/questions/1593051/how-to-programmatically-determine-the-current-checked-out-git-branch
|
||||
# https://stackoverflow.com/questions/1593051/how-to-programmatically-determine-the-current-checked-out-git-branch
|
||||
# If there is a branch already passed will reuse it - otherwise will try to find it
|
||||
CURRENT_BRANCH=${BRANCH}
|
||||
if [[ -z "${CURRENT_BRANCH}" ]] ; then
|
||||
@@ -147,7 +147,7 @@ function copy_docs_for_current_version() {
|
||||
COMMIT_CHANGES="yes"
|
||||
else
|
||||
echo -e "Current branch is [${CURRENT_BRANCH}]"
|
||||
# http://stackoverflow.com/questions/29300806/a-bash-script-to-check-if-a-string-is-present-in-a-comma-separated-list-of-strin
|
||||
# https://stackoverflow.com/questions/29300806/a-bash-script-to-check-if-a-string-is-present-in-a-comma-separated-list-of-strin
|
||||
if [[ ",${WHITELISTED_BRANCHES_VALUE}," = *",${CURRENT_BRANCH},"* ]] ; then
|
||||
mkdir -p ${ROOT_FOLDER}/${CURRENT_BRANCH}
|
||||
echo -e "Branch [${CURRENT_BRANCH}] is whitelisted! Will copy the current docs to the [${CURRENT_BRANCH}] folder"
|
||||
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 117 KiB |
File diff suppressed because it is too large
Load Diff
@@ -112,7 +112,7 @@ If set to `true`, the binder creates new topics automatically.
|
||||
If set to `false`, the binder relies on the topics being already configured.
|
||||
In the latter case, if the topics do not exist, the binder fails to start.
|
||||
+
|
||||
NOTE: This setting is independent of the `auto.topic.create.enable` setting of the broker and does not influence it.
|
||||
NOTE: This setting is independent of the `auto.create.topics.enable` setting of the broker and does not influence it.
|
||||
If the server is set to auto-create topics, they may be created as part of the metadata retrieval request, with default broker settings.
|
||||
+
|
||||
Default: `true`.
|
||||
@@ -142,26 +142,20 @@ Default: none.
|
||||
[[kafka-consumer-properties]]
|
||||
==== Kafka Consumer Properties
|
||||
|
||||
NOTE: To avoid repetition, Spring Cloud Stream supports setting values for all channels, in the format of `spring.cloud.stream.default.<property>=<value>`.
|
||||
|
||||
|
||||
The following properties are available for Kafka consumers only and
|
||||
must be prefixed with `spring.cloud.stream.kafka.bindings.<channelName>.consumer.`.
|
||||
|
||||
admin.configuration::
|
||||
A `Map` of Kafka topic properties used when provisioning topics -- for example, `spring.cloud.stream.kafka.bindings.input.consumer.admin.configuration.message.format.version=0.9.0.0`
|
||||
+
|
||||
Default: none.
|
||||
Since version 2.1.1, this property is deprecated in favor of `topic.properties`, and support for it will be removed in a future version.
|
||||
|
||||
admin.replicas-assignment::
|
||||
A Map<Integer, List<Integer>> of replica assignments, with the key being the partition and the value being the assignments.
|
||||
Used when provisioning new topics.
|
||||
See the `NewTopic` Javadocs in the `kafka-clients` jar.
|
||||
+
|
||||
Default: none.
|
||||
Since version 2.1.1, this property is deprecated in favor of `topic.replicas-assignment`, and support for it will be removed in a future version.
|
||||
|
||||
admin.replication-factor::
|
||||
The replication factor to use when provisioning topics. Overrides the binder-wide setting.
|
||||
Ignored if `replicas-assignments` is present.
|
||||
+
|
||||
Default: none (the binder-wide default of 1 is used).
|
||||
Since version 2.1.1, this property is deprecated in favor of `topic.replication-factor`, and support for it will be removed in a future version.
|
||||
|
||||
autoRebalanceEnabled::
|
||||
When `true`, topic partitions is automatically rebalanced between the members of a consumer group.
|
||||
@@ -229,6 +223,8 @@ Default: null (If not specified, messages that result in errors are forwarded to
|
||||
dlqProducerProperties::
|
||||
Using this, DLQ-specific producer properties can be set.
|
||||
All the properties available through kafka producer properties can be set through this property.
|
||||
When native decoding is enabled on the consumer (i.e., useNativeDecoding: true) , the application must provide corresponding key/value serializers for DLQ.
|
||||
This must be provided in the form of `dlqProducerProperties.configuration.key.serializer` and `dlqProducerProperties.configuration.value.serializer`.
|
||||
+
|
||||
Default: Default Kafka producer properties.
|
||||
standardHeaders::
|
||||
@@ -254,30 +250,54 @@ Note, the time taken to detect new topics that match the pattern is controlled b
|
||||
This can be configured using the `configuration` property above.
|
||||
+
|
||||
Default: `false`
|
||||
topic.properties::
|
||||
A `Map` of Kafka topic properties used when provisioning new topics -- for example, `spring.cloud.stream.kafka.bindings.input.consumer.topic.properties.message.format.version=0.9.0.0`
|
||||
+
|
||||
Default: none.
|
||||
topic.replicas-assignment::
|
||||
A Map<Integer, List<Integer>> of replica assignments, with the key being the partition and the value being the assignments.
|
||||
Used when provisioning new topics.
|
||||
See the `NewTopic` Javadocs in the `kafka-clients` jar.
|
||||
+
|
||||
Default: none.
|
||||
topic.replication-factor::
|
||||
The replication factor to use when provisioning topics. Overrides the binder-wide setting.
|
||||
Ignored if `replicas-assignments` is present.
|
||||
+
|
||||
Default: none (the binder-wide default of 1 is used).
|
||||
pollTimeout::
|
||||
Timeout used for polling in pollable consumers.
|
||||
+
|
||||
Default: 5 seconds.
|
||||
|
||||
==== Consuming Batches
|
||||
|
||||
Starting with version 3.0, when `spring.cloud.stream.binding.<name>.consumer.batch-mode` is set to `true`, all of the records received by polling the Kafka `Consumer` will be presented as a `List<?>` to the listener method.
|
||||
Otherwise, the method will be called with one record at a time.
|
||||
The size of the batch is controlled by Kafka consumer properties `max.poll.records`, `min.fetch.bytes`, `fetch.max.wait.ms`; refer to the Kafka documentation for more information.
|
||||
|
||||
IMPORTANT: Retry within the binder is not supported when using batch mode, so `maxAttempts` will be overridden to 1.
|
||||
You can configure a `SeekToCurrentBatchErrorHandler` (using a `ListenerContainerCustomizer`) to achieve similar functionality to retry in the binder.
|
||||
You can also use a manual `AckMode` and call `Ackowledgment.nack(index, sleep)` to commit the offsets for a partial batch and have the remaining records redelivered.
|
||||
Refer to the https://docs.spring.io/spring-kafka/docs/2.3.0.BUILD-SNAPSHOT/reference/html/#committing-offsets[Spring for Apache Kafka documentation] for more information about these techniques.
|
||||
|
||||
[[kafka-producer-properties]]
|
||||
==== Kafka Producer Properties
|
||||
|
||||
NOTE: To avoid repetition, Spring Cloud Stream supports setting values for all channels, in the format of `spring.cloud.stream.default.<property>=<value>`.
|
||||
|
||||
|
||||
The following properties are available for Kafka producers only and
|
||||
must be prefixed with `spring.cloud.stream.kafka.bindings.<channelName>.producer.`.
|
||||
|
||||
admin.configuration::
|
||||
A `Map` of Kafka topic properties used when provisioning new topics -- for example, `spring.cloud.stream.kafka.bindings.input.consumer.admin.configuration.message.format.version=0.9.0.0`
|
||||
+
|
||||
Default: none.
|
||||
Since version 2.1.1, this property is deprecated in favor of `topic.properties`, and support for it will be removed in a future version.
|
||||
|
||||
admin.replicas-assignment::
|
||||
A Map<Integer, List<Integer>> of replica assignments, with the key being the partition and the value being the assignments.
|
||||
Used when provisioning new topics.
|
||||
See `NewTopic` javadocs in the `kafka-clients` jar.
|
||||
+
|
||||
Default: none.
|
||||
Since version 2.1.1, this property is deprecated in favor of `topic.replicas-assignment`, and support for it will be removed in a future version.
|
||||
|
||||
admin.replication-factor::
|
||||
The replication factor to use when provisioning new topics. Overrides the binder-wide setting.
|
||||
Ignored if `replicas-assignments` is present.
|
||||
+
|
||||
Default: none (the binder-wide default of 1 is used).
|
||||
Since version 2.1.1, this property is deprecated in favor of `topic.replication-factor`, and support for it will be removed in a future version.
|
||||
|
||||
bufferSize::
|
||||
Upper limit, in bytes, of how much data the Kafka producer attempts to batch before sending.
|
||||
@@ -287,6 +307,13 @@ sync::
|
||||
Whether the producer is synchronous.
|
||||
+
|
||||
Default: `false`.
|
||||
sendTimeoutExpression::
|
||||
A SpEL expression evaluated against the outgoing message used to evaluate the time to wait for ack when synchronous publish is enabled -- for example, `headers['mySendTimeout']`.
|
||||
The value of the timeout is in milliseconds.
|
||||
With versions before 3.0, the payload could not be used unless native encoding was being used because, by the time this expression was evaluated, the payload was already in the form of a `byte[]`.
|
||||
Now, the expression is evaluated before the payload is converted.
|
||||
+
|
||||
Default: `none`.
|
||||
batchTimeout::
|
||||
How long the producer waits to allow more messages to accumulate in the same batch before sending the messages.
|
||||
(Normally, the producer does not wait at all and simply sends all the messages that accumulated while the previous send was in progress.) A non-zero value may increase throughput at the expense of latency.
|
||||
@@ -294,7 +321,8 @@ How long the producer waits to allow more messages to accumulate in the same bat
|
||||
Default: `0`.
|
||||
messageKeyExpression::
|
||||
A SpEL expression evaluated against the outgoing message used to populate the key of the produced Kafka message -- for example, `headers['myKey']`.
|
||||
The payload cannot be used because, by the time this expression is evaluated, the payload is already in the form of a `byte[]`.
|
||||
With versions before 3.0, the payload could not be used unless native encoding was being used because, by the time this expression was evaluated, the payload was already in the form of a `byte[]`.
|
||||
Now, the expression is evaluated before the payload is converted.
|
||||
+
|
||||
Default: `none`.
|
||||
headerPatterns::
|
||||
@@ -310,6 +338,35 @@ configuration::
|
||||
Map with a key/value pair containing generic Kafka producer properties.
|
||||
+
|
||||
Default: Empty map.
|
||||
topic.properties::
|
||||
A `Map` of Kafka topic properties used when provisioning new topics -- for example, `spring.cloud.stream.kafka.bindings.output.producer.topic.properties.message.format.version=0.9.0.0`
|
||||
+
|
||||
topic.replicas-assignment::
|
||||
A Map<Integer, List<Integer>> of replica assignments, with the key being the partition and the value being the assignments.
|
||||
Used when provisioning new topics.
|
||||
See the `NewTopic` Javadocs in the `kafka-clients` jar.
|
||||
+
|
||||
Default: none.
|
||||
topic.replication-factor::
|
||||
The replication factor to use when provisioning topics. Overrides the binder-wide setting.
|
||||
Ignored if `replicas-assignments` is present.
|
||||
+
|
||||
Default: none (the binder-wide default of 1 is used).
|
||||
useTopicHeader::
|
||||
Set to `true` to override the default binding destination (topic name) with the value of the `KafkaHeaders.TOPIC` message header in the outbound message.
|
||||
If the header is not present, the default binding destination is used.
|
||||
Default: `false`.
|
||||
+
|
||||
recordMetadataChannel::
|
||||
The bean name of a `MessageChannel` to which successful send results should be sent; the bean must exist in the application context.
|
||||
The message sent to the channel is the sent message (after conversion, if any) with an additional header `KafkaHeaders.RECORD_METADATA`.
|
||||
The header contains a `RecordMetadata` object provided by the Kafka client; it includes the partition and offset where the record was written in the topic.
|
||||
|
||||
`ResultMetadata meta = sendResultMsg.getHeaders().get(KafkaHeaders.RECORD_METADATA, RecordMetadata.class)`
|
||||
|
||||
Failed sends go the producer error channel (if configured); see <<kafka-error-channels>>.
|
||||
Default: null
|
||||
+
|
||||
|
||||
NOTE: The Kafka binder uses the `partitionCount` setting of the producer as a hint to create a topic with the given partition count (in conjunction with the `minPartitionCount`, the maximum of the two being the value being used).
|
||||
Exercise caution when configuring both `minPartitionCount` for a binder and `partitionCount` for an application, as the larger value is used.
|
||||
@@ -317,6 +374,13 @@ If a topic already exists with a smaller partition count and `autoAddPartitions`
|
||||
If a topic already exists with a smaller partition count and `autoAddPartitions` is enabled, new partitions are added.
|
||||
If a topic already exists with a larger number of partitions than the maximum of (`minPartitionCount` or `partitionCount`), the existing partition count is used.
|
||||
|
||||
compression::
|
||||
Set the `compression.type` producer property.
|
||||
Supported values are `none`, `gzip`, `snappy` and `lz4`.
|
||||
If you override the `kafka-clients` jar to 2.1.0 (or later), as discussed in the https://docs.spring.io/spring-kafka/docs/2.2.x/reference/html/deps-for-21x.html[Spring for Apache Kafka documentation], and wish to use `zstd` compression, use `spring.cloud.stream.kafka.bindings.<binding-name>.producer.configuration.compression.type=zstd`.
|
||||
+
|
||||
Default: `none`.
|
||||
|
||||
==== Usage examples
|
||||
|
||||
In this section, we show the use of the preceding properties for specific scenarios.
|
||||
@@ -352,7 +416,7 @@ public class ManuallyAcknowdledgingConsumer {
|
||||
===== Example: Security Configuration
|
||||
|
||||
Apache Kafka 0.9 supports secure connections between client and brokers.
|
||||
To take advantage of this feature, follow the guidelines in the http://kafka.apache.org/090/documentation.html#security_configclients[Apache Kafka Documentation] as well as the Kafka 0.9 http://docs.confluent.io/2.0.0/kafka/security.html[security guidelines from the Confluent documentation].
|
||||
To take advantage of this feature, follow the guidelines in the https://kafka.apache.org/090/documentation.html#security_configclients[Apache Kafka Documentation] as well as the Kafka 0.9 https://docs.confluent.io/2.0.0/kafka/security.html[security guidelines from the Confluent documentation].
|
||||
Use the `spring.cloud.stream.kafka.binder.configuration` option to set security properties for all clients created by the binder.
|
||||
|
||||
For example, to set `security.protocol` to `SASL_SSL`, set the following property:
|
||||
@@ -364,7 +428,7 @@ spring.cloud.stream.kafka.binder.configuration.security.protocol=SASL_SSL
|
||||
|
||||
All the other security properties can be set in a similar manner.
|
||||
|
||||
When using Kerberos, follow the instructions in the http://kafka.apache.org/090/documentation.html#security_sasl_clientconfig[reference documentation] for creating and referencing the JAAS configuration.
|
||||
When using Kerberos, follow the instructions in the https://kafka.apache.org/090/documentation.html#security_sasl_clientconfig[reference documentation] for creating and referencing the JAAS configuration.
|
||||
|
||||
Spring Cloud Stream supports passing JAAS configuration information to the application by using a JAAS configuration file and using Spring Boot properties.
|
||||
|
||||
@@ -477,6 +541,50 @@ public class Application {
|
||||
}
|
||||
----
|
||||
|
||||
[[kafka-transactional-binder]]
|
||||
=== Transactional Binder
|
||||
|
||||
Enable transactions by setting `spring.cloud.stream.kafka.binder.transaction.transactionIdPrefix` to a non-empty value, e.g. `tx-`.
|
||||
When used in a processor application, the consumer starts the transaction; any records sent on the consumer thread participate in the same transaction.
|
||||
When the listener exits normally, the listener container will send the offset to the transaction and commit it.
|
||||
A common producer factory is used for all producer bindings configured using `spring.cloud.stream.kafka.binder.transaction.producer.*` properties; individual binding Kafka producer properties are ignored.
|
||||
|
||||
If you wish to use transactions in a source application, or from some arbitrary thread for producer-only transaction (e.g. `@Scheduled` method), you must get a reference to the transactional producer factory and define a `KafkaTransactionManager` bean using it.
|
||||
|
||||
====
|
||||
[source, java]
|
||||
----
|
||||
@Bean
|
||||
public PlatformTransactionManager transactionManager(BinderFactory binders) {
|
||||
ProducerFactory<byte[], byte[]> pf = ((KafkaMessageChannelBinder) binders.getBinder(null,
|
||||
MessageChannel.class)).getTransactionalProducerFactory();
|
||||
return new KafkaTransactionManager<>(pf);
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
Notice that we get a reference to the binder using the `BinderFactory`; use `null` in the first argument when there is only one binder configured.
|
||||
If more than one binder is configured, use the binder name to get the reference.
|
||||
Once we have a reference to the binder, we can obtain a reference to the `ProducerFactory` and create a transaction manager.
|
||||
|
||||
Then you would use normal Spring transaction support, e.g. `TransactionTemplate` or `@Transactional`, for example:
|
||||
|
||||
====
|
||||
[source, java]
|
||||
----
|
||||
public static class Sender {
|
||||
|
||||
@Transactional
|
||||
public void doInTransaction(MessageChannel output, List<String> stuffToSend) {
|
||||
stuffToSend.forEach(stuff -> output.send(new GenericMessage<>(stuff)));
|
||||
}
|
||||
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
If you wish to synchronize producer-only transactions with those from some other transaction manager, use a `ChainedTransactionManager`.
|
||||
|
||||
[[kafka-error-channels]]
|
||||
=== Error Channels
|
||||
|
||||
|
||||
@@ -49,7 +49,6 @@ spring:
|
||||
output:
|
||||
destination: partitioned.topic
|
||||
producer:
|
||||
partitioned: true
|
||||
partition-key-expression: headers['partitionKey']
|
||||
partition-count: 12
|
||||
----
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
|
||||
[[spring-cloud-stream-binder-kafka-reference]]
|
||||
= Spring Cloud Stream Kafka Binder Reference Guide
|
||||
Sabby Anandan, Marius Bogoevici, Eric Bottard, Mark Fisher, Ilayaperumal Gopinathan, Gunnar Hillert, Mark Pollack, Patrick Peralta, Glenn Renfro, Thomas Risberg, Dave Syer, David Turanski, Janne Valkealahti, Benjamin Klein, Henryk Konsek, Gary Russell
|
||||
Sabby Anandan, Marius Bogoevici, Eric Bottard, Mark Fisher, Ilayaperumal Gopinathan, Gunnar Hillert, Mark Pollack, Patrick Peralta, Glenn Renfro, Thomas Risberg, Dave Syer, David Turanski, Janne Valkealahti, Benjamin Klein, Henryk Konsek, Gary Russell, Arnaud Jardiné, Soby Chacko
|
||||
:doctype: book
|
||||
:toc:
|
||||
:toclevels: 4
|
||||
@@ -21,16 +21,20 @@ Sabby Anandan, Marius Bogoevici, Eric Bottard, Mark Fisher, Ilayaperumal Gopinat
|
||||
:spring-cloud-stream-binder-kafka-repo: snapshot
|
||||
:github-tag: master
|
||||
:spring-cloud-stream-binder-kafka-docs-version: current
|
||||
:spring-cloud-stream-binder-kafka-docs: http://docs.spring.io/spring-cloud-stream-binder-kafka/docs/{spring-cloud-stream-binder-kafka-docs-version}/reference
|
||||
:spring-cloud-stream-binder-kafka-docs-current: http://docs.spring.io/spring-cloud-stream-binder-kafka/docs/current-SNAPSHOT/reference/html/
|
||||
:spring-cloud-stream-binder-kafka-docs: https://docs.spring.io/spring-cloud-stream-binder-kafka/docs/{spring-cloud-stream-binder-kafka-docs-version}/reference
|
||||
:spring-cloud-stream-binder-kafka-docs-current: https://docs.spring.io/spring-cloud-stream-binder-kafka/docs/current-SNAPSHOT/reference/html/
|
||||
:github-repo: spring-cloud/spring-cloud-stream-binder-kafka
|
||||
:github-raw: http://raw.github.com/{github-repo}/{github-tag}
|
||||
:github-code: http://github.com/{github-repo}/tree/{github-tag}
|
||||
:github-wiki: http://github.com/{github-repo}/wiki
|
||||
:github-master-code: http://github.com/{github-repo}/tree/master
|
||||
:github-raw: https://raw.github.com/{github-repo}/{github-tag}
|
||||
:github-code: https://github.com/{github-repo}/tree/{github-tag}
|
||||
:github-wiki: https://github.com/{github-repo}/wiki
|
||||
:github-master-code: https://github.com/{github-repo}/tree/master
|
||||
:sc-ext: java
|
||||
// ======================================================================================
|
||||
|
||||
|
||||
*{spring-cloud-stream-version}*
|
||||
|
||||
|
||||
= Reference Guide
|
||||
include::overview.adoc[]
|
||||
|
||||
|
||||
171
mvnw
vendored
171
mvnw
vendored
@@ -8,7 +8,7 @@
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
@@ -54,38 +54,16 @@ case "`uname`" in
|
||||
CYGWIN*) cygwin=true ;;
|
||||
MINGW*) mingw=true;;
|
||||
Darwin*) darwin=true
|
||||
#
|
||||
# Look for the Apple JDKs first to preserve the existing behaviour, and then look
|
||||
# for the new JDKs provided by Oracle.
|
||||
#
|
||||
if [ -z "$JAVA_HOME" ] && [ -L /System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK ] ; then
|
||||
#
|
||||
# Apple JDKs
|
||||
#
|
||||
export JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK/Home
|
||||
fi
|
||||
|
||||
if [ -z "$JAVA_HOME" ] && [ -L /System/Library/Java/JavaVirtualMachines/CurrentJDK ] ; then
|
||||
#
|
||||
# Apple JDKs
|
||||
#
|
||||
export JAVA_HOME=/System/Library/Java/JavaVirtualMachines/CurrentJDK/Contents/Home
|
||||
fi
|
||||
|
||||
if [ -z "$JAVA_HOME" ] && [ -L "/Library/Java/JavaVirtualMachines/CurrentJDK" ] ; then
|
||||
#
|
||||
# Oracle JDKs
|
||||
#
|
||||
export JAVA_HOME=/Library/Java/JavaVirtualMachines/CurrentJDK/Contents/Home
|
||||
fi
|
||||
|
||||
if [ -z "$JAVA_HOME" ] && [ -x "/usr/libexec/java_home" ]; then
|
||||
#
|
||||
# Apple JDKs
|
||||
#
|
||||
export JAVA_HOME=`/usr/libexec/java_home`
|
||||
fi
|
||||
;;
|
||||
# Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home
|
||||
# See https://developer.apple.com/library/mac/qa/qa1170/_index.html
|
||||
if [ -z "$JAVA_HOME" ]; then
|
||||
if [ -x "/usr/libexec/java_home" ]; then
|
||||
export JAVA_HOME="`/usr/libexec/java_home`"
|
||||
else
|
||||
export JAVA_HOME="/Library/Java/Home"
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -z "$JAVA_HOME" ] ; then
|
||||
@@ -130,7 +108,7 @@ if $cygwin ; then
|
||||
CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
|
||||
fi
|
||||
|
||||
# For Migwn, ensure paths are in UNIX format before anything is touched
|
||||
# For Mingw, ensure paths are in UNIX format before anything is touched
|
||||
if $mingw ; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME="`(cd "$M2_HOME"; pwd)`"
|
||||
@@ -184,27 +162,28 @@ fi
|
||||
|
||||
CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
|
||||
|
||||
# For Cygwin, switch paths to Windows format before running java
|
||||
if $cygwin; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME=`cygpath --path --windows "$M2_HOME"`
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
|
||||
[ -n "$CLASSPATH" ] &&
|
||||
CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
|
||||
fi
|
||||
|
||||
# traverses directory structure from process work directory to filesystem root
|
||||
# first directory with .mvn subdirectory is considered project base directory
|
||||
find_maven_basedir() {
|
||||
local basedir=$(pwd)
|
||||
local wdir=$(pwd)
|
||||
|
||||
if [ -z "$1" ]
|
||||
then
|
||||
echo "Path not specified to find_maven_basedir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
basedir="$1"
|
||||
wdir="$1"
|
||||
while [ "$wdir" != '/' ] ; do
|
||||
if [ -d "$wdir"/.mvn ] ; then
|
||||
basedir=$wdir
|
||||
break
|
||||
fi
|
||||
wdir=$(cd "$wdir/.."; pwd)
|
||||
# workaround for JBEAP-8937 (on Solaris 10/Sparc)
|
||||
if [ -d "${wdir}" ]; then
|
||||
wdir=`cd "$wdir/.."; pwd`
|
||||
fi
|
||||
# end of workaround
|
||||
done
|
||||
echo "${basedir}"
|
||||
}
|
||||
@@ -216,30 +195,92 @@ concat_lines() {
|
||||
fi
|
||||
}
|
||||
|
||||
export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-$(find_maven_basedir)}
|
||||
BASE_DIR=`find_maven_basedir "$(pwd)"`
|
||||
if [ -z "$BASE_DIR" ]; then
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
##########################################################################################
|
||||
# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
|
||||
# This allows using the maven wrapper in projects that prohibit checking in binary data.
|
||||
##########################################################################################
|
||||
if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo "Found .mvn/wrapper/maven-wrapper.jar"
|
||||
fi
|
||||
else
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..."
|
||||
fi
|
||||
jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar"
|
||||
while IFS="=" read key value; do
|
||||
case "$key" in (wrapperUrl) jarUrl="$value"; break ;;
|
||||
esac
|
||||
done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties"
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo "Downloading from: $jarUrl"
|
||||
fi
|
||||
wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar"
|
||||
|
||||
if command -v wget > /dev/null; then
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo "Found wget ... using wget"
|
||||
fi
|
||||
wget "$jarUrl" -O "$wrapperJarPath"
|
||||
elif command -v curl > /dev/null; then
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo "Found curl ... using curl"
|
||||
fi
|
||||
curl -o "$wrapperJarPath" "$jarUrl"
|
||||
else
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo "Falling back to using Java to download"
|
||||
fi
|
||||
javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java"
|
||||
if [ -e "$javaClass" ]; then
|
||||
if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo " - Compiling MavenWrapperDownloader.java ..."
|
||||
fi
|
||||
# Compiling the Java class
|
||||
("$JAVA_HOME/bin/javac" "$javaClass")
|
||||
fi
|
||||
if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
|
||||
# Running the downloader
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo " - Running MavenWrapperDownloader.java ..."
|
||||
fi
|
||||
("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR")
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
##########################################################################################
|
||||
# End of extension
|
||||
##########################################################################################
|
||||
|
||||
export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo $MAVEN_PROJECTBASEDIR
|
||||
fi
|
||||
MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS"
|
||||
|
||||
# Provide a "standardized" way to retrieve the CLI args that will
|
||||
# work with both Windows and non-Windows executions.
|
||||
MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@"
|
||||
export MAVEN_CMD_LINE_ARGS
|
||||
# For Cygwin, switch paths to Windows format before running java
|
||||
if $cygwin; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME=`cygpath --path --windows "$M2_HOME"`
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
|
||||
[ -n "$CLASSPATH" ] &&
|
||||
CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
|
||||
[ -n "$MAVEN_PROJECTBASEDIR" ] &&
|
||||
MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"`
|
||||
fi
|
||||
|
||||
WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
|
||||
|
||||
echo "Running version check"
|
||||
VERSION=$( sed '\!<parent!,\!</parent!d' `dirname $0`/pom.xml | grep '<version' | head -1 | sed -e 's/.*<version>//' -e 's!</version>.*$!!' )
|
||||
echo "The found version is [${VERSION}]"
|
||||
|
||||
if echo $VERSION | egrep -q 'M|RC'; then
|
||||
echo Activating \"milestone\" profile for version=\"$VERSION\"
|
||||
echo $MAVEN_ARGS | grep -q milestone || MAVEN_ARGS="$MAVEN_ARGS -Pmilestone"
|
||||
else
|
||||
echo Deactivating \"milestone\" profile for version=\"$VERSION\"
|
||||
echo $MAVEN_ARGS | grep -q milestone && MAVEN_ARGS=$(echo $MAVEN_ARGS | sed -e 's/-Pmilestone//')
|
||||
fi
|
||||
|
||||
exec "$JAVACMD" \
|
||||
$MAVEN_OPTS \
|
||||
-classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \
|
||||
"-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \
|
||||
${WRAPPER_LAUNCHER} ${MAVEN_ARGS} "$@"
|
||||
${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@"
|
||||
|
||||
306
mvnw.cmd
vendored
Normal file → Executable file
306
mvnw.cmd
vendored
Normal file → Executable file
@@ -1,145 +1,161 @@
|
||||
@REM ----------------------------------------------------------------------------
|
||||
@REM Licensed to the Apache Software Foundation (ASF) under one
|
||||
@REM or more contributor license agreements. See the NOTICE file
|
||||
@REM distributed with this work for additional information
|
||||
@REM regarding copyright ownership. The ASF licenses this file
|
||||
@REM to you under the Apache License, Version 2.0 (the
|
||||
@REM "License"); you may not use this file except in compliance
|
||||
@REM with the License. You may obtain a copy of the License at
|
||||
@REM
|
||||
@REM http://www.apache.org/licenses/LICENSE-2.0
|
||||
@REM
|
||||
@REM Unless required by applicable law or agreed to in writing,
|
||||
@REM software distributed under the License is distributed on an
|
||||
@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
@REM KIND, either express or implied. See the License for the
|
||||
@REM specific language governing permissions and limitations
|
||||
@REM under the License.
|
||||
@REM ----------------------------------------------------------------------------
|
||||
|
||||
@REM ----------------------------------------------------------------------------
|
||||
@REM Maven2 Start Up Batch script
|
||||
@REM
|
||||
@REM Required ENV vars:
|
||||
@REM JAVA_HOME - location of a JDK home dir
|
||||
@REM
|
||||
@REM Optional ENV vars
|
||||
@REM M2_HOME - location of maven2's installed home dir
|
||||
@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
|
||||
@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending
|
||||
@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
|
||||
@REM e.g. to debug Maven itself, use
|
||||
@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
|
||||
@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
|
||||
@REM ----------------------------------------------------------------------------
|
||||
|
||||
@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
|
||||
@echo off
|
||||
@REM enable echoing my setting MAVEN_BATCH_ECHO to 'on'
|
||||
@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
|
||||
|
||||
@REM set %HOME% to equivalent of $HOME
|
||||
if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
|
||||
|
||||
@REM Execute a user defined script before this one
|
||||
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
|
||||
@REM check for pre script, once with legacy .bat ending and once with .cmd ending
|
||||
if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat"
|
||||
if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd"
|
||||
:skipRcPre
|
||||
|
||||
@setlocal
|
||||
|
||||
set ERROR_CODE=0
|
||||
|
||||
@REM To isolate internal variables from possible post scripts, we use another setlocal
|
||||
@setlocal
|
||||
|
||||
@REM ==== START VALIDATION ====
|
||||
if not "%JAVA_HOME%" == "" goto OkJHome
|
||||
|
||||
echo.
|
||||
echo Error: JAVA_HOME not found in your environment. >&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the >&2
|
||||
echo location of your Java installation. >&2
|
||||
echo.
|
||||
goto error
|
||||
|
||||
:OkJHome
|
||||
if exist "%JAVA_HOME%\bin\java.exe" goto init
|
||||
|
||||
echo.
|
||||
echo Error: JAVA_HOME is set to an invalid directory. >&2
|
||||
echo JAVA_HOME = "%JAVA_HOME%" >&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the >&2
|
||||
echo location of your Java installation. >&2
|
||||
echo.
|
||||
goto error
|
||||
|
||||
@REM ==== END VALIDATION ====
|
||||
|
||||
:init
|
||||
|
||||
set MAVEN_CMD_LINE_ARGS=%*
|
||||
|
||||
@REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
|
||||
@REM Fallback to current working directory if not found.
|
||||
|
||||
set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
|
||||
IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
|
||||
|
||||
set EXEC_DIR=%CD%
|
||||
set WDIR=%EXEC_DIR%
|
||||
:findBaseDir
|
||||
IF EXIST "%WDIR%"\.mvn goto baseDirFound
|
||||
cd ..
|
||||
IF "%WDIR%"=="%CD%" goto baseDirNotFound
|
||||
set WDIR=%CD%
|
||||
goto findBaseDir
|
||||
|
||||
:baseDirFound
|
||||
set MAVEN_PROJECTBASEDIR=%WDIR%
|
||||
cd "%EXEC_DIR%"
|
||||
goto endDetectBaseDir
|
||||
|
||||
:baseDirNotFound
|
||||
set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
|
||||
cd "%EXEC_DIR%"
|
||||
|
||||
:endDetectBaseDir
|
||||
|
||||
IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
|
||||
|
||||
@setlocal EnableExtensions EnableDelayedExpansion
|
||||
for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
|
||||
@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
|
||||
|
||||
:endReadAdditionalConfig
|
||||
|
||||
SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
|
||||
|
||||
set WRAPPER_JAR="".\.mvn\wrapper\maven-wrapper.jar""
|
||||
set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
|
||||
|
||||
%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CMD_LINE_ARGS%
|
||||
if ERRORLEVEL 1 goto error
|
||||
goto end
|
||||
|
||||
:error
|
||||
set ERROR_CODE=1
|
||||
|
||||
:end
|
||||
@endlocal & set ERROR_CODE=%ERROR_CODE%
|
||||
|
||||
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
|
||||
@REM check for post script, once with legacy .bat ending and once with .cmd ending
|
||||
if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat"
|
||||
if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd"
|
||||
:skipRcPost
|
||||
|
||||
@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
|
||||
if "%MAVEN_BATCH_PAUSE%" == "on" pause
|
||||
|
||||
if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE%
|
||||
|
||||
exit /B %ERROR_CODE%
|
||||
@REM ----------------------------------------------------------------------------
|
||||
@REM Licensed to the Apache Software Foundation (ASF) under one
|
||||
@REM or more contributor license agreements. See the NOTICE file
|
||||
@REM distributed with this work for additional information
|
||||
@REM regarding copyright ownership. The ASF licenses this file
|
||||
@REM to you under the Apache License, Version 2.0 (the
|
||||
@REM "License"); you may not use this file except in compliance
|
||||
@REM with the License. You may obtain a copy of the License at
|
||||
@REM
|
||||
@REM https://www.apache.org/licenses/LICENSE-2.0
|
||||
@REM
|
||||
@REM Unless required by applicable law or agreed to in writing,
|
||||
@REM software distributed under the License is distributed on an
|
||||
@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
@REM KIND, either express or implied. See the License for the
|
||||
@REM specific language governing permissions and limitations
|
||||
@REM under the License.
|
||||
@REM ----------------------------------------------------------------------------
|
||||
|
||||
@REM ----------------------------------------------------------------------------
|
||||
@REM Maven2 Start Up Batch script
|
||||
@REM
|
||||
@REM Required ENV vars:
|
||||
@REM JAVA_HOME - location of a JDK home dir
|
||||
@REM
|
||||
@REM Optional ENV vars
|
||||
@REM M2_HOME - location of maven2's installed home dir
|
||||
@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
|
||||
@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending
|
||||
@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
|
||||
@REM e.g. to debug Maven itself, use
|
||||
@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
|
||||
@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
|
||||
@REM ----------------------------------------------------------------------------
|
||||
|
||||
@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
|
||||
@echo off
|
||||
@REM set title of command window
|
||||
title %0
|
||||
@REM enable echoing my setting MAVEN_BATCH_ECHO to 'on'
|
||||
@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
|
||||
|
||||
@REM set %HOME% to equivalent of $HOME
|
||||
if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
|
||||
|
||||
@REM Execute a user defined script before this one
|
||||
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
|
||||
@REM check for pre script, once with legacy .bat ending and once with .cmd ending
|
||||
if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat"
|
||||
if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd"
|
||||
:skipRcPre
|
||||
|
||||
@setlocal
|
||||
|
||||
set ERROR_CODE=0
|
||||
|
||||
@REM To isolate internal variables from possible post scripts, we use another setlocal
|
||||
@setlocal
|
||||
|
||||
@REM ==== START VALIDATION ====
|
||||
if not "%JAVA_HOME%" == "" goto OkJHome
|
||||
|
||||
echo.
|
||||
echo Error: JAVA_HOME not found in your environment. >&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the >&2
|
||||
echo location of your Java installation. >&2
|
||||
echo.
|
||||
goto error
|
||||
|
||||
:OkJHome
|
||||
if exist "%JAVA_HOME%\bin\java.exe" goto init
|
||||
|
||||
echo.
|
||||
echo Error: JAVA_HOME is set to an invalid directory. >&2
|
||||
echo JAVA_HOME = "%JAVA_HOME%" >&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the >&2
|
||||
echo location of your Java installation. >&2
|
||||
echo.
|
||||
goto error
|
||||
|
||||
@REM ==== END VALIDATION ====
|
||||
|
||||
:init
|
||||
|
||||
@REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
|
||||
@REM Fallback to current working directory if not found.
|
||||
|
||||
set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
|
||||
IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
|
||||
|
||||
set EXEC_DIR=%CD%
|
||||
set WDIR=%EXEC_DIR%
|
||||
:findBaseDir
|
||||
IF EXIST "%WDIR%"\.mvn goto baseDirFound
|
||||
cd ..
|
||||
IF "%WDIR%"=="%CD%" goto baseDirNotFound
|
||||
set WDIR=%CD%
|
||||
goto findBaseDir
|
||||
|
||||
:baseDirFound
|
||||
set MAVEN_PROJECTBASEDIR=%WDIR%
|
||||
cd "%EXEC_DIR%"
|
||||
goto endDetectBaseDir
|
||||
|
||||
:baseDirNotFound
|
||||
set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
|
||||
cd "%EXEC_DIR%"
|
||||
|
||||
:endDetectBaseDir
|
||||
|
||||
IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
|
||||
|
||||
@setlocal EnableExtensions EnableDelayedExpansion
|
||||
for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
|
||||
@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
|
||||
|
||||
:endReadAdditionalConfig
|
||||
|
||||
SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
|
||||
set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
|
||||
set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
|
||||
|
||||
set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar"
|
||||
FOR /F "tokens=1,2 delims==" %%A IN (%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties) DO (
|
||||
IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B
|
||||
)
|
||||
|
||||
@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
|
||||
@REM This allows using the maven wrapper in projects that prohibit checking in binary data.
|
||||
if exist %WRAPPER_JAR% (
|
||||
echo Found %WRAPPER_JAR%
|
||||
) else (
|
||||
echo Couldn't find %WRAPPER_JAR%, downloading it ...
|
||||
echo Downloading from: %DOWNLOAD_URL%
|
||||
powershell -Command "(New-Object Net.WebClient).DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"
|
||||
echo Finished downloading %WRAPPER_JAR%
|
||||
)
|
||||
@REM End of extension
|
||||
|
||||
%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
|
||||
if ERRORLEVEL 1 goto error
|
||||
goto end
|
||||
|
||||
:error
|
||||
set ERROR_CODE=1
|
||||
|
||||
:end
|
||||
@endlocal & set ERROR_CODE=%ERROR_CODE%
|
||||
|
||||
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
|
||||
@REM check for post script, once with legacy .bat ending and once with .cmd ending
|
||||
if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat"
|
||||
if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd"
|
||||
:skipRcPost
|
||||
|
||||
@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
|
||||
if "%MAVEN_BATCH_PAUSE%" == "on" pause
|
||||
|
||||
if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE%
|
||||
|
||||
exit /B %ERROR_CODE%
|
||||
|
||||
67
pom.xml
67
pom.xml
@@ -1,21 +1,25 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
|
||||
<version>2.1.0.RC2</version>
|
||||
<version>3.0.0.M4</version>
|
||||
<packaging>pom</packaging>
|
||||
<parent>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-build</artifactId>
|
||||
<version>2.1.0.RC2</version>
|
||||
<version>2.2.0.M5</version>
|
||||
<relativePath />
|
||||
</parent>
|
||||
<properties>
|
||||
<java.version>1.8</java.version>
|
||||
<spring-kafka.version>2.2.0.RELEASE</spring-kafka.version>
|
||||
<spring-integration-kafka.version>3.1.0.RELEASE</spring-integration-kafka.version>
|
||||
<kafka.version>2.0.0</kafka.version>
|
||||
<spring-cloud-stream.version>2.1.0.RC2</spring-cloud-stream.version>
|
||||
<spring-kafka.version>2.3.0.RC1</spring-kafka.version>
|
||||
<spring-integration-kafka.version>3.2.0.RC1</spring-integration-kafka.version>
|
||||
<kafka.version>2.3.0</kafka.version>
|
||||
<spring-cloud-schema-registry.version>1.0.0.M1</spring-cloud-schema-registry.version>
|
||||
<spring-cloud-stream.version>3.0.0.M4</spring-cloud-stream.version>
|
||||
<maven-checkstyle-plugin.failsOnError>true</maven-checkstyle-plugin.failsOnError>
|
||||
<maven-checkstyle-plugin.failsOnViolation>true</maven-checkstyle-plugin.failsOnViolation>
|
||||
<maven-checkstyle-plugin.includeTestSourceDirectory>true</maven-checkstyle-plugin.includeTestSourceDirectory>
|
||||
</properties>
|
||||
<modules>
|
||||
<module>spring-cloud-stream-binder-kafka</module>
|
||||
@@ -110,8 +114,8 @@
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-stream-schema</artifactId>
|
||||
<version>${spring-cloud-stream.version}</version>
|
||||
<artifactId>spring-cloud-schema-registry-client</artifactId>
|
||||
<version>${spring-cloud-schema-registry.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
@@ -145,31 +149,6 @@
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-checkstyle-plugin</artifactId>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-stream-tools</artifactId>
|
||||
<version>${spring-cloud-stream.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>checkstyle-validation</id>
|
||||
<phase>validate</phase>
|
||||
<configuration>
|
||||
<configLocation>checkstyle.xml</configLocation>
|
||||
<headerLocation>checkstyle-header.txt</headerLocation>
|
||||
<suppressionsLocation>checkstyle-suppressions.xml</suppressionsLocation>
|
||||
<encoding>UTF-8</encoding>
|
||||
<consoleOutput>true</consoleOutput>
|
||||
<failsOnError>true</failsOnError>
|
||||
<includeTestSourceDirectory>true</includeTestSourceDirectory>
|
||||
</configuration>
|
||||
<goals>
|
||||
<goal>check</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
@@ -181,7 +160,7 @@
|
||||
<repository>
|
||||
<id>spring-snapshots</id>
|
||||
<name>Spring Snapshots</name>
|
||||
<url>http://repo.spring.io/libs-snapshot-local</url>
|
||||
<url>https://repo.spring.io/libs-snapshot-local</url>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
@@ -192,7 +171,7 @@
|
||||
<repository>
|
||||
<id>spring-milestones</id>
|
||||
<name>Spring Milestones</name>
|
||||
<url>http://repo.spring.io/libs-milestone-local</url>
|
||||
<url>https://repo.spring.io/libs-milestone-local</url>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
@@ -200,7 +179,7 @@
|
||||
<repository>
|
||||
<id>spring-releases</id>
|
||||
<name>Spring Releases</name>
|
||||
<url>http://repo.spring.io/release</url>
|
||||
<url>https://repo.spring.io/release</url>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
@@ -210,7 +189,7 @@
|
||||
<pluginRepository>
|
||||
<id>spring-snapshots</id>
|
||||
<name>Spring Snapshots</name>
|
||||
<url>http://repo.spring.io/libs-snapshot-local</url>
|
||||
<url>https://repo.spring.io/libs-snapshot-local</url>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
@@ -221,7 +200,7 @@
|
||||
<pluginRepository>
|
||||
<id>spring-milestones</id>
|
||||
<name>Spring Milestones</name>
|
||||
<url>http://repo.spring.io/libs-milestone-local</url>
|
||||
<url>https://repo.spring.io/libs-milestone-local</url>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
@@ -229,7 +208,7 @@
|
||||
<pluginRepository>
|
||||
<id>spring-releases</id>
|
||||
<name>Spring Releases</name>
|
||||
<url>http://repo.spring.io/libs-release-local</url>
|
||||
<url>https://repo.spring.io/libs-release-local</url>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
@@ -237,4 +216,12 @@
|
||||
</pluginRepositories>
|
||||
</profile>
|
||||
</profiles>
|
||||
<reporting>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-checkstyle-plugin</artifactId>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</reporting>
|
||||
</project>
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
|
||||
<version>2.1.0.RC2</version>
|
||||
<version>3.0.0.M4</version>
|
||||
</parent>
|
||||
<artifactId>spring-cloud-starter-stream-kafka</artifactId>
|
||||
<description>Spring Cloud Starter Stream Kafka</description>
|
||||
<url>http://projects.spring.io/spring-cloud</url>
|
||||
<url>https://projects.spring.io/spring-cloud</url>
|
||||
<organization>
|
||||
<name>Pivotal Software, Inc.</name>
|
||||
<url>http://www.spring.io</url>
|
||||
<url>https://www.spring.io</url>
|
||||
</organization>
|
||||
<properties>
|
||||
<main.basedir>${basedir}/../..</main.basedir>
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
provides: spring-cloud-starter-stream-kafka
|
||||
@@ -1,18 +1,18 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
|
||||
<version>2.1.0.RC2</version>
|
||||
<version>3.0.0.M4</version>
|
||||
</parent>
|
||||
<artifactId>spring-cloud-stream-binder-kafka-core</artifactId>
|
||||
<description>Spring Cloud Stream Kafka Binder Core</description>
|
||||
<url>http://projects.spring.io/spring-cloud</url>
|
||||
<url>https://projects.spring.io/spring-cloud</url>
|
||||
<organization>
|
||||
<name>Pivotal Software, Inc.</name>
|
||||
<url>http://www.spring.io</url>
|
||||
<url>https://www.spring.io</url>
|
||||
</organization>
|
||||
|
||||
<dependencies>
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -54,7 +54,8 @@ public class JaasLoginModuleConfiguration {
|
||||
|
||||
public void setControlFlag(String controlFlag) {
|
||||
Assert.notNull(controlFlag, "cannot be null");
|
||||
this.controlFlag = KafkaJaasLoginModuleInitializer.ControlFlag.valueOf(controlFlag.toUpperCase());
|
||||
this.controlFlag = KafkaJaasLoginModuleInitializer.ControlFlag
|
||||
.valueOf(controlFlag.toUpperCase());
|
||||
}
|
||||
|
||||
public Map<String, String> getOptions() {
|
||||
@@ -64,4 +65,5 @@ public class JaasLoginModuleConfiguration {
|
||||
public void setOptions(Map<String, String> options) {
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -16,8 +16,6 @@
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.properties;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
@@ -25,38 +23,17 @@ import java.util.Map;
|
||||
*
|
||||
* @author Gary Russell
|
||||
* @since 2.0
|
||||
*
|
||||
* @deprecated in favor of {@link KafkaTopicProperties}
|
||||
*/
|
||||
public class KafkaAdminProperties {
|
||||
|
||||
private Short replicationFactor;
|
||||
|
||||
private Map<Integer, List<Integer>> replicasAssignments = new HashMap<>();
|
||||
|
||||
private Map<String, String> configuration = new HashMap<>();
|
||||
|
||||
public Short getReplicationFactor() {
|
||||
return this.replicationFactor;
|
||||
}
|
||||
|
||||
public void setReplicationFactor(Short replicationFactor) {
|
||||
this.replicationFactor = replicationFactor;
|
||||
}
|
||||
|
||||
public Map<Integer, List<Integer>> getReplicasAssignments() {
|
||||
return this.replicasAssignments;
|
||||
}
|
||||
|
||||
public void setReplicasAssignments(Map<Integer, List<Integer>> replicasAssignments) {
|
||||
this.replicasAssignments = replicasAssignments;
|
||||
}
|
||||
@Deprecated
|
||||
public class KafkaAdminProperties extends KafkaTopicProperties {
|
||||
|
||||
public Map<String, String> getConfiguration() {
|
||||
return this.configuration;
|
||||
return getProperties();
|
||||
}
|
||||
|
||||
public void setConfiguration(Map<String, String> configuration) {
|
||||
this.configuration = configuration;
|
||||
setProperties(configuration);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -25,6 +25,8 @@ import javax.validation.constraints.AssertTrue;
|
||||
import javax.validation.constraints.Min;
|
||||
import javax.validation.constraints.NotNull;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.kafka.clients.consumer.ConsumerConfig;
|
||||
import org.apache.kafka.clients.producer.ProducerConfig;
|
||||
|
||||
@@ -40,8 +42,8 @@ import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Configuration properties for the Kafka binder.
|
||||
* The properties in this class are prefixed with <b>spring.cloud.stream.kafka.binder</b>.
|
||||
* Configuration properties for the Kafka binder. The properties in this class are
|
||||
* prefixed with <b>spring.cloud.stream.kafka.binder</b>.
|
||||
*
|
||||
* @author David Turanski
|
||||
* @author Ilayaperumal Gopinathan
|
||||
@@ -49,12 +51,15 @@ import org.springframework.util.StringUtils;
|
||||
* @author Soby Chacko
|
||||
* @author Gary Russell
|
||||
* @author Rafal Zukowski
|
||||
* @author Aldo Sinanaj
|
||||
*/
|
||||
@ConfigurationProperties(prefix = "spring.cloud.stream.kafka.binder")
|
||||
public class KafkaBinderConfigurationProperties {
|
||||
|
||||
private static final String DEFAULT_KAFKA_CONNECTION_STRING = "localhost:9092";
|
||||
|
||||
private final Log logger = LogFactory.getLog(getClass());
|
||||
|
||||
private final Transaction transaction = new Transaction();
|
||||
|
||||
private final KafkaProperties kafkaProperties;
|
||||
@@ -126,11 +131,11 @@ public class KafkaBinderConfigurationProperties {
|
||||
private JaasLoginModuleConfiguration jaas;
|
||||
|
||||
/**
|
||||
* The bean name of a custom header mapper to use instead of a {@link org.springframework.kafka.support.DefaultKafkaHeaderMapper}.
|
||||
* The bean name of a custom header mapper to use instead of a
|
||||
* {@link org.springframework.kafka.support.DefaultKafkaHeaderMapper}.
|
||||
*/
|
||||
private String headerMapperBeanName;
|
||||
|
||||
|
||||
public KafkaBinderConfigurationProperties(KafkaProperties kafkaProperties) {
|
||||
Assert.notNull(kafkaProperties, "'kafkaProperties' cannot be null");
|
||||
this.kafkaProperties = kafkaProperties;
|
||||
@@ -327,9 +332,8 @@ public class KafkaBinderConfigurationProperties {
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts an array of host values to a comma-separated String.
|
||||
* It will append the default port value, if not already specified.
|
||||
*
|
||||
* Converts an array of host values to a comma-separated String. It will append the
|
||||
* default port value, if not already specified.
|
||||
* @param hosts host string
|
||||
* @param defaultPort port
|
||||
* @return formatted connection string
|
||||
@@ -373,10 +377,6 @@ public class KafkaBinderConfigurationProperties {
|
||||
return this.requiredAcks;
|
||||
}
|
||||
|
||||
public void setRequiredAcks(int requiredAcks) {
|
||||
this.requiredAcks = String.valueOf(requiredAcks);
|
||||
}
|
||||
|
||||
public void setRequiredAcks(String requiredAcks) {
|
||||
this.requiredAcks = requiredAcks;
|
||||
}
|
||||
@@ -525,15 +525,19 @@ public class KafkaBinderConfigurationProperties {
|
||||
Map<String, Object> consumerConfiguration = new HashMap<>();
|
||||
consumerConfiguration.putAll(this.kafkaProperties.buildConsumerProperties());
|
||||
// Copy configured binder properties that apply to consumers
|
||||
for (Map.Entry<String, String> configurationEntry : this.configuration.entrySet()) {
|
||||
for (Map.Entry<String, String> configurationEntry : this.configuration
|
||||
.entrySet()) {
|
||||
if (ConsumerConfig.configNames().contains(configurationEntry.getKey())) {
|
||||
consumerConfiguration.put(configurationEntry.getKey(), configurationEntry.getValue());
|
||||
consumerConfiguration.put(configurationEntry.getKey(),
|
||||
configurationEntry.getValue());
|
||||
}
|
||||
}
|
||||
consumerConfiguration.putAll(this.consumerProperties);
|
||||
filterStreamManagedConfiguration(consumerConfiguration);
|
||||
// Override Spring Boot bootstrap server setting if left to default with the value
|
||||
// configured in the binder
|
||||
return getConfigurationWithBootstrapServer(consumerConfiguration, ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG);
|
||||
return getConfigurationWithBootstrapServer(consumerConfiguration,
|
||||
ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -546,18 +550,41 @@ public class KafkaBinderConfigurationProperties {
|
||||
Map<String, Object> producerConfiguration = new HashMap<>();
|
||||
producerConfiguration.putAll(this.kafkaProperties.buildProducerProperties());
|
||||
// Copy configured binder properties that apply to producers
|
||||
for (Map.Entry<String, String> configurationEntry : this.configuration.entrySet()) {
|
||||
for (Map.Entry<String, String> configurationEntry : this.configuration
|
||||
.entrySet()) {
|
||||
if (ProducerConfig.configNames().contains(configurationEntry.getKey())) {
|
||||
producerConfiguration.put(configurationEntry.getKey(), configurationEntry.getValue());
|
||||
producerConfiguration.put(configurationEntry.getKey(),
|
||||
configurationEntry.getValue());
|
||||
}
|
||||
}
|
||||
producerConfiguration.putAll(this.producerProperties);
|
||||
// Override Spring Boot bootstrap server setting if left to default with the value
|
||||
// configured in the binder
|
||||
return getConfigurationWithBootstrapServer(producerConfiguration, ProducerConfig.BOOTSTRAP_SERVERS_CONFIG);
|
||||
return getConfigurationWithBootstrapServer(producerConfiguration,
|
||||
ProducerConfig.BOOTSTRAP_SERVERS_CONFIG);
|
||||
}
|
||||
|
||||
private Map<String, Object> getConfigurationWithBootstrapServer(Map<String, Object> configuration, String bootstrapServersConfig) {
|
||||
private void filterStreamManagedConfiguration(Map<String, Object> configuration) {
|
||||
if (configuration.containsKey(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG)
|
||||
&& configuration.get(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG).equals(true)) {
|
||||
logger.warn(constructIgnoredConfigMessage(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG) +
|
||||
ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG + "=true is not supported by the Kafka binder");
|
||||
configuration.remove(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG);
|
||||
}
|
||||
if (configuration.containsKey(ConsumerConfig.GROUP_ID_CONFIG)) {
|
||||
logger.warn(constructIgnoredConfigMessage(ConsumerConfig.GROUP_ID_CONFIG) +
|
||||
"Use spring.cloud.stream.default.group or spring.cloud.stream.binding.<name>.group to specify " +
|
||||
"the group instead of " + ConsumerConfig.GROUP_ID_CONFIG);
|
||||
configuration.remove(ConsumerConfig.GROUP_ID_CONFIG);
|
||||
}
|
||||
}
|
||||
|
||||
private String constructIgnoredConfigMessage(String config) {
|
||||
return String.format("Ignoring provided value(s) for '%s'. ", config);
|
||||
}
|
||||
|
||||
private Map<String, Object> getConfigurationWithBootstrapServer(
|
||||
Map<String, Object> configuration, String bootstrapServersConfig) {
|
||||
if (ObjectUtils.isEmpty(configuration.get(bootstrapServersConfig))) {
|
||||
configuration.put(bootstrapServersConfig, getKafkaConnectionString());
|
||||
}
|
||||
@@ -567,7 +594,8 @@ public class KafkaBinderConfigurationProperties {
|
||||
@SuppressWarnings("unchecked")
|
||||
List<String> bootStrapServers = (List<String>) configuration
|
||||
.get(bootstrapServersConfig);
|
||||
if (bootStrapServers.size() == 1 && bootStrapServers.get(0).equals("localhost:9092")) {
|
||||
if (bootStrapServers.size() == 1
|
||||
&& bootStrapServers.get(0).equals("localhost:9092")) {
|
||||
configuration.put(bootstrapServersConfig, getKafkaConnectionString());
|
||||
}
|
||||
}
|
||||
@@ -615,9 +643,10 @@ public class KafkaBinderConfigurationProperties {
|
||||
}
|
||||
|
||||
/**
|
||||
* An combination of {@link ProducerProperties} and {@link KafkaProducerProperties}
|
||||
* so that common and kafka-specific properties can be set for the transactional
|
||||
* An combination of {@link ProducerProperties} and {@link KafkaProducerProperties} so
|
||||
* that common and kafka-specific properties can be set for the transactional
|
||||
* producer.
|
||||
*
|
||||
* @since 2.1
|
||||
*/
|
||||
public static class CombinedProducerProperties {
|
||||
@@ -642,8 +671,10 @@ public class KafkaBinderConfigurationProperties {
|
||||
return this.producerProperties.getPartitionSelectorExpression();
|
||||
}
|
||||
|
||||
public void setPartitionSelectorExpression(Expression partitionSelectorExpression) {
|
||||
this.producerProperties.setPartitionSelectorExpression(partitionSelectorExpression);
|
||||
public void setPartitionSelectorExpression(
|
||||
Expression partitionSelectorExpression) {
|
||||
this.producerProperties
|
||||
.setPartitionSelectorExpression(partitionSelectorExpression);
|
||||
}
|
||||
|
||||
public @Min(value = 1, message = "Partition count should be greater than zero.") int getPartitionCount() {
|
||||
@@ -662,11 +693,13 @@ public class KafkaBinderConfigurationProperties {
|
||||
this.producerProperties.setRequiredGroups(requiredGroups);
|
||||
}
|
||||
|
||||
public @AssertTrue(message = "Partition key expression and partition key extractor class properties are mutually exclusive.") boolean isValidPartitionKeyProperty() {
|
||||
public @AssertTrue(message = "Partition key expression and partition key extractor class properties "
|
||||
+ "are mutually exclusive.") boolean isValidPartitionKeyProperty() {
|
||||
return this.producerProperties.isValidPartitionKeyProperty();
|
||||
}
|
||||
|
||||
public @AssertTrue(message = "Partition selector class and partition selector expression properties are mutually exclusive.") boolean isValidPartitionSelectorProperty() {
|
||||
public @AssertTrue(message = "Partition selector class and partition selector expression "
|
||||
+ "properties are mutually exclusive.") boolean isValidPartitionSelectorProperty() {
|
||||
return this.producerProperties.isValidPartitionSelectorProperty();
|
||||
}
|
||||
|
||||
@@ -699,7 +732,8 @@ public class KafkaBinderConfigurationProperties {
|
||||
}
|
||||
|
||||
public void setPartitionKeyExtractorName(String partitionKeyExtractorName) {
|
||||
this.producerProperties.setPartitionKeyExtractorName(partitionKeyExtractorName);
|
||||
this.producerProperties
|
||||
.setPartitionKeyExtractorName(partitionKeyExtractorName);
|
||||
}
|
||||
|
||||
public String getPartitionSelectorName() {
|
||||
@@ -766,17 +800,28 @@ public class KafkaBinderConfigurationProperties {
|
||||
this.kafkaProducerProperties.setConfiguration(configuration);
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public KafkaAdminProperties getAdmin() {
|
||||
return this.kafkaProducerProperties.getAdmin();
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public void setAdmin(KafkaAdminProperties admin) {
|
||||
this.kafkaProducerProperties.setAdmin(admin);
|
||||
}
|
||||
|
||||
public KafkaTopicProperties getTopic() {
|
||||
return this.kafkaProducerProperties.getTopic();
|
||||
}
|
||||
|
||||
public void setTopic(KafkaTopicProperties topic) {
|
||||
this.kafkaProducerProperties.setTopic(topic);
|
||||
}
|
||||
|
||||
public KafkaProducerProperties getExtension() {
|
||||
return this.kafkaProducerProperties;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -45,4 +45,5 @@ public class KafkaBindingProperties implements BinderSpecificPropertiesProvider
|
||||
public void setProducer(KafkaProducerProperties producer) {
|
||||
this.producer = producer;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2016-2018 the original author or authors.
|
||||
* Copyright 2016-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -19,6 +19,7 @@ package org.springframework.cloud.stream.binder.kafka.properties;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.boot.context.properties.DeprecatedConfigurationProperty;
|
||||
|
||||
/**
|
||||
* Extended consumer properties for Kafka binder.
|
||||
@@ -27,6 +28,7 @@ import java.util.Map;
|
||||
* @author Ilayaperumal Gopinathan
|
||||
* @author Soby Chacko
|
||||
* @author Gary Russell
|
||||
* @author Aldo Sinanaj
|
||||
*
|
||||
* <p>
|
||||
* Thanks to Laszlo Szabo for providing the initial patch for generic property support.
|
||||
@@ -38,6 +40,7 @@ public class KafkaConsumerProperties {
|
||||
* Enumeration for starting consumer offset.
|
||||
*/
|
||||
public enum StartOffset {
|
||||
|
||||
/**
|
||||
* Starting from earliest offset.
|
||||
*/
|
||||
@@ -56,12 +59,14 @@ public class KafkaConsumerProperties {
|
||||
public long getReferencePoint() {
|
||||
return this.referencePoint;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Standard headers for the message.
|
||||
*/
|
||||
public enum StandardHeaders {
|
||||
|
||||
/**
|
||||
* No headers.
|
||||
*/
|
||||
@@ -78,6 +83,7 @@ public class KafkaConsumerProperties {
|
||||
* Indicating both ID and timestamp headers.
|
||||
*/
|
||||
both
|
||||
|
||||
}
|
||||
|
||||
private boolean ackEachRecord;
|
||||
@@ -112,7 +118,12 @@ public class KafkaConsumerProperties {
|
||||
|
||||
private Map<String, String> configuration = new HashMap<>();
|
||||
|
||||
private KafkaAdminProperties admin = new KafkaAdminProperties();
|
||||
private KafkaTopicProperties topic = new KafkaTopicProperties();
|
||||
|
||||
/**
|
||||
* Timeout used for polling in pollable consumers.
|
||||
*/
|
||||
private long pollTimeout = org.springframework.kafka.listener.ConsumerProperties.DEFAULT_POLL_TIMEOUT;
|
||||
|
||||
public boolean isAckEachRecord() {
|
||||
return this.ackEachRecord;
|
||||
@@ -221,6 +232,7 @@ public class KafkaConsumerProperties {
|
||||
public void setDlqProducerProperties(KafkaProducerProperties dlqProducerProperties) {
|
||||
this.dlqProducerProperties = dlqProducerProperties;
|
||||
}
|
||||
|
||||
public StandardHeaders getStandardHeaders() {
|
||||
return this.standardHeaders;
|
||||
}
|
||||
@@ -253,12 +265,42 @@ public class KafkaConsumerProperties {
|
||||
this.destinationIsPattern = destinationIsPattern;
|
||||
}
|
||||
|
||||
/**
|
||||
* No longer used; get properties such as this via {@link #getTopic()}.
|
||||
* @return Kafka admin properties
|
||||
* @deprecated No longer used
|
||||
*/
|
||||
@Deprecated
|
||||
@DeprecatedConfigurationProperty(reason = "Not used since 2.1.1, set properties such as this via 'topic'")
|
||||
@SuppressWarnings("deprecation")
|
||||
public KafkaAdminProperties getAdmin() {
|
||||
return this.admin;
|
||||
// Temporary workaround to copy the topic properties to the admin one.
|
||||
final KafkaAdminProperties kafkaAdminProperties = new KafkaAdminProperties();
|
||||
kafkaAdminProperties.setReplicationFactor(this.topic.getReplicationFactor());
|
||||
kafkaAdminProperties.setReplicasAssignments(this.topic.getReplicasAssignments());
|
||||
kafkaAdminProperties.setConfiguration(this.topic.getProperties());
|
||||
return kafkaAdminProperties;
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
@SuppressWarnings("deprecation")
|
||||
public void setAdmin(KafkaAdminProperties admin) {
|
||||
this.admin = admin;
|
||||
this.topic = admin;
|
||||
}
|
||||
|
||||
public KafkaTopicProperties getTopic() {
|
||||
return this.topic;
|
||||
}
|
||||
|
||||
public void setTopic(KafkaTopicProperties topic) {
|
||||
this.topic = topic;
|
||||
}
|
||||
|
||||
public long getPollTimeout() {
|
||||
return this.pollTimeout;
|
||||
}
|
||||
|
||||
public void setPollTimeout(long pollTimeout) {
|
||||
this.pollTimeout = pollTimeout;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -16,12 +16,15 @@
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.properties;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||
import org.springframework.cloud.stream.binder.AbstractExtendedBindingProperties;
|
||||
import org.springframework.cloud.stream.binder.BinderSpecificPropertiesProvider;
|
||||
|
||||
/**
|
||||
* Kafka specific extended binding properties class that extends from {@link AbstractExtendedBindingProperties}.
|
||||
* Kafka specific extended binding properties class that extends from
|
||||
* {@link AbstractExtendedBindingProperties}.
|
||||
*
|
||||
* @author Marius Bogoevici
|
||||
* @author Gary Russell
|
||||
@@ -29,8 +32,8 @@ import org.springframework.cloud.stream.binder.BinderSpecificPropertiesProvider;
|
||||
* @author Oleg Zhurakousky
|
||||
*/
|
||||
@ConfigurationProperties("spring.cloud.stream.kafka")
|
||||
public class KafkaExtendedBindingProperties
|
||||
extends AbstractExtendedBindingProperties<KafkaConsumerProperties, KafkaProducerProperties, KafkaBindingProperties> {
|
||||
public class KafkaExtendedBindingProperties extends
|
||||
AbstractExtendedBindingProperties<KafkaConsumerProperties, KafkaProducerProperties, KafkaBindingProperties> {
|
||||
|
||||
private static final String DEFAULTS_PREFIX = "spring.cloud.stream.kafka.default";
|
||||
|
||||
@@ -39,8 +42,14 @@ public class KafkaExtendedBindingProperties
|
||||
return DEFAULTS_PREFIX;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, KafkaBindingProperties> getBindings() {
|
||||
return this.doGetBindings();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<? extends BinderSpecificPropertiesProvider> getExtendedPropertiesEntryClass() {
|
||||
return KafkaBindingProperties.class;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -21,6 +21,7 @@ import java.util.Map;
|
||||
|
||||
import javax.validation.constraints.NotNull;
|
||||
|
||||
import org.springframework.boot.context.properties.DeprecatedConfigurationProperty;
|
||||
import org.springframework.expression.Expression;
|
||||
|
||||
/**
|
||||
@@ -29,6 +30,7 @@ import org.springframework.expression.Expression;
|
||||
* @author Marius Bogoevici
|
||||
* @author Henryk Konsek
|
||||
* @author Gary Russell
|
||||
* @author Aldo Sinanaj
|
||||
*/
|
||||
public class KafkaProducerProperties {
|
||||
|
||||
@@ -38,6 +40,8 @@ public class KafkaProducerProperties {
|
||||
|
||||
private boolean sync;
|
||||
|
||||
private Expression sendTimeoutExpression;
|
||||
|
||||
private int batchTimeout;
|
||||
|
||||
private Expression messageKeyExpression;
|
||||
@@ -46,7 +50,11 @@ public class KafkaProducerProperties {
|
||||
|
||||
private Map<String, String> configuration = new HashMap<>();
|
||||
|
||||
private KafkaAdminProperties admin = new KafkaAdminProperties();
|
||||
private KafkaTopicProperties topic = new KafkaTopicProperties();
|
||||
|
||||
private boolean useTopicHeader;
|
||||
|
||||
private String recordMetadataChannel;
|
||||
|
||||
public int getBufferSize() {
|
||||
return this.bufferSize;
|
||||
@@ -73,6 +81,14 @@ public class KafkaProducerProperties {
|
||||
this.sync = sync;
|
||||
}
|
||||
|
||||
public Expression getSendTimeoutExpression() {
|
||||
return this.sendTimeoutExpression;
|
||||
}
|
||||
|
||||
public void setSendTimeoutExpression(Expression sendTimeoutExpression) {
|
||||
this.sendTimeoutExpression = sendTimeoutExpression;
|
||||
}
|
||||
|
||||
public int getBatchTimeout() {
|
||||
return this.batchTimeout;
|
||||
}
|
||||
@@ -105,29 +121,84 @@ public class KafkaProducerProperties {
|
||||
this.configuration = configuration;
|
||||
}
|
||||
|
||||
/**
|
||||
* No longer used; get properties such as this via {@link #getTopic()}.
|
||||
* @return Kafka admin properties
|
||||
* @deprecated No longer used
|
||||
*/
|
||||
@Deprecated
|
||||
@DeprecatedConfigurationProperty(reason = "Not used since 2.1.1, set properties such as this via 'topic'")
|
||||
@SuppressWarnings("deprecation")
|
||||
public KafkaAdminProperties getAdmin() {
|
||||
return this.admin;
|
||||
// Temporary workaround to copy the topic properties to the admin one.
|
||||
final KafkaAdminProperties kafkaAdminProperties = new KafkaAdminProperties();
|
||||
kafkaAdminProperties.setReplicationFactor(this.topic.getReplicationFactor());
|
||||
kafkaAdminProperties.setReplicasAssignments(this.topic.getReplicasAssignments());
|
||||
kafkaAdminProperties.setConfiguration(this.topic.getProperties());
|
||||
return kafkaAdminProperties;
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
@SuppressWarnings("deprecation")
|
||||
public void setAdmin(KafkaAdminProperties admin) {
|
||||
this.admin = admin;
|
||||
this.topic = admin;
|
||||
}
|
||||
|
||||
public KafkaTopicProperties getTopic() {
|
||||
return this.topic;
|
||||
}
|
||||
|
||||
public void setTopic(KafkaTopicProperties topic) {
|
||||
this.topic = topic;
|
||||
}
|
||||
|
||||
public boolean isUseTopicHeader() {
|
||||
return this.useTopicHeader;
|
||||
}
|
||||
|
||||
public void setUseTopicHeader(boolean useTopicHeader) {
|
||||
this.useTopicHeader = useTopicHeader;
|
||||
}
|
||||
|
||||
public String getRecordMetadataChannel() {
|
||||
return this.recordMetadataChannel;
|
||||
}
|
||||
|
||||
public void setRecordMetadataChannel(String recordMetadataChannel) {
|
||||
this.recordMetadataChannel = recordMetadataChannel;
|
||||
}
|
||||
|
||||
/**
|
||||
* Enumeration for compression types.
|
||||
*/
|
||||
public enum CompressionType {
|
||||
|
||||
/**
|
||||
* No compression.
|
||||
*/
|
||||
none,
|
||||
|
||||
/**
|
||||
* gzip based compression.
|
||||
*/
|
||||
gzip,
|
||||
|
||||
/**
|
||||
* snappy based compression.
|
||||
*/
|
||||
snappy
|
||||
snappy,
|
||||
|
||||
/**
|
||||
* lz4 compression.
|
||||
*/
|
||||
lz4,
|
||||
|
||||
// /** // TODO: uncomment and fix docs when kafka-clients 2.1.0 or newer is the
|
||||
// default
|
||||
// * zstd compression
|
||||
// */
|
||||
// zstd
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,62 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.properties;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Properties for configuring topics.
|
||||
*
|
||||
* @author Aldo Sinanaj
|
||||
* @since 2.2
|
||||
*
|
||||
*/
|
||||
public class KafkaTopicProperties {
|
||||
|
||||
private Short replicationFactor;
|
||||
|
||||
private Map<Integer, List<Integer>> replicasAssignments = new HashMap<>();
|
||||
|
||||
private Map<String, String> properties = new HashMap<>();
|
||||
|
||||
public Short getReplicationFactor() {
|
||||
return replicationFactor;
|
||||
}
|
||||
|
||||
public void setReplicationFactor(Short replicationFactor) {
|
||||
this.replicationFactor = replicationFactor;
|
||||
}
|
||||
|
||||
public Map<Integer, List<Integer>> getReplicasAssignments() {
|
||||
return replicasAssignments;
|
||||
}
|
||||
|
||||
public void setReplicasAssignments(Map<Integer, List<Integer>> replicasAssignments) {
|
||||
this.replicasAssignments = replicasAssignments;
|
||||
}
|
||||
|
||||
public Map<String, String> getProperties() {
|
||||
return properties;
|
||||
}
|
||||
|
||||
public void setProperties(Map<String, String> properties) {
|
||||
this.properties = properties;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -40,16 +40,17 @@ import org.apache.kafka.clients.admin.TopicDescription;
|
||||
import org.apache.kafka.common.KafkaFuture;
|
||||
import org.apache.kafka.common.PartitionInfo;
|
||||
import org.apache.kafka.common.errors.TopicExistsException;
|
||||
import org.apache.kafka.common.errors.UnknownTopicOrPartitionException;
|
||||
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
|
||||
import org.springframework.cloud.stream.binder.BinderException;
|
||||
import org.springframework.cloud.stream.binder.ExtendedConsumerProperties;
|
||||
import org.springframework.cloud.stream.binder.ExtendedProducerProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.properties.KafkaAdminProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.properties.KafkaBinderConfigurationProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.properties.KafkaProducerProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.properties.KafkaTopicProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.utils.KafkaTopicUtils;
|
||||
import org.springframework.cloud.stream.provisioning.ConsumerDestination;
|
||||
import org.springframework.cloud.stream.provisioning.ProducerDestination;
|
||||
@@ -60,6 +61,7 @@ import org.springframework.retry.backoff.ExponentialBackOffPolicy;
|
||||
import org.springframework.retry.policy.SimpleRetryPolicy;
|
||||
import org.springframework.retry.support.RetryTemplate;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
@@ -71,9 +73,13 @@ import org.springframework.util.StringUtils;
|
||||
* @author Ilayaperumal Gopinathan
|
||||
* @author Simon Flandergan
|
||||
* @author Oleg Zhurakousky
|
||||
* @author Aldo Sinanaj
|
||||
*/
|
||||
public class KafkaTopicProvisioner implements ProvisioningProvider<ExtendedConsumerProperties<KafkaConsumerProperties>,
|
||||
ExtendedProducerProperties<KafkaProducerProperties>>, InitializingBean {
|
||||
public class KafkaTopicProvisioner implements
|
||||
// @checkstyle:off
|
||||
ProvisioningProvider<ExtendedConsumerProperties<KafkaConsumerProperties>, ExtendedProducerProperties<KafkaProducerProperties>>,
|
||||
// @checkstyle:on
|
||||
InitializingBean {
|
||||
|
||||
private static final int DEFAULT_OPERATION_TIMEOUT = 30;
|
||||
|
||||
@@ -87,17 +93,18 @@ public class KafkaTopicProvisioner implements ProvisioningProvider<ExtendedConsu
|
||||
|
||||
private RetryOperations metadataRetryOperations;
|
||||
|
||||
public KafkaTopicProvisioner(KafkaBinderConfigurationProperties kafkaBinderConfigurationProperties,
|
||||
KafkaProperties kafkaProperties) {
|
||||
public KafkaTopicProvisioner(
|
||||
KafkaBinderConfigurationProperties kafkaBinderConfigurationProperties,
|
||||
KafkaProperties kafkaProperties) {
|
||||
Assert.isTrue(kafkaProperties != null, "KafkaProperties cannot be null");
|
||||
this.adminClientProperties = kafkaProperties.buildAdminProperties();
|
||||
this.configurationProperties = kafkaBinderConfigurationProperties;
|
||||
normalalizeBootPropsWithBinder(this.adminClientProperties, kafkaProperties, kafkaBinderConfigurationProperties);
|
||||
normalalizeBootPropsWithBinder(this.adminClientProperties, kafkaProperties,
|
||||
kafkaBinderConfigurationProperties);
|
||||
}
|
||||
|
||||
/**
|
||||
* Mutator for metadata retry operations.
|
||||
*
|
||||
* @param metadataRetryOperations the retry configuration
|
||||
*/
|
||||
public void setMetadataRetryOperations(RetryOperations metadataRetryOperations) {
|
||||
@@ -131,18 +138,22 @@ public class KafkaTopicProvisioner implements ProvisioningProvider<ExtendedConsu
|
||||
}
|
||||
KafkaTopicUtils.validateTopicName(name);
|
||||
try (AdminClient adminClient = AdminClient.create(this.adminClientProperties)) {
|
||||
createTopic(adminClient, name, properties.getPartitionCount(), false, properties.getExtension().getAdmin());
|
||||
createTopic(adminClient, name, properties.getPartitionCount(), false,
|
||||
properties.getExtension().getTopic());
|
||||
int partitions = 0;
|
||||
if (this.configurationProperties.isAutoCreateTopics()) {
|
||||
DescribeTopicsResult describeTopicsResult = adminClient.describeTopics(Collections.singletonList(name));
|
||||
KafkaFuture<Map<String, TopicDescription>> all = describeTopicsResult.all();
|
||||
DescribeTopicsResult describeTopicsResult = adminClient
|
||||
.describeTopics(Collections.singletonList(name));
|
||||
KafkaFuture<Map<String, TopicDescription>> all = describeTopicsResult
|
||||
.all();
|
||||
|
||||
Map<String, TopicDescription> topicDescriptions = null;
|
||||
try {
|
||||
topicDescriptions = all.get(this.operationTimeout, TimeUnit.SECONDS);
|
||||
}
|
||||
catch (Exception ex) {
|
||||
throw new ProvisioningException("Problems encountered with partitions finding", ex);
|
||||
throw new ProvisioningException(
|
||||
"Problems encountered with partitions finding", ex);
|
||||
}
|
||||
TopicDescription topicDescription = topicDescriptions.get(name);
|
||||
partitions = topicDescription.partitions().size();
|
||||
@@ -152,7 +163,8 @@ public class KafkaTopicProvisioner implements ProvisioningProvider<ExtendedConsu
|
||||
}
|
||||
|
||||
@Override
|
||||
public ConsumerDestination provisionConsumerDestination(final String name, final String group,
|
||||
public ConsumerDestination provisionConsumerDestination(final String name,
|
||||
final String group,
|
||||
ExtendedConsumerProperties<KafkaConsumerProperties> properties) {
|
||||
if (!properties.isMultiplex()) {
|
||||
return doProvisionConsumerDestination(name, group, properties);
|
||||
@@ -166,7 +178,8 @@ public class KafkaTopicProvisioner implements ProvisioningProvider<ExtendedConsu
|
||||
}
|
||||
}
|
||||
|
||||
private ConsumerDestination doProvisionConsumerDestination(final String name, final String group,
|
||||
private ConsumerDestination doProvisionConsumerDestination(final String name,
|
||||
final String group,
|
||||
ExtendedConsumerProperties<KafkaConsumerProperties> properties) {
|
||||
|
||||
if (properties.getExtension().isDestinationIsPattern()) {
|
||||
@@ -188,18 +201,24 @@ public class KafkaTopicProvisioner implements ProvisioningProvider<ExtendedConsu
|
||||
int partitionCount = properties.getInstanceCount() * properties.getConcurrency();
|
||||
ConsumerDestination consumerDestination = new KafkaConsumerDestination(name);
|
||||
try (AdminClient adminClient = createAdminClient()) {
|
||||
createTopic(adminClient, name, partitionCount, properties.getExtension().isAutoRebalanceEnabled(),
|
||||
properties.getExtension().getAdmin());
|
||||
createTopic(adminClient, name, partitionCount,
|
||||
properties.getExtension().isAutoRebalanceEnabled(),
|
||||
properties.getExtension().getTopic());
|
||||
if (this.configurationProperties.isAutoCreateTopics()) {
|
||||
DescribeTopicsResult describeTopicsResult = adminClient.describeTopics(Collections.singletonList(name));
|
||||
KafkaFuture<Map<String, TopicDescription>> all = describeTopicsResult.all();
|
||||
DescribeTopicsResult describeTopicsResult = adminClient
|
||||
.describeTopics(Collections.singletonList(name));
|
||||
KafkaFuture<Map<String, TopicDescription>> all = describeTopicsResult
|
||||
.all();
|
||||
try {
|
||||
Map<String, TopicDescription> topicDescriptions = all.get(this.operationTimeout, TimeUnit.SECONDS);
|
||||
Map<String, TopicDescription> topicDescriptions = all
|
||||
.get(this.operationTimeout, TimeUnit.SECONDS);
|
||||
TopicDescription topicDescription = topicDescriptions.get(name);
|
||||
int partitions = topicDescription.partitions().size();
|
||||
consumerDestination = createDlqIfNeedBe(adminClient, name, group, properties, anonymous, partitions);
|
||||
consumerDestination = createDlqIfNeedBe(adminClient, name, group,
|
||||
properties, anonymous, partitions);
|
||||
if (consumerDestination == null) {
|
||||
consumerDestination = new KafkaConsumerDestination(name, partitions);
|
||||
consumerDestination = new KafkaConsumerDestination(name,
|
||||
partitions);
|
||||
}
|
||||
}
|
||||
catch (Exception ex) {
|
||||
@@ -215,22 +234,24 @@ public class KafkaTopicProvisioner implements ProvisioningProvider<ExtendedConsu
|
||||
}
|
||||
|
||||
/**
|
||||
* In general, binder properties supersede boot kafka properties.
|
||||
* The one exception is the bootstrap servers. In that case, we should only override
|
||||
* the boot properties if (there is a binder property AND it is a non-default value)
|
||||
* OR (if there is no boot property); this is needed because the binder property
|
||||
* never returns a null value.
|
||||
* In general, binder properties supersede boot kafka properties. The one exception is
|
||||
* the bootstrap servers. In that case, we should only override the boot properties if
|
||||
* (there is a binder property AND it is a non-default value) OR (if there is no boot
|
||||
* property); this is needed because the binder property never returns a null value.
|
||||
* @param adminProps the admin properties to normalize.
|
||||
* @param bootProps the boot kafka properties.
|
||||
* @param binderProps the binder kafka properties.
|
||||
*/
|
||||
private void normalalizeBootPropsWithBinder(Map<String, Object> adminProps, KafkaProperties bootProps,
|
||||
KafkaBinderConfigurationProperties binderProps) {
|
||||
private void normalalizeBootPropsWithBinder(Map<String, Object> adminProps,
|
||||
KafkaProperties bootProps, KafkaBinderConfigurationProperties binderProps) {
|
||||
// First deal with the outlier
|
||||
String kafkaConnectionString = binderProps.getKafkaConnectionString();
|
||||
if (ObjectUtils.isEmpty(adminProps.get(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG))
|
||||
|| !kafkaConnectionString.equals(binderProps.getDefaultKafkaConnectionString())) {
|
||||
adminProps.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaConnectionString);
|
||||
if (ObjectUtils
|
||||
.isEmpty(adminProps.get(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG))
|
||||
|| !kafkaConnectionString
|
||||
.equals(binderProps.getDefaultKafkaConnectionString())) {
|
||||
adminProps.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG,
|
||||
kafkaConnectionString);
|
||||
}
|
||||
// Now override any boot values with binder values
|
||||
Map<String, String> binderProperties = binderProps.getConfiguration();
|
||||
@@ -243,21 +264,24 @@ public class KafkaTopicProvisioner implements ProvisioningProvider<ExtendedConsu
|
||||
if (adminConfigNames.contains(key)) {
|
||||
Object replaced = adminProps.put(key, value);
|
||||
if (replaced != null && this.logger.isDebugEnabled()) {
|
||||
this.logger.debug("Overrode boot property: [" + key + "], from: [" + replaced + "] to: [" + value + "]");
|
||||
this.logger.debug("Overrode boot property: [" + key + "], from: ["
|
||||
+ replaced + "] to: [" + value + "]");
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private ConsumerDestination createDlqIfNeedBe(AdminClient adminClient, String name, String group,
|
||||
ExtendedConsumerProperties<KafkaConsumerProperties> properties,
|
||||
boolean anonymous, int partitions) {
|
||||
private ConsumerDestination createDlqIfNeedBe(AdminClient adminClient, String name,
|
||||
String group, ExtendedConsumerProperties<KafkaConsumerProperties> properties,
|
||||
boolean anonymous, int partitions) {
|
||||
if (properties.getExtension().isEnableDlq() && !anonymous) {
|
||||
String dlqTopic = StringUtils.hasText(properties.getExtension().getDlqName()) ?
|
||||
properties.getExtension().getDlqName() : "error." + name + "." + group;
|
||||
String dlqTopic = StringUtils.hasText(properties.getExtension().getDlqName())
|
||||
? properties.getExtension().getDlqName()
|
||||
: "error." + name + "." + group;
|
||||
try {
|
||||
createTopicAndPartitions(adminClient, dlqTopic, partitions,
|
||||
properties.getExtension().isAutoRebalanceEnabled(), properties.getExtension().getAdmin());
|
||||
properties.getExtension().isAutoRebalanceEnabled(),
|
||||
properties.getExtension().getTopic());
|
||||
}
|
||||
catch (Throwable throwable) {
|
||||
if (throwable instanceof Error) {
|
||||
@@ -272,27 +296,34 @@ public class KafkaTopicProvisioner implements ProvisioningProvider<ExtendedConsu
|
||||
return null;
|
||||
}
|
||||
|
||||
private void createTopic(AdminClient adminClient, String name, int partitionCount, boolean tolerateLowerPartitionsOnBroker,
|
||||
KafkaAdminProperties properties) {
|
||||
private void createTopic(AdminClient adminClient, String name, int partitionCount,
|
||||
boolean tolerateLowerPartitionsOnBroker, KafkaTopicProperties properties) {
|
||||
try {
|
||||
createTopicIfNecessary(adminClient, name, partitionCount, tolerateLowerPartitionsOnBroker, properties);
|
||||
createTopicIfNecessary(adminClient, name, partitionCount,
|
||||
tolerateLowerPartitionsOnBroker, properties);
|
||||
}
|
||||
// TODO: Remove catching Throwable. See this thread:
|
||||
// TODO:
|
||||
// https://github.com/spring-cloud/spring-cloud-stream-binder-kafka/pull/514#discussion_r241075940
|
||||
catch (Throwable throwable) {
|
||||
if (throwable instanceof Error) {
|
||||
throw (Error) throwable;
|
||||
}
|
||||
else {
|
||||
throw new ProvisioningException("provisioning exception", throwable);
|
||||
// TODO:
|
||||
// https://github.com/spring-cloud/spring-cloud-stream-binder-kafka/pull/514#discussion_r241075940
|
||||
throw new ProvisioningException("Provisioning exception", throwable);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void createTopicIfNecessary(AdminClient adminClient, final String topicName, final int partitionCount,
|
||||
boolean tolerateLowerPartitionsOnBroker, KafkaAdminProperties properties) throws Throwable {
|
||||
private void createTopicIfNecessary(AdminClient adminClient, final String topicName,
|
||||
final int partitionCount, boolean tolerateLowerPartitionsOnBroker,
|
||||
KafkaTopicProperties properties) throws Throwable {
|
||||
|
||||
if (this.configurationProperties.isAutoCreateTopics()) {
|
||||
createTopicAndPartitions(adminClient, topicName, partitionCount, tolerateLowerPartitionsOnBroker,
|
||||
properties);
|
||||
createTopicAndPartitions(adminClient, topicName, partitionCount,
|
||||
tolerateLowerPartitionsOnBroker, properties);
|
||||
}
|
||||
else {
|
||||
this.logger.info("Auto creation of topics is disabled.");
|
||||
@@ -305,12 +336,14 @@ public class KafkaTopicProvisioner implements ProvisioningProvider<ExtendedConsu
|
||||
* @param adminClient kafka admin client
|
||||
* @param topicName topic name
|
||||
* @param partitionCount partition count
|
||||
* @param tolerateLowerPartitionsOnBroker whether lower partitions count on broker is tolerated ot not
|
||||
* @param adminProperties kafka admin properties
|
||||
* @param tolerateLowerPartitionsOnBroker whether lower partitions count on broker is
|
||||
* tolerated ot not
|
||||
* @param topicProperties kafka topic properties
|
||||
* @throws Throwable from topic creation
|
||||
*/
|
||||
private void createTopicAndPartitions(AdminClient adminClient, final String topicName, final int partitionCount,
|
||||
boolean tolerateLowerPartitionsOnBroker, KafkaAdminProperties adminProperties) throws Throwable {
|
||||
private void createTopicAndPartitions(AdminClient adminClient, final String topicName,
|
||||
final int partitionCount, boolean tolerateLowerPartitionsOnBroker,
|
||||
KafkaTopicProperties topicProperties) throws Throwable {
|
||||
|
||||
ListTopicsResult listTopicsResult = adminClient.listTopics();
|
||||
KafkaFuture<Set<String>> namesFutures = listTopicsResult.names();
|
||||
@@ -318,54 +351,71 @@ public class KafkaTopicProvisioner implements ProvisioningProvider<ExtendedConsu
|
||||
Set<String> names = namesFutures.get(this.operationTimeout, TimeUnit.SECONDS);
|
||||
if (names.contains(topicName)) {
|
||||
// only consider minPartitionCount for resizing if autoAddPartitions is true
|
||||
int effectivePartitionCount = this.configurationProperties.isAutoAddPartitions()
|
||||
? Math.max(this.configurationProperties.getMinPartitionCount(), partitionCount)
|
||||
: partitionCount;
|
||||
DescribeTopicsResult describeTopicsResult = adminClient.describeTopics(Collections.singletonList(topicName));
|
||||
KafkaFuture<Map<String, TopicDescription>> topicDescriptionsFuture = describeTopicsResult.all();
|
||||
Map<String, TopicDescription> topicDescriptions = topicDescriptionsFuture.get(this.operationTimeout, TimeUnit.SECONDS);
|
||||
int effectivePartitionCount = this.configurationProperties
|
||||
.isAutoAddPartitions()
|
||||
? Math.max(
|
||||
this.configurationProperties.getMinPartitionCount(),
|
||||
partitionCount)
|
||||
: partitionCount;
|
||||
DescribeTopicsResult describeTopicsResult = adminClient
|
||||
.describeTopics(Collections.singletonList(topicName));
|
||||
KafkaFuture<Map<String, TopicDescription>> topicDescriptionsFuture = describeTopicsResult
|
||||
.all();
|
||||
Map<String, TopicDescription> topicDescriptions = topicDescriptionsFuture
|
||||
.get(this.operationTimeout, TimeUnit.SECONDS);
|
||||
TopicDescription topicDescription = topicDescriptions.get(topicName);
|
||||
int partitionSize = topicDescription.partitions().size();
|
||||
if (partitionSize < effectivePartitionCount) {
|
||||
if (this.configurationProperties.isAutoAddPartitions()) {
|
||||
CreatePartitionsResult partitions = adminClient.createPartitions(
|
||||
Collections.singletonMap(topicName, NewPartitions.increaseTo(effectivePartitionCount)));
|
||||
CreatePartitionsResult partitions = adminClient
|
||||
.createPartitions(Collections.singletonMap(topicName,
|
||||
NewPartitions.increaseTo(effectivePartitionCount)));
|
||||
partitions.all().get(this.operationTimeout, TimeUnit.SECONDS);
|
||||
}
|
||||
else if (tolerateLowerPartitionsOnBroker) {
|
||||
this.logger.warn("The number of expected partitions was: " + partitionCount + ", but "
|
||||
+ partitionSize + (partitionSize > 1 ? " have " : " has ") + "been found instead."
|
||||
+ "There will be " + (effectivePartitionCount - partitionSize) + " idle consumers");
|
||||
this.logger.warn("The number of expected partitions was: "
|
||||
+ partitionCount + ", but " + partitionSize
|
||||
+ (partitionSize > 1 ? " have " : " has ")
|
||||
+ "been found instead." + "There will be "
|
||||
+ (effectivePartitionCount - partitionSize)
|
||||
+ " idle consumers");
|
||||
}
|
||||
else {
|
||||
throw new ProvisioningException("The number of expected partitions was: " + partitionCount + ", but "
|
||||
+ partitionSize + (partitionSize > 1 ? " have " : " has ") + "been found instead."
|
||||
+ "Consider either increasing the partition count of the topic or enabling " +
|
||||
"`autoAddPartitions`");
|
||||
throw new ProvisioningException(
|
||||
"The number of expected partitions was: " + partitionCount
|
||||
+ ", but " + partitionSize
|
||||
+ (partitionSize > 1 ? " have " : " has ")
|
||||
+ "been found instead."
|
||||
+ "Consider either increasing the partition count of the topic or enabling "
|
||||
+ "`autoAddPartitions`");
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
// always consider minPartitionCount for topic creation
|
||||
final int effectivePartitionCount = Math.max(this.configurationProperties.getMinPartitionCount(),
|
||||
partitionCount);
|
||||
final int effectivePartitionCount = Math.max(
|
||||
this.configurationProperties.getMinPartitionCount(), partitionCount);
|
||||
this.metadataRetryOperations.execute((context) -> {
|
||||
|
||||
NewTopic newTopic;
|
||||
Map<Integer, List<Integer>> replicasAssignments = adminProperties.getReplicasAssignments();
|
||||
if (replicasAssignments != null && replicasAssignments.size() > 0) {
|
||||
newTopic = new NewTopic(topicName, adminProperties.getReplicasAssignments());
|
||||
Map<Integer, List<Integer>> replicasAssignments = topicProperties
|
||||
.getReplicasAssignments();
|
||||
if (replicasAssignments != null && replicasAssignments.size() > 0) {
|
||||
newTopic = new NewTopic(topicName,
|
||||
topicProperties.getReplicasAssignments());
|
||||
}
|
||||
else {
|
||||
newTopic = new NewTopic(topicName, effectivePartitionCount,
|
||||
adminProperties.getReplicationFactor() != null
|
||||
? adminProperties.getReplicationFactor()
|
||||
: this.configurationProperties.getReplicationFactor());
|
||||
topicProperties.getReplicationFactor() != null
|
||||
? topicProperties.getReplicationFactor()
|
||||
: this.configurationProperties
|
||||
.getReplicationFactor());
|
||||
}
|
||||
if (adminProperties.getConfiguration().size() > 0) {
|
||||
newTopic.configs(adminProperties.getConfiguration());
|
||||
if (topicProperties.getProperties().size() > 0) {
|
||||
newTopic.configs(topicProperties.getProperties());
|
||||
}
|
||||
CreateTopicsResult createTopicsResult = adminClient.createTopics(Collections.singletonList(newTopic));
|
||||
CreateTopicsResult createTopicsResult = adminClient
|
||||
.createTopics(Collections.singletonList(newTopic));
|
||||
try {
|
||||
createTopicsResult.all().get(this.operationTimeout, TimeUnit.SECONDS);
|
||||
}
|
||||
@@ -373,7 +423,8 @@ public class KafkaTopicProvisioner implements ProvisioningProvider<ExtendedConsu
|
||||
if (ex instanceof ExecutionException) {
|
||||
if (ex.getCause() instanceof TopicExistsException) {
|
||||
if (this.logger.isWarnEnabled()) {
|
||||
this.logger.warn("Attempt to create topic: " + topicName + ". Topic already exists.");
|
||||
this.logger.warn("Attempt to create topic: " + topicName
|
||||
+ ". Topic already exists.");
|
||||
}
|
||||
}
|
||||
else {
|
||||
@@ -392,28 +443,67 @@ public class KafkaTopicProvisioner implements ProvisioningProvider<ExtendedConsu
|
||||
}
|
||||
|
||||
public Collection<PartitionInfo> getPartitionsForTopic(final int partitionCount,
|
||||
final boolean tolerateLowerPartitionsOnBroker,
|
||||
final Callable<Collection<PartitionInfo>> callable) {
|
||||
final boolean tolerateLowerPartitionsOnBroker,
|
||||
final Callable<Collection<PartitionInfo>> callable, final String topicName) {
|
||||
try {
|
||||
return this.metadataRetryOperations
|
||||
.execute((context) -> {
|
||||
Collection<PartitionInfo> partitions = callable.call();
|
||||
// do a sanity check on the partition set
|
||||
int partitionSize = partitions.size();
|
||||
if (partitionSize < partitionCount) {
|
||||
if (tolerateLowerPartitionsOnBroker) {
|
||||
this.logger.warn("The number of expected partitions was: " + partitionCount + ", but "
|
||||
+ partitionSize + (partitionSize > 1 ? " have " : " has ") + "been found instead."
|
||||
+ "There will be " + (partitionCount - partitionSize) + " idle consumers");
|
||||
}
|
||||
else {
|
||||
throw new IllegalStateException("The number of expected partitions was: "
|
||||
+ partitionCount + ", but " + partitionSize
|
||||
+ (partitionSize > 1 ? " have " : " has ") + "been found instead");
|
||||
}
|
||||
return this.metadataRetryOperations.execute((context) -> {
|
||||
Collection<PartitionInfo> partitions = Collections.emptyList();
|
||||
|
||||
try {
|
||||
// This call may return null or throw an exception.
|
||||
partitions = callable.call();
|
||||
}
|
||||
catch (Exception ex) {
|
||||
// The above call can potentially throw exceptions such as timeout. If
|
||||
// we can determine
|
||||
// that the exception was due to an unknown topic on the broker, just
|
||||
// simply rethrow that.
|
||||
if (ex instanceof UnknownTopicOrPartitionException) {
|
||||
throw ex;
|
||||
}
|
||||
this.logger.error("Failed to obtain partition information", ex);
|
||||
}
|
||||
// In some cases, the above partition query may not throw an UnknownTopic..Exception for various reasons.
|
||||
// For that, we are forcing another query to ensure that the topic is present on the server.
|
||||
if (CollectionUtils.isEmpty(partitions)) {
|
||||
try (AdminClient adminClient = AdminClient
|
||||
.create(this.adminClientProperties)) {
|
||||
final DescribeTopicsResult describeTopicsResult = adminClient
|
||||
.describeTopics(Collections.singletonList(topicName));
|
||||
|
||||
describeTopicsResult.all().get();
|
||||
}
|
||||
catch (ExecutionException ex) {
|
||||
if (ex.getCause() instanceof UnknownTopicOrPartitionException) {
|
||||
throw (UnknownTopicOrPartitionException) ex.getCause();
|
||||
}
|
||||
return partitions;
|
||||
});
|
||||
else {
|
||||
logger.warn("No partitions have been retrieved for the topic "
|
||||
+ "(" + topicName
|
||||
+ "). This will affect the health check.");
|
||||
}
|
||||
}
|
||||
}
|
||||
// do a sanity check on the partition set
|
||||
int partitionSize = CollectionUtils.isEmpty(partitions) ? 0 : partitions.size();
|
||||
if (partitionSize < partitionCount) {
|
||||
if (tolerateLowerPartitionsOnBroker) {
|
||||
this.logger.warn("The number of expected partitions was: "
|
||||
+ partitionCount + ", but " + partitionSize
|
||||
+ (partitionSize > 1 ? " have " : " has ")
|
||||
+ "been found instead." + "There will be "
|
||||
+ (partitionCount - partitionSize) + " idle consumers");
|
||||
}
|
||||
else {
|
||||
throw new IllegalStateException(
|
||||
"The number of expected partitions was: " + partitionCount
|
||||
+ ", but " + partitionSize
|
||||
+ (partitionSize > 1 ? " have " : " has ")
|
||||
+ "been found instead");
|
||||
}
|
||||
}
|
||||
return partitions;
|
||||
});
|
||||
}
|
||||
catch (Exception ex) {
|
||||
this.logger.error("Cannot initialize Binder", ex);
|
||||
@@ -444,11 +534,10 @@ public class KafkaTopicProvisioner implements ProvisioningProvider<ExtendedConsu
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "KafkaProducerDestination{" +
|
||||
"producerDestinationName='" + producerDestinationName + '\'' +
|
||||
", partitions=" + partitions +
|
||||
'}';
|
||||
return "KafkaProducerDestination{" + "producerDestinationName='"
|
||||
+ producerDestinationName + '\'' + ", partitions=" + partitions + '}';
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static final class KafkaConsumerDestination implements ConsumerDestination {
|
||||
@@ -467,7 +556,8 @@ public class KafkaTopicProvisioner implements ProvisioningProvider<ExtendedConsu
|
||||
this(consumerDestinationName, partitions, null);
|
||||
}
|
||||
|
||||
KafkaConsumerDestination(String consumerDestinationName, Integer partitions, String dlqName) {
|
||||
KafkaConsumerDestination(String consumerDestinationName, Integer partitions,
|
||||
String dlqName) {
|
||||
this.consumerDestinationName = consumerDestinationName;
|
||||
this.partitions = partitions;
|
||||
this.dlqName = dlqName;
|
||||
@@ -480,11 +570,11 @@ public class KafkaTopicProvisioner implements ProvisioningProvider<ExtendedConsu
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "KafkaConsumerDestination{" +
|
||||
"consumerDestinationName='" + consumerDestinationName + '\'' +
|
||||
", partitions=" + partitions +
|
||||
", dlqName='" + dlqName + '\'' +
|
||||
'}';
|
||||
return "KafkaConsumerDestination{" + "consumerDestinationName='"
|
||||
+ consumerDestinationName + '\'' + ", partitions=" + partitions
|
||||
+ ", dlqName='" + dlqName + '\'' + '}';
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2016 the original author or authors.
|
||||
* Copyright 2016-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -30,20 +30,19 @@ public final class KafkaTopicUtils {
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate topic name.
|
||||
* Allowed chars are ASCII alphanumerics, '.', '_' and '-'.
|
||||
*
|
||||
* Validate topic name. Allowed chars are ASCII alphanumerics, '.', '_' and '-'.
|
||||
* @param topicName name of the topic
|
||||
*/
|
||||
public static void validateTopicName(String topicName) {
|
||||
try {
|
||||
byte[] utf8 = topicName.getBytes("UTF-8");
|
||||
for (byte b : utf8) {
|
||||
if (!((b >= 'a') && (b <= 'z') || (b >= 'A') && (b <= 'Z') || (b >= '0') && (b <= '9') || (b == '.')
|
||||
|| (b == '-') || (b == '_'))) {
|
||||
if (!((b >= 'a') && (b <= 'z') || (b >= 'A') && (b <= 'Z')
|
||||
|| (b >= '0') && (b <= '9') || (b == '.') || (b == '-')
|
||||
|| (b == '_'))) {
|
||||
throw new IllegalArgumentException(
|
||||
"Topic name can only have ASCII alphanumerics, '.', '_' and '-', but was: '" + topicName
|
||||
+ "'");
|
||||
"Topic name can only have ASCII alphanumerics, '.', '_' and '-', but was: '"
|
||||
+ topicName + "'");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -51,4 +50,5 @@ public final class KafkaTopicUtils {
|
||||
throw new AssertionError(ex); // Can't happen
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,108 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.properties;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.kafka.clients.consumer.ConsumerConfig;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
public class KafkaBinderConfigurationPropertiesTest {
|
||||
|
||||
@Test
|
||||
public void mergedConsumerConfigurationFiltersGroupIdFromKafkaProperties() {
|
||||
KafkaProperties kafkaProperties = new KafkaProperties();
|
||||
kafkaProperties.getConsumer().setGroupId("group1");
|
||||
KafkaBinderConfigurationProperties kafkaBinderConfigurationProperties =
|
||||
new KafkaBinderConfigurationProperties(kafkaProperties);
|
||||
|
||||
Map<String, Object> mergedConsumerConfiguration =
|
||||
kafkaBinderConfigurationProperties.mergedConsumerConfiguration();
|
||||
|
||||
assertThat(mergedConsumerConfiguration).doesNotContainKeys(ConsumerConfig.GROUP_ID_CONFIG);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void mergedConsumerConfigurationFiltersEnableAutoCommitFromKafkaProperties() {
|
||||
KafkaProperties kafkaProperties = new KafkaProperties();
|
||||
kafkaProperties.getConsumer().setEnableAutoCommit(true);
|
||||
KafkaBinderConfigurationProperties kafkaBinderConfigurationProperties =
|
||||
new KafkaBinderConfigurationProperties(kafkaProperties);
|
||||
|
||||
Map<String, Object> mergedConsumerConfiguration =
|
||||
kafkaBinderConfigurationProperties.mergedConsumerConfiguration();
|
||||
|
||||
assertThat(mergedConsumerConfiguration).doesNotContainKeys(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void mergedConsumerConfigurationFiltersGroupIdFromKafkaBinderConfigurationPropertiesConfiguration() {
|
||||
KafkaProperties kafkaProperties = new KafkaProperties();
|
||||
KafkaBinderConfigurationProperties kafkaBinderConfigurationProperties =
|
||||
new KafkaBinderConfigurationProperties(kafkaProperties);
|
||||
kafkaBinderConfigurationProperties
|
||||
.setConfiguration(Collections.singletonMap(ConsumerConfig.GROUP_ID_CONFIG, "group1"));
|
||||
|
||||
Map<String, Object> mergedConsumerConfiguration = kafkaBinderConfigurationProperties.mergedConsumerConfiguration();
|
||||
|
||||
assertThat(mergedConsumerConfiguration).doesNotContainKeys(ConsumerConfig.GROUP_ID_CONFIG);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void mergedConsumerConfigurationFiltersEnableAutoCommitFromKafkaBinderConfigurationPropertiesConfiguration() {
|
||||
KafkaProperties kafkaProperties = new KafkaProperties();
|
||||
KafkaBinderConfigurationProperties kafkaBinderConfigurationProperties =
|
||||
new KafkaBinderConfigurationProperties(kafkaProperties);
|
||||
kafkaBinderConfigurationProperties
|
||||
.setConfiguration(Collections.singletonMap(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"));
|
||||
|
||||
Map<String, Object> mergedConsumerConfiguration = kafkaBinderConfigurationProperties.mergedConsumerConfiguration();
|
||||
|
||||
assertThat(mergedConsumerConfiguration).doesNotContainKeys(ConsumerConfig.GROUP_ID_CONFIG);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void mergedConsumerConfigurationFiltersGroupIdFromKafkaBinderConfigurationPropertiesConsumerProperties() {
|
||||
KafkaProperties kafkaProperties = new KafkaProperties();
|
||||
KafkaBinderConfigurationProperties kafkaBinderConfigurationProperties =
|
||||
new KafkaBinderConfigurationProperties(kafkaProperties);
|
||||
kafkaBinderConfigurationProperties
|
||||
.setConsumerProperties(Collections.singletonMap(ConsumerConfig.GROUP_ID_CONFIG, "group1"));
|
||||
|
||||
Map<String, Object> mergedConsumerConfiguration = kafkaBinderConfigurationProperties.mergedConsumerConfiguration();
|
||||
|
||||
assertThat(mergedConsumerConfiguration).doesNotContainKeys(ConsumerConfig.GROUP_ID_CONFIG);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void mergedConsumerConfigurationFiltersEnableAutoCommitFromKafkaBinderConfigurationPropertiesConsumerProps() {
|
||||
KafkaProperties kafkaProperties = new KafkaProperties();
|
||||
KafkaBinderConfigurationProperties kafkaBinderConfigurationProperties =
|
||||
new KafkaBinderConfigurationProperties(kafkaProperties);
|
||||
kafkaBinderConfigurationProperties
|
||||
.setConsumerProperties(Collections.singletonMap(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"));
|
||||
|
||||
Map<String, Object> mergedConsumerConfiguration = kafkaBinderConfigurationProperties.mergedConsumerConfiguration();
|
||||
|
||||
assertThat(mergedConsumerConfiguration).doesNotContainKeys(ConsumerConfig.GROUP_ID_CONFIG);
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -35,7 +35,6 @@ import org.springframework.kafka.test.utils.KafkaTestUtils;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.Assertions.fail;
|
||||
|
||||
|
||||
/**
|
||||
* @author Gary Russell
|
||||
* @since 2.0
|
||||
@@ -47,18 +46,27 @@ public class KafkaTopicProvisionerTests {
|
||||
@Test
|
||||
public void bootPropertiesOverriddenExceptServers() throws Exception {
|
||||
KafkaProperties bootConfig = new KafkaProperties();
|
||||
bootConfig.getProperties().put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "PLAINTEXT");
|
||||
bootConfig.getProperties().put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG,
|
||||
"PLAINTEXT");
|
||||
bootConfig.setBootstrapServers(Collections.singletonList("localhost:1234"));
|
||||
KafkaBinderConfigurationProperties binderConfig = new KafkaBinderConfigurationProperties(bootConfig);
|
||||
binderConfig.getConfiguration().put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SSL");
|
||||
KafkaBinderConfigurationProperties binderConfig = new KafkaBinderConfigurationProperties(
|
||||
bootConfig);
|
||||
binderConfig.getConfiguration().put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG,
|
||||
"SSL");
|
||||
ClassPathResource ts = new ClassPathResource("test.truststore.ks");
|
||||
binderConfig.getConfiguration().put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, ts.getFile().getAbsolutePath());
|
||||
binderConfig.getConfiguration().put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,
|
||||
ts.getFile().getAbsolutePath());
|
||||
binderConfig.setBrokers("localhost:9092");
|
||||
KafkaTopicProvisioner provisioner = new KafkaTopicProvisioner(binderConfig, bootConfig);
|
||||
KafkaTopicProvisioner provisioner = new KafkaTopicProvisioner(binderConfig,
|
||||
bootConfig);
|
||||
AdminClient adminClient = provisioner.createAdminClient();
|
||||
assertThat(KafkaTestUtils.getPropertyValue(adminClient, "client.selector.channelBuilder")).isInstanceOf(SslChannelBuilder.class);
|
||||
Map configs = KafkaTestUtils.getPropertyValue(adminClient, "client.selector.channelBuilder.configs", Map.class);
|
||||
assertThat(((List) configs.get(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG)).get(0)).isEqualTo("localhost:1234");
|
||||
assertThat(KafkaTestUtils.getPropertyValue(adminClient,
|
||||
"client.selector.channelBuilder")).isInstanceOf(SslChannelBuilder.class);
|
||||
Map configs = KafkaTestUtils.getPropertyValue(adminClient,
|
||||
"client.selector.channelBuilder.configs", Map.class);
|
||||
assertThat(
|
||||
((List) configs.get(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG)).get(0))
|
||||
.isEqualTo("localhost:1234");
|
||||
adminClient.close();
|
||||
}
|
||||
|
||||
@@ -66,33 +74,44 @@ public class KafkaTopicProvisionerTests {
|
||||
@Test
|
||||
public void bootPropertiesOverriddenIncludingServers() throws Exception {
|
||||
KafkaProperties bootConfig = new KafkaProperties();
|
||||
bootConfig.getProperties().put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "PLAINTEXT");
|
||||
bootConfig.getProperties().put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG,
|
||||
"PLAINTEXT");
|
||||
bootConfig.setBootstrapServers(Collections.singletonList("localhost:9092"));
|
||||
KafkaBinderConfigurationProperties binderConfig = new KafkaBinderConfigurationProperties(bootConfig);
|
||||
binderConfig.getConfiguration().put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SSL");
|
||||
KafkaBinderConfigurationProperties binderConfig = new KafkaBinderConfigurationProperties(
|
||||
bootConfig);
|
||||
binderConfig.getConfiguration().put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG,
|
||||
"SSL");
|
||||
ClassPathResource ts = new ClassPathResource("test.truststore.ks");
|
||||
binderConfig.getConfiguration().put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, ts.getFile().getAbsolutePath());
|
||||
binderConfig.getConfiguration().put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,
|
||||
ts.getFile().getAbsolutePath());
|
||||
binderConfig.setBrokers("localhost:1234");
|
||||
KafkaTopicProvisioner provisioner = new KafkaTopicProvisioner(binderConfig, bootConfig);
|
||||
KafkaTopicProvisioner provisioner = new KafkaTopicProvisioner(binderConfig,
|
||||
bootConfig);
|
||||
AdminClient adminClient = provisioner.createAdminClient();
|
||||
assertThat(KafkaTestUtils.getPropertyValue(adminClient, "client.selector.channelBuilder")).isInstanceOf(SslChannelBuilder.class);
|
||||
Map configs = KafkaTestUtils.getPropertyValue(adminClient, "client.selector.channelBuilder.configs", Map.class);
|
||||
assertThat(((List) configs.get(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG)).get(0)).isEqualTo("localhost:1234");
|
||||
assertThat(KafkaTestUtils.getPropertyValue(adminClient,
|
||||
"client.selector.channelBuilder")).isInstanceOf(SslChannelBuilder.class);
|
||||
Map configs = KafkaTestUtils.getPropertyValue(adminClient,
|
||||
"client.selector.channelBuilder.configs", Map.class);
|
||||
assertThat(
|
||||
((List) configs.get(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG)).get(0))
|
||||
.isEqualTo("localhost:1234");
|
||||
adminClient.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void brokersInvalid() throws Exception {
|
||||
KafkaProperties bootConfig = new KafkaProperties();
|
||||
KafkaBinderConfigurationProperties binderConfig = new KafkaBinderConfigurationProperties(bootConfig);
|
||||
binderConfig.getConfiguration().put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, "localhost:1234");
|
||||
KafkaBinderConfigurationProperties binderConfig = new KafkaBinderConfigurationProperties(
|
||||
bootConfig);
|
||||
binderConfig.getConfiguration().put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG,
|
||||
"localhost:1234");
|
||||
try {
|
||||
new KafkaTopicProvisioner(binderConfig, bootConfig);
|
||||
fail("Expected illegal state");
|
||||
}
|
||||
catch (IllegalStateException e) {
|
||||
assertThat(e.getMessage())
|
||||
.isEqualTo("Set binder bootstrap servers via the 'brokers' property, not 'configuration'");
|
||||
assertThat(e.getMessage()).isEqualTo(
|
||||
"Set binder bootstrap servers via the 'brokers' property, not 'configuration'");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<artifactId>spring-cloud-stream-binder-kafka-streams</artifactId>
|
||||
@@ -10,7 +10,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
|
||||
<version>2.1.0.RC2</version>
|
||||
<version>3.0.0.M4</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
@@ -22,6 +22,11 @@
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-stream-binder-kafka-core</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-actuator</artifactId>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-configuration-processor</artifactId>
|
||||
@@ -69,51 +74,51 @@
|
||||
<!-- Following dependencies are needed to support Kafka 1.1.0 client-->
|
||||
<dependency>
|
||||
<groupId>org.apache.kafka</groupId>
|
||||
<artifactId>kafka_2.11</artifactId>
|
||||
<artifactId>kafka_2.12</artifactId>
|
||||
<version>${kafka.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.kafka</groupId>
|
||||
<artifactId>kafka_2.11</artifactId>
|
||||
<artifactId>kafka_2.12</artifactId>
|
||||
<version>${kafka.version}</version>
|
||||
<classifier>test</classifier>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<!-- Following dependencies are only provided for testing and won't be packaged with the binder apps-->
|
||||
<dependency>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-stream-schema</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.avro</groupId>
|
||||
<artifactId>avro</artifactId>
|
||||
<version>${avro.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<!-- <dependency>-->
|
||||
<!-- <groupId>org.springframework.cloud</groupId>-->
|
||||
<!-- <artifactId>spring-cloud-schema-registry-client</artifactId>-->
|
||||
<!-- <scope>test</scope>-->
|
||||
<!-- </dependency>-->
|
||||
<!-- <dependency>-->
|
||||
<!-- <groupId>org.apache.avro</groupId>-->
|
||||
<!-- <artifactId>avro</artifactId>-->
|
||||
<!-- <version>${avro.version}</version>-->
|
||||
<!-- <scope>provided</scope>-->
|
||||
<!-- </dependency>-->
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.avro</groupId>
|
||||
<artifactId>avro-maven-plugin</artifactId>
|
||||
<version>${avro.version}</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>generate-test-sources</phase>
|
||||
<goals>
|
||||
<goal>schema</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<outputDirectory>${project.basedir}/target/generated-test-sources</outputDirectory>
|
||||
<testOutputDirectory>${project.basedir}/target/generated-test-sources</testOutputDirectory>
|
||||
<testSourceDirectory>${project.basedir}/src/test/resources/avro</testSourceDirectory>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<!-- <plugin>-->
|
||||
<!-- <groupId>org.apache.avro</groupId>-->
|
||||
<!-- <artifactId>avro-maven-plugin</artifactId>-->
|
||||
<!-- <version>${avro.version}</version>-->
|
||||
<!-- <executions>-->
|
||||
<!-- <execution>-->
|
||||
<!-- <phase>generate-test-sources</phase>-->
|
||||
<!-- <goals>-->
|
||||
<!-- <goal>schema</goal>-->
|
||||
<!-- </goals>-->
|
||||
<!-- <configuration>-->
|
||||
<!-- <outputDirectory>${project.basedir}/target/generated-test-sources</outputDirectory>-->
|
||||
<!-- <testOutputDirectory>${project.basedir}/target/generated-test-sources</testOutputDirectory>-->
|
||||
<!-- <testSourceDirectory>${project.basedir}/src/test/resources/avro</testSourceDirectory>-->
|
||||
<!-- </configuration>-->
|
||||
<!-- </execution>-->
|
||||
<!-- </executions>-->
|
||||
<!-- </plugin>-->
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
@@ -0,0 +1,346 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.kafka.common.serialization.Serde;
|
||||
import org.apache.kafka.common.serialization.Serdes;
|
||||
import org.apache.kafka.common.utils.Bytes;
|
||||
import org.apache.kafka.streams.StreamsBuilder;
|
||||
import org.apache.kafka.streams.StreamsConfig;
|
||||
import org.apache.kafka.streams.Topology;
|
||||
import org.apache.kafka.streams.kstream.Consumed;
|
||||
import org.apache.kafka.streams.kstream.GlobalKTable;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.apache.kafka.streams.kstream.KTable;
|
||||
import org.apache.kafka.streams.kstream.Materialized;
|
||||
import org.apache.kafka.streams.state.KeyValueStore;
|
||||
import org.apache.kafka.streams.state.StoreBuilder;
|
||||
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsBinderConfigurationProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsConsumerProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsExtendedBindingProperties;
|
||||
import org.springframework.cloud.stream.config.BindingProperties;
|
||||
import org.springframework.cloud.stream.config.BindingServiceProperties;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.core.ResolvableType;
|
||||
import org.springframework.kafka.config.KafkaStreamsConfiguration;
|
||||
import org.springframework.kafka.config.StreamsBuilderFactoryBean;
|
||||
import org.springframework.kafka.core.CleanupConfig;
|
||||
import org.springframework.messaging.MessageHeaders;
|
||||
import org.springframework.messaging.support.MessageBuilder;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* @author Soby Chacko
|
||||
* @since 3.0.0
|
||||
*/
|
||||
public abstract class AbstractKafkaStreamsBinderProcessor implements ApplicationContextAware {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(AbstractKafkaStreamsBinderProcessor.class);
|
||||
|
||||
private final KafkaStreamsBindingInformationCatalogue kafkaStreamsBindingInformationCatalogue;
|
||||
private final BindingServiceProperties bindingServiceProperties;
|
||||
private final KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties;
|
||||
private final CleanupConfig cleanupConfig;
|
||||
private final KeyValueSerdeResolver keyValueSerdeResolver;
|
||||
|
||||
protected ConfigurableApplicationContext applicationContext;
|
||||
|
||||
public AbstractKafkaStreamsBinderProcessor(BindingServiceProperties bindingServiceProperties,
|
||||
KafkaStreamsBindingInformationCatalogue kafkaStreamsBindingInformationCatalogue,
|
||||
KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties,
|
||||
KeyValueSerdeResolver keyValueSerdeResolver, CleanupConfig cleanupConfig) {
|
||||
this.bindingServiceProperties = bindingServiceProperties;
|
||||
this.kafkaStreamsBindingInformationCatalogue = kafkaStreamsBindingInformationCatalogue;
|
||||
this.kafkaStreamsExtendedBindingProperties = kafkaStreamsExtendedBindingProperties;
|
||||
this.keyValueSerdeResolver = keyValueSerdeResolver;
|
||||
this.cleanupConfig = cleanupConfig;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void setApplicationContext(ApplicationContext applicationContext)
|
||||
throws BeansException {
|
||||
this.applicationContext = (ConfigurableApplicationContext) applicationContext;
|
||||
}
|
||||
|
||||
protected Topology.AutoOffsetReset getAutoOffsetReset(String inboundName, KafkaStreamsConsumerProperties extendedConsumerProperties) {
|
||||
final KafkaConsumerProperties.StartOffset startOffset = extendedConsumerProperties
|
||||
.getStartOffset();
|
||||
Topology.AutoOffsetReset autoOffsetReset = null;
|
||||
if (startOffset != null) {
|
||||
switch (startOffset) {
|
||||
case earliest:
|
||||
autoOffsetReset = Topology.AutoOffsetReset.EARLIEST;
|
||||
break;
|
||||
case latest:
|
||||
autoOffsetReset = Topology.AutoOffsetReset.LATEST;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (extendedConsumerProperties.isResetOffsets()) {
|
||||
AbstractKafkaStreamsBinderProcessor.LOG.warn("Detected resetOffsets configured on binding "
|
||||
+ inboundName + ". "
|
||||
+ "Setting resetOffsets in Kafka Streams binder does not have any effect.");
|
||||
}
|
||||
return autoOffsetReset;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected void handleKTableGlobalKTableInputs(Object[] arguments, int index, String input, Class<?> parameterType, Object targetBean,
|
||||
StreamsBuilderFactoryBean streamsBuilderFactoryBean, StreamsBuilder streamsBuilder,
|
||||
KafkaStreamsConsumerProperties extendedConsumerProperties,
|
||||
Serde<?> keySerde, Serde<?> valueSerde, Topology.AutoOffsetReset autoOffsetReset) {
|
||||
if (parameterType.isAssignableFrom(KTable.class)) {
|
||||
String materializedAs = extendedConsumerProperties.getMaterializedAs();
|
||||
String bindingDestination = this.bindingServiceProperties.getBindingDestination(input);
|
||||
KTable<?, ?> table = getKTable(streamsBuilder, keySerde, valueSerde, materializedAs,
|
||||
bindingDestination, autoOffsetReset);
|
||||
KTableBoundElementFactory.KTableWrapper kTableWrapper =
|
||||
(KTableBoundElementFactory.KTableWrapper) targetBean;
|
||||
//wrap the proxy created during the initial target type binding with real object (KTable)
|
||||
kTableWrapper.wrap((KTable<Object, Object>) table);
|
||||
this.kafkaStreamsBindingInformationCatalogue.addStreamBuilderFactory(streamsBuilderFactoryBean);
|
||||
arguments[index] = table;
|
||||
}
|
||||
else if (parameterType.isAssignableFrom(GlobalKTable.class)) {
|
||||
String materializedAs = extendedConsumerProperties.getMaterializedAs();
|
||||
String bindingDestination = this.bindingServiceProperties.getBindingDestination(input);
|
||||
GlobalKTable<?, ?> table = getGlobalKTable(streamsBuilder, keySerde, valueSerde, materializedAs,
|
||||
bindingDestination, autoOffsetReset);
|
||||
GlobalKTableBoundElementFactory.GlobalKTableWrapper globalKTableWrapper =
|
||||
(GlobalKTableBoundElementFactory.GlobalKTableWrapper) targetBean;
|
||||
//wrap the proxy created during the initial target type binding with real object (KTable)
|
||||
globalKTableWrapper.wrap((GlobalKTable<Object, Object>) table);
|
||||
this.kafkaStreamsBindingInformationCatalogue.addStreamBuilderFactory(streamsBuilderFactoryBean);
|
||||
arguments[index] = table;
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
protected StreamsBuilderFactoryBean buildStreamsBuilderAndRetrieveConfig(String beanNamePostPrefix,
|
||||
ApplicationContext applicationContext, String inboundName,
|
||||
KafkaStreamsBinderConfigurationProperties kafkaStreamsBinderConfigurationProperties) {
|
||||
ConfigurableListableBeanFactory beanFactory = this.applicationContext
|
||||
.getBeanFactory();
|
||||
|
||||
Map<String, Object> streamConfigGlobalProperties = applicationContext
|
||||
.getBean("streamConfigGlobalProperties", Map.class);
|
||||
|
||||
KafkaStreamsConsumerProperties extendedConsumerProperties = this.kafkaStreamsExtendedBindingProperties
|
||||
.getExtendedConsumerProperties(inboundName);
|
||||
streamConfigGlobalProperties
|
||||
.putAll(extendedConsumerProperties.getConfiguration());
|
||||
|
||||
String bindingLevelApplicationId = extendedConsumerProperties.getApplicationId();
|
||||
// override application.id if set at the individual binding level.
|
||||
// We provide this for backward compatibility with StreamListener based processors.
|
||||
// For function based processors see the next else if conditional block
|
||||
if (StringUtils.hasText(bindingLevelApplicationId)) {
|
||||
streamConfigGlobalProperties.put(StreamsConfig.APPLICATION_ID_CONFIG,
|
||||
bindingLevelApplicationId);
|
||||
}
|
||||
else if (kafkaStreamsBinderConfigurationProperties != null && !CollectionUtils.isEmpty(kafkaStreamsBinderConfigurationProperties.getFunctions())) {
|
||||
String applicationId = kafkaStreamsBinderConfigurationProperties.getFunctions().get(beanNamePostPrefix + ".applicationId");
|
||||
if (!StringUtils.isEmpty(applicationId)) {
|
||||
streamConfigGlobalProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId);
|
||||
}
|
||||
}
|
||||
|
||||
//If the application id is not set by any mechanism, then generate it.
|
||||
streamConfigGlobalProperties.computeIfAbsent(StreamsConfig.APPLICATION_ID_CONFIG,
|
||||
k -> {
|
||||
String generatedApplicationID = beanNamePostPrefix + "-applicationId";
|
||||
LOG.info("Binder Generated Kafka Streams Application ID: " + generatedApplicationID);
|
||||
LOG.info("Use the binder generated application ID only for development and testing. ");
|
||||
LOG.info("For production deployments, please consider explicitly setting an application ID using a configuration property.");
|
||||
LOG.info("The generated applicationID is static and will be preserved over application restarts.");
|
||||
return generatedApplicationID;
|
||||
});
|
||||
|
||||
int concurrency = this.bindingServiceProperties.getConsumerProperties(inboundName)
|
||||
.getConcurrency();
|
||||
// override concurrency if set at the individual binding level.
|
||||
if (concurrency > 1) {
|
||||
streamConfigGlobalProperties.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG,
|
||||
concurrency);
|
||||
}
|
||||
|
||||
Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers = applicationContext
|
||||
.getBean("kafkaStreamsDlqDispatchers", Map.class);
|
||||
|
||||
KafkaStreamsConfiguration kafkaStreamsConfiguration = new KafkaStreamsConfiguration(streamConfigGlobalProperties) {
|
||||
@Override
|
||||
public Properties asProperties() {
|
||||
Properties properties = super.asProperties();
|
||||
properties.put(SendToDlqAndContinue.KAFKA_STREAMS_DLQ_DISPATCHERS,
|
||||
kafkaStreamsDlqDispatchers);
|
||||
return properties;
|
||||
}
|
||||
};
|
||||
|
||||
StreamsBuilderFactoryBean streamsBuilder = this.cleanupConfig == null
|
||||
? new StreamsBuilderFactoryBean(kafkaStreamsConfiguration)
|
||||
: new StreamsBuilderFactoryBean(kafkaStreamsConfiguration,
|
||||
this.cleanupConfig);
|
||||
streamsBuilder.setAutoStartup(false);
|
||||
BeanDefinition streamsBuilderBeanDefinition = BeanDefinitionBuilder
|
||||
.genericBeanDefinition(
|
||||
(Class<StreamsBuilderFactoryBean>) streamsBuilder.getClass(),
|
||||
() -> streamsBuilder)
|
||||
.getRawBeanDefinition();
|
||||
((BeanDefinitionRegistry) beanFactory).registerBeanDefinition(
|
||||
"stream-builder-" + beanNamePostPrefix, streamsBuilderBeanDefinition);
|
||||
|
||||
//Removing the application ID from global properties so that the next function won't re-use it and cause race conditions.
|
||||
streamConfigGlobalProperties.remove(StreamsConfig.APPLICATION_ID_CONFIG);
|
||||
|
||||
return applicationContext.getBean(
|
||||
"&stream-builder-" + beanNamePostPrefix, StreamsBuilderFactoryBean.class);
|
||||
}
|
||||
|
||||
protected Serde<?> getValueSerde(String inboundName, KafkaStreamsConsumerProperties kafkaStreamsConsumerProperties, ResolvableType resolvableType) {
|
||||
if (bindingServiceProperties.getConsumerProperties(inboundName).isUseNativeDecoding()) {
|
||||
BindingProperties bindingProperties = this.bindingServiceProperties
|
||||
.getBindingProperties(inboundName);
|
||||
return this.keyValueSerdeResolver.getInboundValueSerde(
|
||||
bindingProperties.getConsumer(), kafkaStreamsConsumerProperties, resolvableType);
|
||||
}
|
||||
else {
|
||||
return Serdes.ByteArray();
|
||||
}
|
||||
}
|
||||
|
||||
protected KStream<?, ?> getKStream(String inboundName, BindingProperties bindingProperties, StreamsBuilder streamsBuilder,
|
||||
Serde<?> keySerde, Serde<?> valueSerde, Topology.AutoOffsetReset autoOffsetReset) {
|
||||
addStateStoreBeans(streamsBuilder);
|
||||
|
||||
String[] bindingTargets = StringUtils.commaDelimitedListToStringArray(
|
||||
this.bindingServiceProperties.getBindingDestination(inboundName));
|
||||
|
||||
KStream<?, ?> stream = streamsBuilder.stream(Arrays.asList(bindingTargets),
|
||||
Consumed.with(keySerde, valueSerde)
|
||||
.withOffsetResetPolicy(autoOffsetReset));
|
||||
final boolean nativeDecoding = this.bindingServiceProperties
|
||||
.getConsumerProperties(inboundName).isUseNativeDecoding();
|
||||
if (nativeDecoding) {
|
||||
LOG.info("Native decoding is enabled for " + inboundName
|
||||
+ ". Inbound deserialization done at the broker.");
|
||||
}
|
||||
else {
|
||||
LOG.info("Native decoding is disabled for " + inboundName
|
||||
+ ". Inbound message conversion done by Spring Cloud Stream.");
|
||||
}
|
||||
|
||||
return getkStream(bindingProperties, stream, nativeDecoding);
|
||||
}
|
||||
|
||||
private KStream<?, ?> getkStream(BindingProperties bindingProperties, KStream<?, ?> stream, boolean nativeDecoding) {
|
||||
if (!nativeDecoding) {
|
||||
stream = stream.mapValues((value) -> {
|
||||
Object returnValue;
|
||||
String contentType = bindingProperties.getContentType();
|
||||
if (value != null && !StringUtils.isEmpty(contentType)) {
|
||||
returnValue = MessageBuilder.withPayload(value)
|
||||
.setHeader(MessageHeaders.CONTENT_TYPE, contentType).build();
|
||||
}
|
||||
else {
|
||||
returnValue = value;
|
||||
}
|
||||
return returnValue;
|
||||
});
|
||||
}
|
||||
return stream;
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
private void addStateStoreBeans(StreamsBuilder streamsBuilder) {
|
||||
try {
|
||||
final Map<String, StoreBuilder> storeBuilders = applicationContext.getBeansOfType(StoreBuilder.class);
|
||||
if (!CollectionUtils.isEmpty(storeBuilders)) {
|
||||
storeBuilders.values().forEach(storeBuilder -> {
|
||||
streamsBuilder.addStateStore(storeBuilder);
|
||||
if (LOG.isInfoEnabled()) {
|
||||
LOG.info("state store " + storeBuilder.name() + " added to topology");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
catch (Exception e) {
|
||||
// Pass through.
|
||||
}
|
||||
}
|
||||
|
||||
private <K, V> KTable<K, V> materializedAs(StreamsBuilder streamsBuilder, String destination, String storeName,
|
||||
Serde<K> k, Serde<V> v, Topology.AutoOffsetReset autoOffsetReset) {
|
||||
return streamsBuilder.table(this.bindingServiceProperties.getBindingDestination(destination),
|
||||
Consumed.with(k, v).withOffsetResetPolicy(autoOffsetReset), getMaterialized(storeName, k, v));
|
||||
}
|
||||
|
||||
private <K, V> Materialized<K, V, KeyValueStore<Bytes, byte[]>> getMaterialized(
|
||||
String storeName, Serde<K> k, Serde<V> v) {
|
||||
return Materialized.<K, V, KeyValueStore<Bytes, byte[]>>as(storeName)
|
||||
.withKeySerde(k).withValueSerde(v);
|
||||
}
|
||||
|
||||
private <K, V> GlobalKTable<K, V> materializedAsGlobalKTable(
|
||||
StreamsBuilder streamsBuilder, String destination, String storeName,
|
||||
Serde<K> k, Serde<V> v, Topology.AutoOffsetReset autoOffsetReset) {
|
||||
return streamsBuilder.globalTable(
|
||||
this.bindingServiceProperties.getBindingDestination(destination),
|
||||
Consumed.with(k, v).withOffsetResetPolicy(autoOffsetReset),
|
||||
getMaterialized(storeName, k, v));
|
||||
}
|
||||
|
||||
private GlobalKTable<?, ?> getGlobalKTable(StreamsBuilder streamsBuilder,
|
||||
Serde<?> keySerde, Serde<?> valueSerde, String materializedAs,
|
||||
String bindingDestination, Topology.AutoOffsetReset autoOffsetReset) {
|
||||
return materializedAs != null
|
||||
? materializedAsGlobalKTable(streamsBuilder, bindingDestination,
|
||||
materializedAs, keySerde, valueSerde, autoOffsetReset)
|
||||
: streamsBuilder.globalTable(bindingDestination,
|
||||
Consumed.with(keySerde, valueSerde)
|
||||
.withOffsetResetPolicy(autoOffsetReset));
|
||||
}
|
||||
|
||||
private KTable<?, ?> getKTable(StreamsBuilder streamsBuilder, Serde<?> keySerde,
|
||||
Serde<?> valueSerde, String materializedAs, String bindingDestination,
|
||||
Topology.AutoOffsetReset autoOffsetReset) {
|
||||
return materializedAs != null
|
||||
? materializedAs(streamsBuilder, bindingDestination, materializedAs,
|
||||
keySerde, valueSerde, autoOffsetReset)
|
||||
: streamsBuilder.table(bindingDestination,
|
||||
Consumed.with(keySerde, valueSerde)
|
||||
.withOffsetResetPolicy(autoOffsetReset));
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -36,27 +36,35 @@ import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* An {@link AbstractBinder} implementation for {@link GlobalKTable}.
|
||||
*
|
||||
* Provides only consumer binding for the bound {@link GlobalKTable}.
|
||||
* Output bindings are not allowed on this binder.
|
||||
* <p>
|
||||
* Provides only consumer binding for the bound {@link GlobalKTable}. Output bindings are
|
||||
* not allowed on this binder.
|
||||
*
|
||||
* @author Soby Chacko
|
||||
* @since 2.1.0
|
||||
*/
|
||||
public class GlobalKTableBinder extends
|
||||
// @checkstyle:off
|
||||
AbstractBinder<GlobalKTable<Object, Object>, ExtendedConsumerProperties<KafkaStreamsConsumerProperties>, ExtendedProducerProperties<KafkaStreamsProducerProperties>>
|
||||
implements ExtendedPropertiesBinder<GlobalKTable<Object, Object>, KafkaStreamsConsumerProperties, KafkaStreamsProducerProperties> {
|
||||
implements
|
||||
ExtendedPropertiesBinder<GlobalKTable<Object, Object>, KafkaStreamsConsumerProperties, KafkaStreamsProducerProperties> {
|
||||
|
||||
// @checkstyle:on
|
||||
private final KafkaStreamsBinderConfigurationProperties binderConfigurationProperties;
|
||||
|
||||
private final KafkaTopicProvisioner kafkaTopicProvisioner;
|
||||
|
||||
private final Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers;
|
||||
|
||||
// @checkstyle:off
|
||||
private KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties = new KafkaStreamsExtendedBindingProperties();
|
||||
|
||||
public GlobalKTableBinder(KafkaStreamsBinderConfigurationProperties binderConfigurationProperties, KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
// @checkstyle:on
|
||||
|
||||
public GlobalKTableBinder(
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
this.binderConfigurationProperties = binderConfigurationProperties;
|
||||
this.kafkaTopicProvisioner = kafkaTopicProvisioner;
|
||||
this.kafkaStreamsDlqDispatchers = kafkaStreamsDlqDispatchers;
|
||||
@@ -64,31 +72,39 @@ public class GlobalKTableBinder extends
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Binding<GlobalKTable<Object, Object>> doBindConsumer(String name, String group, GlobalKTable<Object, Object> inputTarget,
|
||||
ExtendedConsumerProperties<KafkaStreamsConsumerProperties> properties) {
|
||||
protected Binding<GlobalKTable<Object, Object>> doBindConsumer(String name,
|
||||
String group, GlobalKTable<Object, Object> inputTarget,
|
||||
ExtendedConsumerProperties<KafkaStreamsConsumerProperties> properties) {
|
||||
if (!StringUtils.hasText(group)) {
|
||||
group = this.binderConfigurationProperties.getApplicationId();
|
||||
}
|
||||
KafkaStreamsBinderUtils.prepareConsumerBinding(name, group, getApplicationContext(),
|
||||
this.kafkaTopicProvisioner,
|
||||
this.binderConfigurationProperties, properties, this.kafkaStreamsDlqDispatchers);
|
||||
KafkaStreamsBinderUtils.prepareConsumerBinding(name, group,
|
||||
getApplicationContext(), this.kafkaTopicProvisioner,
|
||||
this.binderConfigurationProperties, properties,
|
||||
this.kafkaStreamsDlqDispatchers);
|
||||
return new DefaultBinding<>(name, group, inputTarget, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Binding<GlobalKTable<Object, Object>> doBindProducer(String name, GlobalKTable<Object, Object> outboundBindTarget,
|
||||
ExtendedProducerProperties<KafkaStreamsProducerProperties> properties) {
|
||||
throw new UnsupportedOperationException("No producer level binding is allowed for GlobalKTable");
|
||||
protected Binding<GlobalKTable<Object, Object>> doBindProducer(String name,
|
||||
GlobalKTable<Object, Object> outboundBindTarget,
|
||||
ExtendedProducerProperties<KafkaStreamsProducerProperties> properties) {
|
||||
throw new UnsupportedOperationException(
|
||||
"No producer level binding is allowed for GlobalKTable");
|
||||
}
|
||||
|
||||
@Override
|
||||
public KafkaStreamsConsumerProperties getExtendedConsumerProperties(String channelName) {
|
||||
return this.kafkaStreamsExtendedBindingProperties.getExtendedConsumerProperties(channelName);
|
||||
public KafkaStreamsConsumerProperties getExtendedConsumerProperties(
|
||||
String channelName) {
|
||||
return this.kafkaStreamsExtendedBindingProperties
|
||||
.getExtendedConsumerProperties(channelName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public KafkaStreamsProducerProperties getExtendedProducerProperties(String channelName) {
|
||||
throw new UnsupportedOperationException("No producer binding is allowed and therefore no properties");
|
||||
public KafkaStreamsProducerProperties getExtendedProducerProperties(
|
||||
String channelName) {
|
||||
throw new UnsupportedOperationException(
|
||||
"No producer binding is allowed and therefore no properties");
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -98,7 +114,13 @@ public class GlobalKTableBinder extends
|
||||
|
||||
@Override
|
||||
public Class<? extends BinderSpecificPropertiesProvider> getExtendedPropertiesEntryClass() {
|
||||
return this.kafkaStreamsExtendedBindingProperties.getExtendedPropertiesEntryClass();
|
||||
return this.kafkaStreamsExtendedBindingProperties
|
||||
.getExtendedPropertiesEntryClass();
|
||||
}
|
||||
|
||||
public void setKafkaStreamsExtendedBindingProperties(
|
||||
KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties) {
|
||||
this.kafkaStreamsExtendedBindingProperties = kafkaStreamsExtendedBindingProperties;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -19,10 +19,15 @@ package org.springframework.cloud.stream.binder.kafka.streams;
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.beans.factory.config.BeanFactoryPostProcessor;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
|
||||
import org.springframework.boot.autoconfigure.kafka.KafkaAutoConfiguration;
|
||||
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.properties.KafkaBinderConfigurationProperties;
|
||||
import org.springframework.cloud.stream.annotation.BindingProvider;
|
||||
import org.springframework.cloud.stream.binder.kafka.provisioning.KafkaTopicProvisioner;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsBinderConfigurationProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsExtendedBindingProperties;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Import;
|
||||
@@ -34,19 +39,49 @@ import org.springframework.context.annotation.Import;
|
||||
* @since 2.1.0
|
||||
*/
|
||||
@Configuration
|
||||
@Import(KafkaStreamsBinderUtils.KafkaStreamsMissingBeansRegistrar.class)
|
||||
@BindingProvider
|
||||
@Import({ KafkaAutoConfiguration.class,
|
||||
KafkaStreamsBinderHealthIndicatorConfiguration.class })
|
||||
public class GlobalKTableBinderConfiguration {
|
||||
|
||||
@Bean
|
||||
public KafkaTopicProvisioner provisioningProvider(KafkaBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaProperties kafkaProperties) {
|
||||
public KafkaTopicProvisioner provisioningProvider(
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaProperties kafkaProperties) {
|
||||
return new KafkaTopicProvisioner(binderConfigurationProperties, kafkaProperties);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public GlobalKTableBinder GlobalKTableBinder(KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
@Qualifier("kafkaStreamsDlqDispatchers") Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
return new GlobalKTableBinder(binderConfigurationProperties, kafkaTopicProvisioner, kafkaStreamsDlqDispatchers);
|
||||
public GlobalKTableBinder GlobalKTableBinder(
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties,
|
||||
@Qualifier("kafkaStreamsDlqDispatchers") Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
GlobalKTableBinder globalKTableBinder = new GlobalKTableBinder(binderConfigurationProperties,
|
||||
kafkaTopicProvisioner, kafkaStreamsDlqDispatchers);
|
||||
globalKTableBinder.setKafkaStreamsExtendedBindingProperties(
|
||||
kafkaStreamsExtendedBindingProperties);
|
||||
return globalKTableBinder;
|
||||
}
|
||||
|
||||
@Bean
|
||||
@ConditionalOnBean(name = "outerContext")
|
||||
public static BeanFactoryPostProcessor outerContextBeanFactoryPostProcessor() {
|
||||
return beanFactory -> {
|
||||
|
||||
// It is safe to call getBean("outerContext") here, because this bean is
|
||||
// registered as first
|
||||
// and as independent from the parent context.
|
||||
ApplicationContext outerContext = (ApplicationContext) beanFactory
|
||||
.getBean("outerContext");
|
||||
beanFactory.registerSingleton(
|
||||
KafkaStreamsBinderConfigurationProperties.class.getSimpleName(),
|
||||
outerContext
|
||||
.getBean(KafkaStreamsBinderConfigurationProperties.class));
|
||||
beanFactory.registerSingleton(
|
||||
KafkaStreamsExtendedBindingProperties.class.getSimpleName(),
|
||||
outerContext.getBean(KafkaStreamsExtendedBindingProperties.class));
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -23,18 +23,21 @@ import org.apache.kafka.streams.kstream.GlobalKTable;
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.cloud.stream.binder.ConsumerProperties;
|
||||
import org.springframework.cloud.stream.binding.AbstractBindingTargetFactory;
|
||||
import org.springframework.cloud.stream.config.BindingProperties;
|
||||
import org.springframework.cloud.stream.config.BindingServiceProperties;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link org.springframework.cloud.stream.binding.BindingTargetFactory} for {@link GlobalKTable}
|
||||
* {@link org.springframework.cloud.stream.binding.BindingTargetFactory} for
|
||||
* {@link GlobalKTable}
|
||||
*
|
||||
* Input bindings are only created as output bindings on GlobalKTable are not allowed.
|
||||
*
|
||||
* @author Soby Chacko
|
||||
* @since 2.1.0
|
||||
*/
|
||||
public class GlobalKTableBoundElementFactory extends AbstractBindingTargetFactory<GlobalKTable> {
|
||||
public class GlobalKTableBoundElementFactory
|
||||
extends AbstractBindingTargetFactory<GlobalKTable> {
|
||||
|
||||
private final BindingServiceProperties bindingServiceProperties;
|
||||
|
||||
@@ -45,12 +48,21 @@ public class GlobalKTableBoundElementFactory extends AbstractBindingTargetFactor
|
||||
|
||||
@Override
|
||||
public GlobalKTable createInput(String name) {
|
||||
ConsumerProperties consumerProperties = this.bindingServiceProperties.getConsumerProperties(name);
|
||||
//Always set multiplex to true in the kafka streams binder
|
||||
BindingProperties bindingProperties = this.bindingServiceProperties.getBindingProperties(name);
|
||||
ConsumerProperties consumerProperties = bindingProperties.getConsumer();
|
||||
if (consumerProperties == null) {
|
||||
consumerProperties = this.bindingServiceProperties.getConsumerProperties(name);
|
||||
consumerProperties.setUseNativeDecoding(true);
|
||||
}
|
||||
// Always set multiplex to true in the kafka streams binder
|
||||
consumerProperties.setMultiplex(true);
|
||||
|
||||
// @checkstyle:off
|
||||
GlobalKTableBoundElementFactory.GlobalKTableWrapperHandler wrapper = new GlobalKTableBoundElementFactory.GlobalKTableWrapperHandler();
|
||||
ProxyFactory proxyFactory = new ProxyFactory(GlobalKTableBoundElementFactory.GlobalKTableWrapper.class, GlobalKTable.class);
|
||||
// @checkstyle:on
|
||||
ProxyFactory proxyFactory = new ProxyFactory(
|
||||
GlobalKTableBoundElementFactory.GlobalKTableWrapper.class,
|
||||
GlobalKTable.class);
|
||||
proxyFactory.addAdvice(wrapper);
|
||||
|
||||
return (GlobalKTable) proxyFactory.getProxy();
|
||||
@@ -58,17 +70,21 @@ public class GlobalKTableBoundElementFactory extends AbstractBindingTargetFactor
|
||||
|
||||
@Override
|
||||
public GlobalKTable createOutput(String name) {
|
||||
throw new UnsupportedOperationException("Outbound operations are not allowed on target type GlobalKTable");
|
||||
throw new UnsupportedOperationException(
|
||||
"Outbound operations are not allowed on target type GlobalKTable");
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper for GlobalKTable proxy.
|
||||
*/
|
||||
public interface GlobalKTableWrapper {
|
||||
|
||||
void wrap(GlobalKTable<Object, Object> delegate);
|
||||
|
||||
}
|
||||
|
||||
private static class GlobalKTableWrapperHandler implements GlobalKTableBoundElementFactory.GlobalKTableWrapper, MethodInterceptor {
|
||||
private static class GlobalKTableWrapperHandler implements
|
||||
GlobalKTableBoundElementFactory.GlobalKTableWrapper, MethodInterceptor {
|
||||
|
||||
private GlobalKTable<Object, Object> delegate;
|
||||
|
||||
@@ -80,17 +96,25 @@ public class GlobalKTableBoundElementFactory extends AbstractBindingTargetFactor
|
||||
|
||||
@Override
|
||||
public Object invoke(MethodInvocation methodInvocation) throws Throwable {
|
||||
if (methodInvocation.getMethod().getDeclaringClass().equals(GlobalKTable.class)) {
|
||||
Assert.notNull(this.delegate, "Trying to prepareConsumerBinding " + methodInvocation
|
||||
.getMethod() + " but no delegate has been set.");
|
||||
return methodInvocation.getMethod().invoke(this.delegate, methodInvocation.getArguments());
|
||||
if (methodInvocation.getMethod().getDeclaringClass()
|
||||
.equals(GlobalKTable.class)) {
|
||||
Assert.notNull(this.delegate,
|
||||
"Trying to prepareConsumerBinding " + methodInvocation.getMethod()
|
||||
+ " but no delegate has been set.");
|
||||
return methodInvocation.getMethod().invoke(this.delegate,
|
||||
methodInvocation.getArguments());
|
||||
}
|
||||
else if (methodInvocation.getMethod().getDeclaringClass().equals(GlobalKTableBoundElementFactory.GlobalKTableWrapper.class)) {
|
||||
return methodInvocation.getMethod().invoke(this, methodInvocation.getArguments());
|
||||
else if (methodInvocation.getMethod().getDeclaringClass()
|
||||
.equals(GlobalKTableBoundElementFactory.GlobalKTableWrapper.class)) {
|
||||
return methodInvocation.getMethod().invoke(this,
|
||||
methodInvocation.getArguments());
|
||||
}
|
||||
else {
|
||||
throw new IllegalStateException("Only GlobalKTable method invocations are permitted");
|
||||
throw new IllegalStateException(
|
||||
"Only GlobalKTable method invocations are permitted");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -19,6 +19,8 @@ package org.springframework.cloud.stream.binder.kafka.streams;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.kafka.common.serialization.Serializer;
|
||||
import org.apache.kafka.streams.KafkaStreams;
|
||||
import org.apache.kafka.streams.errors.InvalidStateStoreException;
|
||||
@@ -27,13 +29,17 @@ import org.apache.kafka.streams.state.QueryableStoreType;
|
||||
import org.apache.kafka.streams.state.StreamsMetadata;
|
||||
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsBinderConfigurationProperties;
|
||||
import org.springframework.retry.RetryPolicy;
|
||||
import org.springframework.retry.backoff.FixedBackOffPolicy;
|
||||
import org.springframework.retry.policy.SimpleRetryPolicy;
|
||||
import org.springframework.retry.support.RetryTemplate;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Services pertinent to the interactive query capabilities of Kafka Streams. This class provides
|
||||
* services such as querying for a particular store, which instance is hosting a particular store etc.
|
||||
* This is part of the public API of the kafka streams binder and the users can inject this service in their
|
||||
* applications to make use of it.
|
||||
* Services pertinent to the interactive query capabilities of Kafka Streams. This class
|
||||
* provides services such as querying for a particular store, which instance is hosting a
|
||||
* particular store etc. This is part of the public API of the kafka streams binder and
|
||||
* the users can inject this service in their applications to make use of it.
|
||||
*
|
||||
* @author Soby Chacko
|
||||
* @author Renwei Han
|
||||
@@ -41,54 +47,71 @@ import org.springframework.util.StringUtils;
|
||||
*/
|
||||
public class InteractiveQueryService {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(InteractiveQueryService.class);
|
||||
|
||||
private final KafkaStreamsRegistry kafkaStreamsRegistry;
|
||||
|
||||
private final KafkaStreamsBinderConfigurationProperties binderConfigurationProperties;
|
||||
|
||||
/**
|
||||
* Constructor for InteractiveQueryService.
|
||||
*
|
||||
* @param kafkaStreamsRegistry holding {@link KafkaStreamsRegistry}
|
||||
* @param binderConfigurationProperties kafka Streams binder configuration properties
|
||||
*/
|
||||
public InteractiveQueryService(KafkaStreamsRegistry kafkaStreamsRegistry,
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties) {
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties) {
|
||||
this.kafkaStreamsRegistry = kafkaStreamsRegistry;
|
||||
this.binderConfigurationProperties = binderConfigurationProperties;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve and return a queryable store by name created in the application.
|
||||
*
|
||||
* @param storeName name of the queryable store
|
||||
* @param storeType type of the queryable store
|
||||
* @param <T> generic queryable store
|
||||
* @return queryable store.
|
||||
*/
|
||||
public <T> T getQueryableStore(String storeName, QueryableStoreType<T> storeType) {
|
||||
for (KafkaStreams kafkaStream : this.kafkaStreamsRegistry.getKafkaStreams()) {
|
||||
try {
|
||||
T store = kafkaStream.store(storeName, storeType);
|
||||
if (store != null) {
|
||||
return store;
|
||||
|
||||
RetryTemplate retryTemplate = new RetryTemplate();
|
||||
|
||||
KafkaStreamsBinderConfigurationProperties.StateStoreRetry stateStoreRetry = this.binderConfigurationProperties.getStateStoreRetry();
|
||||
RetryPolicy retryPolicy = new SimpleRetryPolicy(stateStoreRetry.getMaxAttempts());
|
||||
FixedBackOffPolicy backOffPolicy = new FixedBackOffPolicy();
|
||||
backOffPolicy.setBackOffPeriod(stateStoreRetry.getBackoffPeriod());
|
||||
|
||||
retryTemplate.setBackOffPolicy(backOffPolicy);
|
||||
retryTemplate.setRetryPolicy(retryPolicy);
|
||||
|
||||
return retryTemplate.execute(context -> {
|
||||
T store;
|
||||
for (KafkaStreams kafkaStream : InteractiveQueryService.this.kafkaStreamsRegistry.getKafkaStreams()) {
|
||||
try {
|
||||
store = kafkaStream.store(storeName, storeType);
|
||||
if (store != null) {
|
||||
return store;
|
||||
}
|
||||
}
|
||||
catch (InvalidStateStoreException e) {
|
||||
LOG.warn("Error when retrieving state store: " + storeName, e);
|
||||
}
|
||||
}
|
||||
catch (InvalidStateStoreException ignored) {
|
||||
//pass through
|
||||
}
|
||||
}
|
||||
return null;
|
||||
throw new IllegalStateException("Error when retrieving state store: " + storeName);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the current {@link HostInfo} that the calling kafka streams application is running on.
|
||||
*
|
||||
* Note that the end user applications must provide `applicaiton.server` as a configuration property
|
||||
* when calling this method. If this is not available, then null is returned.
|
||||
* Gets the current {@link HostInfo} that the calling kafka streams application is
|
||||
* running on.
|
||||
*
|
||||
* Note that the end user applications must provide `applicaiton.server` as a
|
||||
* configuration property when calling this method. If this is not available, then
|
||||
* null is returned.
|
||||
* @return the current {@link HostInfo}
|
||||
*/
|
||||
public HostInfo getCurrentHostInfo() {
|
||||
Map<String, String> configuration = this.binderConfigurationProperties.getConfiguration();
|
||||
Map<String, String> configuration = this.binderConfigurationProperties
|
||||
.getConfiguration();
|
||||
if (configuration.containsKey("application.server")) {
|
||||
|
||||
String applicationServer = configuration.get("application.server");
|
||||
@@ -100,27 +123,27 @@ public class InteractiveQueryService {
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the {@link HostInfo} where the provided store and key are hosted on. This may not be the
|
||||
* current host that is running the application. Kafka Streams will look through all the consumer instances
|
||||
* under the same application id and retrieves the proper host.
|
||||
*
|
||||
* Note that the end user applications must provide `applicaiton.server` as a configuration property
|
||||
* for all the application instances when calling this method. If this is not available, then null maybe returned.
|
||||
* Gets the {@link HostInfo} where the provided store and key are hosted on. This may
|
||||
* not be the current host that is running the application. Kafka Streams will look
|
||||
* through all the consumer instances under the same application id and retrieves the
|
||||
* proper host.
|
||||
*
|
||||
* Note that the end user applications must provide `applicaiton.server` as a
|
||||
* configuration property for all the application instances when calling this method.
|
||||
* If this is not available, then null maybe returned.
|
||||
* @param <K> generic type for key
|
||||
* @param store store name
|
||||
* @param key key to look for
|
||||
* @param serializer {@link Serializer} for the key
|
||||
* @return the {@link HostInfo} where the key for the provided store is hosted currently
|
||||
* @return the {@link HostInfo} where the key for the provided store is hosted
|
||||
* currently
|
||||
*/
|
||||
public <K> HostInfo getHostInfo(String store, K key, Serializer<K> serializer) {
|
||||
StreamsMetadata streamsMetadata = this.kafkaStreamsRegistry.getKafkaStreams()
|
||||
.stream()
|
||||
.map((k) -> Optional.ofNullable(k.metadataForKey(store, key, serializer)))
|
||||
.filter(Optional::isPresent)
|
||||
.map(Optional::get)
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
.filter(Optional::isPresent).map(Optional::get).findFirst().orElse(null);
|
||||
return streamsMetadata != null ? streamsMetadata.hostInfo() : null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2017-2018 the original author or authors.
|
||||
* Copyright 2017-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -21,9 +21,11 @@ import java.util.Map;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.kafka.common.serialization.Serde;
|
||||
import org.apache.kafka.common.serialization.Serdes;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.apache.kafka.streams.kstream.Produced;
|
||||
|
||||
import org.springframework.aop.framework.Advised;
|
||||
import org.springframework.cloud.stream.binder.AbstractBinder;
|
||||
import org.springframework.cloud.stream.binder.BinderSpecificPropertiesProvider;
|
||||
import org.springframework.cloud.stream.binder.Binding;
|
||||
@@ -40,8 +42,8 @@ import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStr
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* {@link org.springframework.cloud.stream.binder.Binder} implementation for {@link KStream}.
|
||||
* This implemenation extends from the {@link AbstractBinder} directly.
|
||||
* {@link org.springframework.cloud.stream.binder.Binder} implementation for
|
||||
* {@link KStream}. This implemenation extends from the {@link AbstractBinder} directly.
|
||||
* <p>
|
||||
* Provides both producer and consumer bindings for the bound KStream.
|
||||
*
|
||||
@@ -49,15 +51,22 @@ import org.springframework.util.StringUtils;
|
||||
* @author Soby Chacko
|
||||
*/
|
||||
class KStreamBinder extends
|
||||
// @checkstyle:off
|
||||
AbstractBinder<KStream<Object, Object>, ExtendedConsumerProperties<KafkaStreamsConsumerProperties>, ExtendedProducerProperties<KafkaStreamsProducerProperties>>
|
||||
implements ExtendedPropertiesBinder<KStream<Object, Object>, KafkaStreamsConsumerProperties, KafkaStreamsProducerProperties> {
|
||||
implements
|
||||
ExtendedPropertiesBinder<KStream<Object, Object>, KafkaStreamsConsumerProperties, KafkaStreamsProducerProperties> {
|
||||
|
||||
// @checkstyle:on
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(KStreamBinder.class);
|
||||
|
||||
private final KafkaTopicProvisioner kafkaTopicProvisioner;
|
||||
|
||||
// @checkstyle:off
|
||||
private KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties = new KafkaStreamsExtendedBindingProperties();
|
||||
|
||||
// @checkstyle:on
|
||||
|
||||
private final KafkaStreamsBinderConfigurationProperties binderConfigurationProperties;
|
||||
|
||||
private final KafkaStreamsMessageConversionDelegate kafkaStreamsMessageConversionDelegate;
|
||||
@@ -69,11 +78,11 @@ class KStreamBinder extends
|
||||
private final Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers;
|
||||
|
||||
KStreamBinder(KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
KafkaStreamsMessageConversionDelegate kafkaStreamsMessageConversionDelegate,
|
||||
KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue,
|
||||
KeyValueSerdeResolver keyValueSerdeResolver,
|
||||
Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
KafkaStreamsMessageConversionDelegate kafkaStreamsMessageConversionDelegate,
|
||||
KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue,
|
||||
KeyValueSerdeResolver keyValueSerdeResolver,
|
||||
Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
this.binderConfigurationProperties = binderConfigurationProperties;
|
||||
this.kafkaTopicProvisioner = kafkaTopicProvisioner;
|
||||
this.kafkaStreamsMessageConversionDelegate = kafkaStreamsMessageConversionDelegate;
|
||||
@@ -84,57 +93,87 @@ class KStreamBinder extends
|
||||
|
||||
@Override
|
||||
protected Binding<KStream<Object, Object>> doBindConsumer(String name, String group,
|
||||
KStream<Object, Object> inputTarget,
|
||||
ExtendedConsumerProperties<KafkaStreamsConsumerProperties> properties) {
|
||||
this.kafkaStreamsBindingInformationCatalogue.registerConsumerProperties(inputTarget, properties.getExtension());
|
||||
KStream<Object, Object> inputTarget,
|
||||
ExtendedConsumerProperties<KafkaStreamsConsumerProperties> properties) {
|
||||
|
||||
KStream<Object, Object> delegate = ((KStreamBoundElementFactory.KStreamWrapperHandler)
|
||||
((Advised) inputTarget).getAdvisors()[0].getAdvice()).getDelegate();
|
||||
|
||||
this.kafkaStreamsBindingInformationCatalogue.registerConsumerProperties(delegate, properties.getExtension());
|
||||
|
||||
if (!StringUtils.hasText(group)) {
|
||||
group = this.binderConfigurationProperties.getApplicationId();
|
||||
}
|
||||
KafkaStreamsBinderUtils.prepareConsumerBinding(name, group, getApplicationContext(),
|
||||
this.kafkaTopicProvisioner,
|
||||
this.binderConfigurationProperties, properties, this.kafkaStreamsDlqDispatchers);
|
||||
KafkaStreamsBinderUtils.prepareConsumerBinding(name, group,
|
||||
getApplicationContext(), this.kafkaTopicProvisioner,
|
||||
this.binderConfigurationProperties, properties,
|
||||
this.kafkaStreamsDlqDispatchers);
|
||||
|
||||
return new DefaultBinding<>(name, group, inputTarget, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Binding<KStream<Object, Object>> doBindProducer(String name, KStream<Object, Object> outboundBindTarget,
|
||||
ExtendedProducerProperties<KafkaStreamsProducerProperties> properties) {
|
||||
protected Binding<KStream<Object, Object>> doBindProducer(String name,
|
||||
KStream<Object, Object> outboundBindTarget,
|
||||
ExtendedProducerProperties<KafkaStreamsProducerProperties> properties) {
|
||||
ExtendedProducerProperties<KafkaProducerProperties> extendedProducerProperties = new ExtendedProducerProperties<>(
|
||||
new KafkaProducerProperties());
|
||||
this.kafkaTopicProvisioner.provisionProducerDestination(name, extendedProducerProperties);
|
||||
Serde<?> keySerde = this.keyValueSerdeResolver.getOuboundKeySerde(properties.getExtension());
|
||||
Serde<?> valueSerde = this.keyValueSerdeResolver.getOutboundValueSerde(properties, properties.getExtension());
|
||||
to(properties.isUseNativeEncoding(), name, outboundBindTarget, (Serde<Object>) keySerde, (Serde<Object>) valueSerde);
|
||||
properties.getExtension());
|
||||
this.kafkaTopicProvisioner.provisionProducerDestination(name,
|
||||
extendedProducerProperties);
|
||||
Serde<?> keySerde = this.keyValueSerdeResolver
|
||||
.getOuboundKeySerde(properties.getExtension(), kafkaStreamsBindingInformationCatalogue.getOutboundKStreamResolvable());
|
||||
LOG.info("Key Serde used for (outbound) " + name + ": " + keySerde.getClass().getName());
|
||||
|
||||
Serde<?> valueSerde;
|
||||
if (properties.isUseNativeEncoding()) {
|
||||
valueSerde = this.keyValueSerdeResolver.getOutboundValueSerde(properties,
|
||||
properties.getExtension(), kafkaStreamsBindingInformationCatalogue.getOutboundKStreamResolvable());
|
||||
}
|
||||
else {
|
||||
valueSerde = Serdes.ByteArray();
|
||||
}
|
||||
LOG.info("Key Serde used for (outbound) " + name + ": " + valueSerde.getClass().getName());
|
||||
|
||||
to(properties.isUseNativeEncoding(), name, outboundBindTarget,
|
||||
(Serde<Object>) keySerde, (Serde<Object>) valueSerde);
|
||||
return new DefaultBinding<>(name, null, outboundBindTarget, null);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private void to(boolean isNativeEncoding, String name, KStream<Object, Object> outboundBindTarget,
|
||||
Serde<Object> keySerde, Serde<Object> valueSerde) {
|
||||
private void to(boolean isNativeEncoding, String name,
|
||||
KStream<Object, Object> outboundBindTarget, Serde<Object> keySerde,
|
||||
Serde<Object> valueSerde) {
|
||||
if (!isNativeEncoding) {
|
||||
LOG.info("Native encoding is disabled for " + name + ". Outbound message conversion done by Spring Cloud Stream.");
|
||||
this.kafkaStreamsMessageConversionDelegate.serializeOnOutbound(outboundBindTarget)
|
||||
LOG.info("Native encoding is disabled for " + name
|
||||
+ ". Outbound message conversion done by Spring Cloud Stream.");
|
||||
this.kafkaStreamsMessageConversionDelegate
|
||||
.serializeOnOutbound(outboundBindTarget)
|
||||
.to(name, Produced.with(keySerde, valueSerde));
|
||||
}
|
||||
else {
|
||||
LOG.info("Native encoding is enabled for " + name + ". Outbound serialization done at the broker.");
|
||||
LOG.info("Native encoding is enabled for " + name
|
||||
+ ". Outbound serialization done at the broker.");
|
||||
outboundBindTarget.to(name, Produced.with(keySerde, valueSerde));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public KafkaStreamsConsumerProperties getExtendedConsumerProperties(String channelName) {
|
||||
return this.kafkaStreamsExtendedBindingProperties.getExtendedConsumerProperties(channelName);
|
||||
public KafkaStreamsConsumerProperties getExtendedConsumerProperties(
|
||||
String channelName) {
|
||||
return this.kafkaStreamsExtendedBindingProperties
|
||||
.getExtendedConsumerProperties(channelName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public KafkaStreamsProducerProperties getExtendedProducerProperties(String channelName) {
|
||||
return this.kafkaStreamsExtendedBindingProperties.getExtendedProducerProperties(channelName);
|
||||
public KafkaStreamsProducerProperties getExtendedProducerProperties(
|
||||
String channelName) {
|
||||
return this.kafkaStreamsExtendedBindingProperties
|
||||
.getExtendedProducerProperties(channelName);
|
||||
}
|
||||
|
||||
public void setKafkaStreamsExtendedBindingProperties(KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties) {
|
||||
public void setKafkaStreamsExtendedBindingProperties(
|
||||
KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties) {
|
||||
this.kafkaStreamsExtendedBindingProperties = kafkaStreamsExtendedBindingProperties;
|
||||
}
|
||||
|
||||
@@ -145,6 +184,8 @@ class KStreamBinder extends
|
||||
|
||||
@Override
|
||||
public Class<? extends BinderSpecificPropertiesProvider> getExtendedPropertiesEntryClass() {
|
||||
return this.kafkaStreamsExtendedBindingProperties.getExtendedPropertiesEntryClass();
|
||||
return this.kafkaStreamsExtendedBindingProperties
|
||||
.getExtendedPropertiesEntryClass();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -19,20 +19,18 @@ package org.springframework.cloud.stream.binder.kafka.streams;
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.beans.factory.config.MethodInvokingFactoryBean;
|
||||
import org.springframework.beans.factory.support.AbstractBeanDefinition;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.beans.factory.config.BeanFactoryPostProcessor;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
|
||||
import org.springframework.boot.autoconfigure.kafka.KafkaAutoConfiguration;
|
||||
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.properties.KafkaBinderConfigurationProperties;
|
||||
import org.springframework.cloud.stream.annotation.BindingProvider;
|
||||
import org.springframework.cloud.stream.binder.kafka.provisioning.KafkaTopicProvisioner;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsBinderConfigurationProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsExtendedBindingProperties;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.core.type.AnnotationMetadata;
|
||||
|
||||
/**
|
||||
* Configuration for KStream binder.
|
||||
@@ -42,69 +40,63 @@ import org.springframework.core.type.AnnotationMetadata;
|
||||
* @author Soby Chacko
|
||||
*/
|
||||
@Configuration
|
||||
@Import({KafkaAutoConfiguration.class, KStreamBinderConfiguration.KStreamMissingBeansRegistrar.class})
|
||||
@Import({ KafkaAutoConfiguration.class,
|
||||
KafkaStreamsBinderHealthIndicatorConfiguration.class })
|
||||
@BindingProvider
|
||||
public class KStreamBinderConfiguration {
|
||||
|
||||
@Bean
|
||||
public KafkaTopicProvisioner provisioningProvider(KafkaBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaProperties kafkaProperties) {
|
||||
return new KafkaTopicProvisioner(binderConfigurationProperties, kafkaProperties);
|
||||
public KafkaTopicProvisioner provisioningProvider(
|
||||
KafkaStreamsBinderConfigurationProperties kafkaStreamsBinderConfigurationProperties,
|
||||
KafkaProperties kafkaProperties) {
|
||||
return new KafkaTopicProvisioner(kafkaStreamsBinderConfigurationProperties,
|
||||
kafkaProperties);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public KStreamBinder kStreamBinder(KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
KafkaStreamsMessageConversionDelegate KafkaStreamsMessageConversionDelegate,
|
||||
KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue,
|
||||
KeyValueSerdeResolver keyValueSerdeResolver,
|
||||
KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties,
|
||||
@Qualifier("kafkaStreamsDlqDispatchers") Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
KStreamBinder kStreamBinder = new KStreamBinder(binderConfigurationProperties, kafkaTopicProvisioner,
|
||||
KafkaStreamsMessageConversionDelegate, KafkaStreamsBindingInformationCatalogue,
|
||||
keyValueSerdeResolver, kafkaStreamsDlqDispatchers);
|
||||
kStreamBinder.setKafkaStreamsExtendedBindingProperties(kafkaStreamsExtendedBindingProperties);
|
||||
public KStreamBinder kStreamBinder(
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
KafkaStreamsMessageConversionDelegate KafkaStreamsMessageConversionDelegate,
|
||||
KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue,
|
||||
KeyValueSerdeResolver keyValueSerdeResolver,
|
||||
KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties,
|
||||
@Qualifier("kafkaStreamsDlqDispatchers") Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
KStreamBinder kStreamBinder = new KStreamBinder(binderConfigurationProperties,
|
||||
kafkaTopicProvisioner, KafkaStreamsMessageConversionDelegate,
|
||||
KafkaStreamsBindingInformationCatalogue, keyValueSerdeResolver,
|
||||
kafkaStreamsDlqDispatchers);
|
||||
kStreamBinder.setKafkaStreamsExtendedBindingProperties(
|
||||
kafkaStreamsExtendedBindingProperties);
|
||||
return kStreamBinder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Registrar for missing beans when there are multiple binders in the application.
|
||||
*/
|
||||
static class KStreamMissingBeansRegistrar extends KafkaStreamsBinderUtils.KafkaStreamsMissingBeansRegistrar {
|
||||
@Bean
|
||||
@ConditionalOnBean(name = "outerContext")
|
||||
public static BeanFactoryPostProcessor outerContextBeanFactoryPostProcessor() {
|
||||
return beanFactory -> {
|
||||
|
||||
private static final String BEAN_NAME = "outerContext";
|
||||
|
||||
@Override
|
||||
public void registerBeanDefinitions(AnnotationMetadata importingClassMetadata,
|
||||
BeanDefinitionRegistry registry) {
|
||||
super.registerBeanDefinitions(importingClassMetadata, registry);
|
||||
|
||||
if (registry.containsBeanDefinition(BEAN_NAME)) {
|
||||
|
||||
AbstractBeanDefinition converstionDelegateBean = BeanDefinitionBuilder.genericBeanDefinition(MethodInvokingFactoryBean.class)
|
||||
.addPropertyReference("targetObject", BEAN_NAME)
|
||||
.addPropertyValue("targetMethod", "getBean")
|
||||
.addPropertyValue("arguments", KafkaStreamsMessageConversionDelegate.class)
|
||||
.getBeanDefinition();
|
||||
|
||||
registry.registerBeanDefinition(KafkaStreamsMessageConversionDelegate.class.getSimpleName(), converstionDelegateBean);
|
||||
|
||||
AbstractBeanDefinition keyValueSerdeResolverBean = BeanDefinitionBuilder.genericBeanDefinition(MethodInvokingFactoryBean.class)
|
||||
.addPropertyReference("targetObject", BEAN_NAME)
|
||||
.addPropertyValue("targetMethod", "getBean")
|
||||
.addPropertyValue("arguments", KeyValueSerdeResolver.class)
|
||||
.getBeanDefinition();
|
||||
|
||||
registry.registerBeanDefinition(KeyValueSerdeResolver.class.getSimpleName(), keyValueSerdeResolverBean);
|
||||
|
||||
AbstractBeanDefinition kafkaStreamsExtendedBindingPropertiesBean = BeanDefinitionBuilder.genericBeanDefinition(MethodInvokingFactoryBean.class)
|
||||
.addPropertyReference("targetObject", BEAN_NAME)
|
||||
.addPropertyValue("targetMethod", "getBean")
|
||||
.addPropertyValue("arguments", KafkaStreamsExtendedBindingProperties.class)
|
||||
.getBeanDefinition();
|
||||
|
||||
registry.registerBeanDefinition(KafkaStreamsExtendedBindingProperties.class.getSimpleName(), kafkaStreamsExtendedBindingPropertiesBean);
|
||||
}
|
||||
}
|
||||
// It is safe to call getBean("outerContext") here, because this bean is
|
||||
// registered as first
|
||||
// and as independent from the parent context.
|
||||
ApplicationContext outerContext = (ApplicationContext) beanFactory
|
||||
.getBean("outerContext");
|
||||
beanFactory.registerSingleton(
|
||||
KafkaStreamsBinderConfigurationProperties.class.getSimpleName(),
|
||||
outerContext
|
||||
.getBean(KafkaStreamsBinderConfigurationProperties.class));
|
||||
beanFactory.registerSingleton(
|
||||
KafkaStreamsMessageConversionDelegate.class.getSimpleName(),
|
||||
outerContext.getBean(KafkaStreamsMessageConversionDelegate.class));
|
||||
beanFactory.registerSingleton(
|
||||
KafkaStreamsBindingInformationCatalogue.class.getSimpleName(),
|
||||
outerContext.getBean(KafkaStreamsBindingInformationCatalogue.class));
|
||||
beanFactory.registerSingleton(KeyValueSerdeResolver.class.getSimpleName(),
|
||||
outerContext.getBean(KeyValueSerdeResolver.class));
|
||||
beanFactory.registerSingleton(
|
||||
KafkaStreamsExtendedBindingProperties.class.getSimpleName(),
|
||||
outerContext.getBean(KafkaStreamsExtendedBindingProperties.class));
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -22,16 +22,18 @@ import org.apache.kafka.streams.kstream.KStream;
|
||||
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.cloud.stream.binder.ConsumerProperties;
|
||||
import org.springframework.cloud.stream.binder.ProducerProperties;
|
||||
import org.springframework.cloud.stream.binding.AbstractBindingTargetFactory;
|
||||
import org.springframework.cloud.stream.config.BindingProperties;
|
||||
import org.springframework.cloud.stream.config.BindingServiceProperties;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link org.springframework.cloud.stream.binding.BindingTargetFactory} for{@link KStream}.
|
||||
* {@link org.springframework.cloud.stream.binding.BindingTargetFactory}
|
||||
* for{@link KStream}.
|
||||
*
|
||||
* The implementation creates proxies for both input and output binding.
|
||||
* The actual target will be created downstream through further binding process.
|
||||
* The implementation creates proxies for both input and output binding. The actual target
|
||||
* will be created downstream through further binding process.
|
||||
*
|
||||
* @author Marius Bogoevici
|
||||
* @author Soby Chacko
|
||||
@@ -43,7 +45,7 @@ class KStreamBoundElementFactory extends AbstractBindingTargetFactory<KStream> {
|
||||
private final KafkaStreamsBindingInformationCatalogue kafkaStreamsBindingInformationCatalogue;
|
||||
|
||||
KStreamBoundElementFactory(BindingServiceProperties bindingServiceProperties,
|
||||
KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue) {
|
||||
KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue) {
|
||||
super(KStream.class);
|
||||
this.bindingServiceProperties = bindingServiceProperties;
|
||||
this.kafkaStreamsBindingInformationCatalogue = KafkaStreamsBindingInformationCatalogue;
|
||||
@@ -51,8 +53,13 @@ class KStreamBoundElementFactory extends AbstractBindingTargetFactory<KStream> {
|
||||
|
||||
@Override
|
||||
public KStream createInput(String name) {
|
||||
ConsumerProperties consumerProperties = this.bindingServiceProperties.getConsumerProperties(name);
|
||||
//Always set multiplex to true in the kafka streams binder
|
||||
BindingProperties bindingProperties = this.bindingServiceProperties.getBindingProperties(name);
|
||||
ConsumerProperties consumerProperties = bindingProperties.getConsumer();
|
||||
if (consumerProperties == null) {
|
||||
consumerProperties = this.bindingServiceProperties.getConsumerProperties(name);
|
||||
consumerProperties.setUseNativeDecoding(true);
|
||||
}
|
||||
// Always set multiplex to true in the kafka streams binder
|
||||
consumerProperties.setMultiplex(true);
|
||||
return createProxyForKStream(name);
|
||||
}
|
||||
@@ -60,6 +67,13 @@ class KStreamBoundElementFactory extends AbstractBindingTargetFactory<KStream> {
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public KStream createOutput(final String name) {
|
||||
|
||||
BindingProperties bindingProperties = this.bindingServiceProperties.getBindingProperties(name);
|
||||
ProducerProperties producerProperties = bindingProperties.getProducer();
|
||||
if (producerProperties == null) {
|
||||
producerProperties = this.bindingServiceProperties.getProducerProperties(name);
|
||||
producerProperties.setUseNativeEncoding(true);
|
||||
}
|
||||
return createProxyForKStream(name);
|
||||
}
|
||||
|
||||
@@ -70,9 +84,12 @@ class KStreamBoundElementFactory extends AbstractBindingTargetFactory<KStream> {
|
||||
|
||||
KStream proxy = (KStream) proxyFactory.getProxy();
|
||||
|
||||
//Add the binding properties to the catalogue for later retrieval during further binding steps downstream.
|
||||
BindingProperties bindingProperties = this.bindingServiceProperties.getBindingProperties(name);
|
||||
this.kafkaStreamsBindingInformationCatalogue.registerBindingProperties(proxy, bindingProperties);
|
||||
// Add the binding properties to the catalogue for later retrieval during further
|
||||
// binding steps downstream.
|
||||
BindingProperties bindingProperties = this.bindingServiceProperties
|
||||
.getBindingProperties(name);
|
||||
this.kafkaStreamsBindingInformationCatalogue.registerBindingProperties(proxy,
|
||||
bindingProperties);
|
||||
return proxy;
|
||||
}
|
||||
|
||||
@@ -85,7 +102,8 @@ class KStreamBoundElementFactory extends AbstractBindingTargetFactory<KStream> {
|
||||
|
||||
}
|
||||
|
||||
private static class KStreamWrapperHandler implements KStreamWrapper, MethodInterceptor {
|
||||
static class KStreamWrapperHandler
|
||||
implements KStreamWrapper, MethodInterceptor {
|
||||
|
||||
private KStream<Object, Object> delegate;
|
||||
|
||||
@@ -98,16 +116,26 @@ class KStreamBoundElementFactory extends AbstractBindingTargetFactory<KStream> {
|
||||
@Override
|
||||
public Object invoke(MethodInvocation methodInvocation) throws Throwable {
|
||||
if (methodInvocation.getMethod().getDeclaringClass().equals(KStream.class)) {
|
||||
Assert.notNull(this.delegate, "Trying to prepareConsumerBinding " + methodInvocation
|
||||
.getMethod() + " but no delegate has been set.");
|
||||
return methodInvocation.getMethod().invoke(this.delegate, methodInvocation.getArguments());
|
||||
Assert.notNull(this.delegate,
|
||||
"Trying to prepareConsumerBinding " + methodInvocation.getMethod()
|
||||
+ " but no delegate has been set.");
|
||||
return methodInvocation.getMethod().invoke(this.delegate,
|
||||
methodInvocation.getArguments());
|
||||
}
|
||||
else if (methodInvocation.getMethod().getDeclaringClass().equals(KStreamWrapper.class)) {
|
||||
return methodInvocation.getMethod().invoke(this, methodInvocation.getArguments());
|
||||
else if (methodInvocation.getMethod().getDeclaringClass()
|
||||
.equals(KStreamWrapper.class)) {
|
||||
return methodInvocation.getMethod().invoke(this,
|
||||
methodInvocation.getArguments());
|
||||
}
|
||||
else {
|
||||
throw new IllegalStateException("Only KStream method invocations are permitted");
|
||||
throw new IllegalStateException(
|
||||
"Only KStream method invocations are permitted");
|
||||
}
|
||||
}
|
||||
|
||||
KStream<Object, Object> getDelegate() {
|
||||
return delegate;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -28,21 +28,23 @@ import org.springframework.core.ResolvableType;
|
||||
* @author Marius Bogoevici
|
||||
* @author Soby Chacko
|
||||
*/
|
||||
class KStreamStreamListenerParameterAdapter implements StreamListenerParameterAdapter<KStream<?, ?>, KStream<?, ?>> {
|
||||
class KStreamStreamListenerParameterAdapter
|
||||
implements StreamListenerParameterAdapter<KStream<?, ?>, KStream<?, ?>> {
|
||||
|
||||
private final KafkaStreamsMessageConversionDelegate kafkaStreamsMessageConversionDelegate;
|
||||
|
||||
private final KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue;
|
||||
|
||||
KStreamStreamListenerParameterAdapter(KafkaStreamsMessageConversionDelegate kafkaStreamsMessageConversionDelegate,
|
||||
KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue) {
|
||||
KStreamStreamListenerParameterAdapter(
|
||||
KafkaStreamsMessageConversionDelegate kafkaStreamsMessageConversionDelegate,
|
||||
KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue) {
|
||||
this.kafkaStreamsMessageConversionDelegate = kafkaStreamsMessageConversionDelegate;
|
||||
this.KafkaStreamsBindingInformationCatalogue = KafkaStreamsBindingInformationCatalogue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supports(Class bindingTargetType, MethodParameter methodParameter) {
|
||||
return KStream.class.isAssignableFrom(bindingTargetType)
|
||||
&& KStream.class.isAssignableFrom(methodParameter.getParameterType());
|
||||
return KafkaStreamsBinderUtils.supportsKStream(methodParameter, bindingTargetType);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -51,11 +53,14 @@ class KStreamStreamListenerParameterAdapter implements StreamListenerParameterAd
|
||||
ResolvableType resolvableType = ResolvableType.forMethodParameter(parameter);
|
||||
final Class<?> valueClass = (resolvableType.getGeneric(1).getRawClass() != null)
|
||||
? (resolvableType.getGeneric(1).getRawClass()) : Object.class;
|
||||
if (this.KafkaStreamsBindingInformationCatalogue.isUseNativeDecoding(bindingTarget)) {
|
||||
if (this.KafkaStreamsBindingInformationCatalogue
|
||||
.isUseNativeDecoding(bindingTarget)) {
|
||||
return bindingTarget;
|
||||
}
|
||||
else {
|
||||
return this.kafkaStreamsMessageConversionDelegate.deserializeOnInbound(valueClass, bindingTarget);
|
||||
return this.kafkaStreamsMessageConversionDelegate
|
||||
.deserializeOnInbound(valueClass, bindingTarget);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
* Copyright 2017-2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -29,16 +29,19 @@ import org.springframework.cloud.stream.binding.StreamListenerResultAdapter;
|
||||
* @author Marius Bogoevici
|
||||
* @author Soby Chacko
|
||||
*/
|
||||
class KStreamStreamListenerResultAdapter implements StreamListenerResultAdapter<KStream, KStreamBoundElementFactory.KStreamWrapper> {
|
||||
class KStreamStreamListenerResultAdapter implements
|
||||
StreamListenerResultAdapter<KStream, KStreamBoundElementFactory.KStreamWrapper> {
|
||||
|
||||
@Override
|
||||
public boolean supports(Class<?> resultType, Class<?> boundElement) {
|
||||
return KStream.class.isAssignableFrom(resultType) && KStream.class.isAssignableFrom(boundElement);
|
||||
return KStream.class.isAssignableFrom(resultType)
|
||||
&& KStream.class.isAssignableFrom(boundElement);
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public Closeable adapt(KStream streamListenerResult, KStreamBoundElementFactory.KStreamWrapper boundElement) {
|
||||
public Closeable adapt(KStream streamListenerResult,
|
||||
KStreamBoundElementFactory.KStreamWrapper boundElement) {
|
||||
boundElement.wrap(streamListenerResult);
|
||||
return new NoOpCloseable();
|
||||
}
|
||||
@@ -51,4 +54,5 @@ class KStreamStreamListenerResultAdapter implements StreamListenerResultAdapter<
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -35,16 +35,21 @@ import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStr
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* {@link org.springframework.cloud.stream.binder.Binder} implementation for {@link KTable}.
|
||||
* This implemenation extends from the {@link AbstractBinder} directly.
|
||||
* {@link org.springframework.cloud.stream.binder.Binder} implementation for
|
||||
* {@link KTable}. This implemenation extends from the {@link AbstractBinder} directly.
|
||||
*
|
||||
* Provides only consumer binding for the bound KTable as output bindings are not allowed on it.
|
||||
* Provides only consumer binding for the bound KTable as output bindings are not allowed
|
||||
* on it.
|
||||
*
|
||||
* @author Soby Chacko
|
||||
*/
|
||||
class KTableBinder extends
|
||||
// @checkstyle:off
|
||||
AbstractBinder<KTable<Object, Object>, ExtendedConsumerProperties<KafkaStreamsConsumerProperties>, ExtendedProducerProperties<KafkaStreamsProducerProperties>>
|
||||
implements ExtendedPropertiesBinder<KTable<Object, Object>, KafkaStreamsConsumerProperties, KafkaStreamsProducerProperties> {
|
||||
implements
|
||||
ExtendedPropertiesBinder<KTable<Object, Object>, KafkaStreamsConsumerProperties, KafkaStreamsProducerProperties> {
|
||||
|
||||
// @checkstyle:on
|
||||
|
||||
private final KafkaStreamsBinderConfigurationProperties binderConfigurationProperties;
|
||||
|
||||
@@ -52,10 +57,14 @@ class KTableBinder extends
|
||||
|
||||
private Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers;
|
||||
|
||||
// @checkstyle:off
|
||||
private KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties = new KafkaStreamsExtendedBindingProperties();
|
||||
|
||||
KTableBinder(KafkaStreamsBinderConfigurationProperties binderConfigurationProperties, KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
// @checkstyle:on
|
||||
|
||||
KTableBinder(KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
this.binderConfigurationProperties = binderConfigurationProperties;
|
||||
this.kafkaTopicProvisioner = kafkaTopicProvisioner;
|
||||
this.kafkaStreamsDlqDispatchers = kafkaStreamsDlqDispatchers;
|
||||
@@ -63,31 +72,43 @@ class KTableBinder extends
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Binding<KTable<Object, Object>> doBindConsumer(String name, String group, KTable<Object, Object> inputTarget,
|
||||
ExtendedConsumerProperties<KafkaStreamsConsumerProperties> properties) {
|
||||
protected Binding<KTable<Object, Object>> doBindConsumer(String name, String group,
|
||||
KTable<Object, Object> inputTarget,
|
||||
// @checkstyle:off
|
||||
ExtendedConsumerProperties<KafkaStreamsConsumerProperties> properties) {
|
||||
// @checkstyle:on
|
||||
if (!StringUtils.hasText(group)) {
|
||||
group = this.binderConfigurationProperties.getApplicationId();
|
||||
}
|
||||
KafkaStreamsBinderUtils.prepareConsumerBinding(name, group, getApplicationContext(),
|
||||
this.kafkaTopicProvisioner,
|
||||
this.binderConfigurationProperties, properties, this.kafkaStreamsDlqDispatchers);
|
||||
KafkaStreamsBinderUtils.prepareConsumerBinding(name, group,
|
||||
getApplicationContext(), this.kafkaTopicProvisioner,
|
||||
this.binderConfigurationProperties, properties,
|
||||
this.kafkaStreamsDlqDispatchers);
|
||||
return new DefaultBinding<>(name, group, inputTarget, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Binding<KTable<Object, Object>> doBindProducer(String name, KTable<Object, Object> outboundBindTarget,
|
||||
ExtendedProducerProperties<KafkaStreamsProducerProperties> properties) {
|
||||
throw new UnsupportedOperationException("No producer level binding is allowed for KTable");
|
||||
protected Binding<KTable<Object, Object>> doBindProducer(String name,
|
||||
KTable<Object, Object> outboundBindTarget,
|
||||
// @checkstyle:off
|
||||
ExtendedProducerProperties<KafkaStreamsProducerProperties> properties) {
|
||||
// @checkstyle:on
|
||||
throw new UnsupportedOperationException(
|
||||
"No producer level binding is allowed for KTable");
|
||||
}
|
||||
|
||||
@Override
|
||||
public KafkaStreamsConsumerProperties getExtendedConsumerProperties(String channelName) {
|
||||
return this.kafkaStreamsExtendedBindingProperties.getExtendedConsumerProperties(channelName);
|
||||
public KafkaStreamsConsumerProperties getExtendedConsumerProperties(
|
||||
String channelName) {
|
||||
return this.kafkaStreamsExtendedBindingProperties
|
||||
.getExtendedConsumerProperties(channelName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public KafkaStreamsProducerProperties getExtendedProducerProperties(String channelName) {
|
||||
return this.kafkaStreamsExtendedBindingProperties.getExtendedProducerProperties(channelName);
|
||||
public KafkaStreamsProducerProperties getExtendedProducerProperties(
|
||||
String channelName) {
|
||||
return this.kafkaStreamsExtendedBindingProperties
|
||||
.getExtendedProducerProperties(channelName);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -97,6 +118,12 @@ class KTableBinder extends
|
||||
|
||||
@Override
|
||||
public Class<? extends BinderSpecificPropertiesProvider> getExtendedPropertiesEntryClass() {
|
||||
return this.kafkaStreamsExtendedBindingProperties.getExtendedPropertiesEntryClass();
|
||||
return this.kafkaStreamsExtendedBindingProperties
|
||||
.getExtendedPropertiesEntryClass();
|
||||
}
|
||||
|
||||
public void setKafkaStreamsExtendedBindingProperties(
|
||||
KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties) {
|
||||
this.kafkaStreamsExtendedBindingProperties = kafkaStreamsExtendedBindingProperties;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -21,10 +21,12 @@ import java.util.Map;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.beans.factory.config.BeanFactoryPostProcessor;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
|
||||
import org.springframework.boot.autoconfigure.kafka.KafkaAutoConfiguration;
|
||||
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.properties.KafkaBinderConfigurationProperties;
|
||||
import org.springframework.cloud.stream.annotation.BindingProvider;
|
||||
import org.springframework.cloud.stream.binder.kafka.provisioning.KafkaTopicProvisioner;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsBinderConfigurationProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsExtendedBindingProperties;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
@@ -37,32 +39,49 @@ import org.springframework.context.annotation.Import;
|
||||
*/
|
||||
@SuppressWarnings("ALL")
|
||||
@Configuration
|
||||
@Import(KafkaStreamsBinderUtils.KafkaStreamsMissingBeansRegistrar.class)
|
||||
@BindingProvider
|
||||
@Import({ KafkaAutoConfiguration.class,
|
||||
KafkaStreamsBinderHealthIndicatorConfiguration.class })
|
||||
public class KTableBinderConfiguration {
|
||||
|
||||
@Bean
|
||||
@ConditionalOnBean(name = "outerContext")
|
||||
public BeanFactoryPostProcessor outerContextBeanFactoryPostProcessor() {
|
||||
return (beanFactory) -> {
|
||||
ApplicationContext outerContext = (ApplicationContext) beanFactory.getBean("outerContext");
|
||||
beanFactory.registerSingleton(KafkaStreamsBinderConfigurationProperties.class.getSimpleName(), outerContext
|
||||
.getBean(KafkaStreamsBinderConfigurationProperties.class));
|
||||
beanFactory.registerSingleton(KafkaStreamsBindingInformationCatalogue.class.getSimpleName(), outerContext
|
||||
.getBean(KafkaStreamsBindingInformationCatalogue.class));
|
||||
};
|
||||
}
|
||||
|
||||
@Bean
|
||||
public KafkaTopicProvisioner provisioningProvider(KafkaBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaProperties kafkaProperties) {
|
||||
public KafkaTopicProvisioner provisioningProvider(
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaProperties kafkaProperties) {
|
||||
return new KafkaTopicProvisioner(binderConfigurationProperties, kafkaProperties);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public KTableBinder kTableBinder(KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
@Qualifier("kafkaStreamsDlqDispatchers") Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
KTableBinder kStreamBinder = new KTableBinder(binderConfigurationProperties, kafkaTopicProvisioner, kafkaStreamsDlqDispatchers);
|
||||
return kStreamBinder;
|
||||
public KTableBinder kTableBinder(
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties,
|
||||
@Qualifier("kafkaStreamsDlqDispatchers") Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
KTableBinder kTableBinder = new KTableBinder(binderConfigurationProperties,
|
||||
kafkaTopicProvisioner, kafkaStreamsDlqDispatchers);
|
||||
kTableBinder.setKafkaStreamsExtendedBindingProperties(kafkaStreamsExtendedBindingProperties);
|
||||
return kTableBinder;
|
||||
}
|
||||
|
||||
@Bean
|
||||
@ConditionalOnBean(name = "outerContext")
|
||||
public static BeanFactoryPostProcessor outerContextBeanFactoryPostProcessor() {
|
||||
return beanFactory -> {
|
||||
|
||||
// It is safe to call getBean("outerContext") here, because this bean is
|
||||
// registered as first
|
||||
// and as independent from the parent context.
|
||||
ApplicationContext outerContext = (ApplicationContext) beanFactory
|
||||
.getBean("outerContext");
|
||||
beanFactory.registerSingleton(
|
||||
KafkaStreamsBinderConfigurationProperties.class.getSimpleName(),
|
||||
outerContext
|
||||
.getBean(KafkaStreamsBinderConfigurationProperties.class));
|
||||
beanFactory.registerSingleton(
|
||||
KafkaStreamsExtendedBindingProperties.class.getSimpleName(),
|
||||
outerContext.getBean(KafkaStreamsExtendedBindingProperties.class));
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -23,11 +23,13 @@ import org.apache.kafka.streams.kstream.KTable;
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.cloud.stream.binder.ConsumerProperties;
|
||||
import org.springframework.cloud.stream.binding.AbstractBindingTargetFactory;
|
||||
import org.springframework.cloud.stream.config.BindingProperties;
|
||||
import org.springframework.cloud.stream.config.BindingServiceProperties;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link org.springframework.cloud.stream.binding.BindingTargetFactory} for {@link KTable}
|
||||
* {@link org.springframework.cloud.stream.binding.BindingTargetFactory} for
|
||||
* {@link KTable}
|
||||
*
|
||||
* Input bindings are only created as output bindings on KTable are not allowed.
|
||||
*
|
||||
@@ -44,12 +46,18 @@ class KTableBoundElementFactory extends AbstractBindingTargetFactory<KTable> {
|
||||
|
||||
@Override
|
||||
public KTable createInput(String name) {
|
||||
ConsumerProperties consumerProperties = this.bindingServiceProperties.getConsumerProperties(name);
|
||||
//Always set multiplex to true in the kafka streams binder
|
||||
BindingProperties bindingProperties = this.bindingServiceProperties.getBindingProperties(name);
|
||||
ConsumerProperties consumerProperties = bindingProperties.getConsumer();
|
||||
if (consumerProperties == null) {
|
||||
consumerProperties = this.bindingServiceProperties.getConsumerProperties(name);
|
||||
consumerProperties.setUseNativeDecoding(true);
|
||||
}
|
||||
// Always set multiplex to true in the kafka streams binder
|
||||
consumerProperties.setMultiplex(true);
|
||||
|
||||
KTableBoundElementFactory.KTableWrapperHandler wrapper = new KTableBoundElementFactory.KTableWrapperHandler();
|
||||
ProxyFactory proxyFactory = new ProxyFactory(KTableBoundElementFactory.KTableWrapper.class, KTable.class);
|
||||
ProxyFactory proxyFactory = new ProxyFactory(
|
||||
KTableBoundElementFactory.KTableWrapper.class, KTable.class);
|
||||
proxyFactory.addAdvice(wrapper);
|
||||
|
||||
return (KTable) proxyFactory.getProxy();
|
||||
@@ -58,17 +66,21 @@ class KTableBoundElementFactory extends AbstractBindingTargetFactory<KTable> {
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public KTable createOutput(final String name) {
|
||||
throw new UnsupportedOperationException("Outbound operations are not allowed on target type KTable");
|
||||
throw new UnsupportedOperationException(
|
||||
"Outbound operations are not allowed on target type KTable");
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper for KTable proxy.
|
||||
*/
|
||||
public interface KTableWrapper {
|
||||
|
||||
void wrap(KTable<Object, Object> delegate);
|
||||
|
||||
}
|
||||
|
||||
private static class KTableWrapperHandler implements KTableBoundElementFactory.KTableWrapper, MethodInterceptor {
|
||||
private static class KTableWrapperHandler
|
||||
implements KTableBoundElementFactory.KTableWrapper, MethodInterceptor {
|
||||
|
||||
private KTable<Object, Object> delegate;
|
||||
|
||||
@@ -81,16 +93,23 @@ class KTableBoundElementFactory extends AbstractBindingTargetFactory<KTable> {
|
||||
@Override
|
||||
public Object invoke(MethodInvocation methodInvocation) throws Throwable {
|
||||
if (methodInvocation.getMethod().getDeclaringClass().equals(KTable.class)) {
|
||||
Assert.notNull(this.delegate, "Trying to prepareConsumerBinding " + methodInvocation
|
||||
.getMethod() + " but no delegate has been set.");
|
||||
return methodInvocation.getMethod().invoke(this.delegate, methodInvocation.getArguments());
|
||||
Assert.notNull(this.delegate,
|
||||
"Trying to prepareConsumerBinding " + methodInvocation.getMethod()
|
||||
+ " but no delegate has been set.");
|
||||
return methodInvocation.getMethod().invoke(this.delegate,
|
||||
methodInvocation.getArguments());
|
||||
}
|
||||
else if (methodInvocation.getMethod().getDeclaringClass().equals(KTableBoundElementFactory.KTableWrapper.class)) {
|
||||
return methodInvocation.getMethod().invoke(this, methodInvocation.getArguments());
|
||||
else if (methodInvocation.getMethod().getDeclaringClass()
|
||||
.equals(KTableBoundElementFactory.KTableWrapper.class)) {
|
||||
return methodInvocation.getMethod().invoke(this,
|
||||
methodInvocation.getArguments());
|
||||
}
|
||||
else {
|
||||
throw new IllegalStateException("Only KTable method invocations are permitted");
|
||||
throw new IllegalStateException(
|
||||
"Only KTable method invocations are permitted");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
* Copyright 2017-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -27,17 +27,22 @@ import org.springframework.context.annotation.Configuration;
|
||||
/**
|
||||
* Application support configuration for Kafka Streams binder.
|
||||
*
|
||||
* @deprecated Features provided on this class can be directly configured in the application itself using Kafka Streams.
|
||||
* @author Soby Chacko
|
||||
*/
|
||||
@Configuration
|
||||
@EnableConfigurationProperties(KafkaStreamsApplicationSupportProperties.class)
|
||||
@Deprecated
|
||||
public class KafkaStreamsApplicationSupportAutoConfiguration {
|
||||
|
||||
@Bean
|
||||
@ConditionalOnProperty("spring.cloud.stream.kafka.streams.timeWindow.length")
|
||||
public TimeWindows configuredTimeWindow(KafkaStreamsApplicationSupportProperties processorProperties) {
|
||||
public TimeWindows configuredTimeWindow(
|
||||
KafkaStreamsApplicationSupportProperties processorProperties) {
|
||||
return processorProperties.getTimeWindow().getAdvanceBy() > 0
|
||||
? TimeWindows.of(processorProperties.getTimeWindow().getLength()).advanceBy(processorProperties.getTimeWindow().getAdvanceBy())
|
||||
? TimeWindows.of(processorProperties.getTimeWindow().getLength())
|
||||
.advanceBy(processorProperties.getTimeWindow().getAdvanceBy())
|
||||
: TimeWindows.of(processorProperties.getTimeWindow().getLength());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,81 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.kafka.streams.KafkaStreams;
|
||||
import org.apache.kafka.streams.processor.TaskMetadata;
|
||||
import org.apache.kafka.streams.processor.ThreadMetadata;
|
||||
|
||||
import org.springframework.boot.actuate.health.AbstractHealthIndicator;
|
||||
import org.springframework.boot.actuate.health.Health;
|
||||
import org.springframework.boot.actuate.health.Status;
|
||||
|
||||
/**
|
||||
* Health indicator for Kafka Streams.
|
||||
*
|
||||
* @author Arnaud Jardiné
|
||||
*/
|
||||
public class KafkaStreamsBinderHealthIndicator extends AbstractHealthIndicator {
|
||||
|
||||
private final KafkaStreamsRegistry kafkaStreamsRegistry;
|
||||
|
||||
KafkaStreamsBinderHealthIndicator(KafkaStreamsRegistry kafkaStreamsRegistry) {
|
||||
super("Kafka-streams health check failed");
|
||||
this.kafkaStreamsRegistry = kafkaStreamsRegistry;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doHealthCheck(Health.Builder builder) throws Exception {
|
||||
boolean up = true;
|
||||
for (KafkaStreams kStream : kafkaStreamsRegistry.getKafkaStreams()) {
|
||||
up &= kStream.state().isRunning();
|
||||
builder.withDetails(buildDetails(kStream));
|
||||
}
|
||||
builder.status(up ? Status.UP : Status.DOWN);
|
||||
}
|
||||
|
||||
private static Map<String, Object> buildDetails(KafkaStreams kStreams) {
|
||||
final Map<String, Object> details = new HashMap<>();
|
||||
if (kStreams.state().isRunning()) {
|
||||
for (ThreadMetadata metadata : kStreams.localThreadsMetadata()) {
|
||||
details.put("threadName", metadata.threadName());
|
||||
details.put("threadState", metadata.threadState());
|
||||
details.put("activeTasks", taskDetails(metadata.activeTasks()));
|
||||
details.put("standbyTasks", taskDetails(metadata.standbyTasks()));
|
||||
}
|
||||
}
|
||||
return details;
|
||||
}
|
||||
|
||||
private static Map<String, Object> taskDetails(Set<TaskMetadata> taskMetadata) {
|
||||
final Map<String, Object> details = new HashMap<>();
|
||||
for (TaskMetadata metadata : taskMetadata) {
|
||||
details.put("taskId", metadata.taskId());
|
||||
details.put("partitions",
|
||||
metadata.topicPartitions().stream().map(
|
||||
p -> "partition=" + p.partition() + ", topic=" + p.topic())
|
||||
.collect(Collectors.toList()));
|
||||
}
|
||||
return details;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams;
|
||||
|
||||
import org.springframework.boot.actuate.autoconfigure.health.ConditionalOnEnabledHealthIndicator;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
/**
|
||||
* Configuration class for Kafka-streams binder health indicator beans.
|
||||
*
|
||||
* @author Arnaud Jardiné
|
||||
*/
|
||||
@Configuration
|
||||
@ConditionalOnClass(name = "org.springframework.boot.actuate.health.HealthIndicator")
|
||||
@ConditionalOnEnabledHealthIndicator("binders")
|
||||
class KafkaStreamsBinderHealthIndicatorConfiguration {
|
||||
|
||||
@Bean
|
||||
@ConditionalOnBean(KafkaStreamsRegistry.class)
|
||||
KafkaStreamsBinderHealthIndicator kafkaStreamsBinderHealthIndicator(
|
||||
KafkaStreamsRegistry kafkaStreamsRegistry) {
|
||||
return new KafkaStreamsBinderHealthIndicator(kafkaStreamsRegistry);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -18,6 +18,7 @@ package org.springframework.cloud.stream.binder.kafka.streams;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
import java.util.stream.Collectors;
|
||||
@@ -31,21 +32,30 @@ import org.springframework.beans.factory.ObjectProvider;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
|
||||
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
|
||||
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||
import org.springframework.boot.context.properties.EnableConfigurationProperties;
|
||||
import org.springframework.cloud.stream.binder.BinderConfiguration;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.function.FunctionDetectorCondition;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsBinderConfigurationProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsExtendedBindingProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.serde.CompositeNonNativeSerde;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.serde.MessageConverterDelegateSerde;
|
||||
import org.springframework.cloud.stream.binding.BindingService;
|
||||
import org.springframework.cloud.stream.binding.StreamListenerResultAdapter;
|
||||
import org.springframework.cloud.stream.config.BinderProperties;
|
||||
import org.springframework.cloud.stream.config.BindingServiceConfiguration;
|
||||
import org.springframework.cloud.stream.config.BindingServiceProperties;
|
||||
import org.springframework.cloud.stream.converter.CompositeMessageConverterFactory;
|
||||
import org.springframework.cloud.stream.function.StreamFunctionProperties;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Conditional;
|
||||
import org.springframework.core.env.ConfigurableEnvironment;
|
||||
import org.springframework.core.env.Environment;
|
||||
import org.springframework.core.env.MapPropertySource;
|
||||
import org.springframework.kafka.config.KafkaStreamsConfiguration;
|
||||
import org.springframework.kafka.core.CleanupConfig;
|
||||
import org.springframework.messaging.converter.CompositeMessageConverter;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
@@ -61,71 +71,165 @@ import org.springframework.util.StringUtils;
|
||||
@AutoConfigureAfter(BindingServiceConfiguration.class)
|
||||
public class KafkaStreamsBinderSupportAutoConfiguration {
|
||||
|
||||
private static final String KSTREAM_BINDER_TYPE = "kstream";
|
||||
|
||||
private static final String KTABLE_BINDER_TYPE = "ktable";
|
||||
|
||||
private static final String GLOBALKTABLE_BINDER_TYPE = "globalktable";
|
||||
|
||||
@Bean
|
||||
@ConfigurationProperties(prefix = "spring.cloud.stream.kafka.streams.binder")
|
||||
public KafkaStreamsBinderConfigurationProperties binderConfigurationProperties(KafkaProperties kafkaProperties) {
|
||||
public KafkaStreamsBinderConfigurationProperties binderConfigurationProperties(
|
||||
KafkaProperties kafkaProperties, ConfigurableEnvironment environment,
|
||||
BindingServiceProperties properties) {
|
||||
final Map<String, BinderConfiguration> binderConfigurations = getBinderConfigurations(
|
||||
properties);
|
||||
for (Map.Entry<String, BinderConfiguration> entry : binderConfigurations
|
||||
.entrySet()) {
|
||||
final BinderConfiguration binderConfiguration = entry.getValue();
|
||||
final String binderType = binderConfiguration.getBinderType();
|
||||
if (binderType != null && (binderType.equals(KSTREAM_BINDER_TYPE)
|
||||
|| binderType.equals(KTABLE_BINDER_TYPE)
|
||||
|| binderType.equals(GLOBALKTABLE_BINDER_TYPE))) {
|
||||
Map<String, Object> binderProperties = new HashMap<>();
|
||||
this.flatten(null, binderConfiguration.getProperties(), binderProperties);
|
||||
environment.getPropertySources().addFirst(
|
||||
new MapPropertySource("kafkaStreamsBinderEnv", binderProperties));
|
||||
}
|
||||
}
|
||||
return new KafkaStreamsBinderConfigurationProperties(kafkaProperties);
|
||||
}
|
||||
|
||||
// TODO: Lifted from core - good candidate for exposing as a utility method in core.
|
||||
private static Map<String, BinderConfiguration> getBinderConfigurations(
|
||||
BindingServiceProperties properties) {
|
||||
|
||||
Map<String, BinderConfiguration> binderConfigurations = new HashMap<>();
|
||||
Map<String, BinderProperties> declaredBinders = properties.getBinders();
|
||||
|
||||
for (Map.Entry<String, BinderProperties> binderEntry : declaredBinders
|
||||
.entrySet()) {
|
||||
BinderProperties binderProperties = binderEntry.getValue();
|
||||
binderConfigurations.put(binderEntry.getKey(),
|
||||
new BinderConfiguration(binderProperties.getType(),
|
||||
binderProperties.getEnvironment(),
|
||||
binderProperties.isInheritEnvironment(),
|
||||
binderProperties.isDefaultCandidate()));
|
||||
}
|
||||
return binderConfigurations;
|
||||
}
|
||||
|
||||
// TODO: Lifted from core - good candidate for exposing as a utility method in core.
|
||||
@SuppressWarnings("unchecked")
|
||||
private void flatten(String propertyName, Object value,
|
||||
Map<String, Object> flattenedProperties) {
|
||||
if (value instanceof Map) {
|
||||
((Map<Object, Object>) value).forEach((k, v) -> flatten(
|
||||
(propertyName != null ? propertyName + "." : "") + k, v,
|
||||
flattenedProperties));
|
||||
}
|
||||
else {
|
||||
flattenedProperties.put(propertyName, value.toString());
|
||||
}
|
||||
}
|
||||
|
||||
@Bean
|
||||
public KafkaStreamsConfiguration kafkaStreamsConfiguration(KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
Environment environment) {
|
||||
KafkaProperties kafkaProperties = binderConfigurationProperties.getKafkaProperties();
|
||||
public KafkaStreamsConfiguration kafkaStreamsConfiguration(
|
||||
KafkaStreamsBinderConfigurationProperties properties,
|
||||
Environment environment) {
|
||||
KafkaProperties kafkaProperties = properties.getKafkaProperties();
|
||||
Map<String, Object> streamsProperties = kafkaProperties.buildStreamsProperties();
|
||||
if (kafkaProperties.getStreams().getApplicationId() == null) {
|
||||
String applicationName = environment.getProperty("spring.application.name");
|
||||
if (applicationName != null) {
|
||||
streamsProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationName);
|
||||
streamsProperties.put(StreamsConfig.APPLICATION_ID_CONFIG,
|
||||
applicationName);
|
||||
}
|
||||
}
|
||||
return new KafkaStreamsConfiguration(streamsProperties);
|
||||
}
|
||||
|
||||
@Bean("streamConfigGlobalProperties")
|
||||
public Map<String, Object> streamConfigGlobalProperties(KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaStreamsConfiguration kafkaStreamsConfiguration) {
|
||||
public Map<String, Object> streamConfigGlobalProperties(
|
||||
KafkaStreamsBinderConfigurationProperties configProperties,
|
||||
KafkaStreamsConfiguration kafkaStreamsConfiguration, ConfigurableEnvironment environment) {
|
||||
|
||||
Properties properties = kafkaStreamsConfiguration.asProperties();
|
||||
// Override Spring Boot bootstrap server setting if left to default with the value
|
||||
// configured in the binder
|
||||
|
||||
String kafkaConnectionString = configProperties.getKafkaConnectionString();
|
||||
|
||||
if (kafkaConnectionString != null && kafkaConnectionString.equals("localhost:9092")) {
|
||||
//Making sure that the application indeed set a property.
|
||||
String kafkaStreamsBinderBroker = environment.getProperty("spring.cloud.stream.kafka.streams.binder.brokers");
|
||||
|
||||
if (StringUtils.isEmpty(kafkaStreamsBinderBroker)) {
|
||||
//Kafka Streams binder specific property for brokers is not set by the application.
|
||||
//See if there is one configured at the kafka binder level.
|
||||
String kafkaBinderBroker = environment.getProperty("spring.cloud.stream.kafka.binder.brokers");
|
||||
if (!StringUtils.isEmpty(kafkaBinderBroker)) {
|
||||
kafkaConnectionString = kafkaBinderBroker;
|
||||
configProperties.setBrokers(kafkaConnectionString);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (ObjectUtils.isEmpty(properties.get(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG))) {
|
||||
properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, binderConfigurationProperties.getKafkaConnectionString());
|
||||
properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG,
|
||||
kafkaConnectionString);
|
||||
}
|
||||
else {
|
||||
Object bootstrapServerConfig = properties.get(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG);
|
||||
Object bootstrapServerConfig = properties
|
||||
.get(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG);
|
||||
if (bootstrapServerConfig instanceof String) {
|
||||
@SuppressWarnings("unchecked")
|
||||
String bootStrapServers = (String) properties
|
||||
.get(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG);
|
||||
if (bootStrapServers.equals("localhost:9092")) {
|
||||
properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, binderConfigurationProperties.getKafkaConnectionString());
|
||||
properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG,
|
||||
kafkaConnectionString);
|
||||
}
|
||||
}
|
||||
else if (bootstrapServerConfig instanceof List) {
|
||||
List bootStrapCollection = (List) bootstrapServerConfig;
|
||||
if (bootStrapCollection.size() == 1 && bootStrapCollection.get(0).equals("localhost:9092")) {
|
||||
properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG,
|
||||
kafkaConnectionString);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
String binderProvidedApplicationId = binderConfigurationProperties.getApplicationId();
|
||||
String binderProvidedApplicationId = configProperties.getApplicationId();
|
||||
if (StringUtils.hasText(binderProvidedApplicationId)) {
|
||||
properties.put(StreamsConfig.APPLICATION_ID_CONFIG, binderProvidedApplicationId);
|
||||
properties.put(StreamsConfig.APPLICATION_ID_CONFIG,
|
||||
binderProvidedApplicationId);
|
||||
}
|
||||
|
||||
properties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.ByteArraySerde.class.getName());
|
||||
properties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.ByteArraySerde.class.getName());
|
||||
properties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG,
|
||||
Serdes.ByteArraySerde.class.getName());
|
||||
properties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG,
|
||||
Serdes.ByteArraySerde.class.getName());
|
||||
|
||||
if (binderConfigurationProperties.getSerdeError() == KafkaStreamsBinderConfigurationProperties.SerdeError.logAndContinue) {
|
||||
properties.put(StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG,
|
||||
if (configProperties
|
||||
.getSerdeError() == KafkaStreamsBinderConfigurationProperties.SerdeError.logAndContinue) {
|
||||
properties.put(
|
||||
StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG,
|
||||
LogAndContinueExceptionHandler.class.getName());
|
||||
}
|
||||
else if (binderConfigurationProperties.getSerdeError() == KafkaStreamsBinderConfigurationProperties.SerdeError.logAndFail) {
|
||||
properties.put(StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG,
|
||||
else if (configProperties
|
||||
.getSerdeError() == KafkaStreamsBinderConfigurationProperties.SerdeError.logAndFail) {
|
||||
properties.put(
|
||||
StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG,
|
||||
LogAndFailExceptionHandler.class.getName());
|
||||
}
|
||||
else if (binderConfigurationProperties.getSerdeError() == KafkaStreamsBinderConfigurationProperties.SerdeError.sendToDlq) {
|
||||
properties.put(StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG,
|
||||
else if (configProperties
|
||||
.getSerdeError() == KafkaStreamsBinderConfigurationProperties.SerdeError.sendToDlq) {
|
||||
properties.put(
|
||||
StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG,
|
||||
SendToDlqAndContinue.class.getName());
|
||||
}
|
||||
|
||||
if (!ObjectUtils.isEmpty(binderConfigurationProperties.getConfiguration())) {
|
||||
properties.putAll(binderConfigurationProperties.getConfiguration());
|
||||
if (!ObjectUtils.isEmpty(configProperties.getConfiguration())) {
|
||||
properties.putAll(configProperties.getConfiguration());
|
||||
}
|
||||
return properties.entrySet().stream().collect(
|
||||
Collectors.toMap((e) -> String.valueOf(e.getKey()), Map.Entry::getValue));
|
||||
@@ -138,8 +242,11 @@ public class KafkaStreamsBinderSupportAutoConfiguration {
|
||||
|
||||
@Bean
|
||||
public KStreamStreamListenerParameterAdapter kstreamStreamListenerParameterAdapter(
|
||||
KafkaStreamsMessageConversionDelegate kstreamBoundMessageConversionDelegate, KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue) {
|
||||
return new KStreamStreamListenerParameterAdapter(kstreamBoundMessageConversionDelegate, KafkaStreamsBindingInformationCatalogue);
|
||||
KafkaStreamsMessageConversionDelegate kstreamBoundMessageConversionDelegate,
|
||||
KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue) {
|
||||
return new KStreamStreamListenerParameterAdapter(
|
||||
kstreamBoundMessageConversionDelegate,
|
||||
KafkaStreamsBindingInformationCatalogue);
|
||||
}
|
||||
|
||||
@Bean
|
||||
@@ -151,41 +258,53 @@ public class KafkaStreamsBinderSupportAutoConfiguration {
|
||||
KStreamStreamListenerParameterAdapter kafkaStreamListenerParameterAdapter,
|
||||
Collection<StreamListenerResultAdapter> streamListenerResultAdapters,
|
||||
ObjectProvider<CleanupConfig> cleanupConfig) {
|
||||
return new KafkaStreamsStreamListenerSetupMethodOrchestrator(bindingServiceProperties,
|
||||
kafkaStreamsExtendedBindingProperties, keyValueSerdeResolver, kafkaStreamsBindingInformationCatalogue,
|
||||
return new KafkaStreamsStreamListenerSetupMethodOrchestrator(
|
||||
bindingServiceProperties, kafkaStreamsExtendedBindingProperties,
|
||||
keyValueSerdeResolver, kafkaStreamsBindingInformationCatalogue,
|
||||
kafkaStreamListenerParameterAdapter, streamListenerResultAdapters,
|
||||
cleanupConfig.getIfUnique());
|
||||
}
|
||||
|
||||
@Bean
|
||||
public KafkaStreamsMessageConversionDelegate messageConversionDelegate(CompositeMessageConverterFactory compositeMessageConverterFactory,
|
||||
public KafkaStreamsMessageConversionDelegate messageConversionDelegate(
|
||||
CompositeMessageConverter compositeMessageConverter,
|
||||
SendToDlqAndContinue sendToDlqAndContinue,
|
||||
KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue,
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties) {
|
||||
return new KafkaStreamsMessageConversionDelegate(compositeMessageConverterFactory, sendToDlqAndContinue,
|
||||
return new KafkaStreamsMessageConversionDelegate(compositeMessageConverter, sendToDlqAndContinue,
|
||||
KafkaStreamsBindingInformationCatalogue, binderConfigurationProperties);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public CompositeNonNativeSerde compositeNonNativeSerde(CompositeMessageConverterFactory compositeMessageConverterFactory) {
|
||||
public MessageConverterDelegateSerde messageConverterDelegateSerde(
|
||||
CompositeMessageConverter compositeMessageConverterFactory) {
|
||||
return new MessageConverterDelegateSerde(compositeMessageConverterFactory);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public CompositeNonNativeSerde compositeNonNativeSerde(
|
||||
CompositeMessageConverter compositeMessageConverterFactory) {
|
||||
return new CompositeNonNativeSerde(compositeMessageConverterFactory);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public KStreamBoundElementFactory kStreamBoundElementFactory(BindingServiceProperties bindingServiceProperties,
|
||||
KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue) {
|
||||
public KStreamBoundElementFactory kStreamBoundElementFactory(
|
||||
BindingServiceProperties bindingServiceProperties,
|
||||
KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue) {
|
||||
return new KStreamBoundElementFactory(bindingServiceProperties,
|
||||
KafkaStreamsBindingInformationCatalogue);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public KTableBoundElementFactory kTableBoundElementFactory(BindingServiceProperties bindingServiceProperties) {
|
||||
public KTableBoundElementFactory kTableBoundElementFactory(
|
||||
BindingServiceProperties bindingServiceProperties) {
|
||||
return new KTableBoundElementFactory(bindingServiceProperties);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public GlobalKTableBoundElementFactory globalKTableBoundElementFactory(BindingServiceProperties bindingServiceProperties) {
|
||||
return new GlobalKTableBoundElementFactory(bindingServiceProperties);
|
||||
public GlobalKTableBoundElementFactory globalKTableBoundElementFactory(
|
||||
BindingServiceProperties properties) {
|
||||
return new GlobalKTableBoundElementFactory(properties);
|
||||
}
|
||||
|
||||
@Bean
|
||||
@@ -200,20 +319,19 @@ public class KafkaStreamsBinderSupportAutoConfiguration {
|
||||
|
||||
@Bean
|
||||
@SuppressWarnings("unchecked")
|
||||
public KeyValueSerdeResolver keyValueSerdeResolver(@Qualifier("streamConfigGlobalProperties") Object streamConfigGlobalProperties,
|
||||
KafkaStreamsBinderConfigurationProperties kafkaStreamsBinderConfigurationProperties) {
|
||||
return new KeyValueSerdeResolver((Map<String, Object>) streamConfigGlobalProperties, kafkaStreamsBinderConfigurationProperties);
|
||||
@ConditionalOnMissingBean
|
||||
public KeyValueSerdeResolver keyValueSerdeResolver(
|
||||
@Qualifier("streamConfigGlobalProperties") Object streamConfigGlobalProperties,
|
||||
KafkaStreamsBinderConfigurationProperties properties) {
|
||||
return new KeyValueSerdeResolver(
|
||||
(Map<String, Object>) streamConfigGlobalProperties, properties);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public QueryableStoreRegistry queryableStoreTypeRegistry(KafkaStreamsRegistry kafkaStreamsRegistry) {
|
||||
return new QueryableStoreRegistry(kafkaStreamsRegistry);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public InteractiveQueryService interactiveQueryServices(KafkaStreamsRegistry kafkaStreamsRegistry,
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties) {
|
||||
return new InteractiveQueryService(kafkaStreamsRegistry, binderConfigurationProperties);
|
||||
public InteractiveQueryService interactiveQueryServices(
|
||||
KafkaStreamsRegistry kafkaStreamsRegistry,
|
||||
KafkaStreamsBinderConfigurationProperties properties) {
|
||||
return new InteractiveQueryService(kafkaStreamsRegistry, properties);
|
||||
}
|
||||
|
||||
@Bean
|
||||
@@ -222,9 +340,10 @@ public class KafkaStreamsBinderSupportAutoConfiguration {
|
||||
}
|
||||
|
||||
@Bean
|
||||
public StreamsBuilderFactoryManager streamsBuilderFactoryManager(KafkaStreamsBindingInformationCatalogue kafkaStreamsBindingInformationCatalogue,
|
||||
KafkaStreamsRegistry kafkaStreamsRegistry) {
|
||||
return new StreamsBuilderFactoryManager(kafkaStreamsBindingInformationCatalogue, kafkaStreamsRegistry);
|
||||
public StreamsBuilderFactoryManager streamsBuilderFactoryManager(
|
||||
KafkaStreamsBindingInformationCatalogue catalogue,
|
||||
KafkaStreamsRegistry kafkaStreamsRegistry) {
|
||||
return new StreamsBuilderFactoryManager(catalogue, kafkaStreamsRegistry);
|
||||
}
|
||||
|
||||
@Bean("kafkaStreamsDlqDispatchers")
|
||||
@@ -232,4 +351,19 @@ public class KafkaStreamsBinderSupportAutoConfiguration {
|
||||
return new HashMap<>();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Conditional(FunctionDetectorCondition.class)
|
||||
public KafkaStreamsFunctionProcessor kafkaStreamsFunctionProcessor(BindingServiceProperties bindingServiceProperties,
|
||||
KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties,
|
||||
KeyValueSerdeResolver keyValueSerdeResolver,
|
||||
KafkaStreamsBindingInformationCatalogue kafkaStreamsBindingInformationCatalogue,
|
||||
KafkaStreamsMessageConversionDelegate kafkaStreamsMessageConversionDelegate,
|
||||
ObjectProvider<CleanupConfig> cleanupConfig,
|
||||
StreamFunctionProperties streamFunctionProperties,
|
||||
KafkaStreamsBinderConfigurationProperties kafkaStreamsBinderConfigurationProperties) {
|
||||
return new KafkaStreamsFunctionProcessor(bindingServiceProperties, kafkaStreamsExtendedBindingProperties,
|
||||
keyValueSerdeResolver, kafkaStreamsBindingInformationCatalogue, kafkaStreamsMessageConversionDelegate,
|
||||
cleanupConfig.getIfUnique(), streamFunctionProperties, kafkaStreamsBinderConfigurationProperties);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -18,18 +18,15 @@ package org.springframework.cloud.stream.binder.kafka.streams;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.beans.factory.config.MethodInvokingFactoryBean;
|
||||
import org.springframework.beans.factory.support.AbstractBeanDefinition;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
|
||||
import org.springframework.cloud.stream.binder.ExtendedConsumerProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.provisioning.KafkaTopicProvisioner;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsBinderConfigurationProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsConsumerProperties;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.annotation.ImportBeanDefinitionRegistrar;
|
||||
import org.springframework.core.type.AnnotationMetadata;
|
||||
import org.springframework.core.MethodParameter;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
@@ -43,70 +40,54 @@ final class KafkaStreamsBinderUtils {
|
||||
|
||||
}
|
||||
|
||||
static void prepareConsumerBinding(String name, String group, ApplicationContext context,
|
||||
KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
ExtendedConsumerProperties<KafkaStreamsConsumerProperties> properties,
|
||||
Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
static void prepareConsumerBinding(String name, String group,
|
||||
ApplicationContext context, KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
ExtendedConsumerProperties<KafkaStreamsConsumerProperties> properties,
|
||||
Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
ExtendedConsumerProperties<KafkaConsumerProperties> extendedConsumerProperties = new ExtendedConsumerProperties<>(
|
||||
properties.getExtension());
|
||||
if (binderConfigurationProperties.getSerdeError() == KafkaStreamsBinderConfigurationProperties.SerdeError.sendToDlq) {
|
||||
if (binderConfigurationProperties
|
||||
.getSerdeError() == KafkaStreamsBinderConfigurationProperties.SerdeError.sendToDlq) {
|
||||
extendedConsumerProperties.getExtension().setEnableDlq(true);
|
||||
}
|
||||
|
||||
String[] inputTopics = StringUtils.commaDelimitedListToStringArray(name);
|
||||
for (String inputTopic : inputTopics) {
|
||||
kafkaTopicProvisioner.provisionConsumerDestination(inputTopic, group, extendedConsumerProperties);
|
||||
kafkaTopicProvisioner.provisionConsumerDestination(inputTopic, group,
|
||||
extendedConsumerProperties);
|
||||
}
|
||||
|
||||
if (extendedConsumerProperties.getExtension().isEnableDlq()) {
|
||||
KafkaStreamsDlqDispatch kafkaStreamsDlqDispatch = !StringUtils.isEmpty(extendedConsumerProperties.getExtension().getDlqName()) ?
|
||||
new KafkaStreamsDlqDispatch(extendedConsumerProperties.getExtension().getDlqName(), binderConfigurationProperties,
|
||||
extendedConsumerProperties.getExtension()) : null;
|
||||
KafkaStreamsDlqDispatch kafkaStreamsDlqDispatch = !StringUtils
|
||||
.isEmpty(extendedConsumerProperties.getExtension().getDlqName())
|
||||
? new KafkaStreamsDlqDispatch(
|
||||
extendedConsumerProperties.getExtension()
|
||||
.getDlqName(),
|
||||
binderConfigurationProperties,
|
||||
extendedConsumerProperties.getExtension())
|
||||
: null;
|
||||
for (String inputTopic : inputTopics) {
|
||||
if (StringUtils.isEmpty(extendedConsumerProperties.getExtension().getDlqName())) {
|
||||
if (StringUtils.isEmpty(
|
||||
extendedConsumerProperties.getExtension().getDlqName())) {
|
||||
String dlqName = "error." + inputTopic + "." + group;
|
||||
kafkaStreamsDlqDispatch = new KafkaStreamsDlqDispatch(dlqName, binderConfigurationProperties,
|
||||
kafkaStreamsDlqDispatch = new KafkaStreamsDlqDispatch(dlqName,
|
||||
binderConfigurationProperties,
|
||||
extendedConsumerProperties.getExtension());
|
||||
}
|
||||
|
||||
SendToDlqAndContinue sendToDlqAndContinue = context.getBean(SendToDlqAndContinue.class);
|
||||
sendToDlqAndContinue.addKStreamDlqDispatch(inputTopic, kafkaStreamsDlqDispatch);
|
||||
SendToDlqAndContinue sendToDlqAndContinue = context
|
||||
.getBean(SendToDlqAndContinue.class);
|
||||
sendToDlqAndContinue.addKStreamDlqDispatch(inputTopic,
|
||||
kafkaStreamsDlqDispatch);
|
||||
|
||||
kafkaStreamsDlqDispatchers.put(inputTopic, kafkaStreamsDlqDispatch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper lass for missing bean registration.
|
||||
*/
|
||||
static class KafkaStreamsMissingBeansRegistrar implements ImportBeanDefinitionRegistrar {
|
||||
|
||||
private static final String BEAN_NAME = "outerContext";
|
||||
|
||||
@Override
|
||||
public void registerBeanDefinitions(AnnotationMetadata importingClassMetadata,
|
||||
BeanDefinitionRegistry registry) {
|
||||
if (registry.containsBeanDefinition(BEAN_NAME)) {
|
||||
|
||||
AbstractBeanDefinition configBean = BeanDefinitionBuilder.genericBeanDefinition(MethodInvokingFactoryBean.class)
|
||||
.addPropertyReference("targetObject", BEAN_NAME)
|
||||
.addPropertyValue("targetMethod", "getBean")
|
||||
.addPropertyValue("arguments", KafkaStreamsBinderConfigurationProperties.class)
|
||||
.getBeanDefinition();
|
||||
|
||||
registry.registerBeanDefinition(KafkaStreamsBinderConfigurationProperties.class.getSimpleName(), configBean);
|
||||
|
||||
AbstractBeanDefinition catalogueBean = BeanDefinitionBuilder.genericBeanDefinition(MethodInvokingFactoryBean.class)
|
||||
.addPropertyReference("targetObject", BEAN_NAME)
|
||||
.addPropertyValue("targetMethod", "getBean")
|
||||
.addPropertyValue("arguments", KafkaStreamsBindingInformationCatalogue.class)
|
||||
.getBeanDefinition();
|
||||
|
||||
registry.registerBeanDefinition(KafkaStreamsBindingInformationCatalogue.class.getSimpleName(), catalogueBean);
|
||||
}
|
||||
}
|
||||
static boolean supportsKStream(MethodParameter methodParameter, Class<?> targetBeanClass) {
|
||||
return KStream.class.isAssignableFrom(targetBeanClass)
|
||||
&& KStream.class.isAssignableFrom(methodParameter.getParameterType());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -16,24 +16,28 @@
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import org.apache.kafka.common.serialization.Serde;
|
||||
import org.apache.kafka.streams.StreamsConfig;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
|
||||
import org.springframework.cloud.stream.binder.ConsumerProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsConsumerProperties;
|
||||
import org.springframework.cloud.stream.config.BindingProperties;
|
||||
import org.springframework.core.ResolvableType;
|
||||
import org.springframework.kafka.config.StreamsBuilderFactoryBean;
|
||||
|
||||
/**
|
||||
* A catalogue that provides binding information for Kafka Streams target types such as KStream.
|
||||
* It also keeps a catalogue for the underlying {@link StreamsBuilderFactoryBean} and
|
||||
* {@link StreamsConfig} associated with various {@link org.springframework.cloud.stream.annotation.StreamListener}
|
||||
* methods in the {@link org.springframework.context.ApplicationContext}.
|
||||
* A catalogue that provides binding information for Kafka Streams target types such as
|
||||
* KStream. It also keeps a catalogue for the underlying {@link StreamsBuilderFactoryBean}
|
||||
* and {@link StreamsConfig} associated with various
|
||||
* {@link org.springframework.cloud.stream.annotation.StreamListener} methods in the
|
||||
* {@link org.springframework.context.ApplicationContext}.
|
||||
*
|
||||
* @author Soby Chacko
|
||||
*/
|
||||
@@ -45,10 +49,13 @@ class KafkaStreamsBindingInformationCatalogue {
|
||||
|
||||
private final Set<StreamsBuilderFactoryBean> streamsBuilderFactoryBeans = new HashSet<>();
|
||||
|
||||
private ResolvableType outboundKStreamResolvable;
|
||||
|
||||
private final Map<KStream<?, ?>, Serde<?>> keySerdeInfo = new HashMap<>();
|
||||
|
||||
/**
|
||||
* For a given bounded {@link KStream}, retrieve it's corresponding destination
|
||||
* on the broker.
|
||||
*
|
||||
* For a given bounded {@link KStream}, retrieve it's corresponding destination on the
|
||||
* broker.
|
||||
* @param bindingTarget binding target for KStream
|
||||
* @return destination topic on Kafka
|
||||
*/
|
||||
@@ -59,7 +66,6 @@ class KafkaStreamsBindingInformationCatalogue {
|
||||
|
||||
/**
|
||||
* Is native decoding is enabled on this {@link KStream}.
|
||||
*
|
||||
* @param bindingTarget binding target for KStream
|
||||
* @return true if native decoding is enabled, fasle otherwise.
|
||||
*/
|
||||
@@ -73,7 +79,6 @@ class KafkaStreamsBindingInformationCatalogue {
|
||||
|
||||
/**
|
||||
* Is DLQ enabled for this {@link KStream}.
|
||||
*
|
||||
* @param bindingTarget binding target for KStream
|
||||
* @return true if DLQ is enabled, false otherwise.
|
||||
*/
|
||||
@@ -83,7 +88,6 @@ class KafkaStreamsBindingInformationCatalogue {
|
||||
|
||||
/**
|
||||
* Retrieve the content type associated with a given {@link KStream}.
|
||||
*
|
||||
* @param bindingTarget binding target for KStream
|
||||
* @return content Type associated.
|
||||
*/
|
||||
@@ -94,28 +98,28 @@ class KafkaStreamsBindingInformationCatalogue {
|
||||
|
||||
/**
|
||||
* Register a cache for bounded KStream -> {@link BindingProperties}.
|
||||
*
|
||||
* @param bindingTarget binding target for KStream
|
||||
* @param bindingProperties {@link BindingProperties} for this KStream
|
||||
*/
|
||||
void registerBindingProperties(KStream<?, ?> bindingTarget, BindingProperties bindingProperties) {
|
||||
void registerBindingProperties(KStream<?, ?> bindingTarget,
|
||||
BindingProperties bindingProperties) {
|
||||
this.bindingProperties.put(bindingTarget, bindingProperties);
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a cache for bounded KStream -> {@link KafkaStreamsConsumerProperties}.
|
||||
*
|
||||
* @param bindingTarget binding target for KStream
|
||||
* @param kafkaStreamsConsumerProperties consumer properties for this KStream
|
||||
*/
|
||||
void registerConsumerProperties(KStream<?, ?> bindingTarget, KafkaStreamsConsumerProperties kafkaStreamsConsumerProperties) {
|
||||
void registerConsumerProperties(KStream<?, ?> bindingTarget,
|
||||
KafkaStreamsConsumerProperties kafkaStreamsConsumerProperties) {
|
||||
this.consumerProperties.put(bindingTarget, kafkaStreamsConsumerProperties);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a mapping for KStream -> {@link StreamsBuilderFactoryBean}.
|
||||
*
|
||||
* @param streamsBuilderFactoryBean provides the {@link StreamsBuilderFactoryBean} mapped to the KStream
|
||||
* @param streamsBuilderFactoryBean provides the {@link StreamsBuilderFactoryBean}
|
||||
* mapped to the KStream
|
||||
*/
|
||||
void addStreamBuilderFactory(StreamsBuilderFactoryBean streamsBuilderFactoryBean) {
|
||||
this.streamsBuilderFactoryBeans.add(streamsBuilderFactoryBean);
|
||||
@@ -124,4 +128,37 @@ class KafkaStreamsBindingInformationCatalogue {
|
||||
Set<StreamsBuilderFactoryBean> getStreamsBuilderFactoryBeans() {
|
||||
return this.streamsBuilderFactoryBeans;
|
||||
}
|
||||
|
||||
void setOutboundKStreamResolvable(ResolvableType outboundResolvable) {
|
||||
this.outboundKStreamResolvable = outboundResolvable;
|
||||
}
|
||||
|
||||
ResolvableType getOutboundKStreamResolvable() {
|
||||
return outboundKStreamResolvable;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adding a mapping for KStream target to its corresponding KeySerde.
|
||||
* This is used for sending to DLQ when deserialization fails. See {@link KafkaStreamsMessageConversionDelegate}
|
||||
* for details.
|
||||
*
|
||||
* @param kStreamTarget target KStream
|
||||
* @param keySerde Serde used for the key
|
||||
*/
|
||||
void addKeySerde(KStream<?, ?> kStreamTarget, Serde<?> keySerde) {
|
||||
this.keySerdeInfo.put(kStreamTarget, keySerde);
|
||||
}
|
||||
|
||||
Serde<?> getKeySerde(KStream<?, ?> kStreamTarget) {
|
||||
return this.keySerdeInfo.get(kStreamTarget);
|
||||
}
|
||||
|
||||
|
||||
Map<KStream<?, ?>, BindingProperties> getBindingProperties() {
|
||||
return bindingProperties;
|
||||
}
|
||||
|
||||
Map<KStream<?, ?>, KafkaStreamsConsumerProperties> getConsumerProperties() {
|
||||
return consumerProperties;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -53,10 +53,11 @@ class KafkaStreamsDlqDispatch {
|
||||
private final String dlqName;
|
||||
|
||||
KafkaStreamsDlqDispatch(String dlqName,
|
||||
KafkaBinderConfigurationProperties kafkaBinderConfigurationProperties,
|
||||
KafkaConsumerProperties kafkaConsumerProperties) {
|
||||
KafkaBinderConfigurationProperties kafkaBinderConfigurationProperties,
|
||||
KafkaConsumerProperties kafkaConsumerProperties) {
|
||||
ProducerFactory<byte[], byte[]> producerFactory = getProducerFactory(
|
||||
new ExtendedProducerProperties<>(kafkaConsumerProperties.getDlqProducerProperties()),
|
||||
new ExtendedProducerProperties<>(
|
||||
kafkaConsumerProperties.getDlqProducerProperties()),
|
||||
kafkaBinderConfigurationProperties);
|
||||
|
||||
this.kafkaTemplate = new KafkaTemplate<>(producerFactory);
|
||||
@@ -65,55 +66,58 @@ class KafkaStreamsDlqDispatch {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void sendToDlq(byte[] key, byte[] value, int partittion) {
|
||||
ProducerRecord<byte[], byte[]> producerRecord = new ProducerRecord<>(this.dlqName, partittion,
|
||||
key, value, null);
|
||||
ProducerRecord<byte[], byte[]> producerRecord = new ProducerRecord<>(this.dlqName,
|
||||
partittion, key, value, null);
|
||||
|
||||
StringBuilder sb = new StringBuilder().append(" a message with key='")
|
||||
.append(toDisplayString(ObjectUtils.nullSafeToString(key))).append("'")
|
||||
.append(" and payload='")
|
||||
.append(toDisplayString(ObjectUtils.nullSafeToString(value)))
|
||||
.append("'").append(" received from ")
|
||||
.append(partittion);
|
||||
.append(toDisplayString(ObjectUtils.nullSafeToString(value))).append("'")
|
||||
.append(" received from ").append(partittion);
|
||||
ListenableFuture<SendResult<byte[], byte[]>> sentDlq = null;
|
||||
try {
|
||||
sentDlq = this.kafkaTemplate.send(producerRecord);
|
||||
sentDlq.addCallback(new ListenableFutureCallback<SendResult<byte[], byte[]>>() {
|
||||
sentDlq.addCallback(
|
||||
new ListenableFutureCallback<SendResult<byte[], byte[]>>() {
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable ex) {
|
||||
KafkaStreamsDlqDispatch.this.logger.error(
|
||||
"Error sending to DLQ " + sb.toString(), ex);
|
||||
}
|
||||
@Override
|
||||
public void onFailure(Throwable ex) {
|
||||
KafkaStreamsDlqDispatch.this.logger
|
||||
.error("Error sending to DLQ " + sb.toString(), ex);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onSuccess(SendResult<byte[], byte[]> result) {
|
||||
if (KafkaStreamsDlqDispatch.this.logger.isDebugEnabled()) {
|
||||
KafkaStreamsDlqDispatch.this.logger.debug(
|
||||
"Sent to DLQ " + sb.toString());
|
||||
}
|
||||
}
|
||||
});
|
||||
@Override
|
||||
public void onSuccess(SendResult<byte[], byte[]> result) {
|
||||
if (KafkaStreamsDlqDispatch.this.logger.isDebugEnabled()) {
|
||||
KafkaStreamsDlqDispatch.this.logger
|
||||
.debug("Sent to DLQ " + sb.toString());
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
catch (Exception ex) {
|
||||
if (sentDlq == null) {
|
||||
KafkaStreamsDlqDispatch.this.logger.error(
|
||||
"Error sending to DLQ " + sb.toString(), ex);
|
||||
KafkaStreamsDlqDispatch.this.logger
|
||||
.error("Error sending to DLQ " + sb.toString(), ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private DefaultKafkaProducerFactory<byte[], byte[]> getProducerFactory(ExtendedProducerProperties<KafkaProducerProperties> producerProperties,
|
||||
KafkaBinderConfigurationProperties configurationProperties) {
|
||||
private DefaultKafkaProducerFactory<byte[], byte[]> getProducerFactory(
|
||||
ExtendedProducerProperties<KafkaProducerProperties> producerProperties,
|
||||
KafkaBinderConfigurationProperties configurationProperties) {
|
||||
Map<String, Object> props = new HashMap<>();
|
||||
props.put(ProducerConfig.RETRIES_CONFIG, 0);
|
||||
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
|
||||
props.put(ProducerConfig.ACKS_CONFIG, configurationProperties.getRequiredAcks());
|
||||
Map<String, Object> mergedConfig = configurationProperties.mergedProducerConfiguration();
|
||||
Map<String, Object> mergedConfig = configurationProperties
|
||||
.mergedProducerConfiguration();
|
||||
if (!ObjectUtils.isEmpty(mergedConfig)) {
|
||||
props.putAll(mergedConfig);
|
||||
}
|
||||
if (ObjectUtils.isEmpty(props.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG))) {
|
||||
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, configurationProperties.getKafkaConnectionString());
|
||||
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,
|
||||
configurationProperties.getKafkaConnectionString());
|
||||
}
|
||||
if (ObjectUtils.isEmpty(props.get(ProducerConfig.BATCH_SIZE_CONFIG))) {
|
||||
props.put(ProducerConfig.BATCH_SIZE_CONFIG,
|
||||
@@ -130,9 +134,10 @@ class KafkaStreamsDlqDispatch {
|
||||
if (!ObjectUtils.isEmpty(producerProperties.getExtension().getConfiguration())) {
|
||||
props.putAll(producerProperties.getExtension().getConfiguration());
|
||||
}
|
||||
//Always send as byte[] on dlq (the same byte[] that the consumer received)
|
||||
// Always send as byte[] on dlq (the same byte[] that the consumer received)
|
||||
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
|
||||
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
|
||||
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
|
||||
ByteArraySerializer.class);
|
||||
|
||||
return new DefaultKafkaProducerFactory<>(props);
|
||||
}
|
||||
@@ -143,4 +148,5 @@ class KafkaStreamsDlqDispatch {
|
||||
}
|
||||
return original.substring(0, 50) + "...";
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,366 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.kafka.common.serialization.Serde;
|
||||
import org.apache.kafka.common.serialization.Serdes;
|
||||
import org.apache.kafka.streams.StreamsBuilder;
|
||||
import org.apache.kafka.streams.Topology;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.beans.factory.BeanFactory;
|
||||
import org.springframework.beans.factory.BeanFactoryAware;
|
||||
import org.springframework.beans.factory.BeanInitializationException;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.beans.factory.support.RootBeanDefinition;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.function.KafkaStreamsBindableProxyFactory;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsBinderConfigurationProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsConsumerProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsExtendedBindingProperties;
|
||||
import org.springframework.cloud.stream.binding.StreamListenerErrorMessages;
|
||||
import org.springframework.cloud.stream.config.BindingProperties;
|
||||
import org.springframework.cloud.stream.config.BindingServiceProperties;
|
||||
import org.springframework.cloud.stream.function.StreamFunctionProperties;
|
||||
import org.springframework.core.ResolvableType;
|
||||
import org.springframework.kafka.config.StreamsBuilderFactoryBean;
|
||||
import org.springframework.kafka.core.CleanupConfig;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
|
||||
/**
|
||||
* @author Soby Chacko
|
||||
* @since 2.2.0
|
||||
*/
|
||||
public class KafkaStreamsFunctionProcessor extends AbstractKafkaStreamsBinderProcessor implements BeanFactoryAware {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(KafkaStreamsFunctionProcessor.class);
|
||||
private static final String OUTBOUND = "outbound";
|
||||
|
||||
private final BindingServiceProperties bindingServiceProperties;
|
||||
private final Map<String, StreamsBuilderFactoryBean> methodStreamsBuilderFactoryBeanMap = new HashMap<>();
|
||||
private final KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties;
|
||||
private final KeyValueSerdeResolver keyValueSerdeResolver;
|
||||
private final KafkaStreamsBindingInformationCatalogue kafkaStreamsBindingInformationCatalogue;
|
||||
private final KafkaStreamsMessageConversionDelegate kafkaStreamsMessageConversionDelegate;
|
||||
|
||||
private BeanFactory beanFactory;
|
||||
private StreamFunctionProperties streamFunctionProperties;
|
||||
private KafkaStreamsBinderConfigurationProperties kafkaStreamsBinderConfigurationProperties;
|
||||
|
||||
public KafkaStreamsFunctionProcessor(BindingServiceProperties bindingServiceProperties,
|
||||
KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties,
|
||||
KeyValueSerdeResolver keyValueSerdeResolver,
|
||||
KafkaStreamsBindingInformationCatalogue kafkaStreamsBindingInformationCatalogue,
|
||||
KafkaStreamsMessageConversionDelegate kafkaStreamsMessageConversionDelegate,
|
||||
CleanupConfig cleanupConfig,
|
||||
StreamFunctionProperties streamFunctionProperties,
|
||||
KafkaStreamsBinderConfigurationProperties kafkaStreamsBinderConfigurationProperties) {
|
||||
super(bindingServiceProperties, kafkaStreamsBindingInformationCatalogue, kafkaStreamsExtendedBindingProperties,
|
||||
keyValueSerdeResolver, cleanupConfig);
|
||||
this.bindingServiceProperties = bindingServiceProperties;
|
||||
this.kafkaStreamsExtendedBindingProperties = kafkaStreamsExtendedBindingProperties;
|
||||
this.keyValueSerdeResolver = keyValueSerdeResolver;
|
||||
this.kafkaStreamsBindingInformationCatalogue = kafkaStreamsBindingInformationCatalogue;
|
||||
this.kafkaStreamsMessageConversionDelegate = kafkaStreamsMessageConversionDelegate;
|
||||
this.streamFunctionProperties = streamFunctionProperties;
|
||||
this.kafkaStreamsBinderConfigurationProperties = kafkaStreamsBinderConfigurationProperties;
|
||||
}
|
||||
|
||||
private Map<String, ResolvableType> buildTypeMap(ResolvableType resolvableType,
|
||||
KafkaStreamsBindableProxyFactory kafkaStreamsBindableProxyFactory) {
|
||||
Map<String, ResolvableType> resolvableTypeMap = new LinkedHashMap<>();
|
||||
if (resolvableType != null && resolvableType.getRawClass() != null) {
|
||||
int inputCount = 1;
|
||||
|
||||
ResolvableType currentOutputGeneric;
|
||||
if (resolvableType.getRawClass().isAssignableFrom(BiFunction.class) ||
|
||||
resolvableType.getRawClass().isAssignableFrom(BiConsumer.class)) {
|
||||
inputCount = 2;
|
||||
currentOutputGeneric = resolvableType.getGeneric(2);
|
||||
}
|
||||
else {
|
||||
currentOutputGeneric = resolvableType.getGeneric(1);
|
||||
}
|
||||
while (currentOutputGeneric.getRawClass() != null && functionOrConsumerFound(currentOutputGeneric)) {
|
||||
inputCount++;
|
||||
currentOutputGeneric = currentOutputGeneric.getGeneric(1);
|
||||
}
|
||||
final Set<String> inputs = new LinkedHashSet<>(kafkaStreamsBindableProxyFactory.getInputs());
|
||||
|
||||
final Iterator<String> iterator = inputs.iterator();
|
||||
|
||||
popuateResolvableTypeMap(resolvableType, resolvableTypeMap, iterator);
|
||||
|
||||
ResolvableType iterableResType = resolvableType;
|
||||
int i = resolvableType.getRawClass().isAssignableFrom(BiFunction.class) ||
|
||||
resolvableType.getRawClass().isAssignableFrom(BiConsumer.class) ? 2 : 1;
|
||||
ResolvableType outboundResolvableType;
|
||||
if (i == inputCount) {
|
||||
outboundResolvableType = iterableResType.getGeneric(i);
|
||||
}
|
||||
else {
|
||||
while (i < inputCount && iterator.hasNext()) {
|
||||
iterableResType = iterableResType.getGeneric(1);
|
||||
if (iterableResType.getRawClass() != null &&
|
||||
functionOrConsumerFound(iterableResType)) {
|
||||
popuateResolvableTypeMap(iterableResType, resolvableTypeMap, iterator);
|
||||
}
|
||||
i++;
|
||||
}
|
||||
outboundResolvableType = iterableResType.getGeneric(1);
|
||||
}
|
||||
resolvableTypeMap.put(OUTBOUND, outboundResolvableType);
|
||||
}
|
||||
return resolvableTypeMap;
|
||||
}
|
||||
|
||||
private boolean functionOrConsumerFound(ResolvableType iterableResType) {
|
||||
return iterableResType.getRawClass().equals(Function.class) ||
|
||||
iterableResType.getRawClass().equals(Consumer.class);
|
||||
}
|
||||
|
||||
private void popuateResolvableTypeMap(ResolvableType resolvableType, Map<String, ResolvableType> resolvableTypeMap,
|
||||
Iterator<String> iterator) {
|
||||
final String next = iterator.next();
|
||||
resolvableTypeMap.put(next, resolvableType.getGeneric(0));
|
||||
if (resolvableType.getRawClass() != null &&
|
||||
(resolvableType.getRawClass().isAssignableFrom(BiFunction.class) ||
|
||||
resolvableType.getRawClass().isAssignableFrom(BiConsumer.class))
|
||||
&& iterator.hasNext()) {
|
||||
resolvableTypeMap.put(iterator.next(), resolvableType.getGeneric(1));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This method must be kept stateless. In the case of multiple function beans in an application,
|
||||
* isolated {@link KafkaStreamsBindableProxyFactory} instances are passed in separately for those functions. If the
|
||||
* state is shared between invocations, that will create potential race conditions. Hence, invocations of this method
|
||||
* should not be dependent on state modified by a previous invocation.
|
||||
*
|
||||
* @param resolvableType type of the binding
|
||||
* @param functionName bean name of the function
|
||||
* @param kafkaStreamsBindableProxyFactory bindable proxy factory for the Kafka Streams type
|
||||
*/
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
public void setupFunctionInvokerForKafkaStreams(ResolvableType resolvableType, String functionName,
|
||||
KafkaStreamsBindableProxyFactory kafkaStreamsBindableProxyFactory) {
|
||||
final Map<String, ResolvableType> stringResolvableTypeMap = buildTypeMap(resolvableType, kafkaStreamsBindableProxyFactory);
|
||||
ResolvableType outboundResolvableType = stringResolvableTypeMap.remove(OUTBOUND);
|
||||
Object[] adaptedInboundArguments = adaptAndRetrieveInboundArguments(stringResolvableTypeMap, functionName);
|
||||
try {
|
||||
if (resolvableType.getRawClass() != null && resolvableType.getRawClass().equals(Consumer.class)) {
|
||||
Consumer<Object> consumer = (Consumer) this.beanFactory.getBean(functionName);
|
||||
consumer.accept(adaptedInboundArguments[0]);
|
||||
}
|
||||
else if (resolvableType.getRawClass() != null && resolvableType.getRawClass().equals(BiConsumer.class)) {
|
||||
BiConsumer<Object, Object> biConsumer = (BiConsumer) this.beanFactory.getBean(functionName);
|
||||
biConsumer.accept(adaptedInboundArguments[0], adaptedInboundArguments[1]);
|
||||
}
|
||||
else {
|
||||
Object result;
|
||||
if (resolvableType.getRawClass() != null && resolvableType.getRawClass().equals(BiFunction.class)) {
|
||||
BiFunction<Object, Object, Object> biFunction = (BiFunction) beanFactory.getBean(functionName);
|
||||
result = biFunction.apply(adaptedInboundArguments[0], adaptedInboundArguments[1]);
|
||||
}
|
||||
else {
|
||||
Function<Object, Object> function = (Function) beanFactory.getBean(functionName);
|
||||
result = function.apply(adaptedInboundArguments[0]);
|
||||
}
|
||||
int i = 1;
|
||||
while (result instanceof Function || result instanceof Consumer) {
|
||||
if (result instanceof Function) {
|
||||
result = ((Function) result).apply(adaptedInboundArguments[i]);
|
||||
}
|
||||
else {
|
||||
((Consumer) result).accept(adaptedInboundArguments[i]);
|
||||
result = null;
|
||||
}
|
||||
i++;
|
||||
}
|
||||
if (result != null) {
|
||||
kafkaStreamsBindingInformationCatalogue.setOutboundKStreamResolvable(
|
||||
outboundResolvableType != null ? outboundResolvableType : resolvableType.getGeneric(1));
|
||||
final Set<String> outputs = new TreeSet<>(kafkaStreamsBindableProxyFactory.getOutputs());
|
||||
final Iterator<String> outboundDefinitionIterator = outputs.iterator();
|
||||
|
||||
if (result.getClass().isArray()) {
|
||||
// Binding target as the output bindings were deferred in the KafkaStreamsBindableProxyFactory
|
||||
// due to the fact that it didn't know the returned array size. At this point in the execution,
|
||||
// we know exactly the number of outbound components (from the array length), so do the binding.
|
||||
final int length = ((Object[]) result).length;
|
||||
|
||||
List<String> outputBindings = getOutputBindings(functionName, length);
|
||||
Iterator<String> iterator = outputBindings.iterator();
|
||||
BeanDefinitionRegistry registry = (BeanDefinitionRegistry) beanFactory;
|
||||
Object[] outboundKStreams = (Object[]) result;
|
||||
|
||||
for (int ij = 0; ij < length; ij++) {
|
||||
|
||||
String next = iterator.next();
|
||||
kafkaStreamsBindableProxyFactory.addOutputBinding(next, KStream.class);
|
||||
RootBeanDefinition rootBeanDefinition1 = new RootBeanDefinition();
|
||||
rootBeanDefinition1.setInstanceSupplier(() -> kafkaStreamsBindableProxyFactory.getOutputHolders().get(next).getBoundTarget());
|
||||
registry.registerBeanDefinition(next, rootBeanDefinition1);
|
||||
|
||||
Object targetBean = this.applicationContext.getBean(next);
|
||||
|
||||
KStreamBoundElementFactory.KStreamWrapper
|
||||
boundElement = (KStreamBoundElementFactory.KStreamWrapper) targetBean;
|
||||
boundElement.wrap((KStream) outboundKStreams[ij]);
|
||||
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (outboundDefinitionIterator.hasNext()) {
|
||||
final String next = outboundDefinitionIterator.next();
|
||||
Object targetBean = this.applicationContext.getBean(next);
|
||||
KStreamBoundElementFactory.KStreamWrapper
|
||||
boundElement = (KStreamBoundElementFactory.KStreamWrapper) targetBean;
|
||||
boundElement.wrap((KStream) result);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex) {
|
||||
throw new BeanInitializationException("Cannot setup function invoker for this Kafka Streams function.", ex);
|
||||
}
|
||||
}
|
||||
|
||||
private List<String> getOutputBindings(String functionName, int outputs) {
|
||||
List<String> outputBindings = this.streamFunctionProperties.getOutputBindings().get(functionName);
|
||||
List<String> outputBindingNames = new ArrayList<>();
|
||||
if (!CollectionUtils.isEmpty(outputBindings)) {
|
||||
outputBindingNames.addAll(outputBindings);
|
||||
return outputBindingNames;
|
||||
}
|
||||
else {
|
||||
for (int i = 0; i < outputs; i++) {
|
||||
outputBindingNames.add(String.format("%s_%s_%d", functionName, KafkaStreamsBindableProxyFactory.DEFAULT_OUTPUT_SUFFIX, i));
|
||||
}
|
||||
}
|
||||
return outputBindingNames;
|
||||
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
private Object[] adaptAndRetrieveInboundArguments(Map<String, ResolvableType> stringResolvableTypeMap,
|
||||
String functionName) {
|
||||
Object[] arguments = new Object[stringResolvableTypeMap.size()];
|
||||
int i = 0;
|
||||
for (String input : stringResolvableTypeMap.keySet()) {
|
||||
Class<?> parameterType = stringResolvableTypeMap.get(input).getRawClass();
|
||||
|
||||
if (input != null) {
|
||||
Object targetBean = applicationContext.getBean(input);
|
||||
BindingProperties bindingProperties = this.bindingServiceProperties.getBindingProperties(input);
|
||||
//Retrieve the StreamsConfig created for this method if available.
|
||||
//Otherwise, create the StreamsBuilderFactory and get the underlying config.
|
||||
if (!this.methodStreamsBuilderFactoryBeanMap.containsKey(functionName)) {
|
||||
StreamsBuilderFactoryBean streamsBuilderFactoryBean = buildStreamsBuilderAndRetrieveConfig(functionName, applicationContext, input, kafkaStreamsBinderConfigurationProperties);
|
||||
this.methodStreamsBuilderFactoryBeanMap.put(functionName, streamsBuilderFactoryBean);
|
||||
}
|
||||
try {
|
||||
StreamsBuilderFactoryBean streamsBuilderFactoryBean =
|
||||
this.methodStreamsBuilderFactoryBeanMap.get(functionName);
|
||||
StreamsBuilder streamsBuilder = streamsBuilderFactoryBean.getObject();
|
||||
KafkaStreamsConsumerProperties extendedConsumerProperties =
|
||||
this.kafkaStreamsExtendedBindingProperties.getExtendedConsumerProperties(input);
|
||||
//get state store spec
|
||||
|
||||
Serde<?> keySerde = this.keyValueSerdeResolver.getInboundKeySerde(extendedConsumerProperties, stringResolvableTypeMap.get(input));
|
||||
LOG.info("Key Serde used for " + input + ": " + keySerde.getClass().getName());
|
||||
Serde<?> valueSerde = bindingServiceProperties.getConsumerProperties(input).isUseNativeDecoding() ?
|
||||
getValueSerde(input, extendedConsumerProperties, stringResolvableTypeMap.get(input)) : Serdes.ByteArray();
|
||||
LOG.info("Value Serde used for " + input + ": " + valueSerde.getClass().getName());
|
||||
final Topology.AutoOffsetReset autoOffsetReset = getAutoOffsetReset(input, extendedConsumerProperties);
|
||||
|
||||
if (parameterType.isAssignableFrom(KStream.class)) {
|
||||
KStream<?, ?> stream = getKStream(input, bindingProperties, streamsBuilder, keySerde, valueSerde, autoOffsetReset);
|
||||
KStreamBoundElementFactory.KStreamWrapper kStreamWrapper =
|
||||
(KStreamBoundElementFactory.KStreamWrapper) targetBean;
|
||||
//wrap the proxy created during the initial target type binding with real object (KStream)
|
||||
kStreamWrapper.wrap((KStream<Object, Object>) stream);
|
||||
|
||||
this.kafkaStreamsBindingInformationCatalogue.addKeySerde((KStream) kStreamWrapper, keySerde);
|
||||
this.kafkaStreamsBindingInformationCatalogue.addStreamBuilderFactory(streamsBuilderFactoryBean);
|
||||
|
||||
if (KStream.class.isAssignableFrom(stringResolvableTypeMap.get(input).getRawClass())) {
|
||||
final Class<?> valueClass =
|
||||
(stringResolvableTypeMap.get(input).getGeneric(1).getRawClass() != null)
|
||||
? (stringResolvableTypeMap.get(input).getGeneric(1).getRawClass()) : Object.class;
|
||||
if (this.kafkaStreamsBindingInformationCatalogue.isUseNativeDecoding(
|
||||
(KStream) kStreamWrapper)) {
|
||||
arguments[i] = stream;
|
||||
}
|
||||
else {
|
||||
arguments[i] = this.kafkaStreamsMessageConversionDelegate.deserializeOnInbound(
|
||||
valueClass, stream);
|
||||
}
|
||||
}
|
||||
|
||||
if (arguments[i] == null) {
|
||||
arguments[i] = stream;
|
||||
}
|
||||
Assert.notNull(arguments[i], "Problems encountered while adapting the function argument.");
|
||||
}
|
||||
else {
|
||||
handleKTableGlobalKTableInputs(arguments, i, input, parameterType, targetBean, streamsBuilderFactoryBean,
|
||||
streamsBuilder, extendedConsumerProperties, keySerde, valueSerde, autoOffsetReset);
|
||||
}
|
||||
i++;
|
||||
}
|
||||
catch (Exception ex) {
|
||||
throw new IllegalStateException(ex);
|
||||
}
|
||||
}
|
||||
else {
|
||||
throw new IllegalStateException(StreamListenerErrorMessages.INVALID_DECLARATIVE_METHOD_PARAMETERS);
|
||||
}
|
||||
}
|
||||
return arguments;
|
||||
}
|
||||
|
||||
private KStream<?, ?> getkStream(String inboundName,
|
||||
BindingProperties bindingProperties,
|
||||
StreamsBuilder streamsBuilder,
|
||||
Serde<?> keySerde, Serde<?> valueSerde, Topology.AutoOffsetReset autoOffsetReset) {
|
||||
return getKStream(inboundName, bindingProperties, streamsBuilder, keySerde, valueSerde, autoOffsetReset);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setBeanFactory(BeanFactory beanFactory) throws BeansException {
|
||||
this.beanFactory = beanFactory;
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -19,40 +19,45 @@ package org.springframework.cloud.stream.binder.kafka.streams;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.kafka.common.header.Header;
|
||||
import org.apache.kafka.common.header.Headers;
|
||||
import org.apache.kafka.common.header.internals.RecordHeader;
|
||||
import org.apache.kafka.common.serialization.Serde;
|
||||
import org.apache.kafka.common.serialization.Serializer;
|
||||
import org.apache.kafka.streams.KeyValue;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.apache.kafka.streams.processor.Processor;
|
||||
import org.apache.kafka.streams.processor.ProcessorContext;
|
||||
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsBinderConfigurationProperties;
|
||||
import org.springframework.cloud.stream.converter.CompositeMessageConverterFactory;
|
||||
import org.springframework.messaging.Message;
|
||||
import org.springframework.messaging.MessageHeaders;
|
||||
import org.springframework.messaging.converter.CompositeMessageConverter;
|
||||
import org.springframework.messaging.converter.MessageConverter;
|
||||
import org.springframework.messaging.support.MessageBuilder;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Delegate for handling all framework level message conversions inbound and outbound on {@link KStream}.
|
||||
* If native encoding is not enabled, then serialization will be performed on outbound messages based
|
||||
* on a contentType. Similarly, if native decoding is not enabled, deserialization will be performed on
|
||||
* inbound messages based on a contentType. Based on the contentType, a {@link MessageConverter} will
|
||||
* be resolved.
|
||||
* Delegate for handling all framework level message conversions inbound and outbound on
|
||||
* {@link KStream}. If native encoding is not enabled, then serialization will be
|
||||
* performed on outbound messages based on a contentType. Similarly, if native decoding is
|
||||
* not enabled, deserialization will be performed on inbound messages based on a
|
||||
* contentType. Based on the contentType, a {@link MessageConverter} will be resolved.
|
||||
*
|
||||
* @author Soby Chacko
|
||||
*/
|
||||
public class KafkaStreamsMessageConversionDelegate {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(KafkaStreamsMessageConversionDelegate.class);
|
||||
private static final Log LOG = LogFactory
|
||||
.getLog(KafkaStreamsMessageConversionDelegate.class);
|
||||
|
||||
private static final ThreadLocal<KeyValue<Object, Object>> keyValueThreadLocal = new ThreadLocal<>();
|
||||
|
||||
private final CompositeMessageConverterFactory compositeMessageConverterFactory;
|
||||
private final CompositeMessageConverter compositeMessageConverter;
|
||||
|
||||
private final SendToDlqAndContinue sendToDlqAndContinue;
|
||||
|
||||
@@ -60,11 +65,12 @@ public class KafkaStreamsMessageConversionDelegate {
|
||||
|
||||
private final KafkaStreamsBinderConfigurationProperties kstreamBinderConfigurationProperties;
|
||||
|
||||
KafkaStreamsMessageConversionDelegate(CompositeMessageConverterFactory compositeMessageConverterFactory,
|
||||
SendToDlqAndContinue sendToDlqAndContinue,
|
||||
KafkaStreamsBindingInformationCatalogue kstreamBindingInformationCatalogue,
|
||||
KafkaStreamsBinderConfigurationProperties kstreamBinderConfigurationProperties) {
|
||||
this.compositeMessageConverterFactory = compositeMessageConverterFactory;
|
||||
KafkaStreamsMessageConversionDelegate(
|
||||
CompositeMessageConverter compositeMessageConverter,
|
||||
SendToDlqAndContinue sendToDlqAndContinue,
|
||||
KafkaStreamsBindingInformationCatalogue kstreamBindingInformationCatalogue,
|
||||
KafkaStreamsBinderConfigurationProperties kstreamBinderConfigurationProperties) {
|
||||
this.compositeMessageConverter = compositeMessageConverter;
|
||||
this.sendToDlqAndContinue = sendToDlqAndContinue;
|
||||
this.kstreamBindingInformationCatalogue = kstreamBindingInformationCatalogue;
|
||||
this.kstreamBinderConfigurationProperties = kstreamBinderConfigurationProperties;
|
||||
@@ -72,86 +78,139 @@ public class KafkaStreamsMessageConversionDelegate {
|
||||
|
||||
/**
|
||||
* Serialize {@link KStream} records on outbound based on contentType.
|
||||
*
|
||||
* @param outboundBindTarget outbound KStream target
|
||||
* @return serialized KStream
|
||||
*/
|
||||
@SuppressWarnings("rawtypes")
|
||||
@SuppressWarnings({"rawtypes", "unchecked"})
|
||||
public KStream serializeOnOutbound(KStream<?, ?> outboundBindTarget) {
|
||||
String contentType = this.kstreamBindingInformationCatalogue.getContentType(outboundBindTarget);
|
||||
MessageConverter messageConverter = this.compositeMessageConverterFactory.getMessageConverterForAllRegistered();
|
||||
String contentType = this.kstreamBindingInformationCatalogue
|
||||
.getContentType(outboundBindTarget);
|
||||
MessageConverter messageConverter = this.compositeMessageConverter;
|
||||
final PerRecordContentTypeHolder perRecordContentTypeHolder = new PerRecordContentTypeHolder();
|
||||
|
||||
return outboundBindTarget.mapValues((v) -> {
|
||||
Message<?> message = v instanceof Message<?> ? (Message<?>) v :
|
||||
MessageBuilder.withPayload(v).build();
|
||||
final KStream<?, ?> kStreamWithEnrichedHeaders = outboundBindTarget.mapValues((v) -> {
|
||||
Message<?> message = v instanceof Message<?> ? (Message<?>) v
|
||||
: MessageBuilder.withPayload(v).build();
|
||||
Map<String, Object> headers = new HashMap<>(message.getHeaders());
|
||||
if (!StringUtils.isEmpty(contentType)) {
|
||||
headers.put(MessageHeaders.CONTENT_TYPE, contentType);
|
||||
}
|
||||
MessageHeaders messageHeaders = new MessageHeaders(headers);
|
||||
return
|
||||
messageConverter.toMessage(message.getPayload(),
|
||||
messageHeaders).getPayload();
|
||||
final Message<?> convertedMessage = messageConverter.toMessage(message.getPayload(), messageHeaders);
|
||||
perRecordContentTypeHolder.setContentType((String) messageHeaders.get(MessageHeaders.CONTENT_TYPE));
|
||||
return convertedMessage.getPayload();
|
||||
});
|
||||
|
||||
kStreamWithEnrichedHeaders.process(() -> new Processor() {
|
||||
|
||||
ProcessorContext context;
|
||||
|
||||
@Override
|
||||
public void init(ProcessorContext context) {
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void process(Object key, Object value) {
|
||||
if (perRecordContentTypeHolder.contentType != null) {
|
||||
this.context.headers().remove(MessageHeaders.CONTENT_TYPE);
|
||||
final Header header;
|
||||
try {
|
||||
header = new RecordHeader(MessageHeaders.CONTENT_TYPE,
|
||||
new ObjectMapper().writeValueAsBytes(perRecordContentTypeHolder.contentType));
|
||||
this.context.headers().add(header);
|
||||
}
|
||||
catch (Exception e) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("Could not add content type header");
|
||||
}
|
||||
}
|
||||
perRecordContentTypeHolder.unsetContentType();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
|
||||
}
|
||||
});
|
||||
|
||||
return kStreamWithEnrichedHeaders;
|
||||
}
|
||||
|
||||
/**
|
||||
* Deserialize incoming {@link KStream} based on content type.
|
||||
*
|
||||
* @param valueClass on KStream value
|
||||
* @param bindingTarget inbound KStream target
|
||||
* @return deserialized KStream
|
||||
*/
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
public KStream deserializeOnInbound(Class<?> valueClass, KStream<?, ?> bindingTarget) {
|
||||
MessageConverter messageConverter = this.compositeMessageConverterFactory.getMessageConverterForAllRegistered();
|
||||
public KStream deserializeOnInbound(Class<?> valueClass,
|
||||
KStream<?, ?> bindingTarget) {
|
||||
MessageConverter messageConverter = this.compositeMessageConverter;
|
||||
final PerRecordContentTypeHolder perRecordContentTypeHolder = new PerRecordContentTypeHolder();
|
||||
|
||||
resolvePerRecordContentType(bindingTarget, perRecordContentTypeHolder);
|
||||
|
||||
//Deserialize using a branching strategy
|
||||
// Deserialize using a branching strategy
|
||||
KStream<?, ?>[] branch = bindingTarget.branch(
|
||||
//First filter where the message is converted and return true if everything went well, return false otherwise.
|
||||
(o, o2) -> {
|
||||
boolean isValidRecord = false;
|
||||
// First filter where the message is converted and return true if
|
||||
// everything went well, return false otherwise.
|
||||
(o, o2) -> {
|
||||
boolean isValidRecord = false;
|
||||
|
||||
try {
|
||||
//if the record is a tombstone, ignore and exit from processing further.
|
||||
if (o2 != null) {
|
||||
if (o2 instanceof Message || o2 instanceof String || o2 instanceof byte[]) {
|
||||
Message<?> m1 = null;
|
||||
if (o2 instanceof Message) {
|
||||
m1 = perRecordContentTypeHolder.contentType != null
|
||||
? MessageBuilder.fromMessage((Message<?>) o2).setHeader(MessageHeaders.CONTENT_TYPE,
|
||||
perRecordContentTypeHolder.contentType).build() : (Message<?>) o2;
|
||||
try {
|
||||
// if the record is a tombstone, ignore and exit from processing
|
||||
// further.
|
||||
if (o2 != null) {
|
||||
if (o2 instanceof Message || o2 instanceof String
|
||||
|| o2 instanceof byte[]) {
|
||||
Message<?> m1 = null;
|
||||
if (o2 instanceof Message) {
|
||||
m1 = perRecordContentTypeHolder.contentType != null
|
||||
? MessageBuilder.fromMessage((Message<?>) o2)
|
||||
.setHeader(
|
||||
MessageHeaders.CONTENT_TYPE,
|
||||
perRecordContentTypeHolder.contentType)
|
||||
.build()
|
||||
: (Message<?>) o2;
|
||||
}
|
||||
else {
|
||||
m1 = perRecordContentTypeHolder.contentType != null
|
||||
? MessageBuilder.withPayload(o2).setHeader(
|
||||
MessageHeaders.CONTENT_TYPE,
|
||||
perRecordContentTypeHolder.contentType)
|
||||
.build()
|
||||
: MessageBuilder.withPayload(o2).build();
|
||||
}
|
||||
convertAndSetMessage(o, valueClass, messageConverter, m1);
|
||||
}
|
||||
else {
|
||||
m1 = perRecordContentTypeHolder.contentType != null ? MessageBuilder.withPayload(o2)
|
||||
.setHeader(MessageHeaders.CONTENT_TYPE, perRecordContentTypeHolder.contentType).build() : MessageBuilder.withPayload(o2).build();
|
||||
keyValueThreadLocal.set(new KeyValue<>(o, o2));
|
||||
}
|
||||
convertAndSetMessage(o, valueClass, messageConverter, m1);
|
||||
isValidRecord = true;
|
||||
}
|
||||
else {
|
||||
keyValueThreadLocal.set(new KeyValue<>(o, o2));
|
||||
LOG.info(
|
||||
"Received a tombstone record. This will be skipped from further processing.");
|
||||
}
|
||||
isValidRecord = true;
|
||||
}
|
||||
else {
|
||||
LOG.info("Received a tombstone record. This will be skipped from further processing.");
|
||||
catch (Exception e) {
|
||||
LOG.warn(
|
||||
"Deserialization has failed. This will be skipped from further processing.",
|
||||
e);
|
||||
// pass through
|
||||
}
|
||||
}
|
||||
catch (Exception ignored) {
|
||||
//pass through
|
||||
}
|
||||
return isValidRecord;
|
||||
},
|
||||
//second filter that catches any messages for which an exception thrown in the first filter above.
|
||||
(k, v) -> true
|
||||
);
|
||||
//process errors from the second filter in the branch above.
|
||||
return isValidRecord;
|
||||
},
|
||||
// second filter that catches any messages for which an exception thrown
|
||||
// in the first filter above.
|
||||
(k, v) -> true);
|
||||
// process errors from the second filter in the branch above.
|
||||
processErrorFromDeserialization(bindingTarget, branch[1]);
|
||||
|
||||
//first branch above is the branch where the messages are converted, let it go through further processing.
|
||||
// first branch above is the branch where the messages are converted, let it go
|
||||
// through further processing.
|
||||
return branch[0].mapValues((o2) -> {
|
||||
Object objectValue = keyValueThreadLocal.get().value;
|
||||
keyValueThreadLocal.remove();
|
||||
@@ -160,7 +219,8 @@ public class KafkaStreamsMessageConversionDelegate {
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
private void resolvePerRecordContentType(KStream<?, ?> outboundBindTarget, PerRecordContentTypeHolder perRecordContentTypeHolder) {
|
||||
private void resolvePerRecordContentType(KStream<?, ?> outboundBindTarget,
|
||||
PerRecordContentTypeHolder perRecordContentTypeHolder) {
|
||||
outboundBindTarget.process(() -> new Processor() {
|
||||
|
||||
ProcessorContext context;
|
||||
@@ -173,11 +233,14 @@ public class KafkaStreamsMessageConversionDelegate {
|
||||
@Override
|
||||
public void process(Object key, Object value) {
|
||||
final Headers headers = this.context.headers();
|
||||
final Iterable<Header> contentTypes = headers.headers(MessageHeaders.CONTENT_TYPE);
|
||||
final Iterable<Header> contentTypes = headers
|
||||
.headers(MessageHeaders.CONTENT_TYPE);
|
||||
if (contentTypes != null && contentTypes.iterator().hasNext()) {
|
||||
final String contentType = new String(contentTypes.iterator().next().value());
|
||||
//remove leading and trailing quotes
|
||||
final String cleanContentType = StringUtils.replace(contentType, "\"", "");
|
||||
final String contentType = new String(
|
||||
contentTypes.iterator().next().value());
|
||||
// remove leading and trailing quotes
|
||||
final String cleanContentType = StringUtils.replace(contentType, "\"",
|
||||
"");
|
||||
perRecordContentTypeHolder.setContentType(cleanContentType);
|
||||
}
|
||||
}
|
||||
@@ -189,7 +252,8 @@ public class KafkaStreamsMessageConversionDelegate {
|
||||
});
|
||||
}
|
||||
|
||||
private void convertAndSetMessage(Object o, Class<?> valueClass, MessageConverter messageConverter, Message<?> msg) {
|
||||
private void convertAndSetMessage(Object o, Class<?> valueClass,
|
||||
MessageConverter messageConverter, Message<?> msg) {
|
||||
Object result = valueClass.isAssignableFrom(msg.getPayload().getClass())
|
||||
? msg.getPayload() : messageConverter.fromMessage(msg, valueClass);
|
||||
|
||||
@@ -199,7 +263,8 @@ public class KafkaStreamsMessageConversionDelegate {
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
private void processErrorFromDeserialization(KStream<?, ?> bindingTarget, KStream<?, ?> branch) {
|
||||
private void processErrorFromDeserialization(KStream<?, ?> bindingTarget,
|
||||
KStream<?, ?> branch) {
|
||||
branch.process(() -> new Processor() {
|
||||
ProcessorContext context;
|
||||
|
||||
@@ -210,24 +275,41 @@ public class KafkaStreamsMessageConversionDelegate {
|
||||
|
||||
@Override
|
||||
public void process(Object o, Object o2) {
|
||||
//Only continue if the record was not a tombstone.
|
||||
// Only continue if the record was not a tombstone.
|
||||
if (o2 != null) {
|
||||
if (KafkaStreamsMessageConversionDelegate.this.kstreamBindingInformationCatalogue.isDlqEnabled(bindingTarget)) {
|
||||
if (KafkaStreamsMessageConversionDelegate.this.kstreamBindingInformationCatalogue
|
||||
.isDlqEnabled(bindingTarget)) {
|
||||
String destination = this.context.topic();
|
||||
if (o2 instanceof Message) {
|
||||
Message message = (Message) o2;
|
||||
KafkaStreamsMessageConversionDelegate.this.sendToDlqAndContinue.sendToDlq(destination, (byte[]) o, (byte[]) message.getPayload(), this.context.partition());
|
||||
|
||||
// We need to convert the key to a byte[] before sending to DLQ.
|
||||
Serde keySerde = kstreamBindingInformationCatalogue.getKeySerde(bindingTarget);
|
||||
Serializer keySerializer = keySerde.serializer();
|
||||
byte[] keyBytes = keySerializer.serialize(null, o);
|
||||
|
||||
KafkaStreamsMessageConversionDelegate.this.sendToDlqAndContinue
|
||||
.sendToDlq(destination, keyBytes,
|
||||
(byte[]) message.getPayload(),
|
||||
this.context.partition());
|
||||
}
|
||||
else {
|
||||
KafkaStreamsMessageConversionDelegate.this.sendToDlqAndContinue.sendToDlq(destination, (byte[]) o, (byte[]) o2, this.context.partition());
|
||||
KafkaStreamsMessageConversionDelegate.this.sendToDlqAndContinue
|
||||
.sendToDlq(destination, (byte[]) o, (byte[]) o2,
|
||||
this.context.partition());
|
||||
}
|
||||
}
|
||||
else if (KafkaStreamsMessageConversionDelegate.this.kstreamBinderConfigurationProperties.getSerdeError() == KafkaStreamsBinderConfigurationProperties.SerdeError.logAndFail) {
|
||||
throw new IllegalStateException("Inbound deserialization failed. Stopping further processing of records.");
|
||||
else if (KafkaStreamsMessageConversionDelegate.this.kstreamBinderConfigurationProperties
|
||||
.getSerdeError() == KafkaStreamsBinderConfigurationProperties.SerdeError.logAndFail) {
|
||||
throw new IllegalStateException("Inbound deserialization failed. "
|
||||
+ "Stopping further processing of records.");
|
||||
}
|
||||
else if (KafkaStreamsMessageConversionDelegate.this.kstreamBinderConfigurationProperties.getSerdeError() == KafkaStreamsBinderConfigurationProperties.SerdeError.logAndContinue) {
|
||||
//quietly passing through. No action needed, this is similar to log and continue.
|
||||
LOG.error("Inbound deserialization failed. Skipping this record and continuing.");
|
||||
else if (KafkaStreamsMessageConversionDelegate.this.kstreamBinderConfigurationProperties
|
||||
.getSerdeError() == KafkaStreamsBinderConfigurationProperties.SerdeError.logAndContinue) {
|
||||
// quietly passing through. No action needed, this is similar to
|
||||
// log and continue.
|
||||
LOG.error(
|
||||
"Inbound deserialization failed. Skipping this record and continuing.");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -246,5 +328,11 @@ public class KafkaStreamsMessageConversionDelegate {
|
||||
void setContentType(String contentType) {
|
||||
this.contentType = contentType;
|
||||
}
|
||||
|
||||
void unsetContentType() {
|
||||
this.contentType = null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -37,10 +37,10 @@ class KafkaStreamsRegistry {
|
||||
|
||||
/**
|
||||
* Register the {@link KafkaStreams} object created in the application.
|
||||
*
|
||||
* @param kafkaStreams {@link KafkaStreams} object created in the application
|
||||
*/
|
||||
void registerKafkaStreams(KafkaStreams kafkaStreams) {
|
||||
this.kafkaStreams.add(kafkaStreams);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -17,38 +17,28 @@
|
||||
package org.springframework.cloud.stream.binder.kafka.streams;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.Arrays;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.kafka.common.serialization.Serde;
|
||||
import org.apache.kafka.common.utils.Bytes;
|
||||
import org.apache.kafka.common.serialization.Serdes;
|
||||
import org.apache.kafka.streams.StreamsBuilder;
|
||||
import org.apache.kafka.streams.StreamsConfig;
|
||||
import org.apache.kafka.streams.Topology;
|
||||
import org.apache.kafka.streams.kstream.Consumed;
|
||||
import org.apache.kafka.streams.kstream.GlobalKTable;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.apache.kafka.streams.kstream.KTable;
|
||||
import org.apache.kafka.streams.kstream.Materialized;
|
||||
import org.apache.kafka.streams.state.KeyValueStore;
|
||||
import org.apache.kafka.streams.state.StoreBuilder;
|
||||
import org.apache.kafka.streams.state.Stores;
|
||||
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.beans.factory.BeanInitializationException;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.cloud.stream.annotation.Input;
|
||||
import org.springframework.cloud.stream.annotation.StreamListener;
|
||||
import org.springframework.cloud.stream.binder.ConsumerProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsStateStore;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsConsumerProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsExtendedBindingProperties;
|
||||
@@ -60,16 +50,12 @@ import org.springframework.cloud.stream.binding.StreamListenerSetupMethodOrchest
|
||||
import org.springframework.cloud.stream.config.BindingProperties;
|
||||
import org.springframework.cloud.stream.config.BindingServiceProperties;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.core.MethodParameter;
|
||||
import org.springframework.core.ResolvableType;
|
||||
import org.springframework.core.annotation.AnnotationUtils;
|
||||
import org.springframework.kafka.config.KafkaStreamsConfiguration;
|
||||
import org.springframework.kafka.config.StreamsBuilderFactoryBean;
|
||||
import org.springframework.kafka.core.CleanupConfig;
|
||||
import org.springframework.messaging.MessageHeaders;
|
||||
import org.springframework.messaging.handler.annotation.SendTo;
|
||||
import org.springframework.messaging.support.MessageBuilder;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.ReflectionUtils;
|
||||
@@ -78,20 +64,23 @@ import org.springframework.util.StringUtils;
|
||||
/**
|
||||
* Kafka Streams specific implementation for {@link StreamListenerSetupMethodOrchestrator}
|
||||
* that overrides the default mechanisms for invoking StreamListener adapters.
|
||||
*
|
||||
* <p>
|
||||
* The orchestration primarily focus on the following areas:
|
||||
*
|
||||
* 1. Allow multiple KStream output bindings (KStream branching) by allowing more than one output values on {@link SendTo}
|
||||
* 2. Allow multiple inbound bindings for multiple KStream and or KTable/GlobalKTable types.
|
||||
* 3. Each StreamListener method that it orchestrates gets its own {@link StreamsBuilderFactoryBean} and {@link StreamsConfig}
|
||||
* <p>
|
||||
* 1. Allow multiple KStream output bindings (KStream branching) by allowing more than one
|
||||
* output values on {@link SendTo} 2. Allow multiple inbound bindings for multiple KStream
|
||||
* and or KTable/GlobalKTable types. 3. Each StreamListener method that it orchestrates
|
||||
* gets its own {@link StreamsBuilderFactoryBean} and {@link StreamsConfig}
|
||||
*
|
||||
* @author Soby Chacko
|
||||
* @author Lei Chen
|
||||
* @author Gary Russell
|
||||
*/
|
||||
class KafkaStreamsStreamListenerSetupMethodOrchestrator implements StreamListenerSetupMethodOrchestrator, ApplicationContextAware {
|
||||
class KafkaStreamsStreamListenerSetupMethodOrchestrator extends AbstractKafkaStreamsBinderProcessor
|
||||
implements StreamListenerSetupMethodOrchestrator {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(KafkaStreamsStreamListenerSetupMethodOrchestrator.class);
|
||||
private static final Log LOG = LogFactory
|
||||
.getLog(KafkaStreamsStreamListenerSetupMethodOrchestrator.class);
|
||||
|
||||
private final StreamListenerParameterAdapter streamListenerParameterAdapter;
|
||||
|
||||
@@ -105,38 +94,37 @@ class KafkaStreamsStreamListenerSetupMethodOrchestrator implements StreamListene
|
||||
|
||||
private final KafkaStreamsBindingInformationCatalogue kafkaStreamsBindingInformationCatalogue;
|
||||
|
||||
private final Map<Method, List<String>> registeredStoresPerMethod = new HashMap<>();
|
||||
|
||||
private final Map<Method, StreamsBuilderFactoryBean> methodStreamsBuilderFactoryBeanMap = new HashMap<>();
|
||||
|
||||
private final CleanupConfig cleanupConfig;
|
||||
|
||||
private ConfigurableApplicationContext applicationContext;
|
||||
|
||||
KafkaStreamsStreamListenerSetupMethodOrchestrator(BindingServiceProperties bindingServiceProperties,
|
||||
KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties,
|
||||
KeyValueSerdeResolver keyValueSerdeResolver,
|
||||
KafkaStreamsBindingInformationCatalogue kafkaStreamsBindingInformationCatalogue,
|
||||
StreamListenerParameterAdapter streamListenerParameterAdapter,
|
||||
Collection<StreamListenerResultAdapter> streamListenerResultAdapters,
|
||||
CleanupConfig cleanupConfig) {
|
||||
KafkaStreamsStreamListenerSetupMethodOrchestrator(
|
||||
BindingServiceProperties bindingServiceProperties,
|
||||
KafkaStreamsExtendedBindingProperties extendedBindingProperties,
|
||||
KeyValueSerdeResolver keyValueSerdeResolver,
|
||||
KafkaStreamsBindingInformationCatalogue bindingInformationCatalogue,
|
||||
StreamListenerParameterAdapter streamListenerParameterAdapter,
|
||||
Collection<StreamListenerResultAdapter> listenerResultAdapters,
|
||||
CleanupConfig cleanupConfig) {
|
||||
super(bindingServiceProperties, bindingInformationCatalogue, extendedBindingProperties, keyValueSerdeResolver, cleanupConfig);
|
||||
this.bindingServiceProperties = bindingServiceProperties;
|
||||
this.kafkaStreamsExtendedBindingProperties = kafkaStreamsExtendedBindingProperties;
|
||||
this.kafkaStreamsExtendedBindingProperties = extendedBindingProperties;
|
||||
this.keyValueSerdeResolver = keyValueSerdeResolver;
|
||||
this.kafkaStreamsBindingInformationCatalogue = kafkaStreamsBindingInformationCatalogue;
|
||||
this.kafkaStreamsBindingInformationCatalogue = bindingInformationCatalogue;
|
||||
this.streamListenerParameterAdapter = streamListenerParameterAdapter;
|
||||
this.streamListenerResultAdapters = streamListenerResultAdapters;
|
||||
this.cleanupConfig = cleanupConfig;
|
||||
this.streamListenerResultAdapters = listenerResultAdapters;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supports(Method method) {
|
||||
return methodParameterSupports(method) &&
|
||||
(methodReturnTypeSuppports(method) || Void.TYPE.equals(method.getReturnType()));
|
||||
return methodParameterSupports(method) && (methodReturnTypeSuppports(method)
|
||||
|| Void.TYPE.equals(method.getReturnType()));
|
||||
}
|
||||
|
||||
private boolean methodReturnTypeSuppports(Method method) {
|
||||
Class<?> returnType = method.getReturnType();
|
||||
if (returnType.equals(KStream.class) ||
|
||||
(returnType.isArray() && returnType.getComponentType().equals(KStream.class))) {
|
||||
if (returnType.equals(KStream.class) || (returnType.isArray()
|
||||
&& returnType.getComponentType().equals(KStream.class))) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
@@ -157,12 +145,14 @@ class KafkaStreamsStreamListenerSetupMethodOrchestrator implements StreamListene
|
||||
|
||||
@Override
|
||||
@SuppressWarnings({"rawtypes", "unchecked"})
|
||||
public void orchestrateStreamListenerSetupMethod(StreamListener streamListener, Method method, Object bean) {
|
||||
public void orchestrateStreamListenerSetupMethod(StreamListener streamListener,
|
||||
Method method, Object bean) {
|
||||
String[] methodAnnotatedOutboundNames = getOutboundBindingTargetNames(method);
|
||||
validateStreamListenerMethod(streamListener, method, methodAnnotatedOutboundNames);
|
||||
validateStreamListenerMethod(streamListener, method,
|
||||
methodAnnotatedOutboundNames);
|
||||
String methodAnnotatedInboundName = streamListener.value();
|
||||
Object[] adaptedInboundArguments = adaptAndRetrieveInboundArguments(method, methodAnnotatedInboundName,
|
||||
this.applicationContext,
|
||||
Object[] adaptedInboundArguments = adaptAndRetrieveInboundArguments(method,
|
||||
methodAnnotatedInboundName, this.applicationContext,
|
||||
this.streamListenerParameterAdapter);
|
||||
try {
|
||||
ReflectionUtils.makeAccessible(method);
|
||||
@@ -172,40 +162,51 @@ class KafkaStreamsStreamListenerSetupMethodOrchestrator implements StreamListene
|
||||
else {
|
||||
Object result = method.invoke(bean, adaptedInboundArguments);
|
||||
|
||||
if (result.getClass().isArray()) {
|
||||
Assert.isTrue(methodAnnotatedOutboundNames.length == ((Object[]) result).length,
|
||||
"Result does not match with the number of declared outbounds");
|
||||
}
|
||||
else {
|
||||
Assert.isTrue(methodAnnotatedOutboundNames.length == 1,
|
||||
"Result does not match with the number of declared outbounds");
|
||||
}
|
||||
if (result.getClass().isArray()) {
|
||||
Object[] outboundKStreams = (Object[]) result;
|
||||
int i = 0;
|
||||
for (Object outboundKStream : outboundKStreams) {
|
||||
Object targetBean = this.applicationContext.getBean(methodAnnotatedOutboundNames[i++]);
|
||||
for (StreamListenerResultAdapter streamListenerResultAdapter : this.streamListenerResultAdapters) {
|
||||
if (streamListenerResultAdapter.supports(outboundKStream.getClass(), targetBean.getClass())) {
|
||||
streamListenerResultAdapter.adapt(outboundKStream, targetBean);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (methodAnnotatedOutboundNames != null && methodAnnotatedOutboundNames.length > 0) {
|
||||
if (result.getClass().isArray()) {
|
||||
Assert.isTrue(
|
||||
methodAnnotatedOutboundNames.length == ((Object[]) result).length,
|
||||
"Result does not match with the number of declared outbounds");
|
||||
}
|
||||
else {
|
||||
Assert.isTrue(methodAnnotatedOutboundNames.length == 1,
|
||||
"Result does not match with the number of declared outbounds");
|
||||
}
|
||||
}
|
||||
else {
|
||||
Object targetBean = this.applicationContext.getBean(methodAnnotatedOutboundNames[0]);
|
||||
for (StreamListenerResultAdapter streamListenerResultAdapter : this.streamListenerResultAdapters) {
|
||||
if (streamListenerResultAdapter.supports(result.getClass(), targetBean.getClass())) {
|
||||
streamListenerResultAdapter.adapt(result, targetBean);
|
||||
break;
|
||||
kafkaStreamsBindingInformationCatalogue.setOutboundKStreamResolvable(ResolvableType.forMethodReturnType(method));
|
||||
if (methodAnnotatedOutboundNames != null && methodAnnotatedOutboundNames.length > 0) {
|
||||
if (result.getClass().isArray()) {
|
||||
Object[] outboundKStreams = (Object[]) result;
|
||||
int i = 0;
|
||||
for (Object outboundKStream : outboundKStreams) {
|
||||
Object targetBean = this.applicationContext
|
||||
.getBean(methodAnnotatedOutboundNames[i++]);
|
||||
adaptStreamListenerResult(outboundKStream, targetBean);
|
||||
}
|
||||
}
|
||||
else {
|
||||
Object targetBean = this.applicationContext
|
||||
.getBean(methodAnnotatedOutboundNames[0]);
|
||||
adaptStreamListenerResult(result, targetBean);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex) {
|
||||
throw new BeanInitializationException("Cannot setup StreamListener for " + method, ex);
|
||||
throw new BeanInitializationException(
|
||||
"Cannot setup StreamListener for " + method, ex);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private void adaptStreamListenerResult(Object outboundKStream, Object targetBean) {
|
||||
for (StreamListenerResultAdapter streamListenerResultAdapter : this.streamListenerResultAdapters) {
|
||||
if (streamListenerResultAdapter.supports(
|
||||
outboundKStream.getClass(), targetBean.getClass())) {
|
||||
streamListenerResultAdapter.adapt(outboundKStream,
|
||||
targetBean);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -213,95 +214,92 @@ class KafkaStreamsStreamListenerSetupMethodOrchestrator implements StreamListene
|
||||
@SuppressWarnings({"unchecked"})
|
||||
public Object[] adaptAndRetrieveInboundArguments(Method method, String inboundName,
|
||||
ApplicationContext applicationContext,
|
||||
StreamListenerParameterAdapter... streamListenerParameterAdapters) {
|
||||
StreamListenerParameterAdapter... adapters) {
|
||||
Object[] arguments = new Object[method.getParameterTypes().length];
|
||||
for (int parameterIndex = 0; parameterIndex < arguments.length; parameterIndex++) {
|
||||
MethodParameter methodParameter = MethodParameter.forExecutable(method, parameterIndex);
|
||||
MethodParameter methodParameter = MethodParameter.forExecutable(method,
|
||||
parameterIndex);
|
||||
Class<?> parameterType = methodParameter.getParameterType();
|
||||
Object targetReferenceValue = null;
|
||||
if (methodParameter.hasParameterAnnotation(Input.class)) {
|
||||
targetReferenceValue = AnnotationUtils.getValue(methodParameter.getParameterAnnotation(Input.class));
|
||||
Input methodAnnotation = methodParameter.getParameterAnnotation(Input.class);
|
||||
targetReferenceValue = AnnotationUtils
|
||||
.getValue(methodParameter.getParameterAnnotation(Input.class));
|
||||
Input methodAnnotation = methodParameter
|
||||
.getParameterAnnotation(Input.class);
|
||||
inboundName = methodAnnotation.value();
|
||||
}
|
||||
else if (arguments.length == 1 && StringUtils.hasText(inboundName)) {
|
||||
targetReferenceValue = inboundName;
|
||||
}
|
||||
if (targetReferenceValue != null) {
|
||||
Assert.isInstanceOf(String.class, targetReferenceValue, "Annotation value must be a String");
|
||||
Object targetBean = applicationContext.getBean((String) targetReferenceValue);
|
||||
BindingProperties bindingProperties = this.bindingServiceProperties.getBindingProperties(inboundName);
|
||||
enableNativeDecodingForKTableAlways(parameterType, bindingProperties);
|
||||
//Retrieve the StreamsConfig created for this method if available.
|
||||
//Otherwise, create the StreamsBuilderFactory and get the underlying config.
|
||||
Assert.isInstanceOf(String.class, targetReferenceValue,
|
||||
"Annotation value must be a String");
|
||||
Object targetBean = applicationContext
|
||||
.getBean((String) targetReferenceValue);
|
||||
BindingProperties bindingProperties = this.bindingServiceProperties
|
||||
.getBindingProperties(inboundName);
|
||||
// Retrieve the StreamsConfig created for this method if available.
|
||||
// Otherwise, create the StreamsBuilderFactory and get the underlying
|
||||
// config.
|
||||
if (!this.methodStreamsBuilderFactoryBeanMap.containsKey(method)) {
|
||||
buildStreamsBuilderAndRetrieveConfig(method, applicationContext, inboundName);
|
||||
StreamsBuilderFactoryBean streamsBuilderFactoryBean = buildStreamsBuilderAndRetrieveConfig(method.getDeclaringClass().getSimpleName() + "-" + method.getName(),
|
||||
applicationContext,
|
||||
inboundName, null);
|
||||
this.methodStreamsBuilderFactoryBeanMap.put(method, streamsBuilderFactoryBean);
|
||||
}
|
||||
try {
|
||||
StreamsBuilderFactoryBean streamsBuilderFactoryBean = this.methodStreamsBuilderFactoryBeanMap.get(method);
|
||||
StreamsBuilderFactoryBean streamsBuilderFactoryBean = this.methodStreamsBuilderFactoryBeanMap
|
||||
.get(method);
|
||||
StreamsBuilder streamsBuilder = streamsBuilderFactoryBean.getObject();
|
||||
KafkaStreamsConsumerProperties extendedConsumerProperties = this.kafkaStreamsExtendedBindingProperties.getExtendedConsumerProperties(inboundName);
|
||||
//get state store spec
|
||||
KafkaStreamsConsumerProperties extendedConsumerProperties = this.kafkaStreamsExtendedBindingProperties
|
||||
.getExtendedConsumerProperties(inboundName);
|
||||
// get state store spec
|
||||
KafkaStreamsStateStoreProperties spec = buildStateStoreSpec(method);
|
||||
Serde<?> keySerde = this.keyValueSerdeResolver.getInboundKeySerde(extendedConsumerProperties);
|
||||
Serde<?> valueSerde = this.keyValueSerdeResolver.getInboundValueSerde(bindingProperties.getConsumer(), extendedConsumerProperties);
|
||||
|
||||
final KafkaConsumerProperties.StartOffset startOffset = extendedConsumerProperties.getStartOffset();
|
||||
Topology.AutoOffsetReset autoOffsetReset = null;
|
||||
if (startOffset != null) {
|
||||
switch (startOffset) {
|
||||
case earliest : autoOffsetReset = Topology.AutoOffsetReset.EARLIEST;
|
||||
break;
|
||||
case latest : autoOffsetReset = Topology.AutoOffsetReset.LATEST;
|
||||
break;
|
||||
default: break;
|
||||
}
|
||||
}
|
||||
if (extendedConsumerProperties.isResetOffsets()) {
|
||||
LOG.warn("Detected resetOffsets configured on binding " + inboundName + ". "
|
||||
+ "Setting resetOffsets in Kafka Streams binder does not have any effect.");
|
||||
}
|
||||
Serde<?> keySerde = this.keyValueSerdeResolver
|
||||
.getInboundKeySerde(extendedConsumerProperties, ResolvableType.forMethodParameter(methodParameter));
|
||||
LOG.info("Key Serde used for " + targetReferenceValue + ": " + keySerde.getClass().getName());
|
||||
|
||||
Serde<?> valueSerde = bindingServiceProperties.getConsumerProperties(inboundName).isUseNativeDecoding() ?
|
||||
getValueSerde(inboundName, extendedConsumerProperties, ResolvableType.forMethodParameter(methodParameter)) : Serdes.ByteArray();
|
||||
LOG.info("Value Serde used for " + targetReferenceValue + ": " + valueSerde.getClass().getName());
|
||||
|
||||
Topology.AutoOffsetReset autoOffsetReset = getAutoOffsetReset(inboundName, extendedConsumerProperties);
|
||||
|
||||
if (parameterType.isAssignableFrom(KStream.class)) {
|
||||
KStream<?, ?> stream = getkStream(inboundName, spec, bindingProperties,
|
||||
streamsBuilder, keySerde, valueSerde, autoOffsetReset);
|
||||
KStream<?, ?> stream = getkStream(inboundName, spec,
|
||||
bindingProperties, streamsBuilder, keySerde, valueSerde,
|
||||
autoOffsetReset);
|
||||
KStreamBoundElementFactory.KStreamWrapper kStreamWrapper = (KStreamBoundElementFactory.KStreamWrapper) targetBean;
|
||||
//wrap the proxy created during the initial target type binding with real object (KStream)
|
||||
// wrap the proxy created during the initial target type binding
|
||||
// with real object (KStream)
|
||||
kStreamWrapper.wrap((KStream<Object, Object>) stream);
|
||||
this.kafkaStreamsBindingInformationCatalogue.addStreamBuilderFactory(streamsBuilderFactoryBean);
|
||||
for (StreamListenerParameterAdapter streamListenerParameterAdapter : streamListenerParameterAdapters) {
|
||||
if (streamListenerParameterAdapter.supports(stream.getClass(), methodParameter)) {
|
||||
arguments[parameterIndex] = streamListenerParameterAdapter.adapt(kStreamWrapper, methodParameter);
|
||||
this.kafkaStreamsBindingInformationCatalogue.addKeySerde(stream, keySerde);
|
||||
BindingProperties bindingProperties1 = this.kafkaStreamsBindingInformationCatalogue.getBindingProperties().get(kStreamWrapper);
|
||||
this.kafkaStreamsBindingInformationCatalogue.registerBindingProperties(stream, bindingProperties1);
|
||||
|
||||
this.kafkaStreamsBindingInformationCatalogue
|
||||
.addStreamBuilderFactory(streamsBuilderFactoryBean);
|
||||
for (StreamListenerParameterAdapter streamListenerParameterAdapter : adapters) {
|
||||
if (streamListenerParameterAdapter.supports(stream.getClass(),
|
||||
methodParameter)) {
|
||||
arguments[parameterIndex] = streamListenerParameterAdapter
|
||||
.adapt(stream, methodParameter);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (arguments[parameterIndex] == null && parameterType.isAssignableFrom(stream.getClass())) {
|
||||
if (arguments[parameterIndex] == null
|
||||
&& parameterType.isAssignableFrom(stream.getClass())) {
|
||||
arguments[parameterIndex] = stream;
|
||||
}
|
||||
Assert.notNull(arguments[parameterIndex], "Cannot convert argument " + parameterIndex + " of " + method
|
||||
+ "from " + stream.getClass() + " to " + parameterType);
|
||||
Assert.notNull(arguments[parameterIndex],
|
||||
"Cannot convert argument " + parameterIndex + " of "
|
||||
+ method + "from " + stream.getClass() + " to "
|
||||
+ parameterType);
|
||||
}
|
||||
else if (parameterType.isAssignableFrom(KTable.class)) {
|
||||
String materializedAs = extendedConsumerProperties.getMaterializedAs();
|
||||
String bindingDestination = this.bindingServiceProperties.getBindingDestination(inboundName);
|
||||
KTable<?, ?> table = getKTable(streamsBuilder, keySerde, valueSerde, materializedAs,
|
||||
bindingDestination, autoOffsetReset);
|
||||
KTableBoundElementFactory.KTableWrapper kTableWrapper = (KTableBoundElementFactory.KTableWrapper) targetBean;
|
||||
//wrap the proxy created during the initial target type binding with real object (KTable)
|
||||
kTableWrapper.wrap((KTable<Object, Object>) table);
|
||||
this.kafkaStreamsBindingInformationCatalogue.addStreamBuilderFactory(streamsBuilderFactoryBean);
|
||||
arguments[parameterIndex] = table;
|
||||
}
|
||||
else if (parameterType.isAssignableFrom(GlobalKTable.class)) {
|
||||
String materializedAs = extendedConsumerProperties.getMaterializedAs();
|
||||
String bindingDestination = this.bindingServiceProperties.getBindingDestination(inboundName);
|
||||
GlobalKTable<?, ?> table = getGlobalKTable(streamsBuilder, keySerde, valueSerde, materializedAs,
|
||||
bindingDestination, autoOffsetReset);
|
||||
GlobalKTableBoundElementFactory.GlobalKTableWrapper globalKTableWrapper = (GlobalKTableBoundElementFactory.GlobalKTableWrapper) targetBean;
|
||||
//wrap the proxy created during the initial target type binding with real object (KTable)
|
||||
globalKTableWrapper.wrap((GlobalKTable<Object, Object>) table);
|
||||
this.kafkaStreamsBindingInformationCatalogue.addStreamBuilderFactory(streamsBuilderFactoryBean);
|
||||
arguments[parameterIndex] = table;
|
||||
else {
|
||||
handleKTableGlobalKTableInputs(arguments, parameterIndex, inboundName, parameterType, targetBean, streamsBuilderFactoryBean,
|
||||
streamsBuilder, extendedConsumerProperties, keySerde, valueSerde, autoOffsetReset);
|
||||
}
|
||||
}
|
||||
catch (Exception ex) {
|
||||
@@ -309,67 +307,41 @@ class KafkaStreamsStreamListenerSetupMethodOrchestrator implements StreamListene
|
||||
}
|
||||
}
|
||||
else {
|
||||
throw new IllegalStateException(StreamListenerErrorMessages.INVALID_DECLARATIVE_METHOD_PARAMETERS);
|
||||
throw new IllegalStateException(
|
||||
StreamListenerErrorMessages.INVALID_DECLARATIVE_METHOD_PARAMETERS);
|
||||
}
|
||||
}
|
||||
return arguments;
|
||||
}
|
||||
|
||||
private GlobalKTable<?, ?> getGlobalKTable(StreamsBuilder streamsBuilder, Serde<?> keySerde, Serde<?> valueSerde, String materializedAs,
|
||||
String bindingDestination, Topology.AutoOffsetReset autoOffsetReset) {
|
||||
return materializedAs != null ?
|
||||
materializedAsGlobalKTable(streamsBuilder, bindingDestination, materializedAs, keySerde, valueSerde, autoOffsetReset) :
|
||||
streamsBuilder.globalTable(bindingDestination,
|
||||
Consumed.with(keySerde, valueSerde).withOffsetResetPolicy(autoOffsetReset));
|
||||
}
|
||||
|
||||
private KTable<?, ?> getKTable(StreamsBuilder streamsBuilder, Serde<?> keySerde, Serde<?> valueSerde, String materializedAs,
|
||||
String bindingDestination, Topology.AutoOffsetReset autoOffsetReset) {
|
||||
return materializedAs != null ?
|
||||
materializedAs(streamsBuilder, bindingDestination, materializedAs, keySerde, valueSerde, autoOffsetReset) :
|
||||
streamsBuilder.table(bindingDestination,
|
||||
Consumed.with(keySerde, valueSerde).withOffsetResetPolicy(autoOffsetReset));
|
||||
}
|
||||
|
||||
private <K, V> KTable<K, V> materializedAs(StreamsBuilder streamsBuilder, String destination, String storeName, Serde<K> k, Serde<V> v,
|
||||
Topology.AutoOffsetReset autoOffsetReset) {
|
||||
return streamsBuilder.table(this.bindingServiceProperties.getBindingDestination(destination),
|
||||
Consumed.with(k, v).withOffsetResetPolicy(autoOffsetReset),
|
||||
getMaterialized(storeName, k, v));
|
||||
}
|
||||
|
||||
private <K, V> GlobalKTable<K, V> materializedAsGlobalKTable(StreamsBuilder streamsBuilder, String destination, String storeName, Serde<K> k, Serde<V> v,
|
||||
Topology.AutoOffsetReset autoOffsetReset) {
|
||||
return streamsBuilder.globalTable(this.bindingServiceProperties.getBindingDestination(destination),
|
||||
Consumed.with(k, v).withOffsetResetPolicy(autoOffsetReset),
|
||||
getMaterialized(storeName, k, v));
|
||||
}
|
||||
|
||||
private <K, V> Materialized<K, V, KeyValueStore<Bytes, byte[]>> getMaterialized(String storeName, Serde<K> k, Serde<V> v) {
|
||||
return Materialized.<K, V, KeyValueStore<Bytes, byte[]>>as(storeName)
|
||||
.withKeySerde(k)
|
||||
.withValueSerde(v);
|
||||
}
|
||||
|
||||
private StoreBuilder buildStateStore(KafkaStreamsStateStoreProperties spec) {
|
||||
try {
|
||||
Serde<?> keySerde = this.keyValueSerdeResolver.getStateStoreKeySerde(spec.getKeySerdeString());
|
||||
Serde<?> valueSerde = this.keyValueSerdeResolver.getStateStoreValueSerde(spec.getValueSerdeString());
|
||||
|
||||
Serde<?> keySerde = this.keyValueSerdeResolver
|
||||
.getStateStoreKeySerde(spec.getKeySerdeString());
|
||||
Serde<?> valueSerde = this.keyValueSerdeResolver
|
||||
.getStateStoreValueSerde(spec.getValueSerdeString());
|
||||
StoreBuilder builder;
|
||||
switch (spec.getType()) {
|
||||
case KEYVALUE:
|
||||
builder = Stores.keyValueStoreBuilder(Stores.persistentKeyValueStore(spec.getName()), keySerde, valueSerde);
|
||||
break;
|
||||
case WINDOW:
|
||||
builder = Stores.windowStoreBuilder(Stores.persistentWindowStore(spec.getName(), spec.getRetention(), 3, spec.getLength(), false),
|
||||
keySerde,
|
||||
builder = Stores.keyValueStoreBuilder(
|
||||
Stores.persistentKeyValueStore(spec.getName()), keySerde,
|
||||
valueSerde);
|
||||
break;
|
||||
case WINDOW:
|
||||
builder = Stores
|
||||
.windowStoreBuilder(
|
||||
Stores.persistentWindowStore(spec.getName(),
|
||||
spec.getRetention(), 3, spec.getLength(), false),
|
||||
keySerde, valueSerde);
|
||||
break;
|
||||
case SESSION:
|
||||
builder = Stores.sessionStoreBuilder(Stores.persistentSessionStore(spec.getName(), spec.getRetention()), keySerde, valueSerde);
|
||||
builder = Stores.sessionStoreBuilder(Stores.persistentSessionStore(
|
||||
spec.getName(), spec.getRetention()), keySerde, valueSerde);
|
||||
break;
|
||||
default:
|
||||
throw new UnsupportedOperationException("state store type (" + spec.getType() + ") is not supported!");
|
||||
throw new UnsupportedOperationException(
|
||||
"state store type (" + spec.getType() + ") is not supported!");
|
||||
}
|
||||
if (spec.isCacheEnabled()) {
|
||||
builder = builder.withCachingEnabled();
|
||||
@@ -385,10 +357,11 @@ class KafkaStreamsStreamListenerSetupMethodOrchestrator implements StreamListene
|
||||
}
|
||||
}
|
||||
|
||||
private KStream<?, ?> getkStream(String inboundName, KafkaStreamsStateStoreProperties storeSpec,
|
||||
BindingProperties bindingProperties,
|
||||
StreamsBuilder streamsBuilder,
|
||||
Serde<?> keySerde, Serde<?> valueSerde, Topology.AutoOffsetReset autoOffsetReset) {
|
||||
private KStream<?, ?> getkStream(String inboundName,
|
||||
KafkaStreamsStateStoreProperties storeSpec,
|
||||
BindingProperties bindingProperties, StreamsBuilder streamsBuilder,
|
||||
Serde<?> keySerde, Serde<?> valueSerde,
|
||||
Topology.AutoOffsetReset autoOffsetReset) {
|
||||
if (storeSpec != null) {
|
||||
StoreBuilder storeBuilder = buildStateStore(storeSpec);
|
||||
streamsBuilder.addStateStore(storeBuilder);
|
||||
@@ -396,102 +369,17 @@ class KafkaStreamsStreamListenerSetupMethodOrchestrator implements StreamListene
|
||||
LOG.info("state store " + storeBuilder.name() + " added to topology");
|
||||
}
|
||||
}
|
||||
String[] bindingTargets = StringUtils
|
||||
.commaDelimitedListToStringArray(this.bindingServiceProperties.getBindingDestination(inboundName));
|
||||
|
||||
KStream<?, ?> stream =
|
||||
streamsBuilder.stream(Arrays.asList(bindingTargets),
|
||||
Consumed.with(keySerde, valueSerde)
|
||||
.withOffsetResetPolicy(autoOffsetReset));
|
||||
final boolean nativeDecoding = this.bindingServiceProperties.getConsumerProperties(inboundName).isUseNativeDecoding();
|
||||
if (nativeDecoding) {
|
||||
LOG.info("Native decoding is enabled for " + inboundName + ". Inbound deserialization done at the broker.");
|
||||
}
|
||||
else {
|
||||
LOG.info("Native decoding is disabled for " + inboundName + ". Inbound message conversion done by Spring Cloud Stream.");
|
||||
}
|
||||
|
||||
stream = stream.mapValues((value) -> {
|
||||
Object returnValue;
|
||||
String contentType = bindingProperties.getContentType();
|
||||
if (value != null && !StringUtils.isEmpty(contentType) && !nativeDecoding) {
|
||||
returnValue = MessageBuilder.withPayload(value)
|
||||
.setHeader(MessageHeaders.CONTENT_TYPE, contentType).build();
|
||||
}
|
||||
else {
|
||||
returnValue = value;
|
||||
}
|
||||
return returnValue;
|
||||
});
|
||||
return stream;
|
||||
return getKStream(inboundName, bindingProperties, streamsBuilder, keySerde, valueSerde, autoOffsetReset);
|
||||
}
|
||||
|
||||
private void enableNativeDecodingForKTableAlways(Class<?> parameterType, BindingProperties bindingProperties) {
|
||||
if (parameterType.isAssignableFrom(KTable.class) || parameterType.isAssignableFrom(GlobalKTable.class)) {
|
||||
if (bindingProperties.getConsumer() == null) {
|
||||
bindingProperties.setConsumer(new ConsumerProperties());
|
||||
}
|
||||
//No framework level message conversion provided for KTable/GlobalKTable, its done by the broker.
|
||||
bindingProperties.getConsumer().setUseNativeDecoding(true);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
private void buildStreamsBuilderAndRetrieveConfig(Method method, ApplicationContext applicationContext,
|
||||
String inboundName) {
|
||||
ConfigurableListableBeanFactory beanFactory = this.applicationContext.getBeanFactory();
|
||||
|
||||
Map<String, Object> streamConfigGlobalProperties = applicationContext.getBean("streamConfigGlobalProperties", Map.class);
|
||||
|
||||
KafkaStreamsConsumerProperties extendedConsumerProperties = this.kafkaStreamsExtendedBindingProperties.getExtendedConsumerProperties(inboundName);
|
||||
streamConfigGlobalProperties.putAll(extendedConsumerProperties.getConfiguration());
|
||||
|
||||
String applicationId = extendedConsumerProperties.getApplicationId();
|
||||
//override application.id if set at the individual binding level.
|
||||
if (StringUtils.hasText(applicationId)) {
|
||||
streamConfigGlobalProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId);
|
||||
}
|
||||
|
||||
int concurrency = this.bindingServiceProperties.getConsumerProperties(inboundName).getConcurrency();
|
||||
// override concurrency if set at the individual binding level.
|
||||
if (concurrency > 1) {
|
||||
streamConfigGlobalProperties.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, concurrency);
|
||||
}
|
||||
|
||||
Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers = applicationContext.getBean("kafkaStreamsDlqDispatchers", Map.class);
|
||||
|
||||
KafkaStreamsConfiguration kafkaStreamsConfiguration = new KafkaStreamsConfiguration(streamConfigGlobalProperties) {
|
||||
@Override
|
||||
public Properties asProperties() {
|
||||
Properties properties = super.asProperties();
|
||||
properties.put(SendToDlqAndContinue.KAFKA_STREAMS_DLQ_DISPATCHERS, kafkaStreamsDlqDispatchers);
|
||||
return properties;
|
||||
}
|
||||
};
|
||||
|
||||
StreamsBuilderFactoryBean streamsBuilder = this.cleanupConfig == null
|
||||
? new StreamsBuilderFactoryBean(kafkaStreamsConfiguration)
|
||||
: new StreamsBuilderFactoryBean(kafkaStreamsConfiguration, this.cleanupConfig);
|
||||
streamsBuilder.setAutoStartup(false);
|
||||
BeanDefinition streamsBuilderBeanDefinition =
|
||||
BeanDefinitionBuilder.genericBeanDefinition((Class<StreamsBuilderFactoryBean>) streamsBuilder.getClass(), () -> streamsBuilder)
|
||||
.getRawBeanDefinition();
|
||||
((BeanDefinitionRegistry) beanFactory).registerBeanDefinition("stream-builder-" + method.getName(), streamsBuilderBeanDefinition);
|
||||
StreamsBuilderFactoryBean streamsBuilderX = applicationContext.getBean("&stream-builder-" + method.getName(), StreamsBuilderFactoryBean.class);
|
||||
this.methodStreamsBuilderFactoryBeanMap.put(method, streamsBuilderX);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
|
||||
this.applicationContext = (ConfigurableApplicationContext) applicationContext;
|
||||
}
|
||||
|
||||
private void validateStreamListenerMethod(StreamListener streamListener, Method method, String[] methodAnnotatedOutboundNames) {
|
||||
private void validateStreamListenerMethod(StreamListener streamListener,
|
||||
Method method, String[] methodAnnotatedOutboundNames) {
|
||||
String methodAnnotatedInboundName = streamListener.value();
|
||||
if (methodAnnotatedOutboundNames != null) {
|
||||
for (String s : methodAnnotatedOutboundNames) {
|
||||
if (StringUtils.hasText(s)) {
|
||||
Assert.isTrue(isDeclarativeOutput(method, s), "Method must be declarative");
|
||||
Assert.isTrue(isDeclarativeOutput(method, s),
|
||||
"Method must be declarative");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -499,8 +387,11 @@ class KafkaStreamsStreamListenerSetupMethodOrchestrator implements StreamListene
|
||||
int methodArgumentsLength = method.getParameterTypes().length;
|
||||
|
||||
for (int parameterIndex = 0; parameterIndex < methodArgumentsLength; parameterIndex++) {
|
||||
MethodParameter methodParameter = MethodParameter.forExecutable(method, parameterIndex);
|
||||
Assert.isTrue(isDeclarativeInput(methodAnnotatedInboundName, methodParameter), "Method must be declarative");
|
||||
MethodParameter methodParameter = MethodParameter.forExecutable(method,
|
||||
parameterIndex);
|
||||
Assert.isTrue(
|
||||
isDeclarativeInput(methodAnnotatedInboundName, methodParameter),
|
||||
"Method must be declarative");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -512,7 +403,8 @@ class KafkaStreamsStreamListenerSetupMethodOrchestrator implements StreamListene
|
||||
if (returnType.isArray()) {
|
||||
Class<?> targetBeanClass = this.applicationContext.getType(targetBeanName);
|
||||
declarative = this.streamListenerResultAdapters.stream()
|
||||
.anyMatch((slpa) -> slpa.supports(returnType.getComponentType(), targetBeanClass));
|
||||
.anyMatch((slpa) -> slpa.supports(returnType.getComponentType(),
|
||||
targetBeanClass));
|
||||
return declarative;
|
||||
}
|
||||
Class<?> targetBeanClass = this.applicationContext.getType(targetBeanName);
|
||||
@@ -522,10 +414,23 @@ class KafkaStreamsStreamListenerSetupMethodOrchestrator implements StreamListene
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private boolean isDeclarativeInput(String targetBeanName, MethodParameter methodParameter) {
|
||||
if (!methodParameter.getParameterType().isAssignableFrom(Object.class) && this.applicationContext.containsBean(targetBeanName)) {
|
||||
private boolean isDeclarativeInput(String targetBeanName,
|
||||
MethodParameter methodParameter) {
|
||||
if (!methodParameter.getParameterType().isAssignableFrom(Object.class)
|
||||
&& this.applicationContext.containsBean(targetBeanName)) {
|
||||
Class<?> targetBeanClass = this.applicationContext.getType(targetBeanName);
|
||||
return this.streamListenerParameterAdapter.supports(targetBeanClass, methodParameter);
|
||||
if (targetBeanClass != null) {
|
||||
boolean supports = KafkaStreamsBinderUtils.supportsKStream(methodParameter, targetBeanClass);
|
||||
if (!supports) {
|
||||
supports = KTable.class.isAssignableFrom(targetBeanClass)
|
||||
&& KTable.class.isAssignableFrom(methodParameter.getParameterType());
|
||||
if (!supports) {
|
||||
supports = GlobalKTable.class.isAssignableFrom(targetBeanClass)
|
||||
&& GlobalKTable.class.isAssignableFrom(methodParameter.getParameterType());
|
||||
}
|
||||
}
|
||||
return supports;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
@@ -533,30 +438,38 @@ class KafkaStreamsStreamListenerSetupMethodOrchestrator implements StreamListene
|
||||
private static String[] getOutboundBindingTargetNames(Method method) {
|
||||
SendTo sendTo = AnnotationUtils.findAnnotation(method, SendTo.class);
|
||||
if (sendTo != null) {
|
||||
Assert.isTrue(!ObjectUtils.isEmpty(sendTo.value()), StreamListenerErrorMessages.ATLEAST_ONE_OUTPUT);
|
||||
Assert.isTrue(sendTo.value().length >= 1, "At least one outbound destination need to be provided.");
|
||||
Assert.isTrue(!ObjectUtils.isEmpty(sendTo.value()),
|
||||
StreamListenerErrorMessages.ATLEAST_ONE_OUTPUT);
|
||||
Assert.isTrue(sendTo.value().length >= 1,
|
||||
"At least one outbound destination need to be provided.");
|
||||
return sendTo.value();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
private static KafkaStreamsStateStoreProperties buildStateStoreSpec(Method method) {
|
||||
KafkaStreamsStateStore spec = AnnotationUtils.findAnnotation(method, KafkaStreamsStateStore.class);
|
||||
if (spec != null) {
|
||||
Assert.isTrue(!ObjectUtils.isEmpty(spec.name()), "name cannot be empty");
|
||||
Assert.isTrue(spec.name().length() >= 1, "name cannot be empty.");
|
||||
KafkaStreamsStateStoreProperties props = new KafkaStreamsStateStoreProperties();
|
||||
props.setName(spec.name());
|
||||
props.setType(spec.type());
|
||||
props.setLength(spec.lengthMs());
|
||||
props.setKeySerdeString(spec.keySerde());
|
||||
props.setRetention(spec.retentionMs());
|
||||
props.setValueSerdeString(spec.valueSerde());
|
||||
props.setCacheEnabled(spec.cache());
|
||||
props.setLoggingDisabled(!spec.logging());
|
||||
return props;
|
||||
private KafkaStreamsStateStoreProperties buildStateStoreSpec(Method method) {
|
||||
if (!this.registeredStoresPerMethod.containsKey(method)) {
|
||||
KafkaStreamsStateStore spec = AnnotationUtils.findAnnotation(method,
|
||||
KafkaStreamsStateStore.class);
|
||||
if (spec != null) {
|
||||
Assert.isTrue(!ObjectUtils.isEmpty(spec.name()), "name cannot be empty");
|
||||
Assert.isTrue(spec.name().length() >= 1, "name cannot be empty.");
|
||||
this.registeredStoresPerMethod.put(method, new ArrayList<>());
|
||||
this.registeredStoresPerMethod.get(method).add(spec.name());
|
||||
KafkaStreamsStateStoreProperties props = new KafkaStreamsStateStoreProperties();
|
||||
props.setName(spec.name());
|
||||
props.setType(spec.type());
|
||||
props.setLength(spec.lengthMs());
|
||||
props.setKeySerdeString(spec.keySerde());
|
||||
props.setRetention(spec.retentionMs());
|
||||
props.setValueSerdeString(spec.valueSerde());
|
||||
props.setCacheEnabled(spec.cache());
|
||||
props.setLoggingDisabled(!spec.logging());
|
||||
return props;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -16,78 +16,110 @@
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.kafka.common.serialization.Serde;
|
||||
import org.apache.kafka.common.serialization.Serdes;
|
||||
import org.apache.kafka.common.utils.Utils;
|
||||
import org.apache.kafka.streams.kstream.GlobalKTable;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.apache.kafka.streams.kstream.KTable;
|
||||
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.beans.factory.annotation.AnnotatedBeanDefinition;
|
||||
import org.springframework.cloud.stream.binder.ConsumerProperties;
|
||||
import org.springframework.cloud.stream.binder.ProducerProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsBinderConfigurationProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsConsumerProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsProducerProperties;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.core.ResolvableType;
|
||||
import org.springframework.kafka.support.serializer.JsonSerde;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Resolver for key and value Serde.
|
||||
*
|
||||
* On the inbound, if native decoding is enabled, then any deserialization on the value is handled by Kafka.
|
||||
* First, we look for any key/value Serde set on the binding itself, if that is not available then look at the
|
||||
* common Serde set at the global level. If that fails, it falls back to byte[].
|
||||
* If native decoding is disabled, then the binder will do the deserialization on value and ignore any Serde set for value
|
||||
* and rely on the contentType provided. Keys are always deserialized at the broker.
|
||||
* On the inbound, if native decoding is enabled, then any deserialization on the value is
|
||||
* handled by Kafka. First, we look for any key/value Serde set on the binding itself, if
|
||||
* that is not available then look at the common Serde set at the global level. If that
|
||||
* fails, it falls back to byte[]. If native decoding is disabled, then the binder will do
|
||||
* the deserialization on value and ignore any Serde set for value and rely on the
|
||||
* contentType provided. Keys are always deserialized at the broker.
|
||||
*
|
||||
*
|
||||
* Same rules apply on the outbound. If native encoding is enabled, then value serialization is done at the broker using
|
||||
* any binder level Serde for value, if not using common Serde, if not, then byte[].
|
||||
* If native encoding is disabled, then the binder will do serialization using a contentType. Keys are always serialized
|
||||
* by the broker.
|
||||
* Same rules apply on the outbound. If native encoding is enabled, then value
|
||||
* serialization is done at the broker using any binder level Serde for value, if not
|
||||
* using common Serde, if not, then byte[]. If native encoding is disabled, then the
|
||||
* binder will do serialization using a contentType. Keys are always serialized by the
|
||||
* broker.
|
||||
*
|
||||
* For state store, use serdes class specified in
|
||||
* {@link org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsStateStore} to create Serde accordingly.
|
||||
* {@link org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsStateStore}
|
||||
* to create Serde accordingly.
|
||||
*
|
||||
* @author Soby Chacko
|
||||
* @author Lei Chen
|
||||
*/
|
||||
class KeyValueSerdeResolver {
|
||||
public class KeyValueSerdeResolver implements ApplicationContextAware {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(KeyValueSerdeResolver.class);
|
||||
|
||||
private final Map<String, Object> streamConfigGlobalProperties;
|
||||
|
||||
private final KafkaStreamsBinderConfigurationProperties binderConfigurationProperties;
|
||||
|
||||
private ConfigurableApplicationContext context;
|
||||
|
||||
KeyValueSerdeResolver(Map<String, Object> streamConfigGlobalProperties,
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties) {
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties) {
|
||||
this.streamConfigGlobalProperties = streamConfigGlobalProperties;
|
||||
this.binderConfigurationProperties = binderConfigurationProperties;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide the {@link Serde} for inbound key.
|
||||
*
|
||||
* @param extendedConsumerProperties binding level extended {@link KafkaStreamsConsumerProperties}
|
||||
* @param extendedConsumerProperties binding level extended
|
||||
* {@link KafkaStreamsConsumerProperties}
|
||||
* @return configurd {@link Serde} for the inbound key.
|
||||
*/
|
||||
public Serde<?> getInboundKeySerde(KafkaStreamsConsumerProperties extendedConsumerProperties) {
|
||||
public Serde<?> getInboundKeySerde(
|
||||
KafkaStreamsConsumerProperties extendedConsumerProperties) {
|
||||
String keySerdeString = extendedConsumerProperties.getKeySerde();
|
||||
|
||||
return getKeySerde(keySerdeString);
|
||||
}
|
||||
|
||||
public Serde<?> getInboundKeySerde(
|
||||
KafkaStreamsConsumerProperties extendedConsumerProperties, ResolvableType resolvableType) {
|
||||
String keySerdeString = extendedConsumerProperties.getKeySerde();
|
||||
|
||||
return getKeySerde(keySerdeString, resolvableType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide the {@link Serde} for inbound value.
|
||||
*
|
||||
* @param consumerProperties {@link ConsumerProperties} on binding
|
||||
* @param extendedConsumerProperties binding level extended {@link KafkaStreamsConsumerProperties}
|
||||
* @param extendedConsumerProperties binding level extended
|
||||
* {@link KafkaStreamsConsumerProperties}
|
||||
* @return configurd {@link Serde} for the inbound value.
|
||||
*/
|
||||
public Serde<?> getInboundValueSerde(ConsumerProperties consumerProperties, KafkaStreamsConsumerProperties extendedConsumerProperties) {
|
||||
public Serde<?> getInboundValueSerde(ConsumerProperties consumerProperties,
|
||||
KafkaStreamsConsumerProperties extendedConsumerProperties) {
|
||||
Serde<?> valueSerde;
|
||||
|
||||
String valueSerdeString = extendedConsumerProperties.getValueSerde();
|
||||
try {
|
||||
if (consumerProperties != null &&
|
||||
consumerProperties.isUseNativeDecoding()) {
|
||||
if (consumerProperties != null && consumerProperties.isUseNativeDecoding()) {
|
||||
valueSerde = getValueSerde(valueSerdeString);
|
||||
}
|
||||
else {
|
||||
@@ -101,9 +133,29 @@ class KeyValueSerdeResolver {
|
||||
return valueSerde;
|
||||
}
|
||||
|
||||
public Serde<?> getInboundValueSerde(ConsumerProperties consumerProperties,
|
||||
KafkaStreamsConsumerProperties extendedConsumerProperties,
|
||||
ResolvableType resolvableType) {
|
||||
Serde<?> valueSerde;
|
||||
|
||||
String valueSerdeString = extendedConsumerProperties.getValueSerde();
|
||||
try {
|
||||
if (consumerProperties != null && consumerProperties.isUseNativeDecoding()) {
|
||||
valueSerde = getValueSerde(valueSerdeString, resolvableType);
|
||||
}
|
||||
else {
|
||||
valueSerde = Serdes.ByteArray();
|
||||
}
|
||||
valueSerde.configure(this.streamConfigGlobalProperties, false);
|
||||
}
|
||||
catch (ClassNotFoundException ex) {
|
||||
throw new IllegalStateException("Serde class not found: ", ex);
|
||||
}
|
||||
return valueSerde;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide the {@link Serde} for outbound key.
|
||||
*
|
||||
* @param properties binding level extended {@link KafkaStreamsProducerProperties}
|
||||
* @return configurd {@link Serde} for the outbound key.
|
||||
*/
|
||||
@@ -111,18 +163,44 @@ class KeyValueSerdeResolver {
|
||||
return getKeySerde(properties.getKeySerde());
|
||||
}
|
||||
|
||||
public Serde<?> getOuboundKeySerde(KafkaStreamsProducerProperties properties, ResolvableType resolvableType) {
|
||||
return getKeySerde(properties.getKeySerde(), resolvableType);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Provide the {@link Serde} for outbound value.
|
||||
*
|
||||
* @param producerProperties {@link ProducerProperties} on binding
|
||||
* @param kafkaStreamsProducerProperties binding level extended {@link KafkaStreamsProducerProperties}
|
||||
* @param kafkaStreamsProducerProperties binding level extended
|
||||
* {@link KafkaStreamsProducerProperties}
|
||||
* @return configurd {@link Serde} for the outbound value.
|
||||
*/
|
||||
public Serde<?> getOutboundValueSerde(ProducerProperties producerProperties, KafkaStreamsProducerProperties kafkaStreamsProducerProperties) {
|
||||
public Serde<?> getOutboundValueSerde(ProducerProperties producerProperties,
|
||||
KafkaStreamsProducerProperties kafkaStreamsProducerProperties) {
|
||||
Serde<?> valueSerde;
|
||||
try {
|
||||
if (producerProperties.isUseNativeEncoding()) {
|
||||
valueSerde = getValueSerde(kafkaStreamsProducerProperties.getValueSerde());
|
||||
valueSerde = getValueSerde(
|
||||
kafkaStreamsProducerProperties.getValueSerde());
|
||||
}
|
||||
else {
|
||||
valueSerde = Serdes.ByteArray();
|
||||
}
|
||||
valueSerde.configure(this.streamConfigGlobalProperties, false);
|
||||
}
|
||||
catch (ClassNotFoundException ex) {
|
||||
throw new IllegalStateException("Serde class not found: ", ex);
|
||||
}
|
||||
return valueSerde;
|
||||
}
|
||||
|
||||
public Serde<?> getOutboundValueSerde(ProducerProperties producerProperties,
|
||||
KafkaStreamsProducerProperties kafkaStreamsProducerProperties, ResolvableType resolvableType) {
|
||||
Serde<?> valueSerde;
|
||||
try {
|
||||
if (producerProperties.isUseNativeEncoding()) {
|
||||
valueSerde = getValueSerde(
|
||||
kafkaStreamsProducerProperties.getValueSerde(), resolvableType);
|
||||
}
|
||||
else {
|
||||
valueSerde = Serdes.ByteArray();
|
||||
@@ -137,7 +215,6 @@ class KeyValueSerdeResolver {
|
||||
|
||||
/**
|
||||
* Provide the {@link Serde} for state store.
|
||||
*
|
||||
* @param keySerdeString serde class used for key
|
||||
* @return {@link Serde} for the state store key.
|
||||
*/
|
||||
@@ -147,7 +224,6 @@ class KeyValueSerdeResolver {
|
||||
|
||||
/**
|
||||
* Provide the {@link Serde} for state store value.
|
||||
*
|
||||
* @param valueSerdeString serde class used for value
|
||||
* @return {@link Serde} for the state store value.
|
||||
*/
|
||||
@@ -167,8 +243,7 @@ class KeyValueSerdeResolver {
|
||||
keySerde = Utils.newInstance(keySerdeString, Serde.class);
|
||||
}
|
||||
else {
|
||||
keySerde = this.binderConfigurationProperties.getConfiguration().containsKey("default.key.serde") ?
|
||||
Utils.newInstance(this.binderConfigurationProperties.getConfiguration().get("default.key.serde"), Serde.class) : Serdes.ByteArray();
|
||||
keySerde = getFallbackSerde("default.key.serde");
|
||||
}
|
||||
keySerde.configure(this.streamConfigGlobalProperties, true);
|
||||
|
||||
@@ -179,15 +254,182 @@ class KeyValueSerdeResolver {
|
||||
return keySerde;
|
||||
}
|
||||
|
||||
private Serde<?> getValueSerde(String valueSerdeString) throws ClassNotFoundException {
|
||||
private Serde<?> getKeySerde(String keySerdeString, ResolvableType resolvableType) {
|
||||
Serde<?> keySerde = null;
|
||||
try {
|
||||
if (StringUtils.hasText(keySerdeString)) {
|
||||
keySerde = Utils.newInstance(keySerdeString, Serde.class);
|
||||
}
|
||||
else {
|
||||
if (resolvableType != null &&
|
||||
(isResolvalbeKafkaStreamsType(resolvableType) || isResolvableKStreamArrayType(resolvableType))) {
|
||||
ResolvableType generic = resolvableType.isArray() ? resolvableType.getComponentType().getGeneric(0) : resolvableType.getGeneric(0);
|
||||
Serde<?> fallbackSerde = getFallbackSerde("default.key.serde");
|
||||
keySerde = getSerde(generic, fallbackSerde);
|
||||
}
|
||||
if (keySerde == null) {
|
||||
keySerde = Serdes.ByteArray();
|
||||
}
|
||||
}
|
||||
keySerde.configure(this.streamConfigGlobalProperties, true);
|
||||
}
|
||||
catch (ClassNotFoundException ex) {
|
||||
throw new IllegalStateException("Serde class not found: ", ex);
|
||||
}
|
||||
return keySerde;
|
||||
}
|
||||
|
||||
private boolean isResolvableKStreamArrayType(ResolvableType resolvableType) {
|
||||
return resolvableType.isArray() &&
|
||||
KStream.class.isAssignableFrom(resolvableType.getComponentType().getRawClass());
|
||||
}
|
||||
|
||||
private boolean isResolvalbeKafkaStreamsType(ResolvableType resolvableType) {
|
||||
return resolvableType.getRawClass() != null && (KStream.class.isAssignableFrom(resolvableType.getRawClass()) || KTable.class.isAssignableFrom(resolvableType.getRawClass()) ||
|
||||
GlobalKTable.class.isAssignableFrom(resolvableType.getRawClass()));
|
||||
}
|
||||
|
||||
private Serde<?> getSerde(ResolvableType generic, Serde<?> fallbackSerde) {
|
||||
Serde<?> serde = null;
|
||||
|
||||
Map<String, Serde> beansOfType = context.getBeansOfType(Serde.class);
|
||||
Serde<?>[] serdeBeans = new Serde<?>[1];
|
||||
|
||||
final Class<?> genericRawClazz = generic.getRawClass();
|
||||
beansOfType.forEach((k, v) -> {
|
||||
final Class<?> classObj = ClassUtils.resolveClassName(((AnnotatedBeanDefinition)
|
||||
context.getBeanFactory().getBeanDefinition(k))
|
||||
.getMetadata().getClassName(),
|
||||
ClassUtils.getDefaultClassLoader());
|
||||
try {
|
||||
Method[] methods = classObj.getMethods();
|
||||
Optional<Method> serdeBeanMethod = Arrays.stream(methods).filter(m -> m.getName().equals(k)).findFirst();
|
||||
if (serdeBeanMethod.isPresent()) {
|
||||
Method method = serdeBeanMethod.get();
|
||||
ResolvableType resolvableType = ResolvableType.forMethodReturnType(method, classObj);
|
||||
ResolvableType serdeBeanGeneric = resolvableType.getGeneric(0);
|
||||
Class<?> serdeGenericRawClazz = serdeBeanGeneric.getRawClass();
|
||||
if (serdeGenericRawClazz != null && genericRawClazz != null) {
|
||||
if (serdeGenericRawClazz.isAssignableFrom(genericRawClazz)) {
|
||||
serdeBeans[0] = v;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception e) {
|
||||
// Pass through...
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
if (serdeBeans[0] != null) {
|
||||
return serdeBeans[0];
|
||||
}
|
||||
|
||||
if (genericRawClazz != null) {
|
||||
if (Integer.class.isAssignableFrom(genericRawClazz)) {
|
||||
serde = Serdes.Integer();
|
||||
}
|
||||
else if (Long.class.isAssignableFrom(genericRawClazz)) {
|
||||
serde = Serdes.Long();
|
||||
}
|
||||
else if (Short.class.isAssignableFrom(genericRawClazz)) {
|
||||
serde = Serdes.Short();
|
||||
}
|
||||
else if (Double.class.isAssignableFrom(genericRawClazz)) {
|
||||
serde = Serdes.Double();
|
||||
}
|
||||
else if (Float.class.isAssignableFrom(genericRawClazz)) {
|
||||
serde = Serdes.Float();
|
||||
}
|
||||
else if (byte[].class.isAssignableFrom(genericRawClazz)) {
|
||||
serde = Serdes.ByteArray();
|
||||
}
|
||||
else if (String.class.isAssignableFrom(genericRawClazz)) {
|
||||
serde = Serdes.String();
|
||||
}
|
||||
else if (UUID.class.isAssignableFrom(genericRawClazz)) {
|
||||
serde = Serdes.UUID();
|
||||
}
|
||||
else if (!isSerdeFromStandardDefaults(fallbackSerde)) {
|
||||
//User purposely set a default serde that is not one of the above
|
||||
serde = fallbackSerde;
|
||||
}
|
||||
else {
|
||||
// If the type is Object, then skip assigning the JsonSerde and let the fallback mechanism takes precedence.
|
||||
if (!genericRawClazz.isAssignableFrom((Object.class))) {
|
||||
serde = new JsonSerde(genericRawClazz);
|
||||
}
|
||||
}
|
||||
}
|
||||
return serde;
|
||||
}
|
||||
|
||||
private boolean isSerdeFromStandardDefaults(Serde<?> serde) {
|
||||
if (serde != null) {
|
||||
if (Number.class.isAssignableFrom(serde.getClass())) {
|
||||
return true;
|
||||
}
|
||||
else if (Serdes.ByteArray().getClass().isAssignableFrom(serde.getClass())) {
|
||||
return true;
|
||||
}
|
||||
else if (Serdes.String().getClass().isAssignableFrom(serde.getClass())) {
|
||||
return true;
|
||||
}
|
||||
else if (Serdes.UUID().getClass().isAssignableFrom(serde.getClass())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
private Serde<?> getValueSerde(String valueSerdeString)
|
||||
throws ClassNotFoundException {
|
||||
Serde<?> valueSerde;
|
||||
if (StringUtils.hasText(valueSerdeString)) {
|
||||
valueSerde = Utils.newInstance(valueSerdeString, Serde.class);
|
||||
}
|
||||
else {
|
||||
valueSerde = this.binderConfigurationProperties.getConfiguration().containsKey("default.value.serde") ?
|
||||
Utils.newInstance(this.binderConfigurationProperties.getConfiguration().get("default.value.serde"), Serde.class) : Serdes.ByteArray();
|
||||
valueSerde = getFallbackSerde("default.value.serde");
|
||||
}
|
||||
return valueSerde;
|
||||
}
|
||||
|
||||
private Serde<?> getFallbackSerde(String s) throws ClassNotFoundException {
|
||||
return this.binderConfigurationProperties.getConfiguration()
|
||||
.containsKey(s)
|
||||
? Utils.newInstance(this.binderConfigurationProperties
|
||||
.getConfiguration().get(s),
|
||||
Serde.class)
|
||||
: Serdes.ByteArray();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private Serde<?> getValueSerde(String valueSerdeString, ResolvableType resolvableType)
|
||||
throws ClassNotFoundException {
|
||||
Serde<?> valueSerde = null;
|
||||
if (StringUtils.hasText(valueSerdeString)) {
|
||||
valueSerde = Utils.newInstance(valueSerdeString, Serde.class);
|
||||
}
|
||||
else {
|
||||
|
||||
if (resolvableType != null && ((isResolvalbeKafkaStreamsType(resolvableType)) ||
|
||||
(isResolvableKStreamArrayType(resolvableType)))) {
|
||||
Serde<?> fallbackSerde = getFallbackSerde("default.value.serde");
|
||||
ResolvableType generic = resolvableType.isArray() ? resolvableType.getComponentType().getGeneric(1) : resolvableType.getGeneric(1);
|
||||
valueSerde = getSerde(generic, fallbackSerde);
|
||||
}
|
||||
if (valueSerde == null) {
|
||||
|
||||
valueSerde = Serdes.ByteArray();
|
||||
}
|
||||
}
|
||||
return valueSerde;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
|
||||
context = (ConfigurableApplicationContext) applicationContext;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,65 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams;
|
||||
|
||||
import org.apache.kafka.streams.KafkaStreams;
|
||||
import org.apache.kafka.streams.errors.InvalidStateStoreException;
|
||||
import org.apache.kafka.streams.state.QueryableStoreType;
|
||||
|
||||
/**
|
||||
* Registry that contains {@link QueryableStoreType}s those created from
|
||||
* the user applications.
|
||||
*
|
||||
* @author Soby Chacko
|
||||
* @author Renwei Han
|
||||
* @since 2.0.0
|
||||
* @deprecated in favor of {@link InteractiveQueryService}
|
||||
*/
|
||||
public class QueryableStoreRegistry {
|
||||
|
||||
private final KafkaStreamsRegistry kafkaStreamsRegistry;
|
||||
|
||||
public QueryableStoreRegistry(KafkaStreamsRegistry kafkaStreamsRegistry) {
|
||||
this.kafkaStreamsRegistry = kafkaStreamsRegistry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve and return a queryable store by name created in the application.
|
||||
*
|
||||
* @param storeName name of the queryable store
|
||||
* @param storeType type of the queryable store
|
||||
* @param <T> generic queryable store
|
||||
* @return queryable store.
|
||||
* @deprecated in favor of {@link InteractiveQueryService#getQueryableStore(String, QueryableStoreType)}
|
||||
*/
|
||||
public <T> T getQueryableStoreType(String storeName, QueryableStoreType<T> storeType) {
|
||||
|
||||
for (KafkaStreams kafkaStream : this.kafkaStreamsRegistry.getKafkaStreams()) {
|
||||
try {
|
||||
T store = kafkaStream.store(storeName, storeType);
|
||||
if (store != null) {
|
||||
return store;
|
||||
}
|
||||
}
|
||||
catch (InvalidStateStoreException ignored) {
|
||||
//pass through
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -32,8 +32,8 @@ import org.apache.kafka.streams.processor.internals.StreamTask;
|
||||
import org.springframework.util.ReflectionUtils;
|
||||
|
||||
/**
|
||||
* Custom implementation for {@link DeserializationExceptionHandler} that sends the records
|
||||
* in error to a DLQ topic, then continue stream processing on new records.
|
||||
* Custom implementation for {@link DeserializationExceptionHandler} that sends the
|
||||
* records in error to a DLQ topic, then continue stream processing on new records.
|
||||
*
|
||||
* @author Soby Chacko
|
||||
* @since 2.0.0
|
||||
@@ -46,14 +46,13 @@ public class SendToDlqAndContinue implements DeserializationExceptionHandler {
|
||||
public static final String KAFKA_STREAMS_DLQ_DISPATCHERS = "spring.cloud.stream.kafka.streams.dlq.dispatchers";
|
||||
|
||||
/**
|
||||
* DLQ dispatcher per topic in the application context. The key here is not the actual DLQ topic
|
||||
* but the incoming topic that caused the error.
|
||||
* DLQ dispatcher per topic in the application context. The key here is not the actual
|
||||
* DLQ topic but the incoming topic that caused the error.
|
||||
*/
|
||||
private Map<String, KafkaStreamsDlqDispatch> dlqDispatchers = new HashMap<>();
|
||||
|
||||
/**
|
||||
* For a given topic, send the key/value record to DLQ topic.
|
||||
*
|
||||
* @param topic incoming topic that caused the error
|
||||
* @param key to send
|
||||
* @param value to send
|
||||
@@ -66,16 +65,23 @@ public class SendToDlqAndContinue implements DeserializationExceptionHandler {
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public DeserializationHandlerResponse handle(ProcessorContext context, ConsumerRecord<byte[], byte[]> record, Exception exception) {
|
||||
KafkaStreamsDlqDispatch kafkaStreamsDlqDispatch = this.dlqDispatchers.get(record.topic());
|
||||
kafkaStreamsDlqDispatch.sendToDlq(record.key(), record.value(), record.partition());
|
||||
public DeserializationHandlerResponse handle(ProcessorContext context,
|
||||
ConsumerRecord<byte[], byte[]> record, Exception exception) {
|
||||
KafkaStreamsDlqDispatch kafkaStreamsDlqDispatch = this.dlqDispatchers
|
||||
.get(record.topic());
|
||||
kafkaStreamsDlqDispatch.sendToDlq(record.key(), record.value(),
|
||||
record.partition());
|
||||
context.commit();
|
||||
|
||||
// The following conditional block should be reconsidered when we have a solution for this SO problem:
|
||||
// The following conditional block should be reconsidered when we have a solution
|
||||
// for this SO problem:
|
||||
// https://stackoverflow.com/questions/48470899/kafka-streams-deserialization-handler
|
||||
// Currently it seems like when deserialization error happens, there is no commits happening and the
|
||||
// following code will use reflection to get access to the underlying KafkaConsumer.
|
||||
// It works with Kafka 1.0.0, but there is no guarantee it will work in future versions of kafka as
|
||||
// Currently it seems like when deserialization error happens, there is no commits
|
||||
// happening and the
|
||||
// following code will use reflection to get access to the underlying
|
||||
// KafkaConsumer.
|
||||
// It works with Kafka 1.0.0, but there is no guarantee it will work in future
|
||||
// versions of kafka as
|
||||
// we access private fields by name using reflection, but it is a temporary fix.
|
||||
if (context instanceof ProcessorContextImpl) {
|
||||
ProcessorContextImpl processorContextImpl = (ProcessorContextImpl) context;
|
||||
@@ -87,11 +93,13 @@ public class SendToDlqAndContinue implements DeserializationExceptionHandler {
|
||||
StreamTask streamTask = (StreamTask) taskField;
|
||||
Field consumer = ReflectionUtils.findField(StreamTask.class, "consumer");
|
||||
ReflectionUtils.makeAccessible(consumer);
|
||||
Object kafkaConsumerField = ReflectionUtils.getField(consumer, streamTask);
|
||||
Object kafkaConsumerField = ReflectionUtils.getField(consumer,
|
||||
streamTask);
|
||||
if (kafkaConsumerField.getClass().isAssignableFrom(KafkaConsumer.class)) {
|
||||
KafkaConsumer kafkaConsumer = (KafkaConsumer) kafkaConsumerField;
|
||||
final Map<TopicPartition, OffsetAndMetadata> consumedOffsetsAndMetadata = new HashMap<>();
|
||||
TopicPartition tp = new TopicPartition(record.topic(), record.partition());
|
||||
TopicPartition tp = new TopicPartition(record.topic(),
|
||||
record.partition());
|
||||
OffsetAndMetadata oam = new OffsetAndMetadata(record.offset() + 1);
|
||||
consumedOffsetsAndMetadata.put(tp, oam);
|
||||
kafkaConsumer.commitSync(consumedOffsetsAndMetadata);
|
||||
@@ -104,10 +112,12 @@ public class SendToDlqAndContinue implements DeserializationExceptionHandler {
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public void configure(Map<String, ?> configs) {
|
||||
this.dlqDispatchers = (Map<String, KafkaStreamsDlqDispatch>) configs.get(KAFKA_STREAMS_DLQ_DISPATCHERS);
|
||||
this.dlqDispatchers = (Map<String, KafkaStreamsDlqDispatch>) configs
|
||||
.get(KAFKA_STREAMS_DLQ_DISPATCHERS);
|
||||
}
|
||||
|
||||
void addKStreamDlqDispatch(String topic, KafkaStreamsDlqDispatch kafkaStreamsDlqDispatch) {
|
||||
void addKStreamDlqDispatch(String topic,
|
||||
KafkaStreamsDlqDispatch kafkaStreamsDlqDispatch) {
|
||||
this.dlqDispatchers.put(topic, kafkaStreamsDlqDispatch);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -23,13 +23,13 @@ import org.springframework.kafka.KafkaException;
|
||||
import org.springframework.kafka.config.StreamsBuilderFactoryBean;
|
||||
|
||||
/**
|
||||
* Iterate through all {@link StreamsBuilderFactoryBean} in the application context
|
||||
* and start them. As each one completes starting, register the associated KafkaStreams
|
||||
* object into {@link QueryableStoreRegistry}.
|
||||
* Iterate through all {@link StreamsBuilderFactoryBean} in the application context and
|
||||
* start them. As each one completes starting, register the associated KafkaStreams object
|
||||
* into {@link InteractiveQueryService}.
|
||||
*
|
||||
* This {@link SmartLifecycle} class ensures that the bean created from it is started very late
|
||||
* through the bootstrap process by setting the phase value closer to Integer.MAX_VALUE.
|
||||
* This is to guarantee that the {@link StreamsBuilderFactoryBean} on a
|
||||
* This {@link SmartLifecycle} class ensures that the bean created from it is started very
|
||||
* late through the bootstrap process by setting the phase value closer to
|
||||
* Integer.MAX_VALUE. This is to guarantee that the {@link StreamsBuilderFactoryBean} on a
|
||||
* {@link org.springframework.cloud.stream.annotation.StreamListener} method with multiple
|
||||
* bindings is only started after all the binding phases have completed successfully.
|
||||
*
|
||||
@@ -38,12 +38,14 @@ import org.springframework.kafka.config.StreamsBuilderFactoryBean;
|
||||
class StreamsBuilderFactoryManager implements SmartLifecycle {
|
||||
|
||||
private final KafkaStreamsBindingInformationCatalogue kafkaStreamsBindingInformationCatalogue;
|
||||
|
||||
private final KafkaStreamsRegistry kafkaStreamsRegistry;
|
||||
|
||||
private volatile boolean running;
|
||||
|
||||
StreamsBuilderFactoryManager(KafkaStreamsBindingInformationCatalogue kafkaStreamsBindingInformationCatalogue,
|
||||
KafkaStreamsRegistry kafkaStreamsRegistry) {
|
||||
StreamsBuilderFactoryManager(
|
||||
KafkaStreamsBindingInformationCatalogue kafkaStreamsBindingInformationCatalogue,
|
||||
KafkaStreamsRegistry kafkaStreamsRegistry) {
|
||||
this.kafkaStreamsBindingInformationCatalogue = kafkaStreamsBindingInformationCatalogue;
|
||||
this.kafkaStreamsRegistry = kafkaStreamsRegistry;
|
||||
}
|
||||
@@ -65,10 +67,12 @@ class StreamsBuilderFactoryManager implements SmartLifecycle {
|
||||
public synchronized void start() {
|
||||
if (!this.running) {
|
||||
try {
|
||||
Set<StreamsBuilderFactoryBean> streamsBuilderFactoryBeans = this.kafkaStreamsBindingInformationCatalogue.getStreamsBuilderFactoryBeans();
|
||||
Set<StreamsBuilderFactoryBean> streamsBuilderFactoryBeans = this.kafkaStreamsBindingInformationCatalogue
|
||||
.getStreamsBuilderFactoryBeans();
|
||||
for (StreamsBuilderFactoryBean streamsBuilderFactoryBean : streamsBuilderFactoryBeans) {
|
||||
streamsBuilderFactoryBean.start();
|
||||
this.kafkaStreamsRegistry.registerKafkaStreams(streamsBuilderFactoryBean.getKafkaStreams());
|
||||
this.kafkaStreamsRegistry.registerKafkaStreams(
|
||||
streamsBuilderFactoryBean.getKafkaStreams());
|
||||
}
|
||||
this.running = true;
|
||||
}
|
||||
@@ -79,21 +83,22 @@ class StreamsBuilderFactoryManager implements SmartLifecycle {
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void stop() {
|
||||
if (this.running) {
|
||||
try {
|
||||
Set<StreamsBuilderFactoryBean> streamsBuilderFactoryBeans = this.kafkaStreamsBindingInformationCatalogue.getStreamsBuilderFactoryBeans();
|
||||
for (StreamsBuilderFactoryBean streamsBuilderFactoryBean : streamsBuilderFactoryBeans) {
|
||||
streamsBuilderFactoryBean.stop();
|
||||
}
|
||||
}
|
||||
catch (Exception ex) {
|
||||
throw new IllegalStateException(ex);
|
||||
}
|
||||
finally {
|
||||
this.running = false;
|
||||
public synchronized void stop() {
|
||||
if (this.running) {
|
||||
try {
|
||||
Set<StreamsBuilderFactoryBean> streamsBuilderFactoryBeans = this.kafkaStreamsBindingInformationCatalogue
|
||||
.getStreamsBuilderFactoryBeans();
|
||||
for (StreamsBuilderFactoryBean streamsBuilderFactoryBean : streamsBuilderFactoryBeans) {
|
||||
streamsBuilderFactoryBean.stop();
|
||||
}
|
||||
}
|
||||
catch (Exception ex) {
|
||||
throw new IllegalStateException(ex);
|
||||
}
|
||||
finally {
|
||||
this.running = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
* Copyright 2017-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -24,12 +24,14 @@ import org.springframework.cloud.stream.annotation.Output;
|
||||
/**
|
||||
* Bindable interface for {@link KStream} input and output.
|
||||
*
|
||||
* This interface can be used as a bindable interface with {@link org.springframework.cloud.stream.annotation.EnableBinding}
|
||||
* when both input and output types are single KStream. In other scenarios where multiple types are required, other
|
||||
* similar bindable interfaces can be created and used. For example, there are cases in which multiple KStreams
|
||||
* are required on the outbound in the case of KStream branching or multiple input types are required either in the
|
||||
* form of multiple KStreams and a combination of KStreams and KTables. In those cases, new bindable interfaces compatible
|
||||
* with the requirements must be created. Here are some examples.
|
||||
* This interface can be used as a bindable interface with
|
||||
* {@link org.springframework.cloud.stream.annotation.EnableBinding} when both input and
|
||||
* output types are single KStream. In other scenarios where multiple types are required,
|
||||
* other similar bindable interfaces can be created and used. For example, there are cases
|
||||
* in which multiple KStreams are required on the outbound in the case of KStream
|
||||
* branching or multiple input types are required either in the form of multiple KStreams
|
||||
* and a combination of KStreams and KTables. In those cases, new bindable interfaces
|
||||
* compatible with the requirements must be created. Here are some examples.
|
||||
*
|
||||
* <pre class="code">
|
||||
* interface KStreamBranchProcessor {
|
||||
@@ -73,7 +75,6 @@ public interface KafkaStreamsProcessor {
|
||||
|
||||
/**
|
||||
* Input binding.
|
||||
*
|
||||
* @return {@link Input} binding for {@link KStream} type.
|
||||
*/
|
||||
@Input("input")
|
||||
@@ -81,9 +82,9 @@ public interface KafkaStreamsProcessor {
|
||||
|
||||
/**
|
||||
* Output binding.
|
||||
*
|
||||
* @return {@link Output} binding for {@link KStream} type.
|
||||
*/
|
||||
@Output("output")
|
||||
KStream<?, ?> output();
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -16,7 +16,6 @@
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.annotations;
|
||||
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
@@ -27,21 +26,23 @@ import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStr
|
||||
/**
|
||||
* Interface for Kafka Stream state store.
|
||||
*
|
||||
* This interface can be used to inject a state store specification into KStream building process so
|
||||
* that the desired store can be built by StreamBuilder and added to topology for later use by processors.
|
||||
* This is particularly useful when need to combine stream DSL with low level processor APIs. In those cases,
|
||||
* if a writable state store is desired in processors, it needs to be created using this annotation.
|
||||
* Here is the example.
|
||||
* This interface can be used to inject a state store specification into KStream building
|
||||
* process so that the desired store can be built by StreamBuilder and added to topology
|
||||
* for later use by processors. This is particularly useful when need to combine stream
|
||||
* DSL with low level processor APIs. In those cases, if a writable state store is desired
|
||||
* in processors, it needs to be created using this annotation. Here is the example.
|
||||
*
|
||||
* <pre class="code">
|
||||
* @StreamListener("input")
|
||||
* @KafkaStreamsStateStore(name="mystate", type= KafkaStreamsStateStoreProperties.StoreType.WINDOW, size=300000)
|
||||
* @KafkaStreamsStateStore(name="mystate", type= KafkaStreamsStateStoreProperties.StoreType.WINDOW,
|
||||
* size=300000)
|
||||
* public void process(KStream<Object, Product> input) {
|
||||
* ......
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* With that, you should be able to read/write this state store in your processor/transformer code.
|
||||
* With that, you should be able to read/write this state store in your
|
||||
* processor/transformer code.
|
||||
*
|
||||
* <pre class="code">
|
||||
* new Processor<Object, Product>() {
|
||||
@@ -64,57 +65,51 @@ public @interface KafkaStreamsStateStore {
|
||||
|
||||
/**
|
||||
* Provides name of the state store.
|
||||
*
|
||||
* @return name of state store.
|
||||
*/
|
||||
String name() default "";
|
||||
|
||||
/**
|
||||
* State store type.
|
||||
*
|
||||
* @return {@link KafkaStreamsStateStoreProperties.StoreType} of state store.
|
||||
*/
|
||||
KafkaStreamsStateStoreProperties.StoreType type() default KafkaStreamsStateStoreProperties.StoreType.KEYVALUE;
|
||||
|
||||
/**
|
||||
* Serde used for key.
|
||||
*
|
||||
* @return key serde of state store.
|
||||
*/
|
||||
String keySerde() default "org.apache.kafka.common.serialization.Serdes$StringSerde";
|
||||
|
||||
/**
|
||||
* Serde used for value.
|
||||
*
|
||||
* @return value serde of state store.
|
||||
*/
|
||||
String valueSerde() default "org.apache.kafka.common.serialization.Serdes$StringSerde";
|
||||
|
||||
/**
|
||||
* Length in milli-second of Windowed store window.
|
||||
*
|
||||
* @return length in milli-second of window(for windowed store).
|
||||
*/
|
||||
long lengthMs() default 0;
|
||||
|
||||
/**
|
||||
* Retention period for Windowed store windows.
|
||||
*
|
||||
* @return the maximum period of time in milli-second to keep each window in this store(for windowed store).
|
||||
* @return the maximum period of time in milli-second to keep each window in this
|
||||
* store(for windowed store).
|
||||
*/
|
||||
long retentionMs() default 0;
|
||||
|
||||
/**
|
||||
* Whether catching is enabled or not.
|
||||
*
|
||||
* @return whether caching should be enabled on the created store.
|
||||
*/
|
||||
boolean cache() default false;
|
||||
|
||||
/**
|
||||
* Whether logging is enabled or not.
|
||||
*
|
||||
* @return whether logging should be enabled on the created store.
|
||||
*/
|
||||
boolean logging() default true;
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,111 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.function;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.kafka.streams.kstream.GlobalKTable;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.apache.kafka.streams.kstream.KTable;
|
||||
|
||||
import org.springframework.beans.factory.BeanFactoryUtils;
|
||||
import org.springframework.beans.factory.annotation.AnnotatedBeanDefinition;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionOutcome;
|
||||
import org.springframework.boot.autoconfigure.condition.SpringBootCondition;
|
||||
import org.springframework.context.annotation.ConditionContext;
|
||||
import org.springframework.core.ResolvableType;
|
||||
import org.springframework.core.type.AnnotatedTypeMetadata;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
|
||||
/**
|
||||
* Custom {@link org.springframework.context.annotation.Condition} that detects the presence
|
||||
* of java.util.Function|Consumer beans. Used for Kafka Streams function support.
|
||||
*
|
||||
* @author Soby Chacko
|
||||
* @since 2.2.0
|
||||
*/
|
||||
public class FunctionDetectorCondition extends SpringBootCondition {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(FunctionDetectorCondition.class);
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
@Override
|
||||
public ConditionOutcome getMatchOutcome(ConditionContext context, AnnotatedTypeMetadata metadata) {
|
||||
if (context != null && context.getBeanFactory() != null) {
|
||||
|
||||
String[] functionTypes = BeanFactoryUtils.beanNamesForTypeIncludingAncestors(context.getBeanFactory(), Function.class, true, false);
|
||||
String[] consumerTypes = BeanFactoryUtils.beanNamesForTypeIncludingAncestors(context.getBeanFactory(), Consumer.class, true, false);
|
||||
String[] biFunctionTypes = BeanFactoryUtils.beanNamesForTypeIncludingAncestors(context.getBeanFactory(), BiFunction.class, true, false);
|
||||
String[] biConsumerTypes = BeanFactoryUtils.beanNamesForTypeIncludingAncestors(context.getBeanFactory(), BiConsumer.class, true, false);
|
||||
|
||||
List<String> functionComponents = new ArrayList<>();
|
||||
|
||||
functionComponents.addAll(Arrays.asList(functionTypes));
|
||||
functionComponents.addAll(Arrays.asList(consumerTypes));
|
||||
functionComponents.addAll(Arrays.asList(biFunctionTypes));
|
||||
functionComponents.addAll(Arrays.asList(biConsumerTypes));
|
||||
|
||||
List<String> kafkaStreamsFunctions = pruneFunctionBeansForKafkaStreams(functionComponents, context);
|
||||
if (!CollectionUtils.isEmpty(kafkaStreamsFunctions)) {
|
||||
return ConditionOutcome.match("Matched. Function/BiFunction/Consumer beans found");
|
||||
}
|
||||
else {
|
||||
return ConditionOutcome.noMatch("No match. No Function/BiFunction/Consumer beans found");
|
||||
}
|
||||
}
|
||||
return ConditionOutcome.noMatch("No match. No Function/BiFunction/Consumer beans found");
|
||||
}
|
||||
|
||||
private static List<String> pruneFunctionBeansForKafkaStreams(List<String> strings,
|
||||
ConditionContext context) {
|
||||
final List<String> prunedList = new ArrayList<>();
|
||||
|
||||
for (String key : strings) {
|
||||
final Class<?> classObj = ClassUtils.resolveClassName(((AnnotatedBeanDefinition)
|
||||
context.getBeanFactory().getBeanDefinition(key))
|
||||
.getMetadata().getClassName(),
|
||||
ClassUtils.getDefaultClassLoader());
|
||||
try {
|
||||
Method[] methods = classObj.getMethods();
|
||||
Optional<Method> kafkaStreamMethod = Arrays.stream(methods).filter(m -> m.getName().equals(key)).findFirst();
|
||||
if (kafkaStreamMethod.isPresent()) {
|
||||
Method method = kafkaStreamMethod.get();
|
||||
ResolvableType resolvableType = ResolvableType.forMethodReturnType(method, classObj);
|
||||
final Class<?> rawClass = resolvableType.getGeneric(0).getRawClass();
|
||||
if (rawClass == KStream.class || rawClass == KTable.class || rawClass == GlobalKTable.class) {
|
||||
prunedList.add(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception e) {
|
||||
LOG.error("Function not found: " + key, e);
|
||||
}
|
||||
}
|
||||
return prunedList;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,252 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.function;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.kafka.streams.kstream.GlobalKTable;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.apache.kafka.streams.kstream.KTable;
|
||||
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.beans.factory.BeanFactory;
|
||||
import org.springframework.beans.factory.BeanFactoryAware;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.beans.factory.support.RootBeanDefinition;
|
||||
import org.springframework.cloud.stream.binding.AbstractBindableProxyFactory;
|
||||
import org.springframework.cloud.stream.binding.BindableProxyFactory;
|
||||
import org.springframework.cloud.stream.binding.BoundTargetHolder;
|
||||
import org.springframework.cloud.stream.function.StreamFunctionProperties;
|
||||
import org.springframework.core.ResolvableType;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
|
||||
/**
|
||||
* Kafka Streams specific target bindings proxy factory. See {@link AbstractBindableProxyFactory} for more details.
|
||||
*
|
||||
* Targets bound by this factory:
|
||||
*
|
||||
* {@link KStream}
|
||||
* {@link KTable}
|
||||
* {@link GlobalKTable}
|
||||
*
|
||||
* This class looks at the Function bean's return signature as {@link ResolvableType} and introspect the individual types,
|
||||
* binding them on the way.
|
||||
*
|
||||
* All types on the {@link ResolvableType} are bound except for KStream[] array types on the outbound, which will be
|
||||
* deferred for binding at a later stage. The reason for doing that is because in this class, we don't have any way to know
|
||||
* the actual size in the returned array. That has to wait until the function is invoked and we get a result.
|
||||
*
|
||||
* @author Soby Chacko
|
||||
* @since 3.0.0
|
||||
*/
|
||||
public class KafkaStreamsBindableProxyFactory extends AbstractBindableProxyFactory implements InitializingBean, BeanFactoryAware {
|
||||
|
||||
/**
|
||||
* Default output binding name. Output binding may occur later on in the function invoker (outside of this class),
|
||||
* thus making this field part of the API.
|
||||
*/
|
||||
public static final String DEFAULT_OUTPUT_SUFFIX = "out";
|
||||
private static final String DEFAULT_INPUT_SUFFIX = "in";
|
||||
|
||||
private static Log log = LogFactory.getLog(BindableProxyFactory.class);
|
||||
|
||||
@Autowired
|
||||
private StreamFunctionProperties streamFunctionProperties;
|
||||
|
||||
private final ResolvableType type;
|
||||
|
||||
private final String functionName;
|
||||
|
||||
private BeanFactory beanFactory;
|
||||
|
||||
|
||||
public KafkaStreamsBindableProxyFactory(ResolvableType type, String functionName) {
|
||||
super(type.getType().getClass());
|
||||
this.type = type;
|
||||
this.functionName = functionName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterPropertiesSet() {
|
||||
Assert.notEmpty(KafkaStreamsBindableProxyFactory.this.bindingTargetFactories,
|
||||
"'bindingTargetFactories' cannot be empty");
|
||||
|
||||
int resolvableTypeDepthCounter = 0;
|
||||
ResolvableType argument = this.type.getGeneric(resolvableTypeDepthCounter++);
|
||||
List<String> inputBindings = buildInputBindings();
|
||||
Iterator<String> iterator = inputBindings.iterator();
|
||||
String next = iterator.next();
|
||||
bindInput(argument, next);
|
||||
|
||||
if (this.type.getRawClass() != null &&
|
||||
(this.type.getRawClass().isAssignableFrom(BiFunction.class) ||
|
||||
this.type.getRawClass().isAssignableFrom(BiConsumer.class))) {
|
||||
argument = this.type.getGeneric(resolvableTypeDepthCounter++);
|
||||
next = iterator.next();
|
||||
bindInput(argument, next);
|
||||
}
|
||||
ResolvableType outboundArgument = this.type.getGeneric(resolvableTypeDepthCounter);
|
||||
|
||||
while (isAnotherFunctionOrConsumerFound(outboundArgument)) {
|
||||
//The function is a curried function. We should introspect the partial function chain hierarchy.
|
||||
argument = outboundArgument.getGeneric(0);
|
||||
String next1 = iterator.next();
|
||||
bindInput(argument, next1);
|
||||
outboundArgument = outboundArgument.getGeneric(1);
|
||||
}
|
||||
|
||||
//Introspect output for binding.
|
||||
if (outboundArgument != null && outboundArgument.getRawClass() != null && (!outboundArgument.isArray() &&
|
||||
outboundArgument.getRawClass().isAssignableFrom(KStream.class))) {
|
||||
// if the type is array, we need to do a late binding as we don't know the number of
|
||||
// output bindings at this point in the flow.
|
||||
|
||||
List<String> outputBindings = streamFunctionProperties.getOutputBindings().get(this.functionName);
|
||||
String outputBinding = null;
|
||||
|
||||
if (!CollectionUtils.isEmpty(outputBindings)) {
|
||||
Iterator<String> outputBindingsIter = outputBindings.iterator();
|
||||
if (outputBindingsIter.hasNext()) {
|
||||
outputBinding = outputBindingsIter.next();
|
||||
}
|
||||
|
||||
}
|
||||
else {
|
||||
outputBinding = String.format("%s_%s", this.functionName, DEFAULT_OUTPUT_SUFFIX);
|
||||
}
|
||||
Assert.isTrue(outputBinding != null, "output binding is not inferred.");
|
||||
KafkaStreamsBindableProxyFactory.this.outputHolders.put(outputBinding,
|
||||
new BoundTargetHolder(getBindingTargetFactory(KStream.class)
|
||||
.createOutput(outputBinding), true));
|
||||
String outputBinding1 = outputBinding;
|
||||
RootBeanDefinition rootBeanDefinition1 = new RootBeanDefinition();
|
||||
rootBeanDefinition1.setInstanceSupplier(() -> outputHolders.get(outputBinding1).getBoundTarget());
|
||||
BeanDefinitionRegistry registry = (BeanDefinitionRegistry) beanFactory;
|
||||
registry.registerBeanDefinition(outputBinding1, rootBeanDefinition1);
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isAnotherFunctionOrConsumerFound(ResolvableType arg1) {
|
||||
return arg1 != null && !arg1.isArray() && arg1.getRawClass() != null &&
|
||||
(arg1.getRawClass().isAssignableFrom(Function.class) || arg1.getRawClass().isAssignableFrom(Consumer.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* If the application provides the property spring.cloud.stream.function.inputBindings.functionName,
|
||||
* that gets precedence. Otherwise, use functionName-input or functionName-input-0, functionName-input-1 and so on
|
||||
* for multiple inputs.
|
||||
*
|
||||
* @return an ordered collection of input bindings to use
|
||||
*/
|
||||
private List<String> buildInputBindings() {
|
||||
List<String> inputs = new ArrayList<>();
|
||||
List<String> inputBindings = streamFunctionProperties.getInputBindings().get(this.functionName);
|
||||
if (!CollectionUtils.isEmpty(inputBindings)) {
|
||||
inputs.addAll(inputBindings);
|
||||
return inputs;
|
||||
}
|
||||
int numberOfInputs = this.type.getRawClass() != null &&
|
||||
(this.type.getRawClass().isAssignableFrom(BiFunction.class) ||
|
||||
this.type.getRawClass().isAssignableFrom(BiConsumer.class)) ? 2 : getNumberOfInputs();
|
||||
if (numberOfInputs == 1) {
|
||||
inputs.add(String.format("%s_%s", this.functionName, DEFAULT_INPUT_SUFFIX));
|
||||
return inputs;
|
||||
}
|
||||
else {
|
||||
int i = 0;
|
||||
while (i < numberOfInputs) {
|
||||
inputs.add(String.format("%s_%s_%d", this.functionName, DEFAULT_INPUT_SUFFIX, i++));
|
||||
}
|
||||
return inputs;
|
||||
}
|
||||
}
|
||||
|
||||
private int getNumberOfInputs() {
|
||||
int numberOfInputs = 1;
|
||||
ResolvableType arg1 = this.type.getGeneric(1);
|
||||
|
||||
while (isAnotherFunctionOrConsumerFound(arg1)) {
|
||||
arg1 = arg1.getGeneric(1);
|
||||
numberOfInputs++;
|
||||
}
|
||||
return numberOfInputs;
|
||||
|
||||
}
|
||||
|
||||
private void bindInput(ResolvableType arg0, String inputName) {
|
||||
if (arg0.getRawClass() != null) {
|
||||
KafkaStreamsBindableProxyFactory.this.inputHolders.put(inputName,
|
||||
new BoundTargetHolder(getBindingTargetFactory(arg0.getRawClass())
|
||||
.createInput(inputName), true));
|
||||
}
|
||||
|
||||
BeanDefinitionRegistry registry = (BeanDefinitionRegistry) beanFactory;
|
||||
|
||||
RootBeanDefinition rootBeanDefinition = new RootBeanDefinition();
|
||||
rootBeanDefinition.setInstanceSupplier(() -> inputHolders.get(inputName).getBoundTarget());
|
||||
registry.registerBeanDefinition(inputName, rootBeanDefinition);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> getInputs() {
|
||||
Set<String> ins = new LinkedHashSet<>();
|
||||
this.inputHolders.forEach((s, BoundTargetHolder) -> ins.add(s));
|
||||
return ins;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> getOutputs() {
|
||||
Set<String> outs = new LinkedHashSet<>();
|
||||
this.outputHolders.forEach((s, BoundTargetHolder) -> outs.add(s));
|
||||
return outs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setBeanFactory(BeanFactory beanFactory) throws BeansException {
|
||||
this.beanFactory = beanFactory;
|
||||
}
|
||||
|
||||
public void addOutputBinding(String output, Class<?> clazz) {
|
||||
KafkaStreamsBindableProxyFactory.this.outputHolders.put(output,
|
||||
new BoundTargetHolder(getBindingTargetFactory(clazz)
|
||||
.createOutput(output), true));
|
||||
}
|
||||
|
||||
public String getFunctionName() {
|
||||
return functionName;
|
||||
}
|
||||
|
||||
public Map<String, BoundTargetHolder> getOutputHolders() {
|
||||
return outputHolders;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,73 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.function;
|
||||
|
||||
import org.springframework.beans.factory.config.BeanFactoryPostProcessor;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.beans.factory.support.RootBeanDefinition;
|
||||
import org.springframework.boot.autoconfigure.AutoConfigureBefore;
|
||||
import org.springframework.boot.context.properties.EnableConfigurationProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsFunctionProcessor;
|
||||
import org.springframework.cloud.stream.config.BinderFactoryAutoConfiguration;
|
||||
import org.springframework.cloud.stream.function.StreamFunctionProperties;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Conditional;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
/**
|
||||
* @author Soby Chacko
|
||||
* @since 2.2.0
|
||||
*/
|
||||
@Configuration
|
||||
@EnableConfigurationProperties(StreamFunctionProperties.class)
|
||||
@AutoConfigureBefore(BinderFactoryAutoConfiguration.class)
|
||||
public class KafkaStreamsFunctionAutoConfiguration {
|
||||
|
||||
@Bean
|
||||
@Conditional(FunctionDetectorCondition.class)
|
||||
public KafkaStreamsFunctionProcessorInvoker kafkaStreamsFunctionProcessorInvoker(
|
||||
KafkaStreamsFunctionBeanPostProcessor kafkaStreamsFunctionBeanPostProcessor,
|
||||
KafkaStreamsFunctionProcessor kafkaStreamsFunctionProcessor,
|
||||
KafkaStreamsBindableProxyFactory[] kafkaStreamsBindableProxyFactories) {
|
||||
return new KafkaStreamsFunctionProcessorInvoker(kafkaStreamsFunctionBeanPostProcessor.getResolvableTypes(),
|
||||
kafkaStreamsFunctionProcessor, kafkaStreamsBindableProxyFactories);
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Conditional(FunctionDetectorCondition.class)
|
||||
public KafkaStreamsFunctionBeanPostProcessor kafkaStreamsFunctionBeanPostProcessor() {
|
||||
return new KafkaStreamsFunctionBeanPostProcessor();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Conditional(FunctionDetectorCondition.class)
|
||||
public static BeanFactoryPostProcessor implicitFunctionKafkaStreamsBinder(KafkaStreamsFunctionBeanPostProcessor kafkaStreamsFunctionBeanPostProcessor) {
|
||||
return beanFactory -> {
|
||||
BeanDefinitionRegistry registry = (BeanDefinitionRegistry) beanFactory;
|
||||
|
||||
for (String s : kafkaStreamsFunctionBeanPostProcessor.getResolvableTypes().keySet()) {
|
||||
RootBeanDefinition rootBeanDefinition = new RootBeanDefinition(
|
||||
KafkaStreamsBindableProxyFactory.class);
|
||||
rootBeanDefinition.getConstructorArgumentValues()
|
||||
.addGenericArgumentValue(kafkaStreamsFunctionBeanPostProcessor.getResolvableTypes().get(s));
|
||||
rootBeanDefinition.getConstructorArgumentValues()
|
||||
.addGenericArgumentValue(s);
|
||||
registry.registerBeanDefinition("kafkaStreamsBindableProxyFactory-" + s, rootBeanDefinition);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,95 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.function;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.TreeMap;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.beans.factory.BeanFactory;
|
||||
import org.springframework.beans.factory.BeanFactoryAware;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.beans.factory.annotation.AnnotatedBeanDefinition;
|
||||
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
|
||||
import org.springframework.core.ResolvableType;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Soby Chacko
|
||||
* @since 2.2.0
|
||||
*
|
||||
*/
|
||||
public class KafkaStreamsFunctionBeanPostProcessor implements InitializingBean, BeanFactoryAware {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(KafkaStreamsFunctionBeanPostProcessor.class);
|
||||
|
||||
private ConfigurableListableBeanFactory beanFactory;
|
||||
private Map<String, ResolvableType> resolvableTypeMap = new TreeMap<>();
|
||||
|
||||
public Map<String, ResolvableType> getResolvableTypes() {
|
||||
return this.resolvableTypeMap;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterPropertiesSet() {
|
||||
String[] functionNames = this.beanFactory.getBeanNamesForType(Function.class);
|
||||
String[] biFunctionNames = this.beanFactory.getBeanNamesForType(BiFunction.class);
|
||||
String[] consumerNames = this.beanFactory.getBeanNamesForType(Consumer.class);
|
||||
String[] biConsumerNames = this.beanFactory.getBeanNamesForType(BiConsumer.class);
|
||||
|
||||
Stream.concat(
|
||||
Stream.concat(Stream.of(functionNames), Stream.of(consumerNames)),
|
||||
Stream.concat(Stream.of(biFunctionNames), Stream.of(biConsumerNames)))
|
||||
.forEach(this::extractResolvableTypes);
|
||||
}
|
||||
|
||||
private void extractResolvableTypes(String key) {
|
||||
final Class<?> classObj = ClassUtils.resolveClassName(((AnnotatedBeanDefinition)
|
||||
this.beanFactory.getBeanDefinition(key))
|
||||
.getMetadata().getClassName(),
|
||||
ClassUtils.getDefaultClassLoader());
|
||||
try {
|
||||
Method[] methods = classObj.getMethods();
|
||||
Optional<Method> kafkaStreamMethod = Arrays.stream(methods).filter(m -> m.getName().equals(key)).findFirst();
|
||||
if (kafkaStreamMethod.isPresent()) {
|
||||
Method method = kafkaStreamMethod.get();
|
||||
ResolvableType resolvableType = ResolvableType.forMethodReturnType(method, classObj);
|
||||
resolvableTypeMap.put(key, resolvableType);
|
||||
}
|
||||
}
|
||||
catch (Exception e) {
|
||||
LOG.error("Function not found: " + key, e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setBeanFactory(BeanFactory beanFactory) throws BeansException {
|
||||
this.beanFactory = (ConfigurableListableBeanFactory) beanFactory;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.function;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsFunctionProcessor;
|
||||
import org.springframework.core.ResolvableType;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Soby Chacko
|
||||
* @since 2.1.0
|
||||
*/
|
||||
public class KafkaStreamsFunctionProcessorInvoker {
|
||||
|
||||
private final KafkaStreamsFunctionProcessor kafkaStreamsFunctionProcessor;
|
||||
private final Map<String, ResolvableType> resolvableTypeMap;
|
||||
private final KafkaStreamsBindableProxyFactory[] kafkaStreamsBindableProxyFactories;
|
||||
|
||||
public KafkaStreamsFunctionProcessorInvoker(Map<String, ResolvableType> resolvableTypeMap,
|
||||
KafkaStreamsFunctionProcessor kafkaStreamsFunctionProcessor,
|
||||
KafkaStreamsBindableProxyFactory[] kafkaStreamsBindableProxyFactories) {
|
||||
this.kafkaStreamsFunctionProcessor = kafkaStreamsFunctionProcessor;
|
||||
this.resolvableTypeMap = resolvableTypeMap;
|
||||
this.kafkaStreamsBindableProxyFactories = kafkaStreamsBindableProxyFactories;
|
||||
}
|
||||
|
||||
@PostConstruct
|
||||
void invoke() {
|
||||
resolvableTypeMap.forEach((key, value) -> {
|
||||
Optional<KafkaStreamsBindableProxyFactory> proxyFactory =
|
||||
Arrays.stream(kafkaStreamsBindableProxyFactories).filter(p -> p.getFunctionName().equals(key)).findFirst();
|
||||
this.kafkaStreamsFunctionProcessor.setupFunctionInvokerForKafkaStreams(value, key, proxyFactory.get());
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
* Copyright 2017-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -19,14 +19,17 @@ package org.springframework.cloud.stream.binder.kafka.streams.properties;
|
||||
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||
|
||||
/**
|
||||
* {@link ConfigurationProperties} that can be used by end user Kafka Stream applications. This class provides
|
||||
* convenient ways to access the commonly used kafka stream properties from the user application. For example, windowing
|
||||
* operations are common use cases in stream processing and one can provide window specific properties at runtime and use
|
||||
* {@link ConfigurationProperties} that can be used by end user Kafka Stream applications.
|
||||
* This class provides convenient ways to access the commonly used kafka stream properties
|
||||
* from the user application. For example, windowing operations are common use cases in
|
||||
* stream processing and one can provide window specific properties at runtime and use
|
||||
* those properties in the applications using this class.
|
||||
*
|
||||
* @deprecated The properties exposed by this class can be used directly on Kafka Streams API in the application.
|
||||
* @author Soby Chacko
|
||||
*/
|
||||
@ConfigurationProperties("spring.cloud.stream.kafka.streams")
|
||||
@Deprecated
|
||||
public class KafkaStreamsApplicationSupportProperties {
|
||||
|
||||
private TimeWindow timeWindow;
|
||||
@@ -63,5 +66,7 @@ public class KafkaStreamsApplicationSupportProperties {
|
||||
public void setAdvanceBy(int advanceBy) {
|
||||
this.advanceBy = advanceBy;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -16,6 +16,9 @@
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.properties;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.properties.KafkaBinderConfigurationProperties;
|
||||
|
||||
@@ -25,7 +28,8 @@ import org.springframework.cloud.stream.binder.kafka.properties.KafkaBinderConfi
|
||||
* @author Soby Chacko
|
||||
* @author Gary Russell
|
||||
*/
|
||||
public class KafkaStreamsBinderConfigurationProperties extends KafkaBinderConfigurationProperties {
|
||||
public class KafkaStreamsBinderConfigurationProperties
|
||||
extends KafkaBinderConfigurationProperties {
|
||||
|
||||
public KafkaStreamsBinderConfigurationProperties(KafkaProperties kafkaProperties) {
|
||||
super(kafkaProperties);
|
||||
@@ -35,6 +39,7 @@ public class KafkaStreamsBinderConfigurationProperties extends KafkaBinderConfig
|
||||
* Enumeration for various Serde errors.
|
||||
*/
|
||||
public enum SerdeError {
|
||||
|
||||
/**
|
||||
* Deserialization error handler with log and continue.
|
||||
*/
|
||||
@@ -47,10 +52,31 @@ public class KafkaStreamsBinderConfigurationProperties extends KafkaBinderConfig
|
||||
* Deserialization error handler with DLQ send.
|
||||
*/
|
||||
sendToDlq
|
||||
|
||||
}
|
||||
|
||||
private String applicationId;
|
||||
|
||||
private Map<String, String> functions = new HashMap<>();
|
||||
|
||||
private StateStoreRetry stateStoreRetry = new StateStoreRetry();
|
||||
|
||||
public StateStoreRetry getStateStoreRetry() {
|
||||
return stateStoreRetry;
|
||||
}
|
||||
|
||||
public void setStateStoreRetry(StateStoreRetry stateStoreRetry) {
|
||||
this.stateStoreRetry = stateStoreRetry;
|
||||
}
|
||||
|
||||
public Map<String, String> getFunctions() {
|
||||
return functions;
|
||||
}
|
||||
|
||||
public void setFunctions(Map<String, String> functions) {
|
||||
this.functions = functions;
|
||||
}
|
||||
|
||||
public String getApplicationId() {
|
||||
return this.applicationId;
|
||||
}
|
||||
@@ -60,9 +86,10 @@ public class KafkaStreamsBinderConfigurationProperties extends KafkaBinderConfig
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link org.apache.kafka.streams.errors.DeserializationExceptionHandler} to use
|
||||
* when there is a Serde error. {@link KafkaStreamsBinderConfigurationProperties.SerdeError}
|
||||
* values are used to provide the exception handler on consumer binding.
|
||||
* {@link org.apache.kafka.streams.errors.DeserializationExceptionHandler} to use when
|
||||
* there is a Serde error.
|
||||
* {@link KafkaStreamsBinderConfigurationProperties.SerdeError} values are used to
|
||||
* provide the exception handler on consumer binding.
|
||||
*/
|
||||
private KafkaStreamsBinderConfigurationProperties.SerdeError serdeError;
|
||||
|
||||
@@ -70,8 +97,32 @@ public class KafkaStreamsBinderConfigurationProperties extends KafkaBinderConfig
|
||||
return this.serdeError;
|
||||
}
|
||||
|
||||
public void setSerdeError(KafkaStreamsBinderConfigurationProperties.SerdeError serdeError) {
|
||||
public void setSerdeError(
|
||||
KafkaStreamsBinderConfigurationProperties.SerdeError serdeError) {
|
||||
this.serdeError = serdeError;
|
||||
}
|
||||
|
||||
public static class StateStoreRetry {
|
||||
|
||||
private int maxAttempts = 1;
|
||||
|
||||
private long backoffPeriod = 1000;
|
||||
|
||||
public int getMaxAttempts() {
|
||||
return maxAttempts;
|
||||
}
|
||||
|
||||
public void setMaxAttempts(int maxAttempts) {
|
||||
this.maxAttempts = maxAttempts;
|
||||
}
|
||||
|
||||
public long getBackoffPeriod() {
|
||||
return backoffPeriod;
|
||||
}
|
||||
|
||||
public void setBackoffPeriod(long backoffPeriod) {
|
||||
this.backoffPeriod = backoffPeriod;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
* Copyright 2017-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -19,7 +19,8 @@ package org.springframework.cloud.stream.binder.kafka.streams.properties;
|
||||
import org.springframework.cloud.stream.binder.BinderSpecificPropertiesProvider;
|
||||
|
||||
/**
|
||||
* Extended binding properties holder that delegates to Kafka Streams producer and consumer properties.
|
||||
* Extended binding properties holder that delegates to Kafka Streams producer and
|
||||
* consumer properties.
|
||||
*
|
||||
* @author Marius Bogoevici
|
||||
*/
|
||||
@@ -44,4 +45,5 @@ public class KafkaStreamsBindingProperties implements BinderSpecificPropertiesPr
|
||||
public void setProducer(KafkaStreamsProducerProperties producer) {
|
||||
this.producer = producer;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
* Copyright 2017-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -16,19 +16,26 @@
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.properties;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||
import org.springframework.cloud.stream.binder.AbstractExtendedBindingProperties;
|
||||
import org.springframework.cloud.stream.binder.BinderSpecificPropertiesProvider;
|
||||
|
||||
/**
|
||||
* Kafka streams specific extended binding properties class that extends from {@link AbstractExtendedBindingProperties}.
|
||||
* Kafka streams specific extended binding properties class that extends from
|
||||
* {@link AbstractExtendedBindingProperties}.
|
||||
*
|
||||
* @author Marius Bogoevici
|
||||
* @author Oleg Zhurakousky
|
||||
*/
|
||||
@ConfigurationProperties("spring.cloud.stream.kafka.streams")
|
||||
public class KafkaStreamsExtendedBindingProperties
|
||||
extends AbstractExtendedBindingProperties<KafkaStreamsConsumerProperties, KafkaStreamsProducerProperties, KafkaStreamsBindingProperties> {
|
||||
// @checkstyle:off
|
||||
extends
|
||||
AbstractExtendedBindingProperties<KafkaStreamsConsumerProperties, KafkaStreamsProducerProperties, KafkaStreamsBindingProperties> {
|
||||
|
||||
// @checkstyle:on
|
||||
private static final String DEFAULTS_PREFIX = "spring.cloud.stream.kafka.streams.default";
|
||||
|
||||
@Override
|
||||
@@ -36,8 +43,14 @@ public class KafkaStreamsExtendedBindingProperties
|
||||
return DEFAULTS_PREFIX;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, KafkaStreamsBindingProperties> getBindings() {
|
||||
return this.doGetBindings();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<? extends BinderSpecificPropertiesProvider> getExtendedPropertiesEntryClass() {
|
||||
return KafkaStreamsBindingProperties.class;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -51,4 +51,5 @@ public class KafkaStreamsProducerProperties extends KafkaProducerProperties {
|
||||
public void setValueSerde(String valueSerde) {
|
||||
this.valueSerde = valueSerde;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -16,7 +16,6 @@
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.properties;
|
||||
|
||||
|
||||
/**
|
||||
* Properties for Kafka Streams state store.
|
||||
*
|
||||
@@ -28,6 +27,7 @@ public class KafkaStreamsStateStoreProperties {
|
||||
* Enumeration for store type.
|
||||
*/
|
||||
public enum StoreType {
|
||||
|
||||
/**
|
||||
* Key value store.
|
||||
*/
|
||||
@@ -51,8 +51,8 @@ public class KafkaStreamsStateStoreProperties {
|
||||
public String toString() {
|
||||
return this.type;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Name for this state store.
|
||||
@@ -94,7 +94,6 @@ public class KafkaStreamsStateStoreProperties {
|
||||
*/
|
||||
private boolean loggingDisabled;
|
||||
|
||||
|
||||
public String getName() {
|
||||
return this.name;
|
||||
}
|
||||
@@ -158,4 +157,5 @@ public class KafkaStreamsStateStoreProperties {
|
||||
public void setLoggingDisabled(boolean loggingDisabled) {
|
||||
this.loggingDisabled = loggingDisabled;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,242 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.serde;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.DataInputStream;
|
||||
import java.io.DataOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedList;
|
||||
import java.util.Map;
|
||||
import java.util.PriorityQueue;
|
||||
|
||||
import org.apache.kafka.common.serialization.Deserializer;
|
||||
import org.apache.kafka.common.serialization.Serde;
|
||||
import org.apache.kafka.common.serialization.Serdes;
|
||||
import org.apache.kafka.common.serialization.Serializer;
|
||||
|
||||
import org.springframework.kafka.support.serializer.JsonSerde;
|
||||
|
||||
/**
|
||||
* A convenient {@link Serde} for {@link java.util.Collection} implementations.
|
||||
*
|
||||
* Whenever a Kafka Stream application needs to collect data into a container object like
|
||||
* {@link java.util.Collection}, then this Serde class can be used as a convenience for
|
||||
* serialization needs. Some examples of where using this may handy is when the application
|
||||
* needs to do aggregation or reduction operations where it needs to simply hold an
|
||||
* {@link Iterable} type.
|
||||
*
|
||||
* By default, this Serde will use {@link JsonSerde} for serializing the inner objects.
|
||||
* This can be changed by providing an explicit Serde during creation of this object.
|
||||
*
|
||||
* Here is an example of a possible use case:
|
||||
*
|
||||
* <pre class="code">
|
||||
* .aggregate(ArrayList::new,
|
||||
* (k, v, aggregates) -> {
|
||||
* aggregates.add(v);
|
||||
* return aggregates;
|
||||
* },
|
||||
* Materialized.<String, Collection<Foo>, WindowStore<Bytes, byte[]>>as(
|
||||
* "foo-store")
|
||||
* .withKeySerde(Serdes.String())
|
||||
* .withValueSerde(new CollectionSerde<>(Foo.class, ArrayList.class)))
|
||||
* * </pre>
|
||||
*
|
||||
* Supported Collection types by this Serde are - {@link java.util.ArrayList}, {@link java.util.LinkedList},
|
||||
* {@link java.util.PriorityQueue} and {@link java.util.HashSet}. Deserializer will throw an exception
|
||||
* if any other Collection types are used.
|
||||
*
|
||||
* @param <E> type of the underlying object that the collection holds
|
||||
* @author Soby Chacko
|
||||
* @since 3.0.0
|
||||
*/
|
||||
public class CollectionSerde<E> implements Serde<Collection<E>> {
|
||||
|
||||
/**
|
||||
* Serde used for serializing the inner object.
|
||||
*/
|
||||
private final Serde<Collection<E>> inner;
|
||||
|
||||
/**
|
||||
* Type of the collection class. This has to be a class that is
|
||||
* implementing the {@link java.util.Collection} interface.
|
||||
*/
|
||||
private final Class<?> collectionClass;
|
||||
|
||||
/**
|
||||
* Constructor to use when the application wants to specify the type
|
||||
* of the Serde used for the inner object.
|
||||
*
|
||||
* @param serde specify an explicit Serde
|
||||
* @param collectionsClass type of the Collection class
|
||||
*/
|
||||
public CollectionSerde(Serde<E> serde, Class<?> collectionsClass) {
|
||||
this.collectionClass = collectionsClass;
|
||||
this.inner =
|
||||
Serdes.serdeFrom(
|
||||
new CollectionSerializer<>(serde.serializer()),
|
||||
new CollectionDeserializer<>(serde.deserializer(), collectionsClass));
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor to delegate serialization operations for the inner objects
|
||||
* to {@link JsonSerde}.
|
||||
*
|
||||
* @param targetTypeForJsonSerde target type used by the JsonSerde
|
||||
* @param collectionsClass type of the Collection class
|
||||
*/
|
||||
public CollectionSerde(Class<?> targetTypeForJsonSerde, Class<?> collectionsClass) {
|
||||
this.collectionClass = collectionsClass;
|
||||
JsonSerde<E> jsonSerde = new JsonSerde(targetTypeForJsonSerde);
|
||||
|
||||
this.inner = Serdes.serdeFrom(
|
||||
new CollectionSerializer<>(jsonSerde.serializer()),
|
||||
new CollectionDeserializer<>(jsonSerde.deserializer(), collectionsClass));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Serializer<Collection<E>> serializer() {
|
||||
return inner.serializer();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Deserializer<Collection<E>> deserializer() {
|
||||
return inner.deserializer();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void configure(Map<String, ?> configs, boolean isKey) {
|
||||
inner.serializer().configure(configs, isKey);
|
||||
inner.deserializer().configure(configs, isKey);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
inner.serializer().close();
|
||||
inner.deserializer().close();
|
||||
}
|
||||
|
||||
private static class CollectionSerializer<E> implements Serializer<Collection<E>> {
|
||||
|
||||
|
||||
private Serializer<E> inner;
|
||||
|
||||
CollectionSerializer(Serializer<E> inner) {
|
||||
this.inner = inner;
|
||||
}
|
||||
|
||||
CollectionSerializer() { }
|
||||
|
||||
@Override
|
||||
public void configure(Map<String, ?> configs, boolean isKey) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] serialize(String topic, Collection<E> collection) {
|
||||
final int size = collection.size();
|
||||
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
final DataOutputStream dos = new DataOutputStream(baos);
|
||||
final Iterator<E> iterator = collection.iterator();
|
||||
try {
|
||||
dos.writeInt(size);
|
||||
while (iterator.hasNext()) {
|
||||
final byte[] bytes = inner.serialize(topic, iterator.next());
|
||||
dos.writeInt(bytes.length);
|
||||
dos.write(bytes);
|
||||
}
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new RuntimeException("Unable to serialize the provided collection", e);
|
||||
}
|
||||
return baos.toByteArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
inner.close();
|
||||
}
|
||||
}
|
||||
|
||||
private static class CollectionDeserializer<E> implements Deserializer<Collection<E>> {
|
||||
private final Deserializer<E> valueDeserializer;
|
||||
private final Class<?> collectionClass;
|
||||
|
||||
CollectionDeserializer(final Deserializer<E> valueDeserializer, Class<?> collectionClass) {
|
||||
this.valueDeserializer = valueDeserializer;
|
||||
this.collectionClass = collectionClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void configure(Map<String, ?> configs, boolean isKey) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<E> deserialize(String topic, byte[] bytes) {
|
||||
if (bytes == null || bytes.length == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Collection<E> collection = getCollection();
|
||||
final DataInputStream dataInputStream = new DataInputStream(new ByteArrayInputStream(bytes));
|
||||
|
||||
try {
|
||||
final int records = dataInputStream.readInt();
|
||||
for (int i = 0; i < records; i++) {
|
||||
final byte[] valueBytes = new byte[dataInputStream.readInt()];
|
||||
dataInputStream.read(valueBytes);
|
||||
collection.add(valueDeserializer.deserialize(topic, valueBytes));
|
||||
}
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new RuntimeException("Unable to deserialize collection", e);
|
||||
}
|
||||
|
||||
return collection;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
}
|
||||
|
||||
private Collection<E> getCollection() {
|
||||
Collection<E> collection;
|
||||
if (this.collectionClass.isAssignableFrom(ArrayList.class)) {
|
||||
collection = new ArrayList<>();
|
||||
}
|
||||
else if (this.collectionClass.isAssignableFrom(HashSet.class)) {
|
||||
collection = new HashSet<>();
|
||||
}
|
||||
else if (this.collectionClass.isAssignableFrom(LinkedList.class)) {
|
||||
collection = new LinkedList<>();
|
||||
}
|
||||
else if (this.collectionClass.isAssignableFrom(PriorityQueue.class)) {
|
||||
collection = new PriorityQueue<>();
|
||||
}
|
||||
else {
|
||||
throw new IllegalArgumentException("Unsupported collection type - " + this.collectionClass);
|
||||
}
|
||||
return collection;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -16,194 +16,22 @@
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.serde;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.kafka.common.serialization.Deserializer;
|
||||
import org.apache.kafka.common.serialization.Serde;
|
||||
import org.apache.kafka.common.serialization.Serializer;
|
||||
|
||||
import org.springframework.cloud.stream.converter.CompositeMessageConverterFactory;
|
||||
import org.springframework.messaging.Message;
|
||||
import org.springframework.messaging.MessageHeaders;
|
||||
import org.springframework.messaging.converter.MessageConverter;
|
||||
import org.springframework.messaging.support.MessageBuilder;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.MimeType;
|
||||
import org.springframework.util.MimeTypeUtils;
|
||||
import org.springframework.messaging.converter.CompositeMessageConverter;
|
||||
|
||||
/**
|
||||
* A {@link Serde} implementation that wraps the list of {@link MessageConverter}s
|
||||
* from {@link CompositeMessageConverterFactory}.
|
||||
*
|
||||
* The primary motivation for this class is to provide an avro based {@link Serde} that is
|
||||
* compatible with the schema registry that Spring Cloud Stream provides. When using the
|
||||
* schema registry support from Spring Cloud Stream in a Kafka Streams binder based application,
|
||||
* the applications can deserialize the incoming Kafka Streams records using the built in
|
||||
* Avro {@link MessageConverter}. However, this same message conversion approach will not work
|
||||
* downstream in other operations in the topology for Kafka Streams as some of them needs a
|
||||
* {@link Serde} instance that can talk to the Spring Cloud Stream provided Schema Registry.
|
||||
* This implementation will solve that problem.
|
||||
*
|
||||
* Only Avro and JSON based converters are exposed as binder provided {@link Serde} implementations currently.
|
||||
*
|
||||
* Users of this class must call the {@link CompositeNonNativeSerde#configure(Map, boolean)} method
|
||||
* to configure the {@link Serde} object. At the very least the configuration map must include a key
|
||||
* called "valueClass" to indicate the type of the target object for deserialization. If any other
|
||||
* content type other than JSON is needed (only Avro is available now other than JSON), that needs
|
||||
* to be included in the configuration map with the key "contentType". For example,
|
||||
*
|
||||
* <pre class="code">
|
||||
* Map<String, Object> config = new HashMap<>();
|
||||
* config.put("valueClass", Foo.class);
|
||||
* config.put("contentType", "application/avro");
|
||||
* </pre>
|
||||
*
|
||||
* Then use the above map when calling the configure method.
|
||||
*
|
||||
* This class is only intended to be used when writing a Spring Cloud Stream Kafka Streams application
|
||||
* that uses Spring Cloud Stream schema registry for schema evolution.
|
||||
*
|
||||
* An instance of this class is provided as a bean by the binder configuration and typically the applications
|
||||
* can autowire that bean. This is the expected usage pattern of this class.
|
||||
*
|
||||
* @param <T> type of the object to marshall
|
||||
* This class provides the same functionality as {@link MessageConverterDelegateSerde} and is deprecated.
|
||||
* It is kept for backward compatibility reasons and will be removed in version 3.1
|
||||
*
|
||||
* @author Soby Chacko
|
||||
* @since 2.1
|
||||
* @sine 2.1
|
||||
*
|
||||
* @deprecated in favour of {@link MessageConverterDelegateSerde}
|
||||
*/
|
||||
public class CompositeNonNativeSerde<T> implements Serde<T> {
|
||||
@Deprecated
|
||||
public class CompositeNonNativeSerde extends MessageConverterDelegateSerde {
|
||||
|
||||
private static final String VALUE_CLASS_HEADER = "valueClass";
|
||||
|
||||
private static final String AVRO_FORMAT = "avro";
|
||||
|
||||
private static final MimeType DEFAULT_AVRO_MIME_TYPE = new MimeType("application", "*+" + AVRO_FORMAT);
|
||||
|
||||
private final CompositeNonNativeDeserializer<T> compositeNonNativeDeserializer;
|
||||
|
||||
private final CompositeNonNativeSerializer<T> compositeNonNativeSerializer;
|
||||
|
||||
public CompositeNonNativeSerde(CompositeMessageConverterFactory compositeMessageConverterFactory) {
|
||||
this.compositeNonNativeDeserializer = new CompositeNonNativeDeserializer<>(compositeMessageConverterFactory);
|
||||
this.compositeNonNativeSerializer = new CompositeNonNativeSerializer<>(compositeMessageConverterFactory);
|
||||
public CompositeNonNativeSerde(CompositeMessageConverter compositeMessageConverter) {
|
||||
super(compositeMessageConverter);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void configure(Map<String, ?> configs, boolean isKey) {
|
||||
this.compositeNonNativeDeserializer.configure(configs, isKey);
|
||||
this.compositeNonNativeSerializer.configure(configs, isKey);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
//No-op
|
||||
}
|
||||
|
||||
@Override
|
||||
public Serializer<T> serializer() {
|
||||
return this.compositeNonNativeSerializer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Deserializer<T> deserializer() {
|
||||
return this.compositeNonNativeDeserializer;
|
||||
}
|
||||
|
||||
private static MimeType resolveMimeType(Map<String, ?> configs) {
|
||||
if (configs.containsKey(MessageHeaders.CONTENT_TYPE)) {
|
||||
String contentType = (String) configs.get(MessageHeaders.CONTENT_TYPE);
|
||||
if (DEFAULT_AVRO_MIME_TYPE.equals(MimeTypeUtils.parseMimeType(contentType))) {
|
||||
return DEFAULT_AVRO_MIME_TYPE;
|
||||
}
|
||||
else if (contentType.contains("avro")) {
|
||||
return MimeTypeUtils.parseMimeType("application/avro");
|
||||
}
|
||||
else {
|
||||
return new MimeType("application", "json", StandardCharsets.UTF_8);
|
||||
}
|
||||
}
|
||||
else {
|
||||
return new MimeType("application", "json", StandardCharsets.UTF_8);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom {@link Deserializer} that uses the {@link CompositeMessageConverterFactory}.
|
||||
*
|
||||
* @param <U> parameterized target type for deserialization
|
||||
*/
|
||||
private static class CompositeNonNativeDeserializer<U> implements Deserializer<U> {
|
||||
|
||||
private final MessageConverter messageConverter;
|
||||
|
||||
private MimeType mimeType;
|
||||
|
||||
private Class<?> valueClass;
|
||||
|
||||
CompositeNonNativeDeserializer(CompositeMessageConverterFactory compositeMessageConverterFactory) {
|
||||
this.messageConverter = compositeMessageConverterFactory.getMessageConverterForAllRegistered();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void configure(Map<String, ?> configs, boolean isKey) {
|
||||
Assert.isTrue(configs.containsKey(VALUE_CLASS_HEADER), "Deserializers must provide a configuration for valueClass.");
|
||||
final Object valueClass = configs.get(VALUE_CLASS_HEADER);
|
||||
Assert.isTrue(valueClass instanceof Class, "Deserializers must provide a valid value for valueClass.");
|
||||
this.valueClass = (Class<?>) valueClass;
|
||||
this.mimeType = resolveMimeType(configs);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public U deserialize(String topic, byte[] data) {
|
||||
Message<?> message = MessageBuilder.withPayload(data)
|
||||
.setHeader(MessageHeaders.CONTENT_TYPE, this.mimeType.toString()).build();
|
||||
U messageConverted = (U) this.messageConverter.fromMessage(message, this.valueClass);
|
||||
Assert.notNull(messageConverted, "Deserialization failed.");
|
||||
return messageConverted;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
//No-op
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom {@link Serializer} that uses the {@link CompositeMessageConverterFactory}.
|
||||
*
|
||||
* @param <V> parameterized type for serialization
|
||||
*/
|
||||
private static class CompositeNonNativeSerializer<V> implements Serializer<V> {
|
||||
|
||||
private final MessageConverter messageConverter;
|
||||
private MimeType mimeType;
|
||||
|
||||
CompositeNonNativeSerializer(CompositeMessageConverterFactory compositeMessageConverterFactory) {
|
||||
this.messageConverter = compositeMessageConverterFactory.getMessageConverterForAllRegistered();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void configure(Map<String, ?> configs, boolean isKey) {
|
||||
this.mimeType = resolveMimeType(configs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] serialize(String topic, V data) {
|
||||
Message<?> message = MessageBuilder.withPayload(data).build();
|
||||
Map<String, Object> headers = new HashMap<>(message.getHeaders());
|
||||
headers.put(MessageHeaders.CONTENT_TYPE, this.mimeType.toString());
|
||||
MessageHeaders messageHeaders = new MessageHeaders(headers);
|
||||
final Object payload = this.messageConverter.toMessage(message.getPayload(),
|
||||
messageHeaders).getPayload();
|
||||
return (byte[]) payload;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
//No-op
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,226 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.serde;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.kafka.common.serialization.Deserializer;
|
||||
import org.apache.kafka.common.serialization.Serde;
|
||||
import org.apache.kafka.common.serialization.Serializer;
|
||||
|
||||
import org.springframework.messaging.Message;
|
||||
import org.springframework.messaging.MessageHeaders;
|
||||
import org.springframework.messaging.converter.CompositeMessageConverter;
|
||||
import org.springframework.messaging.converter.MessageConverter;
|
||||
import org.springframework.messaging.support.MessageBuilder;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.MimeType;
|
||||
import org.springframework.util.MimeTypeUtils;
|
||||
|
||||
/**
|
||||
* A {@link Serde} implementation that wraps the list of {@link MessageConverter}s from
|
||||
* {@link CompositeMessageConverter}.
|
||||
*
|
||||
* The primary motivation for this class is to provide an avro based {@link Serde} that is
|
||||
* compatible with the schema registry that Spring Cloud Stream provides. When using the
|
||||
* schema registry support from Spring Cloud Stream in a Kafka Streams binder based
|
||||
* application, the applications can deserialize the incoming Kafka Streams records using
|
||||
* the built in Avro {@link MessageConverter}. However, this same message conversion
|
||||
* approach will not work downstream in other operations in the topology for Kafka Streams
|
||||
* as some of them needs a {@link Serde} instance that can talk to the Spring Cloud Stream
|
||||
* provided Schema Registry. This implementation will solve that problem.
|
||||
*
|
||||
* Only Avro and JSON based converters are exposed as binder provided {@link Serde}
|
||||
* implementations currently.
|
||||
*
|
||||
* Users of this class must call the
|
||||
* {@link MessageConverterDelegateSerde#configure(Map, boolean)} method to configure the
|
||||
* {@link Serde} object. At the very least the configuration map must include a key called
|
||||
* "valueClass" to indicate the type of the target object for deserialization. If any
|
||||
* other content type other than JSON is needed (only Avro is available now other than
|
||||
* JSON), that needs to be included in the configuration map with the key "contentType".
|
||||
* For example,
|
||||
*
|
||||
* <pre class="code">
|
||||
* Map<String, Object> config = new HashMap<>();
|
||||
* config.put("valueClass", Foo.class);
|
||||
* config.put("contentType", "application/avro");
|
||||
* </pre>
|
||||
*
|
||||
* Then use the above map when calling the configure method.
|
||||
*
|
||||
* This class is only intended to be used when writing a Spring Cloud Stream Kafka Streams
|
||||
* application that uses Spring Cloud Stream schema registry for schema evolution.
|
||||
*
|
||||
* An instance of this class is provided as a bean by the binder configuration and
|
||||
* typically the applications can autowire that bean. This is the expected usage pattern
|
||||
* of this class.
|
||||
*
|
||||
* @param <T> type of the object to marshall
|
||||
* @author Soby Chacko
|
||||
* @since 3.0
|
||||
*/
|
||||
public class MessageConverterDelegateSerde<T> implements Serde<T> {
|
||||
|
||||
private static final String VALUE_CLASS_HEADER = "valueClass";
|
||||
|
||||
private static final String AVRO_FORMAT = "avro";
|
||||
|
||||
private static final MimeType DEFAULT_AVRO_MIME_TYPE = new MimeType("application",
|
||||
"*+" + AVRO_FORMAT);
|
||||
|
||||
private final MessageConverterDelegateDeserializer<T> messageConverterDelegateDeserializer;
|
||||
|
||||
private final MessageConverterDelegateSerializer<T> messageConverterDelegateSerializer;
|
||||
|
||||
public MessageConverterDelegateSerde(
|
||||
CompositeMessageConverter compositeMessageConverter) {
|
||||
this.messageConverterDelegateDeserializer = new MessageConverterDelegateDeserializer<>(
|
||||
compositeMessageConverter);
|
||||
this.messageConverterDelegateSerializer = new MessageConverterDelegateSerializer<>(
|
||||
compositeMessageConverter);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void configure(Map<String, ?> configs, boolean isKey) {
|
||||
this.messageConverterDelegateDeserializer.configure(configs, isKey);
|
||||
this.messageConverterDelegateSerializer.configure(configs, isKey);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
// No-op
|
||||
}
|
||||
|
||||
@Override
|
||||
public Serializer<T> serializer() {
|
||||
return this.messageConverterDelegateSerializer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Deserializer<T> deserializer() {
|
||||
return this.messageConverterDelegateDeserializer;
|
||||
}
|
||||
|
||||
private static MimeType resolveMimeType(Map<String, ?> configs) {
|
||||
if (configs.containsKey(MessageHeaders.CONTENT_TYPE)) {
|
||||
String contentType = (String) configs.get(MessageHeaders.CONTENT_TYPE);
|
||||
if (DEFAULT_AVRO_MIME_TYPE.equals(MimeTypeUtils.parseMimeType(contentType))) {
|
||||
return DEFAULT_AVRO_MIME_TYPE;
|
||||
}
|
||||
else if (contentType.contains("avro")) {
|
||||
return MimeTypeUtils.parseMimeType("application/avro");
|
||||
}
|
||||
else {
|
||||
return new MimeType("application", "json", StandardCharsets.UTF_8);
|
||||
}
|
||||
}
|
||||
else {
|
||||
return new MimeType("application", "json", StandardCharsets.UTF_8);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom {@link Deserializer} that uses the {@link org.springframework.cloud.stream.converter.CompositeMessageConverterFactory}.
|
||||
*
|
||||
* @param <U> parameterized target type for deserialization
|
||||
*/
|
||||
private static class MessageConverterDelegateDeserializer<U> implements Deserializer<U> {
|
||||
|
||||
private final MessageConverter messageConverter;
|
||||
|
||||
private MimeType mimeType;
|
||||
|
||||
private Class<?> valueClass;
|
||||
|
||||
MessageConverterDelegateDeserializer(
|
||||
CompositeMessageConverter compositeMessageConverter) {
|
||||
this.messageConverter = compositeMessageConverter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void configure(Map<String, ?> configs, boolean isKey) {
|
||||
Assert.isTrue(configs.containsKey(VALUE_CLASS_HEADER),
|
||||
"Deserializers must provide a configuration for valueClass.");
|
||||
final Object valueClass = configs.get(VALUE_CLASS_HEADER);
|
||||
Assert.isTrue(valueClass instanceof Class,
|
||||
"Deserializers must provide a valid value for valueClass.");
|
||||
this.valueClass = (Class<?>) valueClass;
|
||||
this.mimeType = resolveMimeType(configs);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public U deserialize(String topic, byte[] data) {
|
||||
Message<?> message = MessageBuilder.withPayload(data)
|
||||
.setHeader(MessageHeaders.CONTENT_TYPE, this.mimeType.toString())
|
||||
.build();
|
||||
U messageConverted = (U) this.messageConverter.fromMessage(message,
|
||||
this.valueClass);
|
||||
Assert.notNull(messageConverted, "Deserialization failed.");
|
||||
return messageConverted;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
// No-op
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom {@link Serializer} that uses the {@link org.springframework.cloud.stream.converter.CompositeMessageConverterFactory}.
|
||||
*
|
||||
* @param <V> parameterized type for serialization
|
||||
*/
|
||||
private static class MessageConverterDelegateSerializer<V> implements Serializer<V> {
|
||||
|
||||
private final MessageConverter messageConverter;
|
||||
|
||||
private MimeType mimeType;
|
||||
|
||||
MessageConverterDelegateSerializer(
|
||||
CompositeMessageConverter compositeMessageConverter) {
|
||||
this.messageConverter = compositeMessageConverter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void configure(Map<String, ?> configs, boolean isKey) {
|
||||
this.mimeType = resolveMimeType(configs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] serialize(String topic, V data) {
|
||||
Message<?> message = MessageBuilder.withPayload(data).build();
|
||||
Map<String, Object> headers = new HashMap<>(message.getHeaders());
|
||||
headers.put(MessageHeaders.CONTENT_TYPE, this.mimeType.toString());
|
||||
MessageHeaders messageHeaders = new MessageHeaders(headers);
|
||||
final Object payload = this.messageConverter
|
||||
.toMessage(message.getPayload(), messageHeaders).getPayload();
|
||||
return (byte[]) payload;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
// No-op
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
org.springframework.boot.autoconfigure.EnableAutoConfiguration=\
|
||||
org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsBinderSupportAutoConfiguration,\
|
||||
org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsApplicationSupportAutoConfiguration
|
||||
org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsApplicationSupportAutoConfiguration,\
|
||||
org.springframework.cloud.stream.binder.kafka.streams.function.KafkaStreamsFunctionAutoConfiguration
|
||||
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
* Copyright 2017-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -14,7 +14,7 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.integration;
|
||||
package org.springframework.cloud.stream.binder.kafka.streams;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
@@ -23,25 +23,29 @@ import org.apache.kafka.clients.consumer.ConsumerConfig;
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
||||
import org.apache.kafka.common.serialization.IntegerSerializer;
|
||||
import org.apache.kafka.common.serialization.Serdes;
|
||||
import org.apache.kafka.streams.KafkaStreams;
|
||||
import org.apache.kafka.streams.KeyValue;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.apache.kafka.streams.kstream.Materialized;
|
||||
import org.apache.kafka.streams.kstream.Serialized;
|
||||
import org.apache.kafka.streams.state.HostInfo;
|
||||
import org.apache.kafka.streams.state.QueryableStoreType;
|
||||
import org.apache.kafka.streams.state.QueryableStoreTypes;
|
||||
import org.apache.kafka.streams.state.ReadOnlyKeyValueStore;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.WebApplicationType;
|
||||
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
|
||||
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
|
||||
import org.springframework.cloud.stream.annotation.EnableBinding;
|
||||
import org.springframework.cloud.stream.annotation.StreamListener;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.InteractiveQueryService;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsProcessor;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsBinderConfigurationProperties;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
|
||||
@@ -54,6 +58,7 @@ import org.springframework.kafka.test.utils.KafkaTestUtils;
|
||||
import org.springframework.messaging.handler.annotation.SendTo;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.mockito.internal.verification.VerificationModeFactory.times;
|
||||
|
||||
/**
|
||||
* @author Soby Chacko
|
||||
@@ -62,17 +67,21 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
public class KafkaStreamsInteractiveQueryIntegrationTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true, "counts-id");
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"counts-id");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
|
||||
.getEmbeddedKafka();
|
||||
|
||||
private static Consumer<String, String> consumer;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-id", "false", embeddedKafka);
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-id",
|
||||
"false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts-id");
|
||||
}
|
||||
@@ -82,6 +91,29 @@ public class KafkaStreamsInteractiveQueryIntegrationTests {
|
||||
consumer.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStateStoreRetrievalRetry() {
|
||||
|
||||
KafkaStreams mock = Mockito.mock(KafkaStreams.class);
|
||||
KafkaStreamsRegistry kafkaStreamsRegistry = new KafkaStreamsRegistry();
|
||||
kafkaStreamsRegistry.registerKafkaStreams(mock);
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties =
|
||||
new KafkaStreamsBinderConfigurationProperties(new KafkaProperties());
|
||||
binderConfigurationProperties.getStateStoreRetry().setMaxAttempts(3);
|
||||
InteractiveQueryService interactiveQueryService = new InteractiveQueryService(kafkaStreamsRegistry,
|
||||
binderConfigurationProperties);
|
||||
|
||||
QueryableStoreType<ReadOnlyKeyValueStore<Object, Object>> storeType = QueryableStoreTypes.keyValueStore();
|
||||
try {
|
||||
interactiveQueryService.getQueryableStore("foo", storeType);
|
||||
}
|
||||
catch (Exception ignored) {
|
||||
|
||||
}
|
||||
|
||||
Mockito.verify(mock, times(3)).store("foo", storeType);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testKstreamBinderWithPojoInputAndStringOuput() throws Exception {
|
||||
SpringApplication app = new SpringApplication(ProductCountApplication.class);
|
||||
@@ -91,40 +123,53 @@ public class KafkaStreamsInteractiveQueryIntegrationTests {
|
||||
"--spring.cloud.stream.bindings.input.destination=foos",
|
||||
"--spring.cloud.stream.bindings.output.destination=counts-id",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId=ProductCountApplication-abc",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.application.server=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString());
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.application.server"
|
||||
+ "=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString());
|
||||
try {
|
||||
receiveAndValidateFoo(context);
|
||||
} finally {
|
||||
}
|
||||
finally {
|
||||
context.close();
|
||||
}
|
||||
}
|
||||
|
||||
private void receiveAndValidateFoo(ConfigurableApplicationContext context) throws Exception {
|
||||
private void receiveAndValidateFoo(ConfigurableApplicationContext context)
|
||||
throws Exception {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("foos");
|
||||
template.sendDefault("{\"id\":\"123\"}");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts-id");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer,
|
||||
"counts-id");
|
||||
assertThat(cr.value().contains("Count for product with ID 123: 1")).isTrue();
|
||||
|
||||
ProductCountApplication.Foo foo = context.getBean(ProductCountApplication.Foo.class);
|
||||
ProductCountApplication.Foo foo = context
|
||||
.getBean(ProductCountApplication.Foo.class);
|
||||
assertThat(foo.getProductStock(123).equals(1L));
|
||||
|
||||
//perform assertions on HostInfo related methods in InteractiveQueryService
|
||||
InteractiveQueryService interactiveQueryService = context.getBean(InteractiveQueryService.class);
|
||||
// perform assertions on HostInfo related methods in InteractiveQueryService
|
||||
InteractiveQueryService interactiveQueryService = context
|
||||
.getBean(InteractiveQueryService.class);
|
||||
HostInfo currentHostInfo = interactiveQueryService.getCurrentHostInfo();
|
||||
assertThat(currentHostInfo.host() + ":" + currentHostInfo.port()).isEqualTo(embeddedKafka.getBrokersAsString());
|
||||
assertThat(currentHostInfo.host() + ":" + currentHostInfo.port())
|
||||
.isEqualTo(embeddedKafka.getBrokersAsString());
|
||||
|
||||
HostInfo hostInfo = interactiveQueryService.getHostInfo("prod-id-count-store", 123, new IntegerSerializer());
|
||||
assertThat(hostInfo.host() + ":" + hostInfo.port()).isEqualTo(embeddedKafka.getBrokersAsString());
|
||||
HostInfo hostInfo = interactiveQueryService.getHostInfo("prod-id-count-store",
|
||||
123, new IntegerSerializer());
|
||||
assertThat(hostInfo.host() + ":" + hostInfo.port())
|
||||
.isEqualTo(embeddedKafka.getBrokersAsString());
|
||||
|
||||
HostInfo hostInfoFoo = interactiveQueryService.getHostInfo("prod-id-count-store-foo", 123, new IntegerSerializer());
|
||||
HostInfo hostInfoFoo = interactiveQueryService
|
||||
.getHostInfo("prod-id-count-store-foo", 123, new IntegerSerializer());
|
||||
assertThat(hostInfoFoo).isNull();
|
||||
}
|
||||
|
||||
@@ -134,16 +179,15 @@ public class KafkaStreamsInteractiveQueryIntegrationTests {
|
||||
|
||||
@StreamListener("input")
|
||||
@SendTo("output")
|
||||
@SuppressWarnings("deprecation")
|
||||
public KStream<?, String> process(KStream<Object, Product> input) {
|
||||
|
||||
return input
|
||||
.filter((key, product) -> product.getId() == 123)
|
||||
return input.filter((key, product) -> product.getId() == 123)
|
||||
.map((key, value) -> new KeyValue<>(value.id, value))
|
||||
.groupByKey(Serialized.with(new Serdes.IntegerSerde(), new JsonSerde<>(Product.class)))
|
||||
.count(Materialized.as("prod-id-count-store"))
|
||||
.toStream()
|
||||
.map((key, value) -> new KeyValue<>(null, "Count for product with ID 123: " + value));
|
||||
.groupByKey(Serialized.with(new Serdes.IntegerSerde(),
|
||||
new JsonSerde<>(Product.class)))
|
||||
.count(Materialized.as("prod-id-count-store")).toStream()
|
||||
.map((key, value) -> new KeyValue<>(null,
|
||||
"Count for product with ID 123: " + value));
|
||||
}
|
||||
|
||||
@Bean
|
||||
@@ -152,6 +196,7 @@ public class KafkaStreamsInteractiveQueryIntegrationTests {
|
||||
}
|
||||
|
||||
static class Foo {
|
||||
|
||||
InteractiveQueryService interactiveQueryService;
|
||||
|
||||
Foo(InteractiveQueryService interactiveQueryService) {
|
||||
@@ -159,12 +204,15 @@ public class KafkaStreamsInteractiveQueryIntegrationTests {
|
||||
}
|
||||
|
||||
public Long getProductStock(Integer id) {
|
||||
ReadOnlyKeyValueStore<Object, Object> keyValueStore =
|
||||
interactiveQueryService.getQueryableStore("prod-id-count-store", QueryableStoreTypes.keyValueStore());
|
||||
ReadOnlyKeyValueStore<Object, Object> keyValueStore = interactiveQueryService
|
||||
.getQueryableStore("prod-id-count-store",
|
||||
QueryableStoreTypes.keyValueStore());
|
||||
|
||||
return (Long) keyValueStore.get(id);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class Product {
|
||||
@@ -178,5 +226,7 @@ public class KafkaStreamsInteractiveQueryIntegrationTests {
|
||||
public void setId(Integer id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -16,9 +16,10 @@
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.bootstrap;
|
||||
|
||||
import org.apache.kafka.streams.kstream.GlobalKTable;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.apache.kafka.streams.kstream.KTable;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.springframework.boot.WebApplicationType;
|
||||
@@ -28,53 +29,103 @@ import org.springframework.cloud.stream.annotation.EnableBinding;
|
||||
import org.springframework.cloud.stream.annotation.Input;
|
||||
import org.springframework.cloud.stream.annotation.StreamListener;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.kafka.test.rule.KafkaEmbedded;
|
||||
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
|
||||
|
||||
/**
|
||||
* @author Soby Chacko
|
||||
*/
|
||||
@Ignore("Temporarily disabling the test as builds are getting slower due to this.")
|
||||
public class KafkaStreamsBinderBootstrapTest {
|
||||
|
||||
@ClassRule
|
||||
public static KafkaEmbedded embeddedKafka = new KafkaEmbedded(1, true, 10);
|
||||
public static EmbeddedKafkaRule embeddedKafka = new EmbeddedKafkaRule(1, true, 10);
|
||||
|
||||
@Test
|
||||
public void testKafkaStreamsBinderWithCustomEnvironmentCanStart() {
|
||||
ConfigurableApplicationContext applicationContext = new SpringApplicationBuilder(SimpleApplication.class)
|
||||
.web(WebApplicationType.NONE)
|
||||
.run("--spring.cloud.stream.bindings.input.destination=foo",
|
||||
"--spring.cloud.stream.bindings.input.binder=kBind1",
|
||||
"--spring.cloud.stream.binders.kBind1.type=kstream",
|
||||
"--spring.cloud.stream.binders.kBind1.environment.spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.binders.kBind1.environment.spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString());
|
||||
public void testKStreamBinderWithCustomEnvironmentCanStart() {
|
||||
ConfigurableApplicationContext applicationContext = new SpringApplicationBuilder(
|
||||
SimpleKafkaStreamsApplication.class).web(WebApplicationType.NONE).run(
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-1.consumer.application-id"
|
||||
+ "=testKStreamBinderWithCustomEnvironmentCanStart",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-2.consumer.application-id"
|
||||
+ "=testKStreamBinderWithCustomEnvironmentCanStart-foo",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-3.consumer.application-id"
|
||||
+ "=testKStreamBinderWithCustomEnvironmentCanStart-foobar",
|
||||
"--spring.cloud.stream.bindings.input-1.destination=foo",
|
||||
"--spring.cloud.stream.bindings.input-1.binder=kstreamBinder",
|
||||
"--spring.cloud.stream.binders.kstreamBinder.type=kstream",
|
||||
"--spring.cloud.stream.binders.kstreamBinder.environment"
|
||||
+ ".spring.cloud.stream.kafka.streams.binder.brokers"
|
||||
+ "=" + embeddedKafka.getEmbeddedKafka().getBrokersAsString(),
|
||||
"--spring.cloud.stream.bindings.input-2.destination=bar",
|
||||
"--spring.cloud.stream.bindings.input-2.binder=ktableBinder",
|
||||
"--spring.cloud.stream.binders.ktableBinder.type=ktable",
|
||||
"--spring.cloud.stream.binders.ktableBinder.environment"
|
||||
+ ".spring.cloud.stream.kafka.streams.binder.brokers"
|
||||
+ "=" + embeddedKafka.getEmbeddedKafka().getBrokersAsString(),
|
||||
"--spring.cloud.stream.bindings.input-3.destination=foobar",
|
||||
"--spring.cloud.stream.bindings.input-3.binder=globalktableBinder",
|
||||
"--spring.cloud.stream.binders.globalktableBinder.type=globalktable",
|
||||
"--spring.cloud.stream.binders.globalktableBinder.environment"
|
||||
+ ".spring.cloud.stream.kafka.streams.binder.brokers"
|
||||
+ "=" + embeddedKafka.getEmbeddedKafka().getBrokersAsString());
|
||||
|
||||
applicationContext.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testKafkaStreamsBinderWithStandardConfigurationCanStart() {
|
||||
ConfigurableApplicationContext applicationContext = new SpringApplicationBuilder(SimpleApplication.class)
|
||||
.web(WebApplicationType.NONE)
|
||||
.run("--spring.cloud.stream.bindings.input.destination=foo",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString());
|
||||
ConfigurableApplicationContext applicationContext = new SpringApplicationBuilder(
|
||||
SimpleKafkaStreamsApplication.class).web(WebApplicationType.NONE).run(
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-1.consumer.application-id"
|
||||
+ "=testKafkaStreamsBinderWithStandardConfigurationCanStart",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-2.consumer.application-id"
|
||||
+ "=testKafkaStreamsBinderWithStandardConfigurationCanStart-foo",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-3.consumer.application-id"
|
||||
+ "=testKafkaStreamsBinderWithStandardConfigurationCanStart-foobar",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getEmbeddedKafka().getBrokersAsString());
|
||||
|
||||
applicationContext.close();
|
||||
}
|
||||
|
||||
@SpringBootApplication
|
||||
@EnableBinding(StreamSourceProcessor.class)
|
||||
static class SimpleApplication {
|
||||
@EnableBinding({SimpleKStreamBinding.class, SimpleKTableBinding.class, SimpleGlobalKTableBinding.class})
|
||||
static class SimpleKafkaStreamsApplication {
|
||||
|
||||
@StreamListener
|
||||
public void handle(@Input("input") KStream<Object, String> stream) {
|
||||
public void handle(@Input("input-1") KStream<Object, String> stream) {
|
||||
|
||||
}
|
||||
|
||||
@StreamListener
|
||||
public void handleX(@Input("input-2") KTable<Object, String> stream) {
|
||||
|
||||
}
|
||||
|
||||
@StreamListener
|
||||
public void handleY(@Input("input-3") GlobalKTable<Object, String> stream) {
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
interface StreamSourceProcessor {
|
||||
@Input("input")
|
||||
interface SimpleKStreamBinding {
|
||||
|
||||
@Input("input-1")
|
||||
KStream<?, ?> inputStream();
|
||||
|
||||
}
|
||||
|
||||
interface SimpleKTableBinding {
|
||||
|
||||
@Input("input-2")
|
||||
KTable<?, ?> inputStream();
|
||||
|
||||
}
|
||||
|
||||
interface SimpleGlobalKTableBinding {
|
||||
|
||||
@Input("input-3")
|
||||
GlobalKTable<?, ?> inputStream();
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,201 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.function;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.kafka.clients.consumer.Consumer;
|
||||
import org.apache.kafka.clients.consumer.ConsumerConfig;
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
||||
import org.apache.kafka.streams.KeyValue;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.apache.kafka.streams.kstream.Materialized;
|
||||
import org.apache.kafka.streams.kstream.Predicate;
|
||||
import org.apache.kafka.streams.kstream.TimeWindows;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.WebApplicationType;
|
||||
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
|
||||
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
|
||||
import org.springframework.kafka.core.KafkaTemplate;
|
||||
import org.springframework.kafka.test.EmbeddedKafkaBroker;
|
||||
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
|
||||
import org.springframework.kafka.test.utils.KafkaTestUtils;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
public class KafkaStreamsBinderWordCountBranchesFunctionTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"counts", "foo", "bar");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
|
||||
private static Consumer<String, String> consumer;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("groupx", "false",
|
||||
embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromEmbeddedTopics(consumer, "counts", "foo", "bar");
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDown() {
|
||||
consumer.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testKstreamWordCountWithStringInputAndPojoOuput() throws Exception {
|
||||
SpringApplication app = new SpringApplication(WordCountProcessorApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
|
||||
ConfigurableApplicationContext context = app.run("--server.port=0",
|
||||
"--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.function.inputBindings.process=input",
|
||||
"--spring.cloud.stream.function.outputBindings.process=output1,output2,output3",
|
||||
"--spring.cloud.stream.bindings.input.destination=words",
|
||||
"--spring.cloud.stream.bindings.output1.destination=counts",
|
||||
"--spring.cloud.stream.bindings.output2.destination=foo",
|
||||
"--spring.cloud.stream.bindings.output3.destination=bar",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.timeWindow.length=5000",
|
||||
"--spring.cloud.stream.kafka.streams.timeWindow.advanceBy=0",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId" +
|
||||
"=KafkaStreamsBinderWordCountBranchesFunctionTests-abc",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString());
|
||||
try {
|
||||
receiveAndValidate(context);
|
||||
}
|
||||
finally {
|
||||
context.close();
|
||||
}
|
||||
}
|
||||
|
||||
private void receiveAndValidate(ConfigurableApplicationContext context) throws Exception {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("words");
|
||||
template.sendDefault("english");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts");
|
||||
assertThat(cr.value().contains("\"word\":\"english\",\"count\":1")).isTrue();
|
||||
|
||||
template.sendDefault("french");
|
||||
template.sendDefault("french");
|
||||
cr = KafkaTestUtils.getSingleRecord(consumer, "foo");
|
||||
assertThat(cr.value().contains("\"word\":\"french\",\"count\":2")).isTrue();
|
||||
|
||||
template.sendDefault("spanish");
|
||||
template.sendDefault("spanish");
|
||||
template.sendDefault("spanish");
|
||||
cr = KafkaTestUtils.getSingleRecord(consumer, "bar");
|
||||
assertThat(cr.value().contains("\"word\":\"spanish\",\"count\":3")).isTrue();
|
||||
}
|
||||
|
||||
static class WordCount {
|
||||
|
||||
private String word;
|
||||
|
||||
private long count;
|
||||
|
||||
private Date start;
|
||||
|
||||
private Date end;
|
||||
|
||||
WordCount(String word, long count, Date start, Date end) {
|
||||
this.word = word;
|
||||
this.count = count;
|
||||
this.start = start;
|
||||
this.end = end;
|
||||
}
|
||||
|
||||
public String getWord() {
|
||||
return word;
|
||||
}
|
||||
|
||||
public void setWord(String word) {
|
||||
this.word = word;
|
||||
}
|
||||
|
||||
public long getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
public void setCount(long count) {
|
||||
this.count = count;
|
||||
}
|
||||
|
||||
public Date getStart() {
|
||||
return start;
|
||||
}
|
||||
|
||||
public void setStart(Date start) {
|
||||
this.start = start;
|
||||
}
|
||||
|
||||
public Date getEnd() {
|
||||
return end;
|
||||
}
|
||||
|
||||
public void setEnd(Date end) {
|
||||
this.end = end;
|
||||
}
|
||||
}
|
||||
|
||||
@EnableAutoConfiguration
|
||||
public static class WordCountProcessorApplication {
|
||||
|
||||
@Bean
|
||||
@SuppressWarnings("unchecked")
|
||||
public Function<KStream<Object, String>, KStream<?, WordCount>[]> process() {
|
||||
|
||||
Predicate<Object, WordCount> isEnglish = (k, v) -> v.word.equals("english");
|
||||
Predicate<Object, WordCount> isFrench = (k, v) -> v.word.equals("french");
|
||||
Predicate<Object, WordCount> isSpanish = (k, v) -> v.word.equals("spanish");
|
||||
|
||||
return input -> input
|
||||
.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
|
||||
.groupBy((key, value) -> value)
|
||||
.windowedBy(TimeWindows.of(5000))
|
||||
.count(Materialized.as("WordCounts-branch"))
|
||||
.toStream()
|
||||
.map((key, value) -> new KeyValue<>(null, new WordCount(key.key(), value,
|
||||
new Date(key.window().start()), new Date(key.window().end()))))
|
||||
.branch(isEnglish, isFrench, isSpanish);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,204 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.function;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.kafka.clients.consumer.Consumer;
|
||||
import org.apache.kafka.clients.consumer.ConsumerConfig;
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
||||
import org.apache.kafka.common.serialization.Serdes;
|
||||
import org.apache.kafka.streams.KeyValue;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.apache.kafka.streams.kstream.Materialized;
|
||||
import org.apache.kafka.streams.kstream.Serialized;
|
||||
import org.apache.kafka.streams.kstream.TimeWindows;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.WebApplicationType;
|
||||
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
|
||||
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
|
||||
import org.springframework.kafka.core.KafkaTemplate;
|
||||
import org.springframework.kafka.test.EmbeddedKafkaBroker;
|
||||
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
|
||||
import org.springframework.kafka.test.utils.KafkaTestUtils;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
public class KafkaStreamsBinderWordCountFunctionTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"counts", "counts-1");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
|
||||
private static Consumer<String, String> consumer;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() {
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false",
|
||||
embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromEmbeddedTopics(consumer, "counts", "counts-1");
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDown() {
|
||||
consumer.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testKstreamWordCountFunction() throws Exception {
|
||||
SpringApplication app = new SpringApplication(WordCountProcessorApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
|
||||
try (ConfigurableApplicationContext context = app.run(
|
||||
"--spring.cloud.stream.function.inputBindings.process=input",
|
||||
"--spring.cloud.stream.function.outputBindings.process=output",
|
||||
"--server.port=0",
|
||||
"--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.bindings.input.destination=words",
|
||||
"--spring.cloud.stream.bindings.output.destination=counts",
|
||||
"--spring.cloud.stream.kafka.streams.default.consumer.application-id=testKstreamWordCountFunction",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString())) {
|
||||
receiveAndValidate("words", "counts");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testKstreamWordCountFunctionWithGeneratedApplicationId() throws Exception {
|
||||
SpringApplication app = new SpringApplication(WordCountProcessorApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
|
||||
try (ConfigurableApplicationContext context = app.run(
|
||||
"--spring.cloud.stream.function.inputBindings.process=input",
|
||||
"--spring.cloud.stream.function.outputBindings.process=output",
|
||||
"--server.port=0",
|
||||
"--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.bindings.input.destination=words-1",
|
||||
"--spring.cloud.stream.bindings.output.destination=counts-1",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString())) {
|
||||
receiveAndValidate("words-1", "counts-1");
|
||||
}
|
||||
}
|
||||
|
||||
private void receiveAndValidate(String in, String out) {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
try {
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic(in);
|
||||
template.sendDefault("foobar");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, out);
|
||||
assertThat(cr.value().contains("\"word\":\"foobar\",\"count\":1")).isTrue();
|
||||
}
|
||||
finally {
|
||||
pf.destroy();
|
||||
}
|
||||
}
|
||||
|
||||
static class WordCount {
|
||||
|
||||
private String word;
|
||||
|
||||
private long count;
|
||||
|
||||
private Date start;
|
||||
|
||||
private Date end;
|
||||
|
||||
WordCount(String word, long count, Date start, Date end) {
|
||||
this.word = word;
|
||||
this.count = count;
|
||||
this.start = start;
|
||||
this.end = end;
|
||||
}
|
||||
|
||||
public String getWord() {
|
||||
return word;
|
||||
}
|
||||
|
||||
public void setWord(String word) {
|
||||
this.word = word;
|
||||
}
|
||||
|
||||
public long getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
public void setCount(long count) {
|
||||
this.count = count;
|
||||
}
|
||||
|
||||
public Date getStart() {
|
||||
return start;
|
||||
}
|
||||
|
||||
public void setStart(Date start) {
|
||||
this.start = start;
|
||||
}
|
||||
|
||||
public Date getEnd() {
|
||||
return end;
|
||||
}
|
||||
|
||||
public void setEnd(Date end) {
|
||||
this.end = end;
|
||||
}
|
||||
}
|
||||
|
||||
@EnableAutoConfiguration
|
||||
public static class WordCountProcessorApplication {
|
||||
|
||||
@Bean
|
||||
public Function<KStream<Object, String>, KStream<?, WordCount>> process() {
|
||||
|
||||
return input -> input
|
||||
.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
|
||||
.map((key, value) -> new KeyValue<>(value, value))
|
||||
.groupByKey(Serialized.with(Serdes.String(), Serdes.String()))
|
||||
.windowedBy(TimeWindows.of(5000))
|
||||
.count(Materialized.as("foo-WordCounts"))
|
||||
.toStream()
|
||||
.map((key, value) -> new KeyValue<>(null, new WordCount(key.key(), value,
|
||||
new Date(key.window().start()), new Date(key.window().end()))));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,150 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.function;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.kafka.common.serialization.Serdes;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.apache.kafka.streams.processor.Processor;
|
||||
import org.apache.kafka.streams.processor.ProcessorContext;
|
||||
import org.apache.kafka.streams.processor.ProcessorSupplier;
|
||||
import org.apache.kafka.streams.state.KeyValueStore;
|
||||
import org.apache.kafka.streams.state.StoreBuilder;
|
||||
import org.apache.kafka.streams.state.Stores;
|
||||
import org.apache.kafka.streams.state.WindowStore;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.WebApplicationType;
|
||||
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
|
||||
import org.springframework.kafka.core.KafkaTemplate;
|
||||
import org.springframework.kafka.test.EmbeddedKafkaBroker;
|
||||
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
|
||||
import org.springframework.kafka.test.utils.KafkaTestUtils;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
public class KafkaStreamsFunctionStateStoreTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"counts");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
|
||||
@Test
|
||||
public void testKafkaStreamsFuncionWithMultipleStateStores() throws Exception {
|
||||
SpringApplication app = new SpringApplication(StateStoreTestApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
|
||||
try (ConfigurableApplicationContext context = app.run("--server.port=0",
|
||||
"--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.bindings.process_in.destination=words",
|
||||
"--spring.cloud.stream.kafka.streams.default.consumer.application-id=testKafkaStreamsFuncionWithMultipleStateStores",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString())) {
|
||||
receiveAndValidate(context);
|
||||
}
|
||||
}
|
||||
|
||||
private void receiveAndValidate(ConfigurableApplicationContext context) throws Exception {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
try {
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("words");
|
||||
template.sendDefault("foobar");
|
||||
Thread.sleep(2000L);
|
||||
StateStoreTestApplication processorApplication = context
|
||||
.getBean(StateStoreTestApplication.class);
|
||||
|
||||
KeyValueStore<Long, Long> state1 = processorApplication.state1;
|
||||
assertThat(processorApplication.processed).isTrue();
|
||||
assertThat(state1 != null).isTrue();
|
||||
assertThat(state1.name()).isEqualTo("my-store");
|
||||
WindowStore<Long, Long> state2 = processorApplication.state2;
|
||||
assertThat(state2 != null).isTrue();
|
||||
assertThat(state2.name()).isEqualTo("other-store");
|
||||
assertThat(state2.persistent()).isTrue();
|
||||
}
|
||||
finally {
|
||||
pf.destroy();
|
||||
}
|
||||
}
|
||||
|
||||
@EnableAutoConfiguration
|
||||
public static class StateStoreTestApplication {
|
||||
|
||||
KeyValueStore<Long, Long> state1;
|
||||
WindowStore<Long, Long> state2;
|
||||
|
||||
boolean processed;
|
||||
|
||||
@Bean
|
||||
public java.util.function.Consumer<KStream<Object, String>> process() {
|
||||
return input ->
|
||||
input.process((ProcessorSupplier<Object, String>) () -> new Processor<Object, String>() {
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public void init(ProcessorContext context) {
|
||||
state1 = (KeyValueStore<Long, Long>) context.getStateStore("my-store");
|
||||
state2 = (WindowStore<Long, Long>) context.getStateStore("other-store");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void process(Object key, String value) {
|
||||
processed = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
if (state1 != null) {
|
||||
state1.close();
|
||||
}
|
||||
if (state2 != null) {
|
||||
state2.close();
|
||||
}
|
||||
}
|
||||
}, "my-store", "other-store");
|
||||
}
|
||||
|
||||
@Bean
|
||||
public StoreBuilder myStore() {
|
||||
return Stores.keyValueStoreBuilder(
|
||||
Stores.persistentKeyValueStore("my-store"), Serdes.Long(),
|
||||
Serdes.Long());
|
||||
}
|
||||
|
||||
@Bean
|
||||
public StoreBuilder otherStore() {
|
||||
return Stores.windowStoreBuilder(
|
||||
Stores.persistentWindowStore("other-store",
|
||||
3L, 3, 3L, false), Serdes.Long(),
|
||||
Serdes.Long());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,140 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.function;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.kafka.clients.consumer.Consumer;
|
||||
import org.apache.kafka.clients.consumer.ConsumerConfig;
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.WebApplicationType;
|
||||
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
|
||||
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
|
||||
import org.springframework.kafka.core.KafkaTemplate;
|
||||
import org.springframework.kafka.test.EmbeddedKafkaBroker;
|
||||
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
|
||||
import org.springframework.kafka.test.utils.KafkaTestUtils;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
public class MultipleFunctionsInSameAppTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"coffee", "electronics");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
|
||||
private static Consumer<String, String> consumer;
|
||||
|
||||
private static CountDownLatch countDownLatch = new CountDownLatch(2);
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() {
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("purchase-groups", "false",
|
||||
embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromEmbeddedTopics(consumer, "coffee", "electronics");
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDown() {
|
||||
consumer.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testKstreamWordCountFunction() throws InterruptedException {
|
||||
SpringApplication app = new SpringApplication(MultipleFunctionsInSameApp.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
|
||||
try (ConfigurableApplicationContext ignored = app.run(
|
||||
"--server.port=0",
|
||||
"--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.bindings.process_in.destination=purchases",
|
||||
"--spring.cloud.stream.bindings.process_out_0.destination=coffee",
|
||||
"--spring.cloud.stream.bindings.process_out_1.destination=electronics",
|
||||
"--spring.cloud.stream.bindings.analyze_in_0.destination=coffee",
|
||||
"--spring.cloud.stream.bindings.analyze_in_1.destination=electronics",
|
||||
"--spring.cloud.stream.kafka.streams.binder.functions.analyze.applicationId=analyze-id-0",
|
||||
"--spring.cloud.stream.kafka.streams.binder.functions.process.applicationId=process-id-0",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString())) {
|
||||
receiveAndValidate("purchases", "coffee", "electronics");
|
||||
}
|
||||
}
|
||||
|
||||
private void receiveAndValidate(String in, String... out) throws InterruptedException {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
try {
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic(in);
|
||||
template.sendDefault("coffee");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, out[0]);
|
||||
assertThat(cr.value().contains("coffee")).isTrue();
|
||||
|
||||
template.sendDefault("electronics");
|
||||
cr = KafkaTestUtils.getSingleRecord(consumer, out[1]);
|
||||
assertThat(cr.value().contains("electronics")).isTrue();
|
||||
|
||||
Assert.isTrue(countDownLatch.await(5, TimeUnit.SECONDS), "Analyze (BiConsumer) method didn't receive all the expected records");
|
||||
}
|
||||
finally {
|
||||
pf.destroy();
|
||||
}
|
||||
}
|
||||
|
||||
@EnableAutoConfiguration
|
||||
public static class MultipleFunctionsInSameApp {
|
||||
|
||||
@Bean
|
||||
public Function<KStream<String, String>, KStream<String, String>[]> process() {
|
||||
return input -> input.branch(
|
||||
(s, p) -> p.equalsIgnoreCase("coffee"),
|
||||
(s, p) -> p.equalsIgnoreCase("electronics"));
|
||||
}
|
||||
|
||||
@Bean
|
||||
public BiConsumer<KStream<String, String>, KStream<String, String>> analyze() {
|
||||
return (coffee, electronics) -> {
|
||||
coffee.foreach((s, p) -> countDownLatch.countDown());
|
||||
electronics.foreach((s, p) -> countDownLatch.countDown());
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,121 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.function;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.Date;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.kafka.common.serialization.Deserializer;
|
||||
import org.apache.kafka.common.serialization.Serde;
|
||||
import org.apache.kafka.common.serialization.Serializer;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.WebApplicationType;
|
||||
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
|
||||
import org.springframework.cloud.stream.binder.ConsumerProperties;
|
||||
import org.springframework.cloud.stream.binder.ProducerProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.KeyValueSerdeResolver;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsConsumerProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsExtendedBindingProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsProducerProperties;
|
||||
import org.springframework.cloud.stream.config.BindingServiceProperties;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.core.ResolvableType;
|
||||
import org.springframework.kafka.test.EmbeddedKafkaBroker;
|
||||
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
public class SerdesProvidedAsBeansTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true);
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
|
||||
@Test
|
||||
public void testKstreamWordCountFunction() throws NoSuchMethodException {
|
||||
SpringApplication app = new SpringApplication(SerdeProvidedAsBeanApp.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
|
||||
try (ConfigurableApplicationContext context = app.run(
|
||||
"--server.port=0",
|
||||
"--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.bindings.process_in.destination=purchases",
|
||||
"--spring.cloud.stream.bindings.process_out.destination=coffee",
|
||||
"--spring.cloud.stream.kafka.streams.binder.functions.process.applicationId=process-id-0",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString())) {
|
||||
|
||||
final Method method = SerdeProvidedAsBeanApp.class.getMethod("process");
|
||||
|
||||
ResolvableType resolvableType = ResolvableType.forMethodReturnType(method, SerdeProvidedAsBeanApp.class);
|
||||
|
||||
final KeyValueSerdeResolver keyValueSerdeResolver = context.getBean(KeyValueSerdeResolver.class);
|
||||
final BindingServiceProperties bindingServiceProperties = context.getBean(BindingServiceProperties.class);
|
||||
final KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties = context.getBean(KafkaStreamsExtendedBindingProperties.class);
|
||||
|
||||
final ConsumerProperties consumerProperties = bindingServiceProperties.getBindingProperties("process_in").getConsumer();
|
||||
final KafkaStreamsConsumerProperties kafkaStreamsConsumerProperties = kafkaStreamsExtendedBindingProperties.getExtendedConsumerProperties("process_in");
|
||||
kafkaStreamsExtendedBindingProperties.getExtendedConsumerProperties("process_in");
|
||||
final Serde<?> inboundValueSerde = keyValueSerdeResolver.getInboundValueSerde(consumerProperties, kafkaStreamsConsumerProperties, resolvableType.getGeneric(0));
|
||||
|
||||
Assert.isTrue(inboundValueSerde instanceof FooSerde, "Inbound Value Serde is not matched");
|
||||
|
||||
final ProducerProperties producerProperties = bindingServiceProperties.getBindingProperties("process_out").getProducer();
|
||||
final KafkaStreamsProducerProperties kafkaStreamsProducerProperties = kafkaStreamsExtendedBindingProperties.getExtendedProducerProperties("process_out");
|
||||
kafkaStreamsExtendedBindingProperties.getExtendedProducerProperties("process_out");
|
||||
final Serde<?> outboundValueSerde = keyValueSerdeResolver.getOutboundValueSerde(producerProperties, kafkaStreamsProducerProperties, resolvableType.getGeneric(1));
|
||||
|
||||
Assert.isTrue(outboundValueSerde instanceof FooSerde, "Outbound Value Serde is not matched");
|
||||
}
|
||||
}
|
||||
|
||||
static class FooSerde<T> implements Serde<T> {
|
||||
@Override
|
||||
public Serializer<T> serializer() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Deserializer<T> deserializer() {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@EnableAutoConfiguration
|
||||
public static class SerdeProvidedAsBeanApp {
|
||||
|
||||
@Bean
|
||||
public Function<KStream<String, Date>, KStream<String, Date>> process() {
|
||||
return input -> input;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public Serde<Date> fooSerde() {
|
||||
return new FooSerde<>();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,301 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.function;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.kafka.clients.consumer.Consumer;
|
||||
import org.apache.kafka.clients.consumer.ConsumerConfig;
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecords;
|
||||
import org.apache.kafka.clients.producer.ProducerConfig;
|
||||
import org.apache.kafka.common.serialization.LongDeserializer;
|
||||
import org.apache.kafka.common.serialization.LongSerializer;
|
||||
import org.apache.kafka.streams.KeyValue;
|
||||
import org.apache.kafka.streams.kstream.GlobalKTable;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.WebApplicationType;
|
||||
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
|
||||
import org.springframework.boot.context.properties.EnableConfigurationProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsApplicationSupportProperties;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
|
||||
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
|
||||
import org.springframework.kafka.core.KafkaTemplate;
|
||||
import org.springframework.kafka.support.serializer.JsonDeserializer;
|
||||
import org.springframework.kafka.support.serializer.JsonSerializer;
|
||||
import org.springframework.kafka.test.EmbeddedKafkaBroker;
|
||||
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
|
||||
import org.springframework.kafka.test.utils.KafkaTestUtils;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
public class StreamToGlobalKTableFunctionTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"enriched-order");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
|
||||
private static Consumer<Long, EnrichedOrder> consumer;
|
||||
|
||||
@Test
|
||||
public void testStreamToGlobalKTable() throws Exception {
|
||||
SpringApplication app = new SpringApplication(OrderEnricherApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
try (ConfigurableApplicationContext ignored = app.run("--server.port=0",
|
||||
"--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.function.inputBindings.process=order,customer,product",
|
||||
"--spring.cloud.stream.function.outputBindings.process=enriched-order",
|
||||
"--spring.cloud.stream.bindings.order.destination=orders",
|
||||
"--spring.cloud.stream.bindings.customer.destination=customers",
|
||||
"--spring.cloud.stream.bindings.product.destination=products",
|
||||
"--spring.cloud.stream.bindings.enriched-order.destination=enriched-order",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=10000",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.order.consumer.applicationId=" +
|
||||
"StreamToGlobalKTableJoinFunctionTests-abc",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString())) {
|
||||
Map<String, Object> senderPropsCustomer = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
senderPropsCustomer.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class);
|
||||
senderPropsCustomer.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
|
||||
JsonSerializer.class);
|
||||
|
||||
DefaultKafkaProducerFactory<Long, Customer> pfCustomer =
|
||||
new DefaultKafkaProducerFactory<>(senderPropsCustomer);
|
||||
KafkaTemplate<Long, Customer> template = new KafkaTemplate<>(pfCustomer, true);
|
||||
template.setDefaultTopic("customers");
|
||||
for (long i = 0; i < 5; i++) {
|
||||
final Customer customer = new Customer();
|
||||
customer.setName("customer-" + i);
|
||||
template.sendDefault(i, customer);
|
||||
}
|
||||
|
||||
Map<String, Object> senderPropsProduct = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
senderPropsProduct.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class);
|
||||
senderPropsProduct.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
|
||||
|
||||
DefaultKafkaProducerFactory<Long, Product> pfProduct =
|
||||
new DefaultKafkaProducerFactory<>(senderPropsProduct);
|
||||
KafkaTemplate<Long, Product> productTemplate = new KafkaTemplate<>(pfProduct, true);
|
||||
productTemplate.setDefaultTopic("products");
|
||||
|
||||
for (long i = 0; i < 5; i++) {
|
||||
final Product product = new Product();
|
||||
product.setName("product-" + i);
|
||||
productTemplate.sendDefault(i, product);
|
||||
}
|
||||
|
||||
Map<String, Object> senderPropsOrder = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
senderPropsOrder.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class);
|
||||
senderPropsOrder.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
|
||||
|
||||
DefaultKafkaProducerFactory<Long, Order> pfOrder = new DefaultKafkaProducerFactory<>(senderPropsOrder);
|
||||
KafkaTemplate<Long, Order> orderTemplate = new KafkaTemplate<>(pfOrder, true);
|
||||
orderTemplate.setDefaultTopic("orders");
|
||||
|
||||
for (long i = 0; i < 5; i++) {
|
||||
final Order order = new Order();
|
||||
order.setCustomerId(i);
|
||||
order.setProductId(i);
|
||||
orderTemplate.sendDefault(i, order);
|
||||
}
|
||||
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false",
|
||||
embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class);
|
||||
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
|
||||
JsonDeserializer.class);
|
||||
consumerProps.put(JsonDeserializer.VALUE_DEFAULT_TYPE,
|
||||
"org.springframework.cloud.stream.binder.kafka.streams." +
|
||||
"function.StreamToGlobalKTableFunctionTests.EnrichedOrder");
|
||||
DefaultKafkaConsumerFactory<Long, EnrichedOrder> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "enriched-order");
|
||||
|
||||
int count = 0;
|
||||
long start = System.currentTimeMillis();
|
||||
List<KeyValue<Long, EnrichedOrder>> enrichedOrders = new ArrayList<>();
|
||||
do {
|
||||
ConsumerRecords<Long, EnrichedOrder> records = KafkaTestUtils.getRecords(consumer);
|
||||
count = count + records.count();
|
||||
for (ConsumerRecord<Long, EnrichedOrder> record : records) {
|
||||
enrichedOrders.add(new KeyValue<>(record.key(), record.value()));
|
||||
}
|
||||
} while (count < 5 && (System.currentTimeMillis() - start) < 30000);
|
||||
|
||||
assertThat(count == 5).isTrue();
|
||||
assertThat(enrichedOrders.size() == 5).isTrue();
|
||||
|
||||
enrichedOrders.sort(Comparator.comparing(o -> o.key));
|
||||
|
||||
for (int i = 0; i < 5; i++) {
|
||||
KeyValue<Long, EnrichedOrder> enrichedOrderKeyValue = enrichedOrders.get(i);
|
||||
assertThat(enrichedOrderKeyValue.key == i).isTrue();
|
||||
EnrichedOrder enrichedOrder = enrichedOrderKeyValue.value;
|
||||
assertThat(enrichedOrder.getOrder().customerId == i).isTrue();
|
||||
assertThat(enrichedOrder.getOrder().productId == i).isTrue();
|
||||
assertThat(enrichedOrder.getCustomer().name.equals("customer-" + i)).isTrue();
|
||||
assertThat(enrichedOrder.getProduct().name.equals("product-" + i)).isTrue();
|
||||
}
|
||||
pfCustomer.destroy();
|
||||
pfProduct.destroy();
|
||||
pfOrder.destroy();
|
||||
consumer.close();
|
||||
}
|
||||
}
|
||||
|
||||
@EnableAutoConfiguration
|
||||
@EnableConfigurationProperties(KafkaStreamsApplicationSupportProperties.class)
|
||||
public static class OrderEnricherApplication {
|
||||
|
||||
@Bean
|
||||
public Function<KStream<Long, Order>,
|
||||
Function<GlobalKTable<Long, Customer>,
|
||||
Function<GlobalKTable<Long, Product>, KStream<Long, EnrichedOrder>>>> process() {
|
||||
|
||||
return orderStream -> (
|
||||
customers -> (
|
||||
products -> (
|
||||
orderStream.join(customers,
|
||||
(orderId, order) -> order.getCustomerId(),
|
||||
(order, customer) -> new CustomerOrder(customer, order))
|
||||
.join(products,
|
||||
(orderId, customerOrder) -> customerOrder
|
||||
.productId(),
|
||||
(customerOrder, product) -> {
|
||||
EnrichedOrder enrichedOrder = new EnrichedOrder();
|
||||
enrichedOrder.setProduct(product);
|
||||
enrichedOrder.setCustomer(customerOrder.customer);
|
||||
enrichedOrder.setOrder(customerOrder.order);
|
||||
return enrichedOrder;
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
static class Order {
|
||||
|
||||
long customerId;
|
||||
long productId;
|
||||
|
||||
public long getCustomerId() {
|
||||
return customerId;
|
||||
}
|
||||
|
||||
public void setCustomerId(long customerId) {
|
||||
this.customerId = customerId;
|
||||
}
|
||||
|
||||
public long getProductId() {
|
||||
return productId;
|
||||
}
|
||||
|
||||
public void setProductId(long productId) {
|
||||
this.productId = productId;
|
||||
}
|
||||
}
|
||||
|
||||
static class Customer {
|
||||
|
||||
String name;
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
}
|
||||
|
||||
static class Product {
|
||||
|
||||
String name;
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
}
|
||||
|
||||
static class EnrichedOrder {
|
||||
|
||||
Product product;
|
||||
Customer customer;
|
||||
Order order;
|
||||
|
||||
public Product getProduct() {
|
||||
return product;
|
||||
}
|
||||
|
||||
public void setProduct(Product product) {
|
||||
this.product = product;
|
||||
}
|
||||
|
||||
public Customer getCustomer() {
|
||||
return customer;
|
||||
}
|
||||
|
||||
public void setCustomer(Customer customer) {
|
||||
this.customer = customer;
|
||||
}
|
||||
|
||||
public Order getOrder() {
|
||||
return order;
|
||||
}
|
||||
|
||||
public void setOrder(Order order) {
|
||||
this.order = order;
|
||||
}
|
||||
}
|
||||
|
||||
private static class CustomerOrder {
|
||||
private final Customer customer;
|
||||
private final Order order;
|
||||
|
||||
CustomerOrder(final Customer customer, final Order order) {
|
||||
this.customer = customer;
|
||||
this.order = order;
|
||||
}
|
||||
|
||||
long productId() {
|
||||
return order.getProductId();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,485 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.function;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.kafka.clients.consumer.Consumer;
|
||||
import org.apache.kafka.clients.consumer.ConsumerConfig;
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecords;
|
||||
import org.apache.kafka.clients.producer.ProducerConfig;
|
||||
import org.apache.kafka.common.serialization.LongDeserializer;
|
||||
import org.apache.kafka.common.serialization.LongSerializer;
|
||||
import org.apache.kafka.common.serialization.Serdes;
|
||||
import org.apache.kafka.common.serialization.StringDeserializer;
|
||||
import org.apache.kafka.common.serialization.StringSerializer;
|
||||
import org.apache.kafka.streams.KeyValue;
|
||||
import org.apache.kafka.streams.kstream.Joined;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.apache.kafka.streams.kstream.KTable;
|
||||
import org.apache.kafka.streams.kstream.Serialized;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.WebApplicationType;
|
||||
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
|
||||
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
|
||||
import org.springframework.kafka.core.KafkaTemplate;
|
||||
import org.springframework.kafka.test.EmbeddedKafkaBroker;
|
||||
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
|
||||
import org.springframework.kafka.test.utils.KafkaTestUtils;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
public class StreamToTableJoinFunctionTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1,
|
||||
true, "output-topic-1", "output-topic-2", "user-clicks-2", "user-regions-2");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
|
||||
@Test
|
||||
public void testStreamToTable() {
|
||||
SpringApplication app = new SpringApplication(CountClicksPerRegionApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
|
||||
Consumer<String, Long> consumer;
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-1",
|
||||
"false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
|
||||
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class);
|
||||
DefaultKafkaConsumerFactory<String, Long> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "output-topic-1");
|
||||
|
||||
runTest(app, consumer);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStreamToTableBiFunction() {
|
||||
SpringApplication app = new SpringApplication(BiFunctionCountClicksPerRegionApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
|
||||
Consumer<String, Long> consumer;
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-2",
|
||||
"false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
|
||||
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class);
|
||||
DefaultKafkaConsumerFactory<String, Long> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "output-topic-1");
|
||||
|
||||
runTest(app, consumer);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStreamToTableBiConsumer() throws Exception {
|
||||
SpringApplication app = new SpringApplication(BiConsumerApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
|
||||
Consumer<String, Long> consumer;
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-2",
|
||||
"false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
|
||||
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class);
|
||||
DefaultKafkaConsumerFactory<String, Long> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "output-topic-1");
|
||||
|
||||
try (ConfigurableApplicationContext ignored = app.run("--server.port=0",
|
||||
"--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.bindings.process_in_0.destination=user-clicks-1",
|
||||
"--spring.cloud.stream.bindings.process_in_1.destination=user-regions-1",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=10000",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.process_in_0.consumer.applicationId" +
|
||||
"=testStreamToTableBiConsumer",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString())) {
|
||||
|
||||
// Input 1: Region per user (multiple records allowed per user).
|
||||
List<KeyValue<String, String>> userRegions = Arrays.asList(
|
||||
new KeyValue<>("alice", "asia")
|
||||
);
|
||||
|
||||
Map<String, Object> senderProps1 = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
senderProps1.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
||||
senderProps1.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
||||
|
||||
DefaultKafkaProducerFactory<String, String> pf1 = new DefaultKafkaProducerFactory<>(senderProps1);
|
||||
KafkaTemplate<String, String> template1 = new KafkaTemplate<>(pf1, true);
|
||||
template1.setDefaultTopic("user-regions-1");
|
||||
|
||||
for (KeyValue<String, String> keyValue : userRegions) {
|
||||
template1.sendDefault(keyValue.key, keyValue.value);
|
||||
}
|
||||
|
||||
// Input 2: Clicks per user (multiple records allowed per user).
|
||||
List<KeyValue<String, Long>> userClicks = Arrays.asList(
|
||||
new KeyValue<>("alice", 13L)
|
||||
);
|
||||
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
senderProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
||||
senderProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, LongSerializer.class);
|
||||
|
||||
DefaultKafkaProducerFactory<String, Long> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
KafkaTemplate<String, Long> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("user-clicks-1");
|
||||
|
||||
for (KeyValue<String, Long> keyValue : userClicks) {
|
||||
template.sendDefault(keyValue.key, keyValue.value);
|
||||
}
|
||||
|
||||
Assert.isTrue(BiConsumerApplication.latch.await(10, TimeUnit.SECONDS), "Failed to receive message");
|
||||
|
||||
}
|
||||
finally {
|
||||
consumer.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void runTest(SpringApplication app, Consumer<String, Long> consumer) {
|
||||
try (ConfigurableApplicationContext ignored = app.run("--server.port=0",
|
||||
"--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.bindings.process_in_0.destination=user-clicks-1",
|
||||
"--spring.cloud.stream.bindings.process_in_1.destination=user-regions-1",
|
||||
"--spring.cloud.stream.bindings.process_out.destination=output-topic-1",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=10000",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.process_in_0.consumer.applicationId" +
|
||||
"=StreamToTableJoinFunctionTests-abc",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString())) {
|
||||
|
||||
// Input 1: Region per user (multiple records allowed per user).
|
||||
List<KeyValue<String, String>> userRegions = Arrays.asList(
|
||||
new KeyValue<>("alice", "asia"), /* Alice lived in Asia originally... */
|
||||
new KeyValue<>("bob", "americas"),
|
||||
new KeyValue<>("chao", "asia"),
|
||||
new KeyValue<>("dave", "europe"),
|
||||
new KeyValue<>("alice", "europe"), /* ...but moved to Europe some time later. */
|
||||
new KeyValue<>("eve", "americas"),
|
||||
new KeyValue<>("fang", "asia")
|
||||
);
|
||||
|
||||
Map<String, Object> senderProps1 = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
senderProps1.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
||||
senderProps1.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
||||
|
||||
DefaultKafkaProducerFactory<String, String> pf1 = new DefaultKafkaProducerFactory<>(senderProps1);
|
||||
KafkaTemplate<String, String> template1 = new KafkaTemplate<>(pf1, true);
|
||||
template1.setDefaultTopic("user-regions-1");
|
||||
|
||||
for (KeyValue<String, String> keyValue : userRegions) {
|
||||
template1.sendDefault(keyValue.key, keyValue.value);
|
||||
}
|
||||
|
||||
// Input 2: Clicks per user (multiple records allowed per user).
|
||||
List<KeyValue<String, Long>> userClicks = Arrays.asList(
|
||||
new KeyValue<>("alice", 13L),
|
||||
new KeyValue<>("bob", 4L),
|
||||
new KeyValue<>("chao", 25L),
|
||||
new KeyValue<>("bob", 19L),
|
||||
new KeyValue<>("dave", 56L),
|
||||
new KeyValue<>("eve", 78L),
|
||||
new KeyValue<>("alice", 40L),
|
||||
new KeyValue<>("fang", 99L)
|
||||
);
|
||||
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
senderProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
||||
senderProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, LongSerializer.class);
|
||||
|
||||
DefaultKafkaProducerFactory<String, Long> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
KafkaTemplate<String, Long> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("user-clicks-1");
|
||||
|
||||
for (KeyValue<String, Long> keyValue : userClicks) {
|
||||
template.sendDefault(keyValue.key, keyValue.value);
|
||||
}
|
||||
|
||||
List<KeyValue<String, Long>> expectedClicksPerRegion = Arrays.asList(
|
||||
new KeyValue<>("americas", 101L),
|
||||
new KeyValue<>("europe", 109L),
|
||||
new KeyValue<>("asia", 124L)
|
||||
);
|
||||
|
||||
//Verify that we receive the expected data
|
||||
int count = 0;
|
||||
long start = System.currentTimeMillis();
|
||||
List<KeyValue<String, Long>> actualClicksPerRegion = new ArrayList<>();
|
||||
do {
|
||||
ConsumerRecords<String, Long> records = KafkaTestUtils.getRecords(consumer);
|
||||
count = count + records.count();
|
||||
for (ConsumerRecord<String, Long> record : records) {
|
||||
actualClicksPerRegion.add(new KeyValue<>(record.key(), record.value()));
|
||||
}
|
||||
} while (count < expectedClicksPerRegion.size() && (System.currentTimeMillis() - start) < 30000);
|
||||
|
||||
assertThat(count == expectedClicksPerRegion.size()).isTrue();
|
||||
assertThat(actualClicksPerRegion).hasSameElementsAs(expectedClicksPerRegion);
|
||||
}
|
||||
finally {
|
||||
consumer.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGlobalStartOffsetWithLatestAndIndividualBindingWthEarliest() throws Exception {
|
||||
SpringApplication app = new SpringApplication(BiFunctionCountClicksPerRegionApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
|
||||
Consumer<String, Long> consumer;
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-3",
|
||||
"false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
|
||||
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class);
|
||||
DefaultKafkaConsumerFactory<String, Long> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "output-topic-2");
|
||||
|
||||
// Produce data first to the input topic to test the startOffset setting on the
|
||||
// binding (which is set to earliest below).
|
||||
// Input 1: Clicks per user (multiple records allowed per user).
|
||||
List<KeyValue<String, Long>> userClicks = Arrays.asList(
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L)
|
||||
);
|
||||
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
senderProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
||||
senderProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, LongSerializer.class);
|
||||
|
||||
DefaultKafkaProducerFactory<String, Long> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
KafkaTemplate<String, Long> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("user-clicks-2");
|
||||
|
||||
for (KeyValue<String, Long> keyValue : userClicks) {
|
||||
template.sendDefault(keyValue.key, keyValue.value);
|
||||
}
|
||||
|
||||
try (ConfigurableApplicationContext context = app.run("--server.port=0",
|
||||
"--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.function.inputBindings.process=input-1,input-2",
|
||||
"--spring.cloud.stream.bindings.input-1.destination=user-clicks-2",
|
||||
"--spring.cloud.stream.bindings.input-2.destination=user-regions-2",
|
||||
"--spring.cloud.stream.bindings.process_out.destination=output-topic-2",
|
||||
"--spring.cloud.stream.bindings.input-1.consumer.useNativeDecoding=true",
|
||||
"--spring.cloud.stream.bindings.input-2.consumer.useNativeDecoding=true",
|
||||
"--spring.cloud.stream.bindings.output.producer.useNativeEncoding=true",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.auto.offset.reset=latest",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-1.consumer.startOffset=earliest",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=10000",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-1.consumer.application-id" +
|
||||
"=StreamToTableJoinFunctionTests-foobar",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString())) {
|
||||
Thread.sleep(1000L);
|
||||
|
||||
// Input 2: Region per user (multiple records allowed per user).
|
||||
List<KeyValue<String, String>> userRegions = Arrays.asList(
|
||||
new KeyValue<>("alice", "asia"), /* Alice lived in Asia originally... */
|
||||
new KeyValue<>("bob", "americas"),
|
||||
new KeyValue<>("chao", "asia"),
|
||||
new KeyValue<>("dave", "europe"),
|
||||
new KeyValue<>("alice", "europe"), /* ...but moved to Europe some time later. */
|
||||
new KeyValue<>("eve", "americas"),
|
||||
new KeyValue<>("fang", "asia")
|
||||
);
|
||||
|
||||
Map<String, Object> senderProps1 = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
senderProps1.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
||||
senderProps1.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
||||
|
||||
DefaultKafkaProducerFactory<String, String> pf1 = new DefaultKafkaProducerFactory<>(senderProps1);
|
||||
KafkaTemplate<String, String> template1 = new KafkaTemplate<>(pf1, true);
|
||||
template1.setDefaultTopic("user-regions-2");
|
||||
|
||||
for (KeyValue<String, String> keyValue : userRegions) {
|
||||
template1.sendDefault(keyValue.key, keyValue.value);
|
||||
}
|
||||
|
||||
|
||||
// Input 1: Clicks per user (multiple records allowed per user).
|
||||
List<KeyValue<String, Long>> userClicks1 = Arrays.asList(
|
||||
new KeyValue<>("bob", 4L),
|
||||
new KeyValue<>("chao", 25L),
|
||||
new KeyValue<>("bob", 19L),
|
||||
new KeyValue<>("dave", 56L),
|
||||
new KeyValue<>("eve", 78L),
|
||||
new KeyValue<>("fang", 99L)
|
||||
);
|
||||
|
||||
for (KeyValue<String, Long> keyValue : userClicks1) {
|
||||
template.sendDefault(keyValue.key, keyValue.value);
|
||||
}
|
||||
|
||||
|
||||
List<KeyValue<String, Long>> expectedClicksPerRegion = Arrays.asList(
|
||||
new KeyValue<>("americas", 101L),
|
||||
new KeyValue<>("europe", 56L),
|
||||
new KeyValue<>("asia", 124L),
|
||||
//1000 alice entries which were there in the topic before the consumer started.
|
||||
//Since we set the startOffset to earliest for the topic, it will read them,
|
||||
//but the join fails to associate with a valid region, thus UNKNOWN.
|
||||
new KeyValue<>("UNKNOWN", 1000L)
|
||||
);
|
||||
|
||||
//Verify that we receive the expected data
|
||||
int count = 0;
|
||||
long start = System.currentTimeMillis();
|
||||
List<KeyValue<String, Long>> actualClicksPerRegion = new ArrayList<>();
|
||||
do {
|
||||
ConsumerRecords<String, Long> records = KafkaTestUtils.getRecords(consumer);
|
||||
count = count + records.count();
|
||||
for (ConsumerRecord<String, Long> record : records) {
|
||||
actualClicksPerRegion.add(new KeyValue<>(record.key(), record.value()));
|
||||
}
|
||||
} while (count < expectedClicksPerRegion.size() && (System.currentTimeMillis() - start) < 30000);
|
||||
|
||||
assertThat(count).isEqualTo(expectedClicksPerRegion.size());
|
||||
assertThat(actualClicksPerRegion).hasSameElementsAs(expectedClicksPerRegion);
|
||||
//the following removal is a code smell. Check with Oleg to see why this is happening.
|
||||
//culprit is BinderFactoryAutoConfiguration line 300 with the following code:
|
||||
//if (StringUtils.hasText(name)) {
|
||||
// ((StandardEnvironment) environment).getSystemProperties()
|
||||
// .putIfAbsent("spring.cloud.stream.function.definition", name);
|
||||
// }
|
||||
context.getEnvironment().getSystemProperties()
|
||||
.remove("spring.cloud.stream.function.definition");
|
||||
}
|
||||
finally {
|
||||
|
||||
consumer.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Tuple for a region and its associated number of clicks.
|
||||
*/
|
||||
private static final class RegionWithClicks {
|
||||
|
||||
private final String region;
|
||||
private final long clicks;
|
||||
|
||||
RegionWithClicks(String region, long clicks) {
|
||||
if (region == null || region.isEmpty()) {
|
||||
throw new IllegalArgumentException("region must be set");
|
||||
}
|
||||
if (clicks < 0) {
|
||||
throw new IllegalArgumentException("clicks must not be negative");
|
||||
}
|
||||
this.region = region;
|
||||
this.clicks = clicks;
|
||||
}
|
||||
|
||||
public String getRegion() {
|
||||
return region;
|
||||
}
|
||||
|
||||
public long getClicks() {
|
||||
return clicks;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@EnableAutoConfiguration
|
||||
public static class CountClicksPerRegionApplication {
|
||||
|
||||
@Bean
|
||||
public Function<KStream<String, Long>, Function<KTable<String, String>, KStream<String, Long>>> process() {
|
||||
return userClicksStream -> (userRegionsTable -> (userClicksStream
|
||||
.leftJoin(userRegionsTable, (clicks, region) -> new RegionWithClicks(region == null ?
|
||||
"UNKNOWN" : region, clicks),
|
||||
Joined.with(Serdes.String(), Serdes.Long(), null))
|
||||
.map((user, regionWithClicks) -> new KeyValue<>(regionWithClicks.getRegion(),
|
||||
regionWithClicks.getClicks()))
|
||||
.groupByKey(Serialized.with(Serdes.String(), Serdes.Long()))
|
||||
.reduce(Long::sum)
|
||||
.toStream()));
|
||||
}
|
||||
}
|
||||
|
||||
@EnableAutoConfiguration
|
||||
public static class BiFunctionCountClicksPerRegionApplication {
|
||||
|
||||
@Bean
|
||||
public BiFunction<KStream<String, Long>, KTable<String, String>, KStream<String, Long>> process() {
|
||||
return (userClicksStream, userRegionsTable) -> (userClicksStream
|
||||
.leftJoin(userRegionsTable, (clicks, region) -> new RegionWithClicks(region == null ?
|
||||
"UNKNOWN" : region, clicks),
|
||||
Joined.with(Serdes.String(), Serdes.Long(), null))
|
||||
.map((user, regionWithClicks) -> new KeyValue<>(regionWithClicks.getRegion(),
|
||||
regionWithClicks.getClicks()))
|
||||
.groupByKey(Serialized.with(Serdes.String(), Serdes.Long()))
|
||||
.reduce(Long::sum)
|
||||
.toStream());
|
||||
}
|
||||
}
|
||||
|
||||
@EnableAutoConfiguration
|
||||
public static class BiConsumerApplication {
|
||||
|
||||
static CountDownLatch latch = new CountDownLatch(2);
|
||||
|
||||
@Bean
|
||||
public BiConsumer<KStream<String, Long>, KTable<String, String>> process() {
|
||||
return (userClicksStream, userRegionsTable) -> {
|
||||
userClicksStream.foreach((key, value) -> latch.countDown());
|
||||
userRegionsTable.toStream().foreach((key, value) -> latch.countDown());
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -69,90 +69,98 @@ import static org.mockito.Mockito.verify;
|
||||
public abstract class DeserializationErrorHandlerByKafkaTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true, "counts", "error.words.group",
|
||||
"error.word1.groupx", "error.word2.groupx");
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"DeserializationErrorHandlerByKafkaTests-out", "error.DeserializationErrorHandlerByKafkaTests-In.group", "error.word1.groupx", "error.word2.groupx");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
|
||||
.getEmbeddedKafka();
|
||||
|
||||
@SpyBean
|
||||
org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsMessageConversionDelegate KafkaStreamsMessageConversionDelegate;
|
||||
org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsMessageConversionDelegate conversionDelegate;
|
||||
|
||||
private static Consumer<String, String> consumer;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers", embeddedKafka.getBrokersAsString());
|
||||
System.setProperty("spring.cloud.stream.kafka.streams.binder.zkNodes", embeddedKafka.getZookeeperConnectionString());
|
||||
System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers",
|
||||
embeddedKafka.getBrokersAsString());
|
||||
|
||||
System.setProperty("server.port","0");
|
||||
System.setProperty("spring.jmx.enabled","false");
|
||||
System.setProperty("server.port", "0");
|
||||
System.setProperty("spring.jmx.enabled", "false");
|
||||
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("fooc", "false", embeddedKafka);
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("fooc", "false",
|
||||
embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts");
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "DeserializationErrorHandlerByKafkaTests-out");
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDown() {
|
||||
consumer.close();
|
||||
System.clearProperty("spring.cloud.stream.kafka.streams.binder.brokers");
|
||||
System.clearProperty("server.port");
|
||||
System.clearProperty("spring.jmx.enabled");
|
||||
}
|
||||
|
||||
@SpringBootTest(properties = {
|
||||
"spring.cloud.stream.bindings.input.consumer.useNativeDecoding=true",
|
||||
"spring.cloud.stream.bindings.output.producer.useNativeEncoding=true",
|
||||
"spring.cloud.stream.kafka.streams.bindings.input.consumer.application-id=deser-kafka-dlq",
|
||||
"spring.cloud.stream.bindings.input.group=group",
|
||||
"spring.cloud.stream.kafka.streams.binder.serdeError=sendToDlq",
|
||||
"spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=" +
|
||||
"org.apache.kafka.common.serialization.Serdes$IntegerSerde"},
|
||||
webEnvironment= SpringBootTest.WebEnvironment.NONE
|
||||
)
|
||||
public static class DeserializationByKafkaAndDlqTests extends DeserializationErrorHandlerByKafkaTests {
|
||||
"spring.cloud.stream.kafka.streams.bindings.input.consumer.valueSerde="
|
||||
+ "org.apache.kafka.common.serialization.Serdes$IntegerSerde" }, webEnvironment = SpringBootTest.WebEnvironment.NONE)
|
||||
public static class DeserializationByKafkaAndDlqTests
|
||||
extends DeserializationErrorHandlerByKafkaTests {
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void test() throws Exception {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("words");
|
||||
template.setDefaultTopic("DeserializationErrorHandlerByKafkaTests-In");
|
||||
template.sendDefault("foobar");
|
||||
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobar", "false", embeddedKafka);
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobar",
|
||||
"false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
Consumer<String, String> consumer1 = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer1, "error.words.group");
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer1, "error.DeserializationErrorHandlerByKafkaTests-In.group");
|
||||
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer1, "error.words.group");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer1,
|
||||
"error.DeserializationErrorHandlerByKafkaTests-In.group");
|
||||
assertThat(cr.value().equals("foobar")).isTrue();
|
||||
|
||||
//Ensuring that the deserialization was indeed done by Kafka natively
|
||||
verify(KafkaStreamsMessageConversionDelegate, never()).deserializeOnInbound(any(Class.class), any(KStream.class));
|
||||
verify(KafkaStreamsMessageConversionDelegate, never()).serializeOnOutbound(any(KStream.class));
|
||||
// Ensuring that the deserialization was indeed done by Kafka natively
|
||||
verify(conversionDelegate, never()).deserializeOnInbound(any(Class.class),
|
||||
any(KStream.class));
|
||||
verify(conversionDelegate, never()).serializeOnOutbound(any(KStream.class));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@SpringBootTest(properties = {
|
||||
"spring.cloud.stream.bindings.input.consumer.useNativeDecoding=true",
|
||||
"spring.cloud.stream.bindings.output.producer.useNativeEncoding=true",
|
||||
"spring.cloud.stream.bindings.input.destination=word1,word2",
|
||||
"spring.cloud.stream.kafka.streams.default.consumer.applicationId=deser-kafka-dlq-multi-input",
|
||||
"spring.cloud.stream.kafka.streams.bindings.input.consumer.application-id=deser-kafka-dlq-multi-input",
|
||||
"spring.cloud.stream.bindings.input.group=groupx",
|
||||
"spring.cloud.stream.kafka.streams.binder.serdeError=sendToDlq",
|
||||
"spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=" +
|
||||
"org.apache.kafka.common.serialization.Serdes$IntegerSerde"},
|
||||
webEnvironment= SpringBootTest.WebEnvironment.NONE
|
||||
)
|
||||
public static class DeserializationByKafkaAndDlqTestsWithMultipleInputs extends DeserializationErrorHandlerByKafkaTests {
|
||||
"spring.cloud.stream.kafka.streams.bindings.input.consumer.valueSerde="
|
||||
+ "org.apache.kafka.common.serialization.Serdes$IntegerSerde" }, webEnvironment = SpringBootTest.WebEnvironment.NONE)
|
||||
// @checkstyle:on
|
||||
public static class DeserializationByKafkaAndDlqTestsWithMultipleInputs
|
||||
extends DeserializationErrorHandlerByKafkaTests {
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void test() throws Exception {
|
||||
public void test() {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("word1");
|
||||
template.sendDefault("foobar");
|
||||
@@ -160,22 +168,30 @@ public abstract class DeserializationErrorHandlerByKafkaTests {
|
||||
template.setDefaultTopic("word2");
|
||||
template.sendDefault("foobar");
|
||||
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobarx", "false", embeddedKafka);
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobarx",
|
||||
"false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
Consumer<String, String> consumer1 = cf.createConsumer();
|
||||
embeddedKafka.consumeFromEmbeddedTopics(consumer1, "error.word1.groupx", "error.word2.groupx");
|
||||
embeddedKafka.consumeFromEmbeddedTopics(consumer1, "error.word1.groupx",
|
||||
"error.word2.groupx");
|
||||
|
||||
//TODO: Investigate why the ordering matters below: i.e. if we consume from error.word1.groupx first, an exception is thrown.
|
||||
ConsumerRecord<String, String> cr1 = KafkaTestUtils.getSingleRecord(consumer1, "error.word2.groupx");
|
||||
// TODO: Investigate why the ordering matters below: i.e.
|
||||
// if we consume from error.word1.groupx first, an exception is thrown.
|
||||
ConsumerRecord<String, String> cr1 = KafkaTestUtils.getSingleRecord(consumer1,
|
||||
"error.word2.groupx");
|
||||
assertThat(cr1.value().equals("foobar")).isTrue();
|
||||
ConsumerRecord<String, String> cr2 = KafkaTestUtils.getSingleRecord(consumer1, "error.word1.groupx");
|
||||
ConsumerRecord<String, String> cr2 = KafkaTestUtils.getSingleRecord(consumer1,
|
||||
"error.word1.groupx");
|
||||
assertThat(cr2.value().equals("foobar")).isTrue();
|
||||
|
||||
//Ensuring that the deserialization was indeed done by Kafka natively
|
||||
verify(KafkaStreamsMessageConversionDelegate, never()).deserializeOnInbound(any(Class.class), any(KStream.class));
|
||||
verify(KafkaStreamsMessageConversionDelegate, never()).serializeOnOutbound(any(KStream.class));
|
||||
// Ensuring that the deserialization was indeed done by Kafka natively
|
||||
verify(conversionDelegate, never()).deserializeOnInbound(any(Class.class),
|
||||
any(KStream.class));
|
||||
verify(conversionDelegate, never()).serializeOnOutbound(any(KStream.class));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@EnableBinding(KafkaStreamsProcessor.class)
|
||||
@@ -192,14 +208,15 @@ public abstract class DeserializationErrorHandlerByKafkaTests {
|
||||
public KStream<?, String> process(KStream<Object, String> input) {
|
||||
|
||||
return input
|
||||
.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
|
||||
.flatMapValues(
|
||||
value -> Arrays.asList(value.toLowerCase().split("\\W+")))
|
||||
.map((key, value) -> new KeyValue<>(value, value))
|
||||
.groupByKey(Serialized.with(Serdes.String(), Serdes.String()))
|
||||
.windowedBy(timeWindows)
|
||||
.count(Materialized.as("foo-WordCounts-x"))
|
||||
.toStream()
|
||||
.map((key, value) -> new KeyValue<>(null, "Count for " + key.key() + " : " + value));
|
||||
.windowedBy(timeWindows).count(Materialized.as("foo-WordCounts-x"))
|
||||
.toStream().map((key, value) -> new KeyValue<>(null,
|
||||
"Count for " + key.key() + " : " + value));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -63,28 +63,30 @@ import static org.mockito.Mockito.verify;
|
||||
public abstract class DeserializtionErrorHandlerByBinderTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true, "counts-id", "error.foos.foobar-group",
|
||||
"error.foos1.fooz-group", "error.foos2.fooz-group");
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"counts-id", "error.foos.foobar-group", "error.foos1.fooz-group",
|
||||
"error.foos2.fooz-group");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
|
||||
.getEmbeddedKafka();
|
||||
|
||||
@SpyBean
|
||||
org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsMessageConversionDelegate KafkaStreamsMessageConversionDelegate;
|
||||
org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsMessageConversionDelegate conversionDelegate;
|
||||
|
||||
private static Consumer<Integer, String> consumer;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers", embeddedKafka.getBrokersAsString());
|
||||
System.setProperty("spring.cloud.stream.kafka.streams.binder.zkNodes", embeddedKafka.getZookeeperConnectionString());
|
||||
System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers",
|
||||
embeddedKafka.getBrokersAsString());
|
||||
System.setProperty("server.port", "0");
|
||||
System.setProperty("spring.jmx.enabled", "false");
|
||||
|
||||
System.setProperty("server.port","0");
|
||||
System.setProperty("spring.jmx.enabled","false");
|
||||
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foob", "false", embeddedKafka);
|
||||
//consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, Deserializer.class.getName());
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("kafka-streams-dlq-tests", "false",
|
||||
embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts-id");
|
||||
}
|
||||
@@ -92,66 +94,81 @@ public abstract class DeserializtionErrorHandlerByBinderTests {
|
||||
@AfterClass
|
||||
public static void tearDown() {
|
||||
consumer.close();
|
||||
System.clearProperty("spring.cloud.stream.kafka.streams.binder.brokers");
|
||||
System.clearProperty("server.port");
|
||||
System.clearProperty("spring.jmx.enabled");
|
||||
}
|
||||
|
||||
@SpringBootTest(properties = {
|
||||
"spring.cloud.stream.bindings.input.consumer.useNativeDecoding=false",
|
||||
"spring.cloud.stream.bindings.output.producer.useNativeEncoding=false",
|
||||
"spring.cloud.stream.bindings.input.destination=foos",
|
||||
"spring.cloud.stream.bindings.output.destination=counts-id",
|
||||
"spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"spring.cloud.stream.bindings.output.producer.headerMode=raw",
|
||||
"spring.cloud.stream.kafka.streams.bindings.output.producer.keySerde=org.apache.kafka.common.serialization.Serdes$IntegerSerde",
|
||||
"spring.cloud.stream.bindings.input.consumer.headerMode=raw",
|
||||
"spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$IntegerSerde",
|
||||
"spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"spring.cloud.stream.kafka.streams.binder.serdeError=sendToDlq",
|
||||
"spring.cloud.stream.kafka.streams.bindings.input.consumer.application-id=deserializationByBinderAndDlqTests",
|
||||
"spring.cloud.stream.bindings.input.group=foobar-group"},
|
||||
webEnvironment= SpringBootTest.WebEnvironment.NONE
|
||||
)
|
||||
public static class DeserializationByBinderAndDlqTests extends DeserializtionErrorHandlerByBinderTests {
|
||||
"spring.cloud.stream.kafka.streams.bindings.input.consumer.application-id"
|
||||
+ "=deserializationByBinderAndDlqTests",
|
||||
"spring.cloud.stream.bindings.input.group=foobar-group" }, webEnvironment = SpringBootTest.WebEnvironment.NONE)
|
||||
public static class DeserializationByBinderAndDlqTests
|
||||
extends DeserializtionErrorHandlerByBinderTests {
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void test() throws Exception {
|
||||
public void test() {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("foos");
|
||||
template.sendDefault("hello");
|
||||
template.sendDefault(7, "hello");
|
||||
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobar", "false", embeddedKafka);
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobar",
|
||||
"false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
Consumer<String, String> consumer1 = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer1, "error.foos.foobar-group");
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer1,
|
||||
"error.foos.foobar-group");
|
||||
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer1, "error.foos.foobar-group");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer1,
|
||||
"error.foos.foobar-group");
|
||||
assertThat(cr.value().equals("hello")).isTrue();
|
||||
|
||||
//Ensuring that the deserialization was indeed done by the binder
|
||||
verify(KafkaStreamsMessageConversionDelegate).deserializeOnInbound(any(Class.class), any(KStream.class));
|
||||
// Ensuring that the deserialization was indeed done by the binder
|
||||
verify(conversionDelegate).deserializeOnInbound(any(Class.class),
|
||||
any(KStream.class));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@SpringBootTest(properties = {
|
||||
"spring.cloud.stream.bindings.input.consumer.useNativeDecoding=false",
|
||||
"spring.cloud.stream.bindings.output.producer.useNativeEncoding=false",
|
||||
"spring.cloud.stream.bindings.input.destination=foos1,foos2",
|
||||
"spring.cloud.stream.bindings.output.destination=counts-id",
|
||||
"spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"spring.cloud.stream.kafka.streams.bindings.output.producer.keySerde=org.apache.kafka.common.serialization.Serdes$IntegerSerde",
|
||||
"spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"spring.cloud.stream.kafka.streams.binder.serdeError=sendToDlq",
|
||||
"spring.cloud.stream.kafka.streams.bindings.input.consumer.application-id=deserializationByBinderAndDlqTestsWithMultipleInputs",
|
||||
"spring.cloud.stream.bindings.input.group=fooz-group"},
|
||||
webEnvironment= SpringBootTest.WebEnvironment.NONE
|
||||
)
|
||||
public static class DeserializationByBinderAndDlqTestsWithMultipleInputs extends DeserializtionErrorHandlerByBinderTests {
|
||||
"spring.cloud.stream.kafka.streams.bindings.input.consumer.application-id"
|
||||
+ "=deserializationByBinderAndDlqTestsWithMultipleInputs",
|
||||
"spring.cloud.stream.bindings.input.group=fooz-group" }, webEnvironment = SpringBootTest.WebEnvironment.NONE)
|
||||
public static class DeserializationByBinderAndDlqTestsWithMultipleInputs
|
||||
extends DeserializtionErrorHandlerByBinderTests {
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void test() throws Exception {
|
||||
public void test() {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("foos1");
|
||||
template.sendDefault("hello");
|
||||
@@ -159,21 +176,28 @@ public abstract class DeserializtionErrorHandlerByBinderTests {
|
||||
template.setDefaultTopic("foos2");
|
||||
template.sendDefault("hello");
|
||||
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobar1", "false", embeddedKafka);
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobar1",
|
||||
"false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
Consumer<String, String> consumer1 = cf.createConsumer();
|
||||
embeddedKafka.consumeFromEmbeddedTopics(consumer1, "error.foos1.fooz-group", "error.foos2.fooz-group");
|
||||
embeddedKafka.consumeFromEmbeddedTopics(consumer1, "error.foos1.fooz-group",
|
||||
"error.foos2.fooz-group");
|
||||
|
||||
ConsumerRecord<String, String> cr1 = KafkaTestUtils.getSingleRecord(consumer1, "error.foos1.fooz-group");
|
||||
ConsumerRecord<String, String> cr1 = KafkaTestUtils.getSingleRecord(consumer1,
|
||||
"error.foos1.fooz-group");
|
||||
assertThat(cr1.value().equals("hello")).isTrue();
|
||||
|
||||
ConsumerRecord<String, String> cr2 = KafkaTestUtils.getSingleRecord(consumer1, "error.foos2.fooz-group");
|
||||
ConsumerRecord<String, String> cr2 = KafkaTestUtils.getSingleRecord(consumer1,
|
||||
"error.foos2.fooz-group");
|
||||
assertThat(cr2.value().equals("hello")).isTrue();
|
||||
|
||||
//Ensuring that the deserialization was indeed done by the binder
|
||||
verify(KafkaStreamsMessageConversionDelegate).deserializeOnInbound(any(Class.class), any(KStream.class));
|
||||
// Ensuring that the deserialization was indeed done by the binder
|
||||
verify(conversionDelegate).deserializeOnInbound(any(Class.class),
|
||||
any(KStream.class));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@EnableBinding(KafkaStreamsProcessor.class)
|
||||
@@ -183,16 +207,17 @@ public abstract class DeserializtionErrorHandlerByBinderTests {
|
||||
@StreamListener("input")
|
||||
@SendTo("output")
|
||||
public KStream<Integer, Long> process(KStream<Object, Product> input) {
|
||||
return input
|
||||
.filter((key, product) -> product.getId() == 123)
|
||||
return input.filter((key, product) -> product.getId() == 123)
|
||||
.map((key, value) -> new KeyValue<>(value, value))
|
||||
.groupByKey(Serialized.with(new JsonSerde<>(Product.class), new JsonSerde<>(Product.class)))
|
||||
.groupByKey(Serialized.with(new JsonSerde<>(Product.class),
|
||||
new JsonSerde<>(Product.class)))
|
||||
.windowedBy(TimeWindows.of(5000))
|
||||
.count(Materialized.as("id-count-store-x"))
|
||||
.toStream()
|
||||
.count(Materialized.as("id-count-store-x")).toStream()
|
||||
.map((key, value) -> new KeyValue<>(key.key().id, value));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class Product {
|
||||
|
||||
Integer id;
|
||||
@@ -204,5 +229,7 @@ public abstract class DeserializtionErrorHandlerByBinderTests {
|
||||
public void setId(Integer id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,297 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.integration;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.kafka.clients.consumer.Consumer;
|
||||
import org.apache.kafka.clients.consumer.ConsumerConfig;
|
||||
import org.apache.kafka.clients.producer.ProducerRecord;
|
||||
import org.apache.kafka.streams.KafkaStreams;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.assertj.core.util.Lists;
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.WebApplicationType;
|
||||
import org.springframework.boot.actuate.health.CompositeHealthContributor;
|
||||
import org.springframework.boot.actuate.health.Health;
|
||||
import org.springframework.boot.actuate.health.Status;
|
||||
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
|
||||
import org.springframework.cloud.stream.annotation.EnableBinding;
|
||||
import org.springframework.cloud.stream.annotation.Input;
|
||||
import org.springframework.cloud.stream.annotation.Output;
|
||||
import org.springframework.cloud.stream.annotation.StreamListener;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsBinderHealthIndicator;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsProcessor;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
|
||||
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
|
||||
import org.springframework.kafka.core.KafkaTemplate;
|
||||
import org.springframework.kafka.support.SendResult;
|
||||
import org.springframework.kafka.test.EmbeddedKafkaBroker;
|
||||
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
|
||||
import org.springframework.kafka.test.utils.KafkaTestUtils;
|
||||
import org.springframework.messaging.handler.annotation.SendTo;
|
||||
import org.springframework.util.concurrent.ListenableFuture;
|
||||
import org.springframework.util.concurrent.ListenableFutureCallback;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
/**
|
||||
* @author Arnaud Jardiné
|
||||
*/
|
||||
public class KafkaStreamsBinderHealthIndicatorTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"out", "out2");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
|
||||
.getEmbeddedKafka();
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() {
|
||||
System.setProperty("logging.level.org.apache.kafka", "OFF");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void healthIndicatorUpTest() throws Exception {
|
||||
try (ConfigurableApplicationContext context = singleStream()) {
|
||||
receive(context,
|
||||
Lists.newArrayList(new ProducerRecord<>("in", "{\"id\":\"123\"}"),
|
||||
new ProducerRecord<>("in", "{\"id\":\"123\"}")),
|
||||
Status.UP, "out");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void healthIndicatorDownTest() throws Exception {
|
||||
try (ConfigurableApplicationContext context = singleStream()) {
|
||||
receive(context,
|
||||
Lists.newArrayList(new ProducerRecord<>("in", "{\"id\":\"123\"}"),
|
||||
new ProducerRecord<>("in", "{\"id\":\"124\"}")),
|
||||
Status.DOWN, "out");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void healthIndicatorUpMultipleKStreamsTest() throws Exception {
|
||||
try (ConfigurableApplicationContext context = multipleStream()) {
|
||||
receive(context,
|
||||
Lists.newArrayList(new ProducerRecord<>("in", "{\"id\":\"123\"}"),
|
||||
new ProducerRecord<>("in2", "{\"id\":\"123\"}")),
|
||||
Status.UP, "out", "out2");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void healthIndicatorDownMultipleKStreamsTest() throws Exception {
|
||||
try (ConfigurableApplicationContext context = multipleStream()) {
|
||||
receive(context,
|
||||
Lists.newArrayList(new ProducerRecord<>("in", "{\"id\":\"123\"}"),
|
||||
new ProducerRecord<>("in2", "{\"id\":\"124\"}")),
|
||||
Status.DOWN, "out", "out2");
|
||||
}
|
||||
}
|
||||
|
||||
private static boolean waitFor(Status status, Map<String, Object> details) {
|
||||
if (status == Status.UP) {
|
||||
String threadState = (String) details.get("threadState");
|
||||
return threadState != null
|
||||
&& (threadState.equalsIgnoreCase(KafkaStreams.State.REBALANCING.name())
|
||||
|| threadState.equalsIgnoreCase("PARTITIONS_REVOKED")
|
||||
|| threadState.equalsIgnoreCase("PARTITIONS_ASSIGNED")
|
||||
|| threadState.equalsIgnoreCase(
|
||||
KafkaStreams.State.PENDING_SHUTDOWN.name()));
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private void receive(ConfigurableApplicationContext context,
|
||||
List<ProducerRecord<Integer, String>> records, Status expected,
|
||||
String... topics) throws Exception {
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-id0",
|
||||
"false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
try (Consumer<String, String> consumer = cf.createConsumer()) {
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
CountDownLatch latch = new CountDownLatch(records.size());
|
||||
for (ProducerRecord<Integer, String> record : records) {
|
||||
ListenableFuture<SendResult<Integer, String>> future = template
|
||||
.send(record);
|
||||
future.addCallback(
|
||||
new ListenableFutureCallback<SendResult<Integer, String>>() {
|
||||
@Override
|
||||
public void onFailure(Throwable ex) {
|
||||
Assert.fail();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onSuccess(SendResult<Integer, String> result) {
|
||||
latch.countDown();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
latch.await(5, TimeUnit.SECONDS);
|
||||
|
||||
embeddedKafka.consumeFromEmbeddedTopics(consumer, topics);
|
||||
KafkaTestUtils.getRecords(consumer, 1000);
|
||||
|
||||
TimeUnit.SECONDS.sleep(2);
|
||||
checkHealth(context, expected);
|
||||
}
|
||||
finally {
|
||||
pf.destroy();
|
||||
}
|
||||
}
|
||||
|
||||
private static void checkHealth(ConfigurableApplicationContext context,
|
||||
Status expected) throws InterruptedException {
|
||||
CompositeHealthContributor healthIndicator = context
|
||||
.getBean("bindersHealthContributor", CompositeHealthContributor.class);
|
||||
KafkaStreamsBinderHealthIndicator kafkaStreamsBinderHealthIndicator = (KafkaStreamsBinderHealthIndicator) healthIndicator.getContributor("kstream");
|
||||
Health health = kafkaStreamsBinderHealthIndicator.health();
|
||||
while (waitFor(health.getStatus(), health.getDetails())) {
|
||||
TimeUnit.SECONDS.sleep(2);
|
||||
health = kafkaStreamsBinderHealthIndicator.health();
|
||||
}
|
||||
assertThat(health.getStatus()).isEqualTo(expected);
|
||||
}
|
||||
|
||||
private ConfigurableApplicationContext singleStream() {
|
||||
SpringApplication app = new SpringApplication(KStreamApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
return app.run("--server.port=0", "--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.bindings.input.destination=in",
|
||||
"--spring.cloud.stream.bindings.output.destination=out",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde="
|
||||
+ "org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde="
|
||||
+ "org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId="
|
||||
+ "ApplicationHealthTest-xyz",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString());
|
||||
}
|
||||
|
||||
private ConfigurableApplicationContext multipleStream() {
|
||||
System.setProperty("logging.level.org.apache.kafka", "OFF");
|
||||
SpringApplication app = new SpringApplication(AnotherKStreamApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
return app.run("--server.port=0", "--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.bindings.input.destination=in",
|
||||
"--spring.cloud.stream.bindings.output.destination=out",
|
||||
"--spring.cloud.stream.bindings.input2.destination=in2",
|
||||
"--spring.cloud.stream.bindings.output2.destination=out2",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde="
|
||||
+ "org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde="
|
||||
+ "org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId="
|
||||
+ "ApplicationHealthTest-xyz",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input2.consumer.applicationId="
|
||||
+ "ApplicationHealthTest2-xyz",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString());
|
||||
}
|
||||
|
||||
@EnableBinding(KafkaStreamsProcessor.class)
|
||||
@EnableAutoConfiguration
|
||||
public static class KStreamApplication {
|
||||
|
||||
@StreamListener("input")
|
||||
@SendTo("output")
|
||||
public KStream<Object, Product> process(KStream<Object, Product> input) {
|
||||
return input.filter((key, product) -> {
|
||||
if (product.getId() != 123) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@EnableBinding({ KafkaStreamsProcessor.class, KafkaStreamsProcessorX.class })
|
||||
@EnableAutoConfiguration
|
||||
public static class AnotherKStreamApplication {
|
||||
|
||||
@StreamListener("input")
|
||||
@SendTo("output")
|
||||
public KStream<Object, Product> process(KStream<Object, Product> input) {
|
||||
return input.filter((key, product) -> {
|
||||
if (product.getId() != 123) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
@StreamListener("input2")
|
||||
@SendTo("output2")
|
||||
public KStream<Object, Product> process2(KStream<Object, Product> input) {
|
||||
return input.filter((key, product) -> {
|
||||
if (product.getId() != 123) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public interface KafkaStreamsProcessorX {
|
||||
|
||||
@Input("input2")
|
||||
KStream<?, ?> input();
|
||||
|
||||
@Output("output2")
|
||||
KStream<?, ?> output();
|
||||
|
||||
}
|
||||
|
||||
public static class Product {
|
||||
|
||||
Integer id;
|
||||
|
||||
public Integer getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(Integer id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
* Copyright 2017-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -37,12 +37,10 @@ import org.junit.Test;
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.WebApplicationType;
|
||||
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
|
||||
import org.springframework.boot.context.properties.EnableConfigurationProperties;
|
||||
import org.springframework.cloud.stream.annotation.EnableBinding;
|
||||
import org.springframework.cloud.stream.annotation.Input;
|
||||
import org.springframework.cloud.stream.annotation.StreamListener;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsProcessor;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsApplicationSupportProperties;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
|
||||
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
|
||||
@@ -55,30 +53,34 @@ import org.springframework.messaging.handler.annotation.SendTo;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
/**
|
||||
* This test case demonstrates a kafk-streams topology which consumes messages from
|
||||
* multiple kafka topics(destinations).
|
||||
*
|
||||
* See
|
||||
* {@link KafkaStreamsBinderMultipleInputTopicsTest#testKstreamWordCountWithStringInputAndPojoOuput}
|
||||
* where the input topic names are specified as comma-separated String values for the
|
||||
* property spring.cloud.stream.bindings.input.destination.
|
||||
*
|
||||
* @author Sarath Shyam
|
||||
*
|
||||
* This test case demonstrates a kafk-streams topology which consumes messages from
|
||||
* multiple kafka topics(destinations).
|
||||
* See {@link KafkaStreamsBinderMultipleInputTopicsTest#testKstreamWordCountWithStringInputAndPojoOuput} where
|
||||
* the input topic names are specified as comma-separated String values for
|
||||
* the property spring.cloud.stream.bindings.input.destination.
|
||||
*
|
||||
*
|
||||
*/
|
||||
public class KafkaStreamsBinderMultipleInputTopicsTest {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true, "counts");
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"counts");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
|
||||
.getEmbeddedKafka();
|
||||
|
||||
private static Consumer<String, String> consumer;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false", embeddedKafka);
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false",
|
||||
embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts");
|
||||
}
|
||||
@@ -90,7 +92,8 @@ public class KafkaStreamsBinderMultipleInputTopicsTest {
|
||||
|
||||
@Test
|
||||
public void testKstreamWordCountWithStringInputAndPojoOuput() throws Exception {
|
||||
SpringApplication app = new SpringApplication(WordCountProcessorApplication.class);
|
||||
SpringApplication app = new SpringApplication(
|
||||
WordCountProcessorApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
|
||||
ConfigurableApplicationContext context = app.run("--server.port=0",
|
||||
@@ -99,68 +102,68 @@ public class KafkaStreamsBinderMultipleInputTopicsTest {
|
||||
"--spring.cloud.stream.bindings.output.destination=counts",
|
||||
"--spring.cloud.stream.bindings.output.contentType=application/json",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.bindings.input.consumer.headerMode=raw",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.timeWindow.length=5000",
|
||||
"--spring.cloud.stream.kafka.streams.timeWindow.advanceBy=0",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId=WordCountProcessorApplication-xyz",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString());
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId"
|
||||
+ "=WordCountProcessorApplication-xyz",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString());
|
||||
try {
|
||||
receiveAndValidate(context);
|
||||
} finally {
|
||||
receiveAndValidate();
|
||||
}
|
||||
finally {
|
||||
context.close();
|
||||
}
|
||||
}
|
||||
|
||||
private void receiveAndValidate(ConfigurableApplicationContext context) throws Exception {
|
||||
private void receiveAndValidate()
|
||||
throws Exception {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("words1");
|
||||
template.sendDefault("foobar1");
|
||||
template.setDefaultTopic("words2");
|
||||
template.sendDefault("foobar2");
|
||||
|
||||
//Sleep a bit so that both the messages are processed before reading from the output topic.
|
||||
//Else assertions might fail arbitrarily.
|
||||
// Sleep a bit so that both the messages are processed before reading from the
|
||||
// output topic.
|
||||
// Else assertions might fail arbitrarily.
|
||||
Thread.sleep(5000);
|
||||
|
||||
ConsumerRecords<String, String> received = KafkaTestUtils.getRecords(consumer);
|
||||
|
||||
ConsumerRecords<String, String> received = KafkaTestUtils.getRecords(consumer);
|
||||
List<String> wordCounts = new ArrayList<>(2);
|
||||
|
||||
received.records("counts").forEach((consumerRecord) -> {
|
||||
wordCounts.add((consumerRecord.value()));
|
||||
});
|
||||
|
||||
received.records("counts")
|
||||
.forEach((consumerRecord) -> wordCounts.add((consumerRecord.value())));
|
||||
System.out.println(wordCounts);
|
||||
assertThat(wordCounts.contains("{\"word\":\"foobar1\",\"count\":1}")).isTrue();
|
||||
assertThat(wordCounts.contains("{\"word\":\"foobar2\",\"count\":1}")).isTrue();
|
||||
|
||||
}
|
||||
|
||||
@EnableBinding(KafkaStreamsProcessor.class)
|
||||
@EnableAutoConfiguration
|
||||
@EnableConfigurationProperties(KafkaStreamsApplicationSupportProperties.class)
|
||||
static class WordCountProcessorApplication {
|
||||
|
||||
|
||||
@StreamListener
|
||||
@SendTo("output")
|
||||
public KStream<?, WordCount> process(@Input("input") KStream<Object, String> input) {
|
||||
public KStream<?, WordCount> process(
|
||||
@Input("input") KStream<Object, String> input) {
|
||||
|
||||
input.map((k,v) -> {
|
||||
input.map((k, v) -> {
|
||||
System.out.println(k);
|
||||
System.out.println(v);
|
||||
return new KeyValue<>(k,v);
|
||||
return new KeyValue<>(k, v);
|
||||
});
|
||||
return input
|
||||
.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
|
||||
.flatMapValues(
|
||||
value -> Arrays.asList(value.toLowerCase().split("\\W+")))
|
||||
.map((key, value) -> new KeyValue<>(value, value))
|
||||
.groupByKey(Serialized.with(Serdes.String(), Serdes.String()))
|
||||
.count(Materialized.as("WordCounts"))
|
||||
.toStream()
|
||||
.count(Materialized.as("WordCounts")).toStream()
|
||||
.map((key, value) -> new KeyValue<>(null, new WordCount(key, value)));
|
||||
}
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
* Copyright 2017-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -18,10 +18,10 @@ package org.springframework.cloud.stream.binder.kafka.streams.integration;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.apache.kafka.clients.consumer.Consumer;
|
||||
import org.apache.kafka.clients.consumer.ConsumerConfig;
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
||||
import org.apache.kafka.common.serialization.LongDeserializer;
|
||||
import org.apache.kafka.streams.KeyValue;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.apache.kafka.streams.kstream.Materialized;
|
||||
@@ -57,18 +57,22 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
public class KafkaStreamsBinderPojoInputAndPrimitiveTypeOutputTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true, "counts-id");
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"counts-id");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
|
||||
.getEmbeddedKafka();
|
||||
|
||||
private static Consumer<Integer, String> consumer;
|
||||
private static Consumer<Integer, Long> consumer;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-id", "false", embeddedKafka);
|
||||
//consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, Deserializer.class.getName());
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-id",
|
||||
"false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
consumerProps.put("value.deserializer", LongDeserializer.class);
|
||||
DefaultKafkaConsumerFactory<Integer, Long> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts-id");
|
||||
}
|
||||
@@ -87,31 +91,34 @@ public class KafkaStreamsBinderPojoInputAndPrimitiveTypeOutputTests {
|
||||
"--spring.cloud.stream.bindings.input.destination=foos",
|
||||
"--spring.cloud.stream.bindings.output.destination=counts-id",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.keySerde=org.apache.kafka.common.serialization.Serdes$IntegerSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId=KafkaStreamsBinderPojoInputAndPrimitiveTypeOutputTests-xyz",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString());
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde="
|
||||
+ "org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde="
|
||||
+ "org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId="
|
||||
+ "KafkaStreamsBinderPojoInputAndPrimitiveTypeOutputTests-xyz",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString());
|
||||
try {
|
||||
receiveAndValidateFoo(context);
|
||||
} finally {
|
||||
receiveAndValidateFoo();
|
||||
}
|
||||
finally {
|
||||
context.close();
|
||||
}
|
||||
}
|
||||
|
||||
private void receiveAndValidateFoo(ConfigurableApplicationContext context) throws Exception {
|
||||
private void receiveAndValidateFoo() {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("foos");
|
||||
template.sendDefault("{\"id\":\"123\"}");
|
||||
ConsumerRecord<Integer, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts-id");
|
||||
ConsumerRecord<Integer, Long> cr = KafkaTestUtils.getSingleRecord(consumer,
|
||||
"counts-id");
|
||||
|
||||
assertThat(cr.key()).isEqualTo(123);
|
||||
ObjectMapper om = new ObjectMapper();
|
||||
Long aLong = om.readValue(cr.value(), Long.class);
|
||||
assertThat(aLong).isEqualTo(1L);
|
||||
assertThat(cr.value()).isEqualTo(1L);
|
||||
}
|
||||
|
||||
@EnableBinding(KafkaStreamsProcessor.class)
|
||||
@@ -121,18 +128,20 @@ public class KafkaStreamsBinderPojoInputAndPrimitiveTypeOutputTests {
|
||||
@StreamListener("input")
|
||||
@SendTo("output")
|
||||
public KStream<Integer, Long> process(KStream<Object, Product> input) {
|
||||
return input
|
||||
.filter((key, product) -> product.getId() == 123)
|
||||
return input.filter((key, product) -> product.getId() == 123)
|
||||
.map((key, value) -> new KeyValue<>(value, value))
|
||||
.groupByKey(Serialized.with(new JsonSerde<>(Product.class), new JsonSerde<>(Product.class)))
|
||||
.groupByKey(Serialized.with(new JsonSerde<>(Product.class),
|
||||
new JsonSerde<>(Product.class)))
|
||||
.windowedBy(TimeWindows.of(5000))
|
||||
.count(Materialized.as("id-count-store-x"))
|
||||
.toStream()
|
||||
.map((key, value) -> new KeyValue<>(key.key().id, value));
|
||||
.count(Materialized.as("id-count-store-x")).toStream()
|
||||
.map((key, value) -> {
|
||||
return new KeyValue<>(key.key().id, value);
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class Product {
|
||||
public static class Product {
|
||||
|
||||
Integer id;
|
||||
|
||||
@@ -143,5 +152,7 @@ public class KafkaStreamsBinderPojoInputAndPrimitiveTypeOutputTests {
|
||||
public void setId(Integer id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -40,16 +40,13 @@ import org.junit.BeforeClass;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.WebApplicationType;
|
||||
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
|
||||
import org.springframework.boot.context.properties.EnableConfigurationProperties;
|
||||
import org.springframework.cloud.stream.annotation.EnableBinding;
|
||||
import org.springframework.cloud.stream.annotation.Input;
|
||||
import org.springframework.cloud.stream.annotation.StreamListener;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsProcessor;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsApplicationSupportProperties;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.integration.test.util.TestUtils;
|
||||
@@ -73,19 +70,23 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
public class KafkaStreamsBinderWordCountIntegrationTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true, "counts");
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"counts", "counts-1");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
|
||||
.getEmbeddedKafka();
|
||||
|
||||
private static Consumer<String, String> consumer;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false", embeddedKafka);
|
||||
public static void setUp() {
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false",
|
||||
embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts");
|
||||
embeddedKafka.consumeFromEmbeddedTopics(consumer, "counts", "counts-1");
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
@@ -94,78 +95,90 @@ public class KafkaStreamsBinderWordCountIntegrationTests {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testKstreamWordCountWithApplicationIdSpecifiedAtDefaultConsumer() throws Exception {
|
||||
SpringApplication app = new SpringApplication(WordCountProcessorApplication.class);
|
||||
public void testKstreamWordCountWithApplicationIdSpecifiedAtDefaultConsumer()
|
||||
throws Exception {
|
||||
SpringApplication app = new SpringApplication(
|
||||
WordCountProcessorApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
|
||||
try (ConfigurableApplicationContext context = app.run("--server.port=0",
|
||||
"--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.bindings.input.destination=words",
|
||||
"--spring.cloud.stream.bindings.output.destination=counts",
|
||||
"--spring.cloud.stream.bindings.output.contentType=application/json",
|
||||
"--spring.cloud.stream.kafka.streams.default.consumer.application-id=basic-word-count",
|
||||
"--spring.cloud.stream.kafka.streams.default.consumer.application-id=testKstreamWordCountWithApplicationIdSpecifiedAtDefaultConsumer",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.bindings.output.producer.headerMode=raw",
|
||||
"--spring.cloud.stream.bindings.input.consumer.headerMode=raw",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.timeWindow.length=5000",
|
||||
"--spring.cloud.stream.kafka.streams.timeWindow.advanceBy=0",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString())) {
|
||||
receiveAndValidate(context);
|
||||
"--spring.cloud.stream.kafka.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString())) {
|
||||
receiveAndValidate("words", "counts");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testKstreamWordCountWithInputBindingLevelApplicationId() throws Exception {
|
||||
SpringApplication app = new SpringApplication(WordCountProcessorApplication.class);
|
||||
public void testKstreamWordCountWithInputBindingLevelApplicationId()
|
||||
throws Exception {
|
||||
SpringApplication app = new SpringApplication(
|
||||
WordCountProcessorApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
|
||||
try (ConfigurableApplicationContext context = app.run("--server.port=0",
|
||||
"--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.bindings.input.destination=words",
|
||||
"--spring.cloud.stream.bindings.output.destination=counts",
|
||||
"--spring.cloud.stream.bindings.output.contentType=application/json",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.application-id=basic-word-count",
|
||||
"--spring.cloud.stream.bindings.input.destination=words-1",
|
||||
"--spring.cloud.stream.bindings.output.destination=counts-1",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.application-id=testKstreamWordCountWithInputBindingLevelApplicationId",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.valueSerde=org.springframework.kafka.support.serializer.JsonSerde",
|
||||
"--spring.cloud.stream.kafka.streams.timeWindow.length=5000",
|
||||
"--spring.cloud.stream.kafka.streams.timeWindow.advanceBy=0",
|
||||
"--spring.cloud.stream.bindings.input.consumer.concurrency=2",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString())) {
|
||||
receiveAndValidate(context);
|
||||
//Assertions on StreamBuilderFactoryBean
|
||||
StreamsBuilderFactoryBean streamsBuilderFactoryBean = context.getBean("&stream-builder-process", StreamsBuilderFactoryBean.class);
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString())) {
|
||||
receiveAndValidate("words-1", "counts-1");
|
||||
// Assertions on StreamBuilderFactoryBean
|
||||
StreamsBuilderFactoryBean streamsBuilderFactoryBean = context
|
||||
.getBean("&stream-builder-WordCountProcessorApplication-process", StreamsBuilderFactoryBean.class);
|
||||
KafkaStreams kafkaStreams = streamsBuilderFactoryBean.getKafkaStreams();
|
||||
ReadOnlyWindowStore<Object, Object> store = kafkaStreams.store("foo-WordCounts", QueryableStoreTypes.windowStore());
|
||||
ReadOnlyWindowStore<Object, Object> store = kafkaStreams
|
||||
.store("foo-WordCounts", QueryableStoreTypes.windowStore());
|
||||
assertThat(store).isNotNull();
|
||||
|
||||
Map streamConfigGlobalProperties = context.getBean("streamConfigGlobalProperties", Map.class);
|
||||
Map streamConfigGlobalProperties = context
|
||||
.getBean("streamConfigGlobalProperties", Map.class);
|
||||
|
||||
//Ensure that concurrency settings are mapped to number of stream task threads in Kafka Streams.
|
||||
final Integer concurrency = (Integer)streamConfigGlobalProperties.get(StreamsConfig.NUM_STREAM_THREADS_CONFIG);
|
||||
// Ensure that concurrency settings are mapped to number of stream task
|
||||
// threads in Kafka Streams.
|
||||
final Integer concurrency = (Integer) streamConfigGlobalProperties
|
||||
.get(StreamsConfig.NUM_STREAM_THREADS_CONFIG);
|
||||
assertThat(concurrency).isEqualTo(2);
|
||||
|
||||
sendTombStoneRecordsAndVerifyGracefulHandling();
|
||||
|
||||
CleanupConfig cleanup = TestUtils.getPropertyValue(streamsBuilderFactoryBean, "cleanupConfig",
|
||||
CleanupConfig.class);
|
||||
CleanupConfig cleanup = TestUtils.getPropertyValue(streamsBuilderFactoryBean,
|
||||
"cleanupConfig", CleanupConfig.class);
|
||||
assertThat(cleanup.cleanupOnStart()).isTrue();
|
||||
assertThat(cleanup.cleanupOnStop()).isFalse();
|
||||
}
|
||||
}
|
||||
|
||||
private void receiveAndValidate(ConfigurableApplicationContext context) throws Exception {
|
||||
private void receiveAndValidate(String in, String out) {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
try {
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("words");
|
||||
template.setDefaultTopic(in);
|
||||
template.sendDefault("foobar");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer,
|
||||
out);
|
||||
assertThat(cr.value().contains("\"word\":\"foobar\",\"count\":1")).isTrue();
|
||||
}
|
||||
finally {
|
||||
@@ -175,14 +188,17 @@ public class KafkaStreamsBinderWordCountIntegrationTests {
|
||||
|
||||
private void sendTombStoneRecordsAndVerifyGracefulHandling() throws Exception {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
try {
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("words");
|
||||
template.setDefaultTopic("words-1");
|
||||
template.sendDefault(null);
|
||||
ConsumerRecords<String, String> received = consumer.poll(Duration.ofMillis(5000));
|
||||
//By asserting that the received record is empty, we are ensuring that the tombstone record
|
||||
//was handled by the binder gracefully.
|
||||
ConsumerRecords<String, String> received = consumer
|
||||
.poll(Duration.ofMillis(5000));
|
||||
// By asserting that the received record is empty, we are ensuring that the
|
||||
// tombstone record
|
||||
// was handled by the binder gracefully.
|
||||
assertThat(received.isEmpty()).isTrue();
|
||||
}
|
||||
finally {
|
||||
@@ -192,24 +208,24 @@ public class KafkaStreamsBinderWordCountIntegrationTests {
|
||||
|
||||
@EnableBinding(KafkaStreamsProcessor.class)
|
||||
@EnableAutoConfiguration
|
||||
@EnableConfigurationProperties(KafkaStreamsApplicationSupportProperties.class)
|
||||
static class WordCountProcessorApplication {
|
||||
|
||||
@Autowired
|
||||
private TimeWindows timeWindows;
|
||||
|
||||
@StreamListener
|
||||
@SendTo("output")
|
||||
public KStream<?, WordCount> process(@Input("input") KStream<Object, String> input) {
|
||||
public KStream<?, WordCount> process(
|
||||
@Input("input") KStream<Object, String> input) {
|
||||
|
||||
return input
|
||||
.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
|
||||
.flatMapValues(
|
||||
value -> Arrays.asList(value.toLowerCase().split("\\W+")))
|
||||
.map((key, value) -> new KeyValue<>(value, value))
|
||||
.groupByKey(Serialized.with(Serdes.String(), Serdes.String()))
|
||||
.windowedBy(timeWindows)
|
||||
.count(Materialized.as("foo-WordCounts"))
|
||||
.windowedBy(TimeWindows.of(Duration.ofSeconds(5))).count(Materialized.as("foo-WordCounts"))
|
||||
.toStream()
|
||||
.map((key, value) -> new KeyValue<>(null, new WordCount(key.key(), value, new Date(key.window().start()), new Date(key.window().end()))));
|
||||
.map((key, value) -> new KeyValue<>(null,
|
||||
new WordCount(key.key(), value,
|
||||
new Date(key.window().start()),
|
||||
new Date(key.window().end()))));
|
||||
}
|
||||
|
||||
@Bean
|
||||
@@ -267,6 +283,7 @@ public class KafkaStreamsBinderWordCountIntegrationTests {
|
||||
public void setEnd(Date end) {
|
||||
this.end = end;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -16,6 +16,7 @@
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.integration;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
|
||||
@@ -34,16 +35,12 @@ import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
|
||||
import org.springframework.boot.context.properties.EnableConfigurationProperties;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
import org.springframework.boot.test.mock.mockito.SpyBean;
|
||||
import org.springframework.cloud.stream.annotation.EnableBinding;
|
||||
import org.springframework.cloud.stream.annotation.StreamListener;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsProcessor;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsApplicationSupportProperties;
|
||||
import org.springframework.context.annotation.PropertySource;
|
||||
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
|
||||
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
|
||||
import org.springframework.kafka.core.KafkaTemplate;
|
||||
@@ -54,6 +51,7 @@ import org.springframework.messaging.handler.annotation.SendTo;
|
||||
import org.springframework.test.annotation.DirtiesContext;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringRunner;
|
||||
import org.springframework.util.StopWatch;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
@@ -69,100 +67,120 @@ import static org.mockito.Mockito.verify;
|
||||
public abstract class KafkaStreamsNativeEncodingDecodingTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true, "counts");
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"decode-counts", "decode-counts-1");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
|
||||
.getEmbeddedKafka();
|
||||
|
||||
@SpyBean
|
||||
org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsMessageConversionDelegate KafkaStreamsMessageConversionDelegate;
|
||||
org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsMessageConversionDelegate conversionDelegate;
|
||||
|
||||
private static Consumer<String, String> consumer;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers", embeddedKafka.getBrokersAsString());
|
||||
System.setProperty("spring.cloud.stream.kafka.streams.binder.zkNodes", embeddedKafka.getZookeeperConnectionString());
|
||||
public static void setUp() {
|
||||
System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers",
|
||||
embeddedKafka.getBrokersAsString());
|
||||
System.setProperty("server.port", "0");
|
||||
System.setProperty("spring.jmx.enabled", "false");
|
||||
|
||||
System.setProperty("server.port","0");
|
||||
System.setProperty("spring.jmx.enabled","false");
|
||||
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false", embeddedKafka);
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false",
|
||||
embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts");
|
||||
embeddedKafka.consumeFromEmbeddedTopics(consumer, "decode-counts", "decode-counts-1");
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDown() {
|
||||
consumer.close();
|
||||
System.clearProperty("spring.cloud.stream.kafka.streams.binder.brokers");
|
||||
System.clearProperty("server.port");
|
||||
System.clearProperty("spring.jmx.enabled");
|
||||
}
|
||||
|
||||
@SpringBootTest(properties = {
|
||||
"spring.cloud.stream.bindings.input.consumer.useNativeDecoding=true",
|
||||
"spring.cloud.stream.bindings.output.producer.useNativeEncoding=true",
|
||||
"spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId=NativeEncodingDecodingEnabledTests-abc"
|
||||
},
|
||||
webEnvironment= SpringBootTest.WebEnvironment.NONE
|
||||
)
|
||||
public static class NativeEncodingDecodingEnabledTests extends KafkaStreamsNativeEncodingDecodingTests {
|
||||
"spring.cloud.stream.bindings.input.destination=decode-words-1",
|
||||
"spring.cloud.stream.bindings.output.destination=decode-counts-1",
|
||||
"spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId"
|
||||
+ "=NativeEncodingDecodingEnabledTests-abc" }, webEnvironment = SpringBootTest.WebEnvironment.NONE)
|
||||
public static class NativeEncodingDecodingEnabledTests
|
||||
extends KafkaStreamsNativeEncodingDecodingTests {
|
||||
|
||||
@Test
|
||||
public void test() throws Exception {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("words");
|
||||
template.setDefaultTopic("decode-words-1");
|
||||
template.sendDefault("foobar");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer,
|
||||
"decode-counts-1");
|
||||
assertThat(cr.value().equals("Count for foobar : 1")).isTrue();
|
||||
|
||||
verify(KafkaStreamsMessageConversionDelegate, never()).serializeOnOutbound(any(KStream.class));
|
||||
verify(KafkaStreamsMessageConversionDelegate, never()).deserializeOnInbound(any(Class.class), any(KStream.class));
|
||||
verify(conversionDelegate, never()).serializeOnOutbound(any(KStream.class));
|
||||
verify(conversionDelegate, never()).deserializeOnInbound(any(Class.class),
|
||||
any(KStream.class));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@SpringBootTest(webEnvironment= SpringBootTest.WebEnvironment.NONE,
|
||||
properties = "spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId=NativeEncodingDecodingEnabledTests-xyz")
|
||||
public static class NativeEncodingDecodingDisabledTests extends KafkaStreamsNativeEncodingDecodingTests {
|
||||
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.NONE, properties = {
|
||||
"spring.cloud.stream.bindings.input.destination=decode-words",
|
||||
"spring.cloud.stream.bindings.output.destination=decode-counts",
|
||||
"spring.cloud.stream.bindings.input.consumer.useNativeDecoding=false",
|
||||
"spring.cloud.stream.bindings.output.producer.useNativeEncoding=false",
|
||||
"spring.cloud.stream.kafka.streams.bindings.input3.consumer.applicationId"
|
||||
+ "=hello-NativeEncodingDecodingEnabledTests-xyz" })
|
||||
public static class NativeEncodingDecodingDisabledTests
|
||||
extends KafkaStreamsNativeEncodingDecodingTests {
|
||||
|
||||
@Test
|
||||
public void test() throws Exception {
|
||||
public void test() {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("words");
|
||||
template.setDefaultTopic("decode-words");
|
||||
template.sendDefault("foobar");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts");
|
||||
StopWatch stopWatch = new StopWatch();
|
||||
stopWatch.start();
|
||||
System.out.println("Starting: ");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer,
|
||||
"decode-counts");
|
||||
stopWatch.stop();
|
||||
System.out.println("Total time: " + stopWatch.getTotalTimeSeconds());
|
||||
assertThat(cr.value().equals("Count for foobar : 1")).isTrue();
|
||||
|
||||
verify(KafkaStreamsMessageConversionDelegate).serializeOnOutbound(any(KStream.class));
|
||||
verify(KafkaStreamsMessageConversionDelegate).deserializeOnInbound(any(Class.class), any(KStream.class));
|
||||
verify(conversionDelegate).serializeOnOutbound(any(KStream.class));
|
||||
verify(conversionDelegate).deserializeOnInbound(any(Class.class),
|
||||
any(KStream.class));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@EnableBinding(KafkaStreamsProcessor.class)
|
||||
@EnableAutoConfiguration
|
||||
@PropertySource("classpath:/org/springframework/cloud/stream/binder/kstream/integTest-1.properties")
|
||||
@EnableConfigurationProperties(KafkaStreamsApplicationSupportProperties.class)
|
||||
public static class WordCountProcessorApplication {
|
||||
|
||||
@Autowired
|
||||
private TimeWindows timeWindows;
|
||||
|
||||
@StreamListener("input")
|
||||
@SendTo("output")
|
||||
public KStream<?, String> process(KStream<Object, String> input) {
|
||||
|
||||
return input
|
||||
.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
|
||||
.flatMapValues(
|
||||
value -> Arrays.asList(value.toLowerCase().split("\\W+")))
|
||||
.map((key, value) -> new KeyValue<>(value, value))
|
||||
.groupByKey(Serialized.with(Serdes.String(), Serdes.String()))
|
||||
.windowedBy(timeWindows)
|
||||
.count(Materialized.as("foo-WordCounts-x"))
|
||||
.toStream()
|
||||
.map((key, value) -> new KeyValue<>(null, "Count for " + key.key() + " : " + value));
|
||||
.windowedBy(TimeWindows.of(Duration.ofSeconds(5))).count(Materialized.as("foo-WordCounts-x"))
|
||||
.toStream().map((key, value) -> new KeyValue<>(null,
|
||||
"Count for " + key.key() + " : " + value));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -16,12 +16,14 @@
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.integration;
|
||||
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.kafka.common.serialization.Serdes;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.apache.kafka.streams.processor.Processor;
|
||||
import org.apache.kafka.streams.processor.ProcessorContext;
|
||||
import org.apache.kafka.streams.state.StoreBuilder;
|
||||
import org.apache.kafka.streams.state.Stores;
|
||||
import org.apache.kafka.streams.state.WindowStore;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
@@ -35,12 +37,14 @@ import org.springframework.cloud.stream.annotation.StreamListener;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsStateStore;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsStateStoreProperties;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
|
||||
import org.springframework.kafka.core.KafkaTemplate;
|
||||
import org.springframework.kafka.test.EmbeddedKafkaBroker;
|
||||
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
|
||||
import org.springframework.kafka.test.utils.KafkaTestUtils;
|
||||
|
||||
import static junit.framework.TestCase.fail;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
/**
|
||||
@@ -50,9 +54,11 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
public class KafkaStreamsStateStoreIntegrationTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true, "counts-id");
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"counts-id");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
|
||||
.getEmbeddedKafka();
|
||||
|
||||
@Test
|
||||
public void testKstreamStateStore() throws Exception {
|
||||
@@ -62,36 +68,121 @@ public class KafkaStreamsStateStoreIntegrationTests {
|
||||
"--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.bindings.input.destination=foobar",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId=KafkaStreamsStateStoreIntegrationTests-abc",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString());
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId"
|
||||
+ "=KafkaStreamsStateStoreIntegrationTests-abc",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString());
|
||||
try {
|
||||
Thread.sleep(2000);
|
||||
receiveAndValidateFoo(context);
|
||||
} catch (Exception e) {
|
||||
receiveAndValidateFoo(context, ProductCountApplication.class);
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw e;
|
||||
} finally {
|
||||
}
|
||||
finally {
|
||||
context.close();
|
||||
}
|
||||
}
|
||||
|
||||
private void receiveAndValidateFoo(ConfigurableApplicationContext context) throws Exception {
|
||||
@Test
|
||||
public void testKstreamStateStoreBuilderBeansDefinedInApplication() throws Exception {
|
||||
SpringApplication app = new SpringApplication(StateStoreBeanApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
ConfigurableApplicationContext context = app.run("--server.port=0",
|
||||
"--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.bindings.input3.destination=foobar",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input3.consumer.applicationId"
|
||||
+ "=KafkaStreamsStateStoreIntegrationTests-xyzabc-123",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString());
|
||||
try {
|
||||
Thread.sleep(2000);
|
||||
receiveAndValidateFoo(context, StateStoreBeanApplication.class);
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
context.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSameStateStoreIsCreatedOnlyOnceWhenMultipleInputBindingsArePresent() throws Exception {
|
||||
SpringApplication app = new SpringApplication(ProductCountApplicationWithMultipleInputBindings.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
ConfigurableApplicationContext context = app.run("--server.port=0",
|
||||
"--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.bindings.input1.destination=foobar",
|
||||
"--spring.cloud.stream.bindings.input2.destination=hello-foobar",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input1.consumer.applicationId"
|
||||
+ "=KafkaStreamsStateStoreIntegrationTests-abc",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString());
|
||||
try {
|
||||
Thread.sleep(2000);
|
||||
// We are not particularly interested in querying the state store here, as that is verified by the other test
|
||||
// in this class. This test verifies that the same store is not attempted to be created by multiple input bindings.
|
||||
// Normally, that will cause an exception to be thrown. However by not getting any exceptions, we are verifying
|
||||
// that the binder is handling it appropriately.
|
||||
//For more info, see this issue: https://github.com/spring-cloud/spring-cloud-stream-binder-kafka/issues/551
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
context.close();
|
||||
}
|
||||
}
|
||||
|
||||
private void receiveAndValidateFoo(ConfigurableApplicationContext context, Class<?> clazz)
|
||||
throws Exception {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("foobar");
|
||||
template.sendDefault("{\"id\":\"123\"}");
|
||||
Thread.sleep(1000);
|
||||
|
||||
//assertions
|
||||
ProductCountApplication productCount = context.getBean(ProductCountApplication.class);
|
||||
WindowStore<Object, String> state = productCount.state;
|
||||
assertThat(state != null).isTrue();
|
||||
assertThat(state.name()).isEqualTo("mystate");
|
||||
assertThat(state.persistent()).isTrue();
|
||||
assertThat(productCount.processed).isTrue();
|
||||
// assertions
|
||||
if (clazz.isAssignableFrom(ProductCountApplication.class)) {
|
||||
ProductCountApplication productCount = context
|
||||
.getBean(ProductCountApplication.class);
|
||||
WindowStore<Object, String> state = productCount.state;
|
||||
assertThat(state != null).isTrue();
|
||||
assertThat(state.name()).isEqualTo("mystate");
|
||||
assertThat(state.persistent()).isTrue();
|
||||
assertThat(productCount.processed).isTrue();
|
||||
}
|
||||
else if (clazz.isAssignableFrom(StateStoreBeanApplication.class)) {
|
||||
StateStoreBeanApplication productCount = context
|
||||
.getBean(StateStoreBeanApplication.class);
|
||||
WindowStore<Object, String> state = productCount.state;
|
||||
assertThat(state != null).isTrue();
|
||||
assertThat(state.name()).isEqualTo("mystate");
|
||||
assertThat(state.persistent()).isTrue();
|
||||
assertThat(productCount.processed).isTrue();
|
||||
}
|
||||
else {
|
||||
fail("Expected assertiond did not happen");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@EnableBinding(KafkaStreamsProcessorX.class)
|
||||
@@ -99,33 +190,114 @@ public class KafkaStreamsStateStoreIntegrationTests {
|
||||
public static class ProductCountApplication {
|
||||
|
||||
WindowStore<Object, String> state;
|
||||
|
||||
boolean processed;
|
||||
|
||||
@StreamListener("input")
|
||||
@KafkaStreamsStateStore(name = "mystate", type = KafkaStreamsStateStoreProperties.StoreType.WINDOW, lengthMs = 300000)
|
||||
@SuppressWarnings({"deprecation", "unchecked"})
|
||||
@KafkaStreamsStateStore(name = "mystate", type = KafkaStreamsStateStoreProperties.StoreType.WINDOW, lengthMs = 300000, retentionMs = 300000)
|
||||
@SuppressWarnings({ "deprecation", "unchecked" })
|
||||
public void process(KStream<Object, Product> input) {
|
||||
|
||||
input
|
||||
.process(() -> new Processor<Object, Product>() {
|
||||
input.process(() -> new Processor<Object, Product>() {
|
||||
|
||||
@Override
|
||||
public void init(ProcessorContext processorContext) {
|
||||
state = (WindowStore) processorContext.getStateStore("mystate");
|
||||
}
|
||||
@Override
|
||||
public void init(ProcessorContext processorContext) {
|
||||
state = (WindowStore) processorContext.getStateStore("mystate");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void process(Object s, Product product) {
|
||||
processed = true;
|
||||
}
|
||||
@Override
|
||||
public void process(Object s, Product product) {
|
||||
processed = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
if (state != null) {
|
||||
state.close();
|
||||
}
|
||||
@Override
|
||||
public void close() {
|
||||
if (state != null) {
|
||||
state.close();
|
||||
}
|
||||
}, "mystate");
|
||||
}
|
||||
}, "mystate");
|
||||
}
|
||||
}
|
||||
|
||||
@EnableBinding(KafkaStreamsProcessorZ.class)
|
||||
@EnableAutoConfiguration
|
||||
public static class StateStoreBeanApplication {
|
||||
|
||||
WindowStore<Object, String> state;
|
||||
|
||||
boolean processed;
|
||||
|
||||
@StreamListener("input3")
|
||||
@SuppressWarnings({"unchecked" })
|
||||
public void process(KStream<Object, Product> input) {
|
||||
|
||||
input.process(() -> new Processor<Object, Product>() {
|
||||
|
||||
@Override
|
||||
public void init(ProcessorContext processorContext) {
|
||||
state = (WindowStore) processorContext.getStateStore("mystate");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void process(Object s, Product product) {
|
||||
processed = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
if (state != null) {
|
||||
state.close();
|
||||
}
|
||||
}
|
||||
}, "mystate");
|
||||
}
|
||||
|
||||
@Bean
|
||||
public StoreBuilder mystore() {
|
||||
return Stores.windowStoreBuilder(
|
||||
Stores.persistentWindowStore("mystate",
|
||||
3L, 3, 3L, false), Serdes.String(),
|
||||
Serdes.String());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@EnableBinding(KafkaStreamsProcessorY.class)
|
||||
@EnableAutoConfiguration
|
||||
public static class ProductCountApplicationWithMultipleInputBindings {
|
||||
|
||||
WindowStore<Object, String> state;
|
||||
|
||||
boolean processed;
|
||||
|
||||
@StreamListener
|
||||
@KafkaStreamsStateStore(name = "mystate", type = KafkaStreamsStateStoreProperties.StoreType.WINDOW, lengthMs = 300000, retentionMs = 300000)
|
||||
@SuppressWarnings({ "deprecation", "unchecked" })
|
||||
public void process(@Input("input1")KStream<Object, Product> input, @Input("input2")KStream<Object, Product> input2) {
|
||||
|
||||
input.process(() -> new Processor<Object, Product>() {
|
||||
|
||||
@Override
|
||||
public void init(ProcessorContext processorContext) {
|
||||
state = (WindowStore) processorContext.getStateStore("mystate");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void process(Object s, Product product) {
|
||||
processed = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
if (state != null) {
|
||||
state.close();
|
||||
}
|
||||
}
|
||||
}, "mystate");
|
||||
|
||||
//simple use of input2, we are not using input2 for anything other than triggering some test behavior.
|
||||
input2.foreach((key, value) -> { });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -140,6 +312,7 @@ public class KafkaStreamsStateStoreIntegrationTests {
|
||||
public void setId(Integer id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
interface KafkaStreamsProcessorX {
|
||||
@@ -147,4 +320,19 @@ public class KafkaStreamsStateStoreIntegrationTests {
|
||||
@Input("input")
|
||||
KStream<?, ?> input();
|
||||
}
|
||||
|
||||
interface KafkaStreamsProcessorY {
|
||||
|
||||
@Input("input1")
|
||||
KStream<?, ?> input1();
|
||||
|
||||
@Input("input2")
|
||||
KStream<?, ?> input2();
|
||||
}
|
||||
|
||||
interface KafkaStreamsProcessorZ {
|
||||
|
||||
@Input("input3")
|
||||
KStream<?, ?> input3();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -60,17 +60,21 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
public class KafkastreamsBinderPojoInputStringOutputIntegrationTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true, "counts-id");
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"counts-id");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
|
||||
.getEmbeddedKafka();
|
||||
|
||||
private static Consumer<String, String> consumer;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-id", "false", embeddedKafka);
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-id",
|
||||
"false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts-id");
|
||||
}
|
||||
@@ -89,19 +93,20 @@ public class KafkastreamsBinderPojoInputStringOutputIntegrationTests {
|
||||
"--spring.cloud.stream.bindings.input.destination=foos",
|
||||
"--spring.cloud.stream.bindings.output.destination=counts-id",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.keySerde=org.apache.kafka.common.serialization.Serdes$IntegerSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId=ProductCountApplication-xyz",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString());
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString());
|
||||
try {
|
||||
receiveAndValidateFoo(context);
|
||||
//Assertions on StreamBuilderFactoryBean
|
||||
StreamsBuilderFactoryBean streamsBuilderFactoryBean = context.getBean("&stream-builder-process",
|
||||
StreamsBuilderFactoryBean.class);
|
||||
CleanupConfig cleanup = TestUtils.getPropertyValue(streamsBuilderFactoryBean, "cleanupConfig",
|
||||
CleanupConfig.class);
|
||||
receiveAndValidateFoo();
|
||||
// Assertions on StreamBuilderFactoryBean
|
||||
StreamsBuilderFactoryBean streamsBuilderFactoryBean = context
|
||||
.getBean("&stream-builder-ProductCountApplication-process", StreamsBuilderFactoryBean.class);
|
||||
CleanupConfig cleanup = TestUtils.getPropertyValue(streamsBuilderFactoryBean,
|
||||
"cleanupConfig", CleanupConfig.class);
|
||||
assertThat(cleanup.cleanupOnStart()).isFalse();
|
||||
assertThat(cleanup.cleanupOnStop()).isTrue();
|
||||
}
|
||||
@@ -110,13 +115,15 @@ public class KafkastreamsBinderPojoInputStringOutputIntegrationTests {
|
||||
}
|
||||
}
|
||||
|
||||
private void receiveAndValidateFoo(ConfigurableApplicationContext context) throws Exception {
|
||||
private void receiveAndValidateFoo() {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("foos");
|
||||
template.sendDefault("{\"id\":\"123\"}");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts-id");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer,
|
||||
"counts-id");
|
||||
assertThat(cr.value().contains("Count for product with ID 123: 1")).isTrue();
|
||||
}
|
||||
|
||||
@@ -128,15 +135,16 @@ public class KafkastreamsBinderPojoInputStringOutputIntegrationTests {
|
||||
@SendTo("output")
|
||||
public KStream<Integer, String> process(KStream<Object, Product> input) {
|
||||
|
||||
return input
|
||||
.filter((key, product) -> product.getId() == 123)
|
||||
return input.filter((key, product) -> product.getId() == 123)
|
||||
.map((key, value) -> new KeyValue<>(value, value))
|
||||
.groupByKey(Serialized.with(new JsonSerde<>(Product.class), new JsonSerde<>(Product.class)))
|
||||
.groupByKey(Serialized.with(new JsonSerde<>(Product.class),
|
||||
new JsonSerde<>(Product.class)))
|
||||
.windowedBy(TimeWindows.of(5000))
|
||||
.count(Materialized.as("id-count-store"))
|
||||
.toStream()
|
||||
.map((key, value) -> new KeyValue<>(key.key().id, "Count for product with ID 123: " + value));
|
||||
.count(Materialized.as("id-count-store")).toStream()
|
||||
.map((key, value) -> new KeyValue<>(key.key().id,
|
||||
"Count for product with ID 123: " + value));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class Product {
|
||||
@@ -150,5 +158,7 @@ public class KafkastreamsBinderPojoInputStringOutputIntegrationTests {
|
||||
public void setId(Integer id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,100 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.integration;
|
||||
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.WebApplicationType;
|
||||
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
|
||||
import org.springframework.cloud.stream.annotation.EnableBinding;
|
||||
import org.springframework.cloud.stream.annotation.Input;
|
||||
import org.springframework.cloud.stream.annotation.StreamListener;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.kafka.config.StreamsBuilderFactoryBean;
|
||||
import org.springframework.kafka.test.EmbeddedKafkaBroker;
|
||||
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
public class MultiProcessorsWithSameNameTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"counts");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
|
||||
.getEmbeddedKafka();
|
||||
|
||||
@Test
|
||||
public void testBinderStartsSuccessfullyWhenTwoProcessorsWithSameNamesArePresent() {
|
||||
SpringApplication app = new SpringApplication(
|
||||
MultiProcessorsWithSameNameTests.WordCountProcessorApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
|
||||
try (ConfigurableApplicationContext context = app.run("--server.port=0",
|
||||
"--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.bindings.input.destination=words",
|
||||
"--spring.cloud.stream.bindings.input-2.destination=words",
|
||||
"--spring.cloud.stream.bindings.output.destination=counts",
|
||||
"--spring.cloud.stream.bindings.output.contentType=application/json",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-1.consumer.application-id=basic-word-count",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-2.consumer.application-id=basic-word-count-1",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString())) {
|
||||
StreamsBuilderFactoryBean streamsBuilderFactoryBean1 = context
|
||||
.getBean("&stream-builder-Foo-process", StreamsBuilderFactoryBean.class);
|
||||
assertThat(streamsBuilderFactoryBean1).isNotNull();
|
||||
StreamsBuilderFactoryBean streamsBuilderFactoryBean2 = context
|
||||
.getBean("&stream-builder-Bar-process", StreamsBuilderFactoryBean.class);
|
||||
assertThat(streamsBuilderFactoryBean2).isNotNull();
|
||||
}
|
||||
}
|
||||
|
||||
@EnableBinding(KafkaStreamsProcessorX.class)
|
||||
@EnableAutoConfiguration
|
||||
static class WordCountProcessorApplication {
|
||||
|
||||
@Component
|
||||
static class Foo {
|
||||
@StreamListener
|
||||
public void process(@Input("input-1") KStream<Object, String> input) {
|
||||
}
|
||||
}
|
||||
|
||||
//Second class with a stub processor that has the same name as above ("process")
|
||||
@Component
|
||||
static class Bar {
|
||||
@StreamListener
|
||||
public void process(@Input("input-2") KStream<Object, String> input) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
interface KafkaStreamsProcessorX {
|
||||
|
||||
@Input("input-1")
|
||||
KStream<?, ?> input1();
|
||||
|
||||
@Input("input-2")
|
||||
KStream<?, ?> input2();
|
||||
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user