Compare commits
1 Commits
v2.2.0.M1
...
v2.1.0.RC2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
633f19b294 |
110
.mvn/wrapper/MavenWrapperDownloader.java
vendored
110
.mvn/wrapper/MavenWrapperDownloader.java
vendored
@@ -1,110 +0,0 @@
|
||||
/*
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
https://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
*/
|
||||
|
||||
import java.net.*;
|
||||
import java.io.*;
|
||||
import java.nio.channels.*;
|
||||
import java.util.Properties;
|
||||
|
||||
public class MavenWrapperDownloader {
|
||||
|
||||
/**
|
||||
* Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided.
|
||||
*/
|
||||
private static final String DEFAULT_DOWNLOAD_URL =
|
||||
"https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar";
|
||||
|
||||
/**
|
||||
* Path to the maven-wrapper.properties file, which might contain a downloadUrl property to
|
||||
* use instead of the default one.
|
||||
*/
|
||||
private static final String MAVEN_WRAPPER_PROPERTIES_PATH =
|
||||
".mvn/wrapper/maven-wrapper.properties";
|
||||
|
||||
/**
|
||||
* Path where the maven-wrapper.jar will be saved to.
|
||||
*/
|
||||
private static final String MAVEN_WRAPPER_JAR_PATH =
|
||||
".mvn/wrapper/maven-wrapper.jar";
|
||||
|
||||
/**
|
||||
* Name of the property which should be used to override the default download url for the wrapper.
|
||||
*/
|
||||
private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl";
|
||||
|
||||
public static void main(String args[]) {
|
||||
System.out.println("- Downloader started");
|
||||
File baseDirectory = new File(args[0]);
|
||||
System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath());
|
||||
|
||||
// If the maven-wrapper.properties exists, read it and check if it contains a custom
|
||||
// wrapperUrl parameter.
|
||||
File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH);
|
||||
String url = DEFAULT_DOWNLOAD_URL;
|
||||
if(mavenWrapperPropertyFile.exists()) {
|
||||
FileInputStream mavenWrapperPropertyFileInputStream = null;
|
||||
try {
|
||||
mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile);
|
||||
Properties mavenWrapperProperties = new Properties();
|
||||
mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream);
|
||||
url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url);
|
||||
} catch (IOException e) {
|
||||
System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'");
|
||||
} finally {
|
||||
try {
|
||||
if(mavenWrapperPropertyFileInputStream != null) {
|
||||
mavenWrapperPropertyFileInputStream.close();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
// Ignore ...
|
||||
}
|
||||
}
|
||||
}
|
||||
System.out.println("- Downloading from: : " + url);
|
||||
|
||||
File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH);
|
||||
if(!outputFile.getParentFile().exists()) {
|
||||
if(!outputFile.getParentFile().mkdirs()) {
|
||||
System.out.println(
|
||||
"- ERROR creating output direcrory '" + outputFile.getParentFile().getAbsolutePath() + "'");
|
||||
}
|
||||
}
|
||||
System.out.println("- Downloading to: " + outputFile.getAbsolutePath());
|
||||
try {
|
||||
downloadFileFromURL(url, outputFile);
|
||||
System.out.println("Done");
|
||||
System.exit(0);
|
||||
} catch (Throwable e) {
|
||||
System.out.println("- Error downloading");
|
||||
e.printStackTrace();
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
private static void downloadFileFromURL(String urlString, File destination) throws Exception {
|
||||
URL website = new URL(urlString);
|
||||
ReadableByteChannel rbc;
|
||||
rbc = Channels.newChannel(website.openStream());
|
||||
FileOutputStream fos = new FileOutputStream(destination);
|
||||
fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
|
||||
fos.close();
|
||||
rbc.close();
|
||||
}
|
||||
|
||||
}
|
||||
BIN
.mvn/wrapper/maven-wrapper.jar
vendored
Executable file → Normal file
BIN
.mvn/wrapper/maven-wrapper.jar
vendored
Executable file → Normal file
Binary file not shown.
2
.mvn/wrapper/maven-wrapper.properties
vendored
Executable file → Normal file
2
.mvn/wrapper/maven-wrapper.properties
vendored
Executable file → Normal file
@@ -1 +1 @@
|
||||
distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.5.4/apache-maven-3.5.4-bin.zip
|
||||
distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.3.3/apache-maven-3.3.3-bin.zip
|
||||
@@ -21,7 +21,7 @@
|
||||
<repository>
|
||||
<id>spring-snapshots</id>
|
||||
<name>Spring Snapshots</name>
|
||||
<url>https://repo.spring.io/libs-snapshot-local</url>
|
||||
<url>http://repo.spring.io/libs-snapshot-local</url>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
@@ -29,7 +29,7 @@
|
||||
<repository>
|
||||
<id>spring-milestones</id>
|
||||
<name>Spring Milestones</name>
|
||||
<url>https://repo.spring.io/libs-milestone-local</url>
|
||||
<url>http://repo.spring.io/libs-milestone-local</url>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
@@ -37,7 +37,7 @@
|
||||
<repository>
|
||||
<id>spring-releases</id>
|
||||
<name>Spring Releases</name>
|
||||
<url>https://repo.spring.io/release</url>
|
||||
<url>http://repo.spring.io/release</url>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
@@ -47,7 +47,7 @@
|
||||
<pluginRepository>
|
||||
<id>spring-snapshots</id>
|
||||
<name>Spring Snapshots</name>
|
||||
<url>https://repo.spring.io/libs-snapshot-local</url>
|
||||
<url>http://repo.spring.io/libs-snapshot-local</url>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
@@ -55,7 +55,7 @@
|
||||
<pluginRepository>
|
||||
<id>spring-milestones</id>
|
||||
<name>Spring Milestones</name>
|
||||
<url>https://repo.spring.io/libs-milestone-local</url>
|
||||
<url>http://repo.spring.io/libs-milestone-local</url>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
|
||||
4
LICENSE
4
LICENSE
@@ -1,6 +1,6 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
https://www.apache.org/licenses/
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
@@ -192,7 +192,7 @@
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
https://www.apache.org/licenses/LICENSE-2.0
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
|
||||
91
README.adoc
91
README.adoc
@@ -1,8 +1,4 @@
|
||||
////
|
||||
DO NOT EDIT THIS FILE. IT WAS GENERATED.
|
||||
Manual changes to this file will be lost when it is generated again.
|
||||
Edit the files in the src/main/asciidoc/ directory instead.
|
||||
////
|
||||
// Do not edit this file (e.g. go instead to src/main/asciidoc)
|
||||
|
||||
:jdkversion: 1.8
|
||||
:github-tag: master
|
||||
@@ -166,13 +162,22 @@ The following properties are available for Kafka consumers only and
|
||||
must be prefixed with `spring.cloud.stream.kafka.bindings.<channelName>.consumer.`.
|
||||
|
||||
admin.configuration::
|
||||
Since version 2.1.1, this property is deprecated in favor of `topic.properties`, and support for it will be removed in a future version.
|
||||
A `Map` of Kafka topic properties used when provisioning topics -- for example, `spring.cloud.stream.kafka.bindings.input.consumer.admin.configuration.message.format.version=0.9.0.0`
|
||||
+
|
||||
Default: none.
|
||||
|
||||
admin.replicas-assignment::
|
||||
Since version 2.1.1, this property is deprecated in favor of `topic.replicas-assignment`, and support for it will be removed in a future version.
|
||||
A Map<Integer, List<Integer>> of replica assignments, with the key being the partition and the value being the assignments.
|
||||
Used when provisioning new topics.
|
||||
See the `NewTopic` Javadocs in the `kafka-clients` jar.
|
||||
+
|
||||
Default: none.
|
||||
|
||||
admin.replication-factor::
|
||||
Since version 2.1.1, this property is deprecated in favor of `topic.replication-factor`, and support for it will be removed in a future version.
|
||||
The replication factor to use when provisioning topics. Overrides the binder-wide setting.
|
||||
Ignored if `replicas-assignments` is present.
|
||||
+
|
||||
Default: none (the binder-wide default of 1 is used).
|
||||
|
||||
autoRebalanceEnabled::
|
||||
When `true`, topic partitions is automatically rebalanced between the members of a consumer group.
|
||||
@@ -265,21 +270,6 @@ Note, the time taken to detect new topics that match the pattern is controlled b
|
||||
This can be configured using the `configuration` property above.
|
||||
+
|
||||
Default: `false`
|
||||
topic.properties::
|
||||
A `Map` of Kafka topic properties used when provisioning new topics -- for example, `spring.cloud.stream.kafka.bindings.input.consumer.topic.properties.message.format.version=0.9.0.0`
|
||||
+
|
||||
Default: none.
|
||||
topic.replicas-assignment::
|
||||
A Map<Integer, List<Integer>> of replica assignments, with the key being the partition and the value being the assignments.
|
||||
Used when provisioning new topics.
|
||||
See the `NewTopic` Javadocs in the `kafka-clients` jar.
|
||||
+
|
||||
Default: none.
|
||||
topic.replication-factor::
|
||||
The replication factor to use when provisioning topics. Overrides the binder-wide setting.
|
||||
Ignored if `replicas-assignments` is present.
|
||||
+
|
||||
Default: none (the binder-wide default of 1 is used).
|
||||
|
||||
[[kafka-producer-properties]]
|
||||
==== Kafka Producer Properties
|
||||
@@ -288,13 +278,22 @@ The following properties are available for Kafka producers only and
|
||||
must be prefixed with `spring.cloud.stream.kafka.bindings.<channelName>.producer.`.
|
||||
|
||||
admin.configuration::
|
||||
Since version 2.1.1, this property is deprecated in favor of `topic.properties`, and support for it will be removed in a future version.
|
||||
A `Map` of Kafka topic properties used when provisioning new topics -- for example, `spring.cloud.stream.kafka.bindings.input.consumer.admin.configuration.message.format.version=0.9.0.0`
|
||||
+
|
||||
Default: none.
|
||||
|
||||
admin.replicas-assignment::
|
||||
Since version 2.1.1, this property is deprecated in favor of `topic.replicas-assignment`, and support for it will be removed in a future version.
|
||||
A Map<Integer, List<Integer>> of replica assignments, with the key being the partition and the value being the assignments.
|
||||
Used when provisioning new topics.
|
||||
See `NewTopic` javadocs in the `kafka-clients` jar.
|
||||
+
|
||||
Default: none.
|
||||
|
||||
admin.replication-factor::
|
||||
Since version 2.1.1, this property is deprecated in favor of `topic.replication-factor`, and support for it will be removed in a future version.
|
||||
The replication factor to use when provisioning new topics. Overrides the binder-wide setting.
|
||||
Ignored if `replicas-assignments` is present.
|
||||
+
|
||||
Default: none (the binder-wide default of 1 is used).
|
||||
|
||||
bufferSize::
|
||||
Upper limit, in bytes, of how much data the Kafka producer attempts to batch before sending.
|
||||
@@ -327,21 +326,6 @@ configuration::
|
||||
Map with a key/value pair containing generic Kafka producer properties.
|
||||
+
|
||||
Default: Empty map.
|
||||
topic.properties::
|
||||
A `Map` of Kafka topic properties used when provisioning new topics -- for example, `spring.cloud.stream.kafka.bindings.output.producer.topic.properties.message.format.version=0.9.0.0`
|
||||
+
|
||||
topic.replicas-assignment::
|
||||
A Map<Integer, List<Integer>> of replica assignments, with the key being the partition and the value being the assignments.
|
||||
Used when provisioning new topics.
|
||||
See the `NewTopic` Javadocs in the `kafka-clients` jar.
|
||||
+
|
||||
Default: none.
|
||||
topic.replication-factor::
|
||||
The replication factor to use when provisioning topics. Overrides the binder-wide setting.
|
||||
Ignored if `replicas-assignments` is present.
|
||||
+
|
||||
Default: none (the binder-wide default of 1 is used).
|
||||
|
||||
|
||||
NOTE: The Kafka binder uses the `partitionCount` setting of the producer as a hint to create a topic with the given partition count (in conjunction with the `minPartitionCount`, the maximum of the two being the value being used).
|
||||
Exercise caution when configuring both `minPartitionCount` for a binder and `partitionCount` for an application, as the larger value is used.
|
||||
@@ -349,13 +333,6 @@ If a topic already exists with a smaller partition count and `autoAddPartitions`
|
||||
If a topic already exists with a smaller partition count and `autoAddPartitions` is enabled, new partitions are added.
|
||||
If a topic already exists with a larger number of partitions than the maximum of (`minPartitionCount` or `partitionCount`), the existing partition count is used.
|
||||
|
||||
compression::
|
||||
Set the `compression.type` producer property.
|
||||
Supported values are `none`, `gzip`, `snappy` and `lz4`.
|
||||
If you override the `kafka-clients` jar to 2.1.0 (or later), as discussed in the https://docs.spring.io/spring-kafka/docs/2.2.x/reference/html/deps-for-21x.html[Spring for Apache Kafka documentation], and wish to use `zstd` compression, use `spring.cloud.stream.kafka.bindings.<binding-name>.producer.configuration.compression.type=zstd`.
|
||||
+
|
||||
Default: `none`.
|
||||
|
||||
==== Usage examples
|
||||
|
||||
In this section, we show the use of the preceding properties for specific scenarios.
|
||||
@@ -391,7 +368,7 @@ public class ManuallyAcknowdledgingConsumer {
|
||||
===== Example: Security Configuration
|
||||
|
||||
Apache Kafka 0.9 supports secure connections between client and brokers.
|
||||
To take advantage of this feature, follow the guidelines in the https://kafka.apache.org/090/documentation.html#security_configclients[Apache Kafka Documentation] as well as the Kafka 0.9 https://docs.confluent.io/2.0.0/kafka/security.html[security guidelines from the Confluent documentation].
|
||||
To take advantage of this feature, follow the guidelines in the http://kafka.apache.org/090/documentation.html#security_configclients[Apache Kafka Documentation] as well as the Kafka 0.9 http://docs.confluent.io/2.0.0/kafka/security.html[security guidelines from the Confluent documentation].
|
||||
Use the `spring.cloud.stream.kafka.binder.configuration` option to set security properties for all clients created by the binder.
|
||||
|
||||
For example, to set `security.protocol` to `SASL_SSL`, set the following property:
|
||||
@@ -403,7 +380,7 @@ spring.cloud.stream.kafka.binder.configuration.security.protocol=SASL_SSL
|
||||
|
||||
All the other security properties can be set in a similar manner.
|
||||
|
||||
When using Kerberos, follow the instructions in the https://kafka.apache.org/090/documentation.html#security_sasl_clientconfig[reference documentation] for creating and referencing the JAAS configuration.
|
||||
When using Kerberos, follow the instructions in the http://kafka.apache.org/090/documentation.html#security_sasl_clientconfig[reference documentation] for creating and referencing the JAAS configuration.
|
||||
|
||||
Spring Cloud Stream supports passing JAAS configuration information to the application by using a JAAS configuration file and using Spring Boot properties.
|
||||
|
||||
@@ -646,7 +623,7 @@ source control.
|
||||
|
||||
The projects that require middleware generally include a
|
||||
`docker-compose.yml`, so consider using
|
||||
https://compose.docker.io/[Docker Compose] to run the middeware servers
|
||||
http://compose.docker.io/[Docker Compose] to run the middeware servers
|
||||
in Docker containers.
|
||||
|
||||
=== Documentation
|
||||
@@ -655,13 +632,13 @@ There is a "full" profile that will generate documentation.
|
||||
|
||||
=== Working with the code
|
||||
If you don't have an IDE preference we would recommend that you use
|
||||
https://www.springsource.com/developer/sts[Spring Tools Suite] or
|
||||
https://eclipse.org[Eclipse] when working with the code. We use the
|
||||
https://eclipse.org/m2e/[m2eclipe] eclipse plugin for maven support. Other IDEs and tools
|
||||
http://www.springsource.com/developer/sts[Spring Tools Suite] or
|
||||
http://eclipse.org[Eclipse] when working with the code. We use the
|
||||
http://eclipse.org/m2e/[m2eclipe] eclipse plugin for maven support. Other IDEs and tools
|
||||
should also work without issue.
|
||||
|
||||
==== Importing into eclipse with m2eclipse
|
||||
We recommend the https://eclipse.org/m2e/[m2eclipe] eclipse plugin when working with
|
||||
We recommend the http://eclipse.org/m2e/[m2eclipe] eclipse plugin when working with
|
||||
eclipse. If you don't already have m2eclipse installed it is available from the "eclipse
|
||||
marketplace".
|
||||
|
||||
@@ -714,7 +691,7 @@ added after the original pull request but before a merge.
|
||||
`eclipse-code-formatter.xml` file from the
|
||||
https://github.com/spring-cloud/build/tree/master/eclipse-coding-conventions.xml[Spring
|
||||
Cloud Build] project. If using IntelliJ, you can use the
|
||||
https://plugins.jetbrains.com/plugin/6546[Eclipse Code Formatter
|
||||
http://plugins.jetbrains.com/plugin/6546[Eclipse Code Formatter
|
||||
Plugin] to import the same file.
|
||||
* Make sure all new `.java` files to have a simple Javadoc class comment with at least an
|
||||
`@author` tag identifying you, and preferably at least a paragraph on what the class is
|
||||
@@ -727,7 +704,7 @@ added after the original pull request but before a merge.
|
||||
* A few unit tests would help a lot as well -- someone has to do it.
|
||||
* If no-one else is using your branch, please rebase it against the current master (or
|
||||
other target branch in the main project).
|
||||
* When writing a commit message please follow https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html[these conventions],
|
||||
* When writing a commit message please follow http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html[these conventions],
|
||||
if you are fixing an existing issue please add `Fixes gh-XXXX` at the end of the commit
|
||||
message (where XXXX is the issue number).
|
||||
|
||||
|
||||
292
docs/pom.xml
292
docs/pom.xml
@@ -1,13 +1,13 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<artifactId>spring-cloud-stream-binder-kafka-docs</artifactId>
|
||||
<parent>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
|
||||
<version>2.2.0.M1</version>
|
||||
<version>2.1.0.RC2</version>
|
||||
</parent>
|
||||
<packaging>pom</packaging>
|
||||
<name>spring-cloud-stream-binder-kafka-docs</name>
|
||||
@@ -15,11 +15,8 @@
|
||||
<properties>
|
||||
<docs.main>spring-cloud-stream-binder-kafka</docs.main>
|
||||
<main.basedir>${basedir}/..</main.basedir>
|
||||
<spring-doc-resources.version>0.1.1.RELEASE</spring-doc-resources.version>
|
||||
<spring-asciidoctor-extensions.version>0.1.0.RELEASE
|
||||
</spring-asciidoctor-extensions.version>
|
||||
<asciidoctorj-pdf.version>1.5.0-alpha.16</asciidoctorj-pdf.version>
|
||||
</properties>
|
||||
|
||||
<profiles>
|
||||
<profile>
|
||||
<id>docs</id>
|
||||
@@ -28,294 +25,21 @@
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-dependency-plugin</artifactId>
|
||||
<version>${maven-dependency-plugin.version}</version>
|
||||
<inherited>false</inherited>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>unpack-docs</id>
|
||||
<phase>generate-resources</phase>
|
||||
<goals>
|
||||
<goal>unpack</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<artifactItems>
|
||||
<artifactItem>
|
||||
<groupId>org.springframework.cloud
|
||||
</groupId>
|
||||
<artifactId>spring-cloud-build-docs
|
||||
</artifactId>
|
||||
<version>${spring-cloud-build.version}
|
||||
</version>
|
||||
<classifier>sources</classifier>
|
||||
<type>jar</type>
|
||||
<overWrite>false</overWrite>
|
||||
<outputDirectory>${docs.resources.dir}
|
||||
</outputDirectory>
|
||||
</artifactItem>
|
||||
</artifactItems>
|
||||
</configuration>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>unpack-docs-resources</id>
|
||||
<phase>generate-resources</phase>
|
||||
<goals>
|
||||
<goal>unpack</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<artifactItems>
|
||||
<artifactItem>
|
||||
<groupId>io.spring.docresources</groupId>
|
||||
<artifactId>spring-doc-resources</artifactId>
|
||||
<version>${spring-doc-resources.version}</version>
|
||||
<type>zip</type>
|
||||
<overWrite>true</overWrite>
|
||||
<outputDirectory>${project.build.directory}/refdocs/</outputDirectory>
|
||||
</artifactItem>
|
||||
</artifactItems>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-resources-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>copy-asciidoc-resources</id>
|
||||
<phase>generate-resources</phase>
|
||||
<goals>
|
||||
<goal>copy-resources</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<outputDirectory>${project.build.directory}/refdocs/</outputDirectory>
|
||||
<resources>
|
||||
<resource>
|
||||
<directory>src/main/asciidoc</directory>
|
||||
<filtering>false</filtering>
|
||||
<excludes>
|
||||
<exclude>ghpages.sh</exclude>
|
||||
</excludes>
|
||||
</resource>
|
||||
</resources>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<groupId>org.asciidoctor</groupId>
|
||||
<artifactId>asciidoctor-maven-plugin</artifactId>
|
||||
<version>${asciidoctor-maven-plugin.version}</version>
|
||||
<inherited>false</inherited>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>io.spring.asciidoctor</groupId>
|
||||
<artifactId>spring-asciidoctor-extensions</artifactId>
|
||||
<version>${spring-asciidoctor-extensions.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.asciidoctor</groupId>
|
||||
<artifactId>asciidoctorj-pdf</artifactId>
|
||||
<version>${asciidoctorj-pdf.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<configuration>
|
||||
<sourceDirectory>${project.build.directory}/refdocs/</sourceDirectory>
|
||||
<sourceDocumentName>${docs.main}.adoc</sourceDocumentName>
|
||||
<attributes>
|
||||
<spring-cloud-stream-version>${project.version}</spring-cloud-stream-version>
|
||||
<!-- <docs-url>https://cloud.spring.io/</docs-url> -->
|
||||
<!-- <docs-version></docs-version> -->
|
||||
<docs-version>${project.version}/</docs-version>
|
||||
<docs-url>https://cloud.spring.io/spring-cloud-static/</docs-url>
|
||||
</attributes>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>generate-html-documentation</id>
|
||||
<phase>prepare-package</phase>
|
||||
<goals>
|
||||
<goal>process-asciidoc</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<backend>html5</backend>
|
||||
<sourceHighlighter>highlight.js</sourceHighlighter>
|
||||
<doctype>book</doctype>
|
||||
<attributes>
|
||||
// these attributes are required to use the doc resources
|
||||
<docinfo>shared</docinfo>
|
||||
<stylesdir>css/</stylesdir>
|
||||
<stylesheet>spring.css</stylesheet>
|
||||
<linkcss>true</linkcss>
|
||||
<icons>font</icons>
|
||||
<highlightjsdir>js/highlight</highlightjsdir>
|
||||
<highlightjs-theme>atom-one-dark-reasonable</highlightjs-theme>
|
||||
<allow-uri-read>true</allow-uri-read>
|
||||
<nofooter />
|
||||
<toc>left</toc>
|
||||
<toc-levels>4</toc-levels>
|
||||
<spring-cloud-version>${project.version}</spring-cloud-version>
|
||||
<sectlinks>true</sectlinks>
|
||||
</attributes>
|
||||
<outputFile>${docs.main}.html</outputFile>
|
||||
</configuration>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>generate-docbook</id>
|
||||
<phase>none</phase>
|
||||
<goals>
|
||||
<goal>process-asciidoc</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>generate-index</id>
|
||||
<phase>none</phase>
|
||||
<goals>
|
||||
<goal>process-asciidoc</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<groupId>com.agilejava.docbkx</groupId>
|
||||
<artifactId>docbkx-maven-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-antrun-plugin</artifactId>
|
||||
<version>${maven-antrun-plugin.version}</version>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>ant-contrib</groupId>
|
||||
<artifactId>ant-contrib</artifactId>
|
||||
<version>1.0b3</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>ant</groupId>
|
||||
<artifactId>ant</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.ant</groupId>
|
||||
<artifactId>ant-nodeps</artifactId>
|
||||
<version>1.8.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.tigris.antelope</groupId>
|
||||
<artifactId>antelopetasks</artifactId>
|
||||
<version>3.2.10</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.jruby</groupId>
|
||||
<artifactId>jruby-complete</artifactId>
|
||||
<version>1.7.17</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.asciidoctor</groupId>
|
||||
<artifactId>asciidoctorj</artifactId>
|
||||
<version>1.5.8</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>readme</id>
|
||||
<phase>process-resources</phase>
|
||||
<goals>
|
||||
<goal>run</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<target>
|
||||
<java classname="org.jruby.Main" failonerror="yes">
|
||||
<arg
|
||||
value="${docs.resources.dir}/ruby/generate_readme.sh" />
|
||||
<arg value="-o" />
|
||||
<arg value="${main.basedir}/README.adoc" />
|
||||
</java>
|
||||
</target>
|
||||
</configuration>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>assert-no-unresolved-links</id>
|
||||
<phase>prepare-package</phase>
|
||||
<goals>
|
||||
<goal>run</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<target>
|
||||
<fileset id="unresolved.file"
|
||||
dir="${basedir}/target/generated-docs/" includes="**/*.html">
|
||||
<contains text="Unresolved" />
|
||||
</fileset>
|
||||
<fail message="[Unresolved] Found...failing">
|
||||
<condition>
|
||||
<resourcecount when="greater" count="0"
|
||||
refid="unresolved.file" />
|
||||
</condition>
|
||||
</fail>
|
||||
</target>
|
||||
</configuration>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>setup-maven-properties</id>
|
||||
<phase>validate</phase>
|
||||
<goals>
|
||||
<goal>run</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<exportAntProperties>true</exportAntProperties>
|
||||
<target>
|
||||
<taskdef
|
||||
resource="net/sf/antcontrib/antcontrib.properties" />
|
||||
<taskdef name="stringutil"
|
||||
classname="ise.antelope.tasks.StringUtilTask" />
|
||||
<var name="version-type" value="${project.version}" />
|
||||
<propertyregex property="version-type"
|
||||
override="true" input="${version-type}" regexp=".*\.(.*)"
|
||||
replace="\1" />
|
||||
<propertyregex property="version-type"
|
||||
override="true" input="${version-type}" regexp="(M)\d+"
|
||||
replace="MILESTONE" />
|
||||
<propertyregex property="version-type"
|
||||
override="true" input="${version-type}" regexp="(RC)\d+"
|
||||
replace="MILESTONE" />
|
||||
<propertyregex property="version-type"
|
||||
override="true" input="${version-type}" regexp="BUILD-(.*)"
|
||||
replace="SNAPSHOT" />
|
||||
<stringutil string="${version-type}"
|
||||
property="spring-cloud-repo">
|
||||
<lowercase />
|
||||
</stringutil>
|
||||
<var name="github-tag" value="v${project.version}" />
|
||||
<propertyregex property="github-tag"
|
||||
override="true" input="${github-tag}" regexp=".*SNAPSHOT"
|
||||
replace="master" />
|
||||
</target>
|
||||
</configuration>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>copy-css</id>
|
||||
<phase>none</phase>
|
||||
<goals>
|
||||
<goal>run</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>generate-documentation-index</id>
|
||||
<phase>none</phase>
|
||||
<goals>
|
||||
<goal>run</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>copy-generated-html</id>
|
||||
<phase>none</phase>
|
||||
<goals>
|
||||
<goal>run</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
|
||||
</executions>
|
||||
<inherited>false</inherited>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>build-helper-maven-plugin</artifactId>
|
||||
|
||||
@@ -34,7 +34,7 @@ source control.
|
||||
|
||||
The projects that require middleware generally include a
|
||||
`docker-compose.yml`, so consider using
|
||||
https://compose.docker.io/[Docker Compose] to run the middeware servers
|
||||
http://compose.docker.io/[Docker Compose] to run the middeware servers
|
||||
in Docker containers.
|
||||
|
||||
=== Documentation
|
||||
@@ -43,13 +43,13 @@ There is a "full" profile that will generate documentation.
|
||||
|
||||
=== Working with the code
|
||||
If you don't have an IDE preference we would recommend that you use
|
||||
https://www.springsource.com/developer/sts[Spring Tools Suite] or
|
||||
https://eclipse.org[Eclipse] when working with the code. We use the
|
||||
https://eclipse.org/m2e/[m2eclipe] eclipse plugin for maven support. Other IDEs and tools
|
||||
http://www.springsource.com/developer/sts[Spring Tools Suite] or
|
||||
http://eclipse.org[Eclipse] when working with the code. We use the
|
||||
http://eclipse.org/m2e/[m2eclipe] eclipse plugin for maven support. Other IDEs and tools
|
||||
should also work without issue.
|
||||
|
||||
==== Importing into eclipse with m2eclipse
|
||||
We recommend the https://eclipse.org/m2e/[m2eclipe] eclipse plugin when working with
|
||||
We recommend the http://eclipse.org/m2e/[m2eclipe] eclipse plugin when working with
|
||||
eclipse. If you don't already have m2eclipse installed it is available from the "eclipse
|
||||
marketplace".
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ added after the original pull request but before a merge.
|
||||
`eclipse-code-formatter.xml` file from the
|
||||
https://github.com/spring-cloud/build/tree/master/eclipse-coding-conventions.xml[Spring
|
||||
Cloud Build] project. If using IntelliJ, you can use the
|
||||
https://plugins.jetbrains.com/plugin/6546[Eclipse Code Formatter
|
||||
http://plugins.jetbrains.com/plugin/6546[Eclipse Code Formatter
|
||||
Plugin] to import the same file.
|
||||
* Make sure all new `.java` files to have a simple Javadoc class comment with at least an
|
||||
`@author` tag identifying you, and preferably at least a paragraph on what the class is
|
||||
@@ -37,6 +37,6 @@ added after the original pull request but before a merge.
|
||||
* A few unit tests would help a lot as well -- someone has to do it.
|
||||
* If no-one else is using your branch, please rebase it against the current master (or
|
||||
other target branch in the main project).
|
||||
* When writing a commit message please follow https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html[these conventions],
|
||||
* When writing a commit message please follow http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html[these conventions],
|
||||
if you are fixing an existing issue please add `Fixes gh-XXXX` at the end of the commit
|
||||
message (where XXXX is the issue number).
|
||||
@@ -25,8 +25,10 @@ spring.cloud.stream.bindings.input.group=so8400replay
|
||||
spring.cloud.stream.bindings.input.destination=error.so8400out.so8400
|
||||
|
||||
spring.cloud.stream.bindings.output.destination=so8400out
|
||||
spring.cloud.stream.bindings.output.producer.partitioned=true
|
||||
|
||||
spring.cloud.stream.bindings.parkingLot.destination=so8400in.parkingLot
|
||||
spring.cloud.stream.bindings.parkingLot.producer.partitioned=true
|
||||
|
||||
spring.cloud.stream.kafka.binder.configuration.auto.offset.reset=earliest
|
||||
|
||||
|
||||
@@ -40,7 +40,7 @@ function check_if_anything_to_sync() {
|
||||
|
||||
function retrieve_current_branch() {
|
||||
# Code getting the name of the current branch. For master we want to publish as we did until now
|
||||
# https://stackoverflow.com/questions/1593051/how-to-programmatically-determine-the-current-checked-out-git-branch
|
||||
# http://stackoverflow.com/questions/1593051/how-to-programmatically-determine-the-current-checked-out-git-branch
|
||||
# If there is a branch already passed will reuse it - otherwise will try to find it
|
||||
CURRENT_BRANCH=${BRANCH}
|
||||
if [[ -z "${CURRENT_BRANCH}" ]] ; then
|
||||
@@ -147,7 +147,7 @@ function copy_docs_for_current_version() {
|
||||
COMMIT_CHANGES="yes"
|
||||
else
|
||||
echo -e "Current branch is [${CURRENT_BRANCH}]"
|
||||
# https://stackoverflow.com/questions/29300806/a-bash-script-to-check-if-a-string-is-present-in-a-comma-separated-list-of-strin
|
||||
# http://stackoverflow.com/questions/29300806/a-bash-script-to-check-if-a-string-is-present-in-a-comma-separated-list-of-strin
|
||||
if [[ ",${WHITELISTED_BRANCHES_VALUE}," = *",${CURRENT_BRANCH},"* ]] ; then
|
||||
mkdir -p ${ROOT_FOLDER}/${CURRENT_BRANCH}
|
||||
echo -e "Branch [${CURRENT_BRANCH}] is whitelisted! Will copy the current docs to the [${CURRENT_BRANCH}] folder"
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
== Kafka Streams Binder
|
||||
|
||||
|
||||
=== Usage
|
||||
|
||||
For using the Kafka Streams binder, you just need to add it to your Spring Cloud Stream application, using the following
|
||||
@@ -21,7 +20,7 @@ Spring Cloud Stream's Apache Kafka support also includes a binder implementation
|
||||
Streams binding. With this native integration, a Spring Cloud Stream "processor" application can directly use the
|
||||
https://kafka.apache.org/documentation/streams/developer-guide[Apache Kafka Streams] APIs in the core business logic.
|
||||
|
||||
Kafka Streams binder implementation builds on the foundation provided by the https://docs.spring.io/spring-kafka/reference/html/_reference.html#kafka-streams[Kafka Streams in Spring Kafka]
|
||||
Kafka Streams binder implementation builds on the foundation provided by the http://docs.spring.io/spring-kafka/reference/html/_reference.html#kafka-streams[Kafka Streams in Spring Kafka]
|
||||
project.
|
||||
|
||||
Kafka Streams binder provides binding capabilities for the three major types in Kafka Streams - KStream, KTable and GlobalKTable.
|
||||
@@ -694,31 +693,3 @@ StreamsBuilderFactoryBean streamsBuilderFactoryBean = context.getBean("&stream-b
|
||||
By default, the `Kafkastreams.cleanup()` method is called when the binding is stopped.
|
||||
See https://docs.spring.io/spring-kafka/reference/html/_reference.html#_configuration[the Spring Kafka documentation].
|
||||
To modify this behavior simply add a single `CleanupConfig` `@Bean` (configured to clean up on start, stop, or neither) to the application context; the bean will be detected and wired into the factory bean.
|
||||
|
||||
=== Health Indicator
|
||||
|
||||
The health indicator requires the dependency `spring-boot-starter-actuator`. For maven use:
|
||||
[source,xml]
|
||||
----
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-actuator</artifactId>
|
||||
</dependency>
|
||||
----
|
||||
|
||||
Spring Cloud Stream Binder Kafka Streams provides a health indicator to check the state of the underlying Kafka threads.
|
||||
Spring Cloud Stream defines a property `management.health.binders.enabled` to enable the health indicator. See the
|
||||
https://docs.spring.io/spring-cloud-stream/docs/current/reference/htmlsingle/#_health_indicator[Spring Cloud Stream documentation].
|
||||
|
||||
The health indicator provides the following details for each Kafka threads:
|
||||
|
||||
* Thread name
|
||||
* Thread state: `CREATED`, `RUNNING`, `PARTITIONS_REVOKED`, `PARTITIONS_ASSIGNED`, `PENDING_SHUTDOWN` or `DEAD`
|
||||
* Active tasks: task ID and partitions
|
||||
* Standby tasks: task ID and partitions
|
||||
|
||||
By default, only the global status is visible (`UP` or `DOWN`). To show the details, the property `management.endpoint.health.show-details` must be set to `ALWAYS` or `WHEN_AUTHORIZED`.
|
||||
For more details about the health information, see the
|
||||
https://docs.spring.io/spring-boot/docs/current/reference/html/production-ready-endpoints.html#production-ready-health[Spring Boot Actuator documentation].
|
||||
|
||||
NOTE: The status of the health indicator is `UP` if all the Kafka threads registered are in the `RUNNING` state.
|
||||
|
||||
@@ -146,13 +146,22 @@ The following properties are available for Kafka consumers only and
|
||||
must be prefixed with `spring.cloud.stream.kafka.bindings.<channelName>.consumer.`.
|
||||
|
||||
admin.configuration::
|
||||
Since version 2.1.1, this property is deprecated in favor of `topic.properties`, and support for it will be removed in a future version.
|
||||
A `Map` of Kafka topic properties used when provisioning topics -- for example, `spring.cloud.stream.kafka.bindings.input.consumer.admin.configuration.message.format.version=0.9.0.0`
|
||||
+
|
||||
Default: none.
|
||||
|
||||
admin.replicas-assignment::
|
||||
Since version 2.1.1, this property is deprecated in favor of `topic.replicas-assignment`, and support for it will be removed in a future version.
|
||||
A Map<Integer, List<Integer>> of replica assignments, with the key being the partition and the value being the assignments.
|
||||
Used when provisioning new topics.
|
||||
See the `NewTopic` Javadocs in the `kafka-clients` jar.
|
||||
+
|
||||
Default: none.
|
||||
|
||||
admin.replication-factor::
|
||||
Since version 2.1.1, this property is deprecated in favor of `topic.replication-factor`, and support for it will be removed in a future version.
|
||||
The replication factor to use when provisioning topics. Overrides the binder-wide setting.
|
||||
Ignored if `replicas-assignments` is present.
|
||||
+
|
||||
Default: none (the binder-wide default of 1 is used).
|
||||
|
||||
autoRebalanceEnabled::
|
||||
When `true`, topic partitions is automatically rebalanced between the members of a consumer group.
|
||||
@@ -245,21 +254,6 @@ Note, the time taken to detect new topics that match the pattern is controlled b
|
||||
This can be configured using the `configuration` property above.
|
||||
+
|
||||
Default: `false`
|
||||
topic.properties::
|
||||
A `Map` of Kafka topic properties used when provisioning new topics -- for example, `spring.cloud.stream.kafka.bindings.input.consumer.topic.properties.message.format.version=0.9.0.0`
|
||||
+
|
||||
Default: none.
|
||||
topic.replicas-assignment::
|
||||
A Map<Integer, List<Integer>> of replica assignments, with the key being the partition and the value being the assignments.
|
||||
Used when provisioning new topics.
|
||||
See the `NewTopic` Javadocs in the `kafka-clients` jar.
|
||||
+
|
||||
Default: none.
|
||||
topic.replication-factor::
|
||||
The replication factor to use when provisioning topics. Overrides the binder-wide setting.
|
||||
Ignored if `replicas-assignments` is present.
|
||||
+
|
||||
Default: none (the binder-wide default of 1 is used).
|
||||
|
||||
[[kafka-producer-properties]]
|
||||
==== Kafka Producer Properties
|
||||
@@ -268,13 +262,22 @@ The following properties are available for Kafka producers only and
|
||||
must be prefixed with `spring.cloud.stream.kafka.bindings.<channelName>.producer.`.
|
||||
|
||||
admin.configuration::
|
||||
Since version 2.1.1, this property is deprecated in favor of `topic.properties`, and support for it will be removed in a future version.
|
||||
A `Map` of Kafka topic properties used when provisioning new topics -- for example, `spring.cloud.stream.kafka.bindings.input.consumer.admin.configuration.message.format.version=0.9.0.0`
|
||||
+
|
||||
Default: none.
|
||||
|
||||
admin.replicas-assignment::
|
||||
Since version 2.1.1, this property is deprecated in favor of `topic.replicas-assignment`, and support for it will be removed in a future version.
|
||||
A Map<Integer, List<Integer>> of replica assignments, with the key being the partition and the value being the assignments.
|
||||
Used when provisioning new topics.
|
||||
See `NewTopic` javadocs in the `kafka-clients` jar.
|
||||
+
|
||||
Default: none.
|
||||
|
||||
admin.replication-factor::
|
||||
Since version 2.1.1, this property is deprecated in favor of `topic.replication-factor`, and support for it will be removed in a future version.
|
||||
The replication factor to use when provisioning new topics. Overrides the binder-wide setting.
|
||||
Ignored if `replicas-assignments` is present.
|
||||
+
|
||||
Default: none (the binder-wide default of 1 is used).
|
||||
|
||||
bufferSize::
|
||||
Upper limit, in bytes, of how much data the Kafka producer attempts to batch before sending.
|
||||
@@ -307,21 +310,6 @@ configuration::
|
||||
Map with a key/value pair containing generic Kafka producer properties.
|
||||
+
|
||||
Default: Empty map.
|
||||
topic.properties::
|
||||
A `Map` of Kafka topic properties used when provisioning new topics -- for example, `spring.cloud.stream.kafka.bindings.output.producer.topic.properties.message.format.version=0.9.0.0`
|
||||
+
|
||||
topic.replicas-assignment::
|
||||
A Map<Integer, List<Integer>> of replica assignments, with the key being the partition and the value being the assignments.
|
||||
Used when provisioning new topics.
|
||||
See the `NewTopic` Javadocs in the `kafka-clients` jar.
|
||||
+
|
||||
Default: none.
|
||||
topic.replication-factor::
|
||||
The replication factor to use when provisioning topics. Overrides the binder-wide setting.
|
||||
Ignored if `replicas-assignments` is present.
|
||||
+
|
||||
Default: none (the binder-wide default of 1 is used).
|
||||
|
||||
|
||||
NOTE: The Kafka binder uses the `partitionCount` setting of the producer as a hint to create a topic with the given partition count (in conjunction with the `minPartitionCount`, the maximum of the two being the value being used).
|
||||
Exercise caution when configuring both `minPartitionCount` for a binder and `partitionCount` for an application, as the larger value is used.
|
||||
@@ -329,13 +317,6 @@ If a topic already exists with a smaller partition count and `autoAddPartitions`
|
||||
If a topic already exists with a smaller partition count and `autoAddPartitions` is enabled, new partitions are added.
|
||||
If a topic already exists with a larger number of partitions than the maximum of (`minPartitionCount` or `partitionCount`), the existing partition count is used.
|
||||
|
||||
compression::
|
||||
Set the `compression.type` producer property.
|
||||
Supported values are `none`, `gzip`, `snappy` and `lz4`.
|
||||
If you override the `kafka-clients` jar to 2.1.0 (or later), as discussed in the https://docs.spring.io/spring-kafka/docs/2.2.x/reference/html/deps-for-21x.html[Spring for Apache Kafka documentation], and wish to use `zstd` compression, use `spring.cloud.stream.kafka.bindings.<binding-name>.producer.configuration.compression.type=zstd`.
|
||||
+
|
||||
Default: `none`.
|
||||
|
||||
==== Usage examples
|
||||
|
||||
In this section, we show the use of the preceding properties for specific scenarios.
|
||||
@@ -371,7 +352,7 @@ public class ManuallyAcknowdledgingConsumer {
|
||||
===== Example: Security Configuration
|
||||
|
||||
Apache Kafka 0.9 supports secure connections between client and brokers.
|
||||
To take advantage of this feature, follow the guidelines in the https://kafka.apache.org/090/documentation.html#security_configclients[Apache Kafka Documentation] as well as the Kafka 0.9 https://docs.confluent.io/2.0.0/kafka/security.html[security guidelines from the Confluent documentation].
|
||||
To take advantage of this feature, follow the guidelines in the http://kafka.apache.org/090/documentation.html#security_configclients[Apache Kafka Documentation] as well as the Kafka 0.9 http://docs.confluent.io/2.0.0/kafka/security.html[security guidelines from the Confluent documentation].
|
||||
Use the `spring.cloud.stream.kafka.binder.configuration` option to set security properties for all clients created by the binder.
|
||||
|
||||
For example, to set `security.protocol` to `SASL_SSL`, set the following property:
|
||||
@@ -383,7 +364,7 @@ spring.cloud.stream.kafka.binder.configuration.security.protocol=SASL_SSL
|
||||
|
||||
All the other security properties can be set in a similar manner.
|
||||
|
||||
When using Kerberos, follow the instructions in the https://kafka.apache.org/090/documentation.html#security_sasl_clientconfig[reference documentation] for creating and referencing the JAAS configuration.
|
||||
When using Kerberos, follow the instructions in the http://kafka.apache.org/090/documentation.html#security_sasl_clientconfig[reference documentation] for creating and referencing the JAAS configuration.
|
||||
|
||||
Spring Cloud Stream supports passing JAAS configuration information to the application by using a JAAS configuration file and using Spring Boot properties.
|
||||
|
||||
|
||||
@@ -49,6 +49,7 @@ spring:
|
||||
output:
|
||||
destination: partitioned.topic
|
||||
producer:
|
||||
partitioned: true
|
||||
partition-key-expression: headers['partitionKey']
|
||||
partition-count: 12
|
||||
----
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
|
||||
[[spring-cloud-stream-binder-kafka-reference]]
|
||||
= Spring Cloud Stream Kafka Binder Reference Guide
|
||||
Sabby Anandan, Marius Bogoevici, Eric Bottard, Mark Fisher, Ilayaperumal Gopinathan, Gunnar Hillert, Mark Pollack, Patrick Peralta, Glenn Renfro, Thomas Risberg, Dave Syer, David Turanski, Janne Valkealahti, Benjamin Klein, Henryk Konsek, Gary Russell, Arnaud Jardiné, Soby Chacko
|
||||
Sabby Anandan, Marius Bogoevici, Eric Bottard, Mark Fisher, Ilayaperumal Gopinathan, Gunnar Hillert, Mark Pollack, Patrick Peralta, Glenn Renfro, Thomas Risberg, Dave Syer, David Turanski, Janne Valkealahti, Benjamin Klein, Henryk Konsek, Gary Russell
|
||||
:doctype: book
|
||||
:toc:
|
||||
:toclevels: 4
|
||||
@@ -21,22 +21,16 @@ Sabby Anandan, Marius Bogoevici, Eric Bottard, Mark Fisher, Ilayaperumal Gopinat
|
||||
:spring-cloud-stream-binder-kafka-repo: snapshot
|
||||
:github-tag: master
|
||||
:spring-cloud-stream-binder-kafka-docs-version: current
|
||||
:spring-cloud-stream-binder-kafka-docs: https://docs.spring.io/spring-cloud-stream-binder-kafka/docs/{spring-cloud-stream-binder-kafka-docs-version}/reference
|
||||
:spring-cloud-stream-binder-kafka-docs-current: https://docs.spring.io/spring-cloud-stream-binder-kafka/docs/current-SNAPSHOT/reference/html/
|
||||
:spring-cloud-stream-binder-kafka-docs: http://docs.spring.io/spring-cloud-stream-binder-kafka/docs/{spring-cloud-stream-binder-kafka-docs-version}/reference
|
||||
:spring-cloud-stream-binder-kafka-docs-current: http://docs.spring.io/spring-cloud-stream-binder-kafka/docs/current-SNAPSHOT/reference/html/
|
||||
:github-repo: spring-cloud/spring-cloud-stream-binder-kafka
|
||||
:github-raw: https://raw.github.com/{github-repo}/{github-tag}
|
||||
:github-code: https://github.com/{github-repo}/tree/{github-tag}
|
||||
:github-wiki: https://github.com/{github-repo}/wiki
|
||||
:github-master-code: https://github.com/{github-repo}/tree/master
|
||||
:github-raw: http://raw.github.com/{github-repo}/{github-tag}
|
||||
:github-code: http://github.com/{github-repo}/tree/{github-tag}
|
||||
:github-wiki: http://github.com/{github-repo}/wiki
|
||||
:github-master-code: http://github.com/{github-repo}/tree/master
|
||||
:sc-ext: java
|
||||
// ======================================================================================
|
||||
|
||||
|
||||
*{spring-cloud-stream-version}*
|
||||
|
||||
[#index-link]
|
||||
{docs-url}spring-cloud-stream/{docs-version}home.html
|
||||
|
||||
= Reference Guide
|
||||
include::overview.adoc[]
|
||||
|
||||
|
||||
171
mvnw
vendored
171
mvnw
vendored
@@ -8,7 +8,7 @@
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
@@ -54,16 +54,38 @@ case "`uname`" in
|
||||
CYGWIN*) cygwin=true ;;
|
||||
MINGW*) mingw=true;;
|
||||
Darwin*) darwin=true
|
||||
# Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home
|
||||
# See https://developer.apple.com/library/mac/qa/qa1170/_index.html
|
||||
if [ -z "$JAVA_HOME" ]; then
|
||||
if [ -x "/usr/libexec/java_home" ]; then
|
||||
export JAVA_HOME="`/usr/libexec/java_home`"
|
||||
else
|
||||
export JAVA_HOME="/Library/Java/Home"
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
#
|
||||
# Look for the Apple JDKs first to preserve the existing behaviour, and then look
|
||||
# for the new JDKs provided by Oracle.
|
||||
#
|
||||
if [ -z "$JAVA_HOME" ] && [ -L /System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK ] ; then
|
||||
#
|
||||
# Apple JDKs
|
||||
#
|
||||
export JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK/Home
|
||||
fi
|
||||
|
||||
if [ -z "$JAVA_HOME" ] && [ -L /System/Library/Java/JavaVirtualMachines/CurrentJDK ] ; then
|
||||
#
|
||||
# Apple JDKs
|
||||
#
|
||||
export JAVA_HOME=/System/Library/Java/JavaVirtualMachines/CurrentJDK/Contents/Home
|
||||
fi
|
||||
|
||||
if [ -z "$JAVA_HOME" ] && [ -L "/Library/Java/JavaVirtualMachines/CurrentJDK" ] ; then
|
||||
#
|
||||
# Oracle JDKs
|
||||
#
|
||||
export JAVA_HOME=/Library/Java/JavaVirtualMachines/CurrentJDK/Contents/Home
|
||||
fi
|
||||
|
||||
if [ -z "$JAVA_HOME" ] && [ -x "/usr/libexec/java_home" ]; then
|
||||
#
|
||||
# Apple JDKs
|
||||
#
|
||||
export JAVA_HOME=`/usr/libexec/java_home`
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -z "$JAVA_HOME" ] ; then
|
||||
@@ -108,7 +130,7 @@ if $cygwin ; then
|
||||
CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
|
||||
fi
|
||||
|
||||
# For Mingw, ensure paths are in UNIX format before anything is touched
|
||||
# For Migwn, ensure paths are in UNIX format before anything is touched
|
||||
if $mingw ; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME="`(cd "$M2_HOME"; pwd)`"
|
||||
@@ -162,28 +184,27 @@ fi
|
||||
|
||||
CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
|
||||
|
||||
# For Cygwin, switch paths to Windows format before running java
|
||||
if $cygwin; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME=`cygpath --path --windows "$M2_HOME"`
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
|
||||
[ -n "$CLASSPATH" ] &&
|
||||
CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
|
||||
fi
|
||||
|
||||
# traverses directory structure from process work directory to filesystem root
|
||||
# first directory with .mvn subdirectory is considered project base directory
|
||||
find_maven_basedir() {
|
||||
|
||||
if [ -z "$1" ]
|
||||
then
|
||||
echo "Path not specified to find_maven_basedir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
basedir="$1"
|
||||
wdir="$1"
|
||||
local basedir=$(pwd)
|
||||
local wdir=$(pwd)
|
||||
while [ "$wdir" != '/' ] ; do
|
||||
if [ -d "$wdir"/.mvn ] ; then
|
||||
basedir=$wdir
|
||||
break
|
||||
fi
|
||||
# workaround for JBEAP-8937 (on Solaris 10/Sparc)
|
||||
if [ -d "${wdir}" ]; then
|
||||
wdir=`cd "$wdir/.."; pwd`
|
||||
fi
|
||||
# end of workaround
|
||||
wdir=$(cd "$wdir/.."; pwd)
|
||||
done
|
||||
echo "${basedir}"
|
||||
}
|
||||
@@ -195,92 +216,30 @@ concat_lines() {
|
||||
fi
|
||||
}
|
||||
|
||||
BASE_DIR=`find_maven_basedir "$(pwd)"`
|
||||
if [ -z "$BASE_DIR" ]; then
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
##########################################################################################
|
||||
# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
|
||||
# This allows using the maven wrapper in projects that prohibit checking in binary data.
|
||||
##########################################################################################
|
||||
if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo "Found .mvn/wrapper/maven-wrapper.jar"
|
||||
fi
|
||||
else
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..."
|
||||
fi
|
||||
jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar"
|
||||
while IFS="=" read key value; do
|
||||
case "$key" in (wrapperUrl) jarUrl="$value"; break ;;
|
||||
esac
|
||||
done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties"
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo "Downloading from: $jarUrl"
|
||||
fi
|
||||
wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar"
|
||||
|
||||
if command -v wget > /dev/null; then
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo "Found wget ... using wget"
|
||||
fi
|
||||
wget "$jarUrl" -O "$wrapperJarPath"
|
||||
elif command -v curl > /dev/null; then
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo "Found curl ... using curl"
|
||||
fi
|
||||
curl -o "$wrapperJarPath" "$jarUrl"
|
||||
else
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo "Falling back to using Java to download"
|
||||
fi
|
||||
javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java"
|
||||
if [ -e "$javaClass" ]; then
|
||||
if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo " - Compiling MavenWrapperDownloader.java ..."
|
||||
fi
|
||||
# Compiling the Java class
|
||||
("$JAVA_HOME/bin/javac" "$javaClass")
|
||||
fi
|
||||
if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
|
||||
# Running the downloader
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo " - Running MavenWrapperDownloader.java ..."
|
||||
fi
|
||||
("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR")
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
##########################################################################################
|
||||
# End of extension
|
||||
##########################################################################################
|
||||
|
||||
export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo $MAVEN_PROJECTBASEDIR
|
||||
fi
|
||||
export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-$(find_maven_basedir)}
|
||||
MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS"
|
||||
|
||||
# For Cygwin, switch paths to Windows format before running java
|
||||
if $cygwin; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME=`cygpath --path --windows "$M2_HOME"`
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
|
||||
[ -n "$CLASSPATH" ] &&
|
||||
CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
|
||||
[ -n "$MAVEN_PROJECTBASEDIR" ] &&
|
||||
MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"`
|
||||
fi
|
||||
# Provide a "standardized" way to retrieve the CLI args that will
|
||||
# work with both Windows and non-Windows executions.
|
||||
MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@"
|
||||
export MAVEN_CMD_LINE_ARGS
|
||||
|
||||
WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
|
||||
|
||||
echo "Running version check"
|
||||
VERSION=$( sed '\!<parent!,\!</parent!d' `dirname $0`/pom.xml | grep '<version' | head -1 | sed -e 's/.*<version>//' -e 's!</version>.*$!!' )
|
||||
echo "The found version is [${VERSION}]"
|
||||
|
||||
if echo $VERSION | egrep -q 'M|RC'; then
|
||||
echo Activating \"milestone\" profile for version=\"$VERSION\"
|
||||
echo $MAVEN_ARGS | grep -q milestone || MAVEN_ARGS="$MAVEN_ARGS -Pmilestone"
|
||||
else
|
||||
echo Deactivating \"milestone\" profile for version=\"$VERSION\"
|
||||
echo $MAVEN_ARGS | grep -q milestone && MAVEN_ARGS=$(echo $MAVEN_ARGS | sed -e 's/-Pmilestone//')
|
||||
fi
|
||||
|
||||
exec "$JAVACMD" \
|
||||
$MAVEN_OPTS \
|
||||
-classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \
|
||||
"-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \
|
||||
${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@"
|
||||
${WRAPPER_LAUNCHER} ${MAVEN_ARGS} "$@"
|
||||
|
||||
306
mvnw.cmd
vendored
Executable file → Normal file
306
mvnw.cmd
vendored
Executable file → Normal file
@@ -1,161 +1,145 @@
|
||||
@REM ----------------------------------------------------------------------------
|
||||
@REM Licensed to the Apache Software Foundation (ASF) under one
|
||||
@REM or more contributor license agreements. See the NOTICE file
|
||||
@REM distributed with this work for additional information
|
||||
@REM regarding copyright ownership. The ASF licenses this file
|
||||
@REM to you under the Apache License, Version 2.0 (the
|
||||
@REM "License"); you may not use this file except in compliance
|
||||
@REM with the License. You may obtain a copy of the License at
|
||||
@REM
|
||||
@REM https://www.apache.org/licenses/LICENSE-2.0
|
||||
@REM
|
||||
@REM Unless required by applicable law or agreed to in writing,
|
||||
@REM software distributed under the License is distributed on an
|
||||
@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
@REM KIND, either express or implied. See the License for the
|
||||
@REM specific language governing permissions and limitations
|
||||
@REM under the License.
|
||||
@REM ----------------------------------------------------------------------------
|
||||
|
||||
@REM ----------------------------------------------------------------------------
|
||||
@REM Maven2 Start Up Batch script
|
||||
@REM
|
||||
@REM Required ENV vars:
|
||||
@REM JAVA_HOME - location of a JDK home dir
|
||||
@REM
|
||||
@REM Optional ENV vars
|
||||
@REM M2_HOME - location of maven2's installed home dir
|
||||
@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
|
||||
@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending
|
||||
@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
|
||||
@REM e.g. to debug Maven itself, use
|
||||
@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
|
||||
@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
|
||||
@REM ----------------------------------------------------------------------------
|
||||
|
||||
@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
|
||||
@echo off
|
||||
@REM set title of command window
|
||||
title %0
|
||||
@REM enable echoing my setting MAVEN_BATCH_ECHO to 'on'
|
||||
@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
|
||||
|
||||
@REM set %HOME% to equivalent of $HOME
|
||||
if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
|
||||
|
||||
@REM Execute a user defined script before this one
|
||||
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
|
||||
@REM check for pre script, once with legacy .bat ending and once with .cmd ending
|
||||
if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat"
|
||||
if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd"
|
||||
:skipRcPre
|
||||
|
||||
@setlocal
|
||||
|
||||
set ERROR_CODE=0
|
||||
|
||||
@REM To isolate internal variables from possible post scripts, we use another setlocal
|
||||
@setlocal
|
||||
|
||||
@REM ==== START VALIDATION ====
|
||||
if not "%JAVA_HOME%" == "" goto OkJHome
|
||||
|
||||
echo.
|
||||
echo Error: JAVA_HOME not found in your environment. >&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the >&2
|
||||
echo location of your Java installation. >&2
|
||||
echo.
|
||||
goto error
|
||||
|
||||
:OkJHome
|
||||
if exist "%JAVA_HOME%\bin\java.exe" goto init
|
||||
|
||||
echo.
|
||||
echo Error: JAVA_HOME is set to an invalid directory. >&2
|
||||
echo JAVA_HOME = "%JAVA_HOME%" >&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the >&2
|
||||
echo location of your Java installation. >&2
|
||||
echo.
|
||||
goto error
|
||||
|
||||
@REM ==== END VALIDATION ====
|
||||
|
||||
:init
|
||||
|
||||
@REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
|
||||
@REM Fallback to current working directory if not found.
|
||||
|
||||
set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
|
||||
IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
|
||||
|
||||
set EXEC_DIR=%CD%
|
||||
set WDIR=%EXEC_DIR%
|
||||
:findBaseDir
|
||||
IF EXIST "%WDIR%"\.mvn goto baseDirFound
|
||||
cd ..
|
||||
IF "%WDIR%"=="%CD%" goto baseDirNotFound
|
||||
set WDIR=%CD%
|
||||
goto findBaseDir
|
||||
|
||||
:baseDirFound
|
||||
set MAVEN_PROJECTBASEDIR=%WDIR%
|
||||
cd "%EXEC_DIR%"
|
||||
goto endDetectBaseDir
|
||||
|
||||
:baseDirNotFound
|
||||
set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
|
||||
cd "%EXEC_DIR%"
|
||||
|
||||
:endDetectBaseDir
|
||||
|
||||
IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
|
||||
|
||||
@setlocal EnableExtensions EnableDelayedExpansion
|
||||
for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
|
||||
@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
|
||||
|
||||
:endReadAdditionalConfig
|
||||
|
||||
SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
|
||||
set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
|
||||
set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
|
||||
|
||||
set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar"
|
||||
FOR /F "tokens=1,2 delims==" %%A IN (%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties) DO (
|
||||
IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B
|
||||
)
|
||||
|
||||
@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
|
||||
@REM This allows using the maven wrapper in projects that prohibit checking in binary data.
|
||||
if exist %WRAPPER_JAR% (
|
||||
echo Found %WRAPPER_JAR%
|
||||
) else (
|
||||
echo Couldn't find %WRAPPER_JAR%, downloading it ...
|
||||
echo Downloading from: %DOWNLOAD_URL%
|
||||
powershell -Command "(New-Object Net.WebClient).DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"
|
||||
echo Finished downloading %WRAPPER_JAR%
|
||||
)
|
||||
@REM End of extension
|
||||
|
||||
%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
|
||||
if ERRORLEVEL 1 goto error
|
||||
goto end
|
||||
|
||||
:error
|
||||
set ERROR_CODE=1
|
||||
|
||||
:end
|
||||
@endlocal & set ERROR_CODE=%ERROR_CODE%
|
||||
|
||||
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
|
||||
@REM check for post script, once with legacy .bat ending and once with .cmd ending
|
||||
if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat"
|
||||
if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd"
|
||||
:skipRcPost
|
||||
|
||||
@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
|
||||
if "%MAVEN_BATCH_PAUSE%" == "on" pause
|
||||
|
||||
if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE%
|
||||
|
||||
exit /B %ERROR_CODE%
|
||||
@REM ----------------------------------------------------------------------------
|
||||
@REM Licensed to the Apache Software Foundation (ASF) under one
|
||||
@REM or more contributor license agreements. See the NOTICE file
|
||||
@REM distributed with this work for additional information
|
||||
@REM regarding copyright ownership. The ASF licenses this file
|
||||
@REM to you under the Apache License, Version 2.0 (the
|
||||
@REM "License"); you may not use this file except in compliance
|
||||
@REM with the License. You may obtain a copy of the License at
|
||||
@REM
|
||||
@REM http://www.apache.org/licenses/LICENSE-2.0
|
||||
@REM
|
||||
@REM Unless required by applicable law or agreed to in writing,
|
||||
@REM software distributed under the License is distributed on an
|
||||
@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
@REM KIND, either express or implied. See the License for the
|
||||
@REM specific language governing permissions and limitations
|
||||
@REM under the License.
|
||||
@REM ----------------------------------------------------------------------------
|
||||
|
||||
@REM ----------------------------------------------------------------------------
|
||||
@REM Maven2 Start Up Batch script
|
||||
@REM
|
||||
@REM Required ENV vars:
|
||||
@REM JAVA_HOME - location of a JDK home dir
|
||||
@REM
|
||||
@REM Optional ENV vars
|
||||
@REM M2_HOME - location of maven2's installed home dir
|
||||
@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
|
||||
@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending
|
||||
@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
|
||||
@REM e.g. to debug Maven itself, use
|
||||
@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
|
||||
@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
|
||||
@REM ----------------------------------------------------------------------------
|
||||
|
||||
@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
|
||||
@echo off
|
||||
@REM enable echoing my setting MAVEN_BATCH_ECHO to 'on'
|
||||
@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
|
||||
|
||||
@REM set %HOME% to equivalent of $HOME
|
||||
if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
|
||||
|
||||
@REM Execute a user defined script before this one
|
||||
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
|
||||
@REM check for pre script, once with legacy .bat ending and once with .cmd ending
|
||||
if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat"
|
||||
if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd"
|
||||
:skipRcPre
|
||||
|
||||
@setlocal
|
||||
|
||||
set ERROR_CODE=0
|
||||
|
||||
@REM To isolate internal variables from possible post scripts, we use another setlocal
|
||||
@setlocal
|
||||
|
||||
@REM ==== START VALIDATION ====
|
||||
if not "%JAVA_HOME%" == "" goto OkJHome
|
||||
|
||||
echo.
|
||||
echo Error: JAVA_HOME not found in your environment. >&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the >&2
|
||||
echo location of your Java installation. >&2
|
||||
echo.
|
||||
goto error
|
||||
|
||||
:OkJHome
|
||||
if exist "%JAVA_HOME%\bin\java.exe" goto init
|
||||
|
||||
echo.
|
||||
echo Error: JAVA_HOME is set to an invalid directory. >&2
|
||||
echo JAVA_HOME = "%JAVA_HOME%" >&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the >&2
|
||||
echo location of your Java installation. >&2
|
||||
echo.
|
||||
goto error
|
||||
|
||||
@REM ==== END VALIDATION ====
|
||||
|
||||
:init
|
||||
|
||||
set MAVEN_CMD_LINE_ARGS=%*
|
||||
|
||||
@REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
|
||||
@REM Fallback to current working directory if not found.
|
||||
|
||||
set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
|
||||
IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
|
||||
|
||||
set EXEC_DIR=%CD%
|
||||
set WDIR=%EXEC_DIR%
|
||||
:findBaseDir
|
||||
IF EXIST "%WDIR%"\.mvn goto baseDirFound
|
||||
cd ..
|
||||
IF "%WDIR%"=="%CD%" goto baseDirNotFound
|
||||
set WDIR=%CD%
|
||||
goto findBaseDir
|
||||
|
||||
:baseDirFound
|
||||
set MAVEN_PROJECTBASEDIR=%WDIR%
|
||||
cd "%EXEC_DIR%"
|
||||
goto endDetectBaseDir
|
||||
|
||||
:baseDirNotFound
|
||||
set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
|
||||
cd "%EXEC_DIR%"
|
||||
|
||||
:endDetectBaseDir
|
||||
|
||||
IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
|
||||
|
||||
@setlocal EnableExtensions EnableDelayedExpansion
|
||||
for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
|
||||
@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
|
||||
|
||||
:endReadAdditionalConfig
|
||||
|
||||
SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
|
||||
|
||||
set WRAPPER_JAR="".\.mvn\wrapper\maven-wrapper.jar""
|
||||
set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
|
||||
|
||||
%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CMD_LINE_ARGS%
|
||||
if ERRORLEVEL 1 goto error
|
||||
goto end
|
||||
|
||||
:error
|
||||
set ERROR_CODE=1
|
||||
|
||||
:end
|
||||
@endlocal & set ERROR_CODE=%ERROR_CODE%
|
||||
|
||||
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
|
||||
@REM check for post script, once with legacy .bat ending and once with .cmd ending
|
||||
if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat"
|
||||
if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd"
|
||||
:skipRcPost
|
||||
|
||||
@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
|
||||
if "%MAVEN_BATCH_PAUSE%" == "on" pause
|
||||
|
||||
if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE%
|
||||
|
||||
exit /B %ERROR_CODE%
|
||||
|
||||
58
pom.xml
58
pom.xml
@@ -1,24 +1,21 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
|
||||
<version>2.2.0.M1</version>
|
||||
<version>2.1.0.RC2</version>
|
||||
<packaging>pom</packaging>
|
||||
<parent>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-build</artifactId>
|
||||
<version>2.1.4.RELEASE</version>
|
||||
<version>2.1.0.RC2</version>
|
||||
<relativePath />
|
||||
</parent>
|
||||
<properties>
|
||||
<java.version>1.8</java.version>
|
||||
<spring-kafka.version>2.2.2.RELEASE</spring-kafka.version>
|
||||
<spring-kafka.version>2.2.0.RELEASE</spring-kafka.version>
|
||||
<spring-integration-kafka.version>3.1.0.RELEASE</spring-integration-kafka.version>
|
||||
<kafka.version>2.0.0</kafka.version>
|
||||
<spring-cloud-stream.version>2.2.0.M1</spring-cloud-stream.version>
|
||||
<maven-checkstyle-plugin.failsOnError>true</maven-checkstyle-plugin.failsOnError>
|
||||
<maven-checkstyle-plugin.failsOnViolation>true</maven-checkstyle-plugin.failsOnViolation>
|
||||
<maven-checkstyle-plugin.includeTestSourceDirectory>true</maven-checkstyle-plugin.includeTestSourceDirectory>
|
||||
<spring-cloud-stream.version>2.1.0.RC2</spring-cloud-stream.version>
|
||||
</properties>
|
||||
<modules>
|
||||
<module>spring-cloud-stream-binder-kafka</module>
|
||||
@@ -148,6 +145,31 @@
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-checkstyle-plugin</artifactId>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-stream-tools</artifactId>
|
||||
<version>${spring-cloud-stream.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>checkstyle-validation</id>
|
||||
<phase>validate</phase>
|
||||
<configuration>
|
||||
<configLocation>checkstyle.xml</configLocation>
|
||||
<headerLocation>checkstyle-header.txt</headerLocation>
|
||||
<suppressionsLocation>checkstyle-suppressions.xml</suppressionsLocation>
|
||||
<encoding>UTF-8</encoding>
|
||||
<consoleOutput>true</consoleOutput>
|
||||
<failsOnError>true</failsOnError>
|
||||
<includeTestSourceDirectory>true</includeTestSourceDirectory>
|
||||
</configuration>
|
||||
<goals>
|
||||
<goal>check</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
@@ -159,7 +181,7 @@
|
||||
<repository>
|
||||
<id>spring-snapshots</id>
|
||||
<name>Spring Snapshots</name>
|
||||
<url>https://repo.spring.io/libs-snapshot-local</url>
|
||||
<url>http://repo.spring.io/libs-snapshot-local</url>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
@@ -170,7 +192,7 @@
|
||||
<repository>
|
||||
<id>spring-milestones</id>
|
||||
<name>Spring Milestones</name>
|
||||
<url>https://repo.spring.io/libs-milestone-local</url>
|
||||
<url>http://repo.spring.io/libs-milestone-local</url>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
@@ -178,7 +200,7 @@
|
||||
<repository>
|
||||
<id>spring-releases</id>
|
||||
<name>Spring Releases</name>
|
||||
<url>https://repo.spring.io/release</url>
|
||||
<url>http://repo.spring.io/release</url>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
@@ -188,7 +210,7 @@
|
||||
<pluginRepository>
|
||||
<id>spring-snapshots</id>
|
||||
<name>Spring Snapshots</name>
|
||||
<url>https://repo.spring.io/libs-snapshot-local</url>
|
||||
<url>http://repo.spring.io/libs-snapshot-local</url>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
@@ -199,7 +221,7 @@
|
||||
<pluginRepository>
|
||||
<id>spring-milestones</id>
|
||||
<name>Spring Milestones</name>
|
||||
<url>https://repo.spring.io/libs-milestone-local</url>
|
||||
<url>http://repo.spring.io/libs-milestone-local</url>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
@@ -207,7 +229,7 @@
|
||||
<pluginRepository>
|
||||
<id>spring-releases</id>
|
||||
<name>Spring Releases</name>
|
||||
<url>https://repo.spring.io/libs-release-local</url>
|
||||
<url>http://repo.spring.io/libs-release-local</url>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
@@ -215,12 +237,4 @@
|
||||
</pluginRepositories>
|
||||
</profile>
|
||||
</profiles>
|
||||
<reporting>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-checkstyle-plugin</artifactId>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</reporting>
|
||||
</project>
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
|
||||
<version>2.2.0.M1</version>
|
||||
<version>2.1.0.RC2</version>
|
||||
</parent>
|
||||
<artifactId>spring-cloud-starter-stream-kafka</artifactId>
|
||||
<description>Spring Cloud Starter Stream Kafka</description>
|
||||
<url>https://projects.spring.io/spring-cloud</url>
|
||||
<url>http://projects.spring.io/spring-cloud</url>
|
||||
<organization>
|
||||
<name>Pivotal Software, Inc.</name>
|
||||
<url>https://www.spring.io</url>
|
||||
<url>http://www.spring.io</url>
|
||||
</organization>
|
||||
<properties>
|
||||
<main.basedir>${basedir}/../..</main.basedir>
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
|
||||
<version>2.2.0.M1</version>
|
||||
<version>2.1.0.RC2</version>
|
||||
</parent>
|
||||
<artifactId>spring-cloud-stream-binder-kafka-core</artifactId>
|
||||
<description>Spring Cloud Stream Kafka Binder Core</description>
|
||||
<url>https://projects.spring.io/spring-cloud</url>
|
||||
<url>http://projects.spring.io/spring-cloud</url>
|
||||
<organization>
|
||||
<name>Pivotal Software, Inc.</name>
|
||||
<url>https://www.spring.io</url>
|
||||
<url>http://www.spring.io</url>
|
||||
</organization>
|
||||
|
||||
<dependencies>
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -54,8 +54,7 @@ public class JaasLoginModuleConfiguration {
|
||||
|
||||
public void setControlFlag(String controlFlag) {
|
||||
Assert.notNull(controlFlag, "cannot be null");
|
||||
this.controlFlag = KafkaJaasLoginModuleInitializer.ControlFlag
|
||||
.valueOf(controlFlag.toUpperCase());
|
||||
this.controlFlag = KafkaJaasLoginModuleInitializer.ControlFlag.valueOf(controlFlag.toUpperCase());
|
||||
}
|
||||
|
||||
public Map<String, String> getOptions() {
|
||||
@@ -65,5 +64,4 @@ public class JaasLoginModuleConfiguration {
|
||||
public void setOptions(Map<String, String> options) {
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -16,6 +16,8 @@
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.properties;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
@@ -23,17 +25,38 @@ import java.util.Map;
|
||||
*
|
||||
* @author Gary Russell
|
||||
* @since 2.0
|
||||
* @deprecated in favor of {@link KafkaTopicProperties}
|
||||
*
|
||||
*/
|
||||
@Deprecated
|
||||
public class KafkaAdminProperties extends KafkaTopicProperties {
|
||||
public class KafkaAdminProperties {
|
||||
|
||||
private Short replicationFactor;
|
||||
|
||||
private Map<Integer, List<Integer>> replicasAssignments = new HashMap<>();
|
||||
|
||||
private Map<String, String> configuration = new HashMap<>();
|
||||
|
||||
public Short getReplicationFactor() {
|
||||
return this.replicationFactor;
|
||||
}
|
||||
|
||||
public void setReplicationFactor(Short replicationFactor) {
|
||||
this.replicationFactor = replicationFactor;
|
||||
}
|
||||
|
||||
public Map<Integer, List<Integer>> getReplicasAssignments() {
|
||||
return this.replicasAssignments;
|
||||
}
|
||||
|
||||
public void setReplicasAssignments(Map<Integer, List<Integer>> replicasAssignments) {
|
||||
this.replicasAssignments = replicasAssignments;
|
||||
}
|
||||
|
||||
public Map<String, String> getConfiguration() {
|
||||
return getProperties();
|
||||
return this.configuration;
|
||||
}
|
||||
|
||||
public void setConfiguration(Map<String, String> configuration) {
|
||||
setProperties(configuration);
|
||||
this.configuration = configuration;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -40,8 +40,8 @@ import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Configuration properties for the Kafka binder. The properties in this class are
|
||||
* prefixed with <b>spring.cloud.stream.kafka.binder</b>.
|
||||
* Configuration properties for the Kafka binder.
|
||||
* The properties in this class are prefixed with <b>spring.cloud.stream.kafka.binder</b>.
|
||||
*
|
||||
* @author David Turanski
|
||||
* @author Ilayaperumal Gopinathan
|
||||
@@ -49,7 +49,6 @@ import org.springframework.util.StringUtils;
|
||||
* @author Soby Chacko
|
||||
* @author Gary Russell
|
||||
* @author Rafal Zukowski
|
||||
* @author Aldo Sinanaj
|
||||
*/
|
||||
@ConfigurationProperties(prefix = "spring.cloud.stream.kafka.binder")
|
||||
public class KafkaBinderConfigurationProperties {
|
||||
@@ -127,11 +126,11 @@ public class KafkaBinderConfigurationProperties {
|
||||
private JaasLoginModuleConfiguration jaas;
|
||||
|
||||
/**
|
||||
* The bean name of a custom header mapper to use instead of a
|
||||
* {@link org.springframework.kafka.support.DefaultKafkaHeaderMapper}.
|
||||
* The bean name of a custom header mapper to use instead of a {@link org.springframework.kafka.support.DefaultKafkaHeaderMapper}.
|
||||
*/
|
||||
private String headerMapperBeanName;
|
||||
|
||||
|
||||
public KafkaBinderConfigurationProperties(KafkaProperties kafkaProperties) {
|
||||
Assert.notNull(kafkaProperties, "'kafkaProperties' cannot be null");
|
||||
this.kafkaProperties = kafkaProperties;
|
||||
@@ -328,8 +327,9 @@ public class KafkaBinderConfigurationProperties {
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts an array of host values to a comma-separated String. It will append the
|
||||
* default port value, if not already specified.
|
||||
* Converts an array of host values to a comma-separated String.
|
||||
* It will append the default port value, if not already specified.
|
||||
*
|
||||
* @param hosts host string
|
||||
* @param defaultPort port
|
||||
* @return formatted connection string
|
||||
@@ -373,6 +373,10 @@ public class KafkaBinderConfigurationProperties {
|
||||
return this.requiredAcks;
|
||||
}
|
||||
|
||||
public void setRequiredAcks(int requiredAcks) {
|
||||
this.requiredAcks = String.valueOf(requiredAcks);
|
||||
}
|
||||
|
||||
public void setRequiredAcks(String requiredAcks) {
|
||||
this.requiredAcks = requiredAcks;
|
||||
}
|
||||
@@ -521,18 +525,15 @@ public class KafkaBinderConfigurationProperties {
|
||||
Map<String, Object> consumerConfiguration = new HashMap<>();
|
||||
consumerConfiguration.putAll(this.kafkaProperties.buildConsumerProperties());
|
||||
// Copy configured binder properties that apply to consumers
|
||||
for (Map.Entry<String, String> configurationEntry : this.configuration
|
||||
.entrySet()) {
|
||||
for (Map.Entry<String, String> configurationEntry : this.configuration.entrySet()) {
|
||||
if (ConsumerConfig.configNames().contains(configurationEntry.getKey())) {
|
||||
consumerConfiguration.put(configurationEntry.getKey(),
|
||||
configurationEntry.getValue());
|
||||
consumerConfiguration.put(configurationEntry.getKey(), configurationEntry.getValue());
|
||||
}
|
||||
}
|
||||
consumerConfiguration.putAll(this.consumerProperties);
|
||||
// Override Spring Boot bootstrap server setting if left to default with the value
|
||||
// configured in the binder
|
||||
return getConfigurationWithBootstrapServer(consumerConfiguration,
|
||||
ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG);
|
||||
return getConfigurationWithBootstrapServer(consumerConfiguration, ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -545,22 +546,18 @@ public class KafkaBinderConfigurationProperties {
|
||||
Map<String, Object> producerConfiguration = new HashMap<>();
|
||||
producerConfiguration.putAll(this.kafkaProperties.buildProducerProperties());
|
||||
// Copy configured binder properties that apply to producers
|
||||
for (Map.Entry<String, String> configurationEntry : this.configuration
|
||||
.entrySet()) {
|
||||
for (Map.Entry<String, String> configurationEntry : this.configuration.entrySet()) {
|
||||
if (ProducerConfig.configNames().contains(configurationEntry.getKey())) {
|
||||
producerConfiguration.put(configurationEntry.getKey(),
|
||||
configurationEntry.getValue());
|
||||
producerConfiguration.put(configurationEntry.getKey(), configurationEntry.getValue());
|
||||
}
|
||||
}
|
||||
producerConfiguration.putAll(this.producerProperties);
|
||||
// Override Spring Boot bootstrap server setting if left to default with the value
|
||||
// configured in the binder
|
||||
return getConfigurationWithBootstrapServer(producerConfiguration,
|
||||
ProducerConfig.BOOTSTRAP_SERVERS_CONFIG);
|
||||
return getConfigurationWithBootstrapServer(producerConfiguration, ProducerConfig.BOOTSTRAP_SERVERS_CONFIG);
|
||||
}
|
||||
|
||||
private Map<String, Object> getConfigurationWithBootstrapServer(
|
||||
Map<String, Object> configuration, String bootstrapServersConfig) {
|
||||
private Map<String, Object> getConfigurationWithBootstrapServer(Map<String, Object> configuration, String bootstrapServersConfig) {
|
||||
if (ObjectUtils.isEmpty(configuration.get(bootstrapServersConfig))) {
|
||||
configuration.put(bootstrapServersConfig, getKafkaConnectionString());
|
||||
}
|
||||
@@ -570,8 +567,7 @@ public class KafkaBinderConfigurationProperties {
|
||||
@SuppressWarnings("unchecked")
|
||||
List<String> bootStrapServers = (List<String>) configuration
|
||||
.get(bootstrapServersConfig);
|
||||
if (bootStrapServers.size() == 1
|
||||
&& bootStrapServers.get(0).equals("localhost:9092")) {
|
||||
if (bootStrapServers.size() == 1 && bootStrapServers.get(0).equals("localhost:9092")) {
|
||||
configuration.put(bootstrapServersConfig, getKafkaConnectionString());
|
||||
}
|
||||
}
|
||||
@@ -619,10 +615,9 @@ public class KafkaBinderConfigurationProperties {
|
||||
}
|
||||
|
||||
/**
|
||||
* An combination of {@link ProducerProperties} and {@link KafkaProducerProperties} so
|
||||
* that common and kafka-specific properties can be set for the transactional
|
||||
* An combination of {@link ProducerProperties} and {@link KafkaProducerProperties}
|
||||
* so that common and kafka-specific properties can be set for the transactional
|
||||
* producer.
|
||||
*
|
||||
* @since 2.1
|
||||
*/
|
||||
public static class CombinedProducerProperties {
|
||||
@@ -647,10 +642,8 @@ public class KafkaBinderConfigurationProperties {
|
||||
return this.producerProperties.getPartitionSelectorExpression();
|
||||
}
|
||||
|
||||
public void setPartitionSelectorExpression(
|
||||
Expression partitionSelectorExpression) {
|
||||
this.producerProperties
|
||||
.setPartitionSelectorExpression(partitionSelectorExpression);
|
||||
public void setPartitionSelectorExpression(Expression partitionSelectorExpression) {
|
||||
this.producerProperties.setPartitionSelectorExpression(partitionSelectorExpression);
|
||||
}
|
||||
|
||||
public @Min(value = 1, message = "Partition count should be greater than zero.") int getPartitionCount() {
|
||||
@@ -669,13 +662,11 @@ public class KafkaBinderConfigurationProperties {
|
||||
this.producerProperties.setRequiredGroups(requiredGroups);
|
||||
}
|
||||
|
||||
public @AssertTrue(message = "Partition key expression and partition key extractor class properties "
|
||||
+ "are mutually exclusive.") boolean isValidPartitionKeyProperty() {
|
||||
public @AssertTrue(message = "Partition key expression and partition key extractor class properties are mutually exclusive.") boolean isValidPartitionKeyProperty() {
|
||||
return this.producerProperties.isValidPartitionKeyProperty();
|
||||
}
|
||||
|
||||
public @AssertTrue(message = "Partition selector class and partition selector expression "
|
||||
+ "properties are mutually exclusive.") boolean isValidPartitionSelectorProperty() {
|
||||
public @AssertTrue(message = "Partition selector class and partition selector expression properties are mutually exclusive.") boolean isValidPartitionSelectorProperty() {
|
||||
return this.producerProperties.isValidPartitionSelectorProperty();
|
||||
}
|
||||
|
||||
@@ -708,8 +699,7 @@ public class KafkaBinderConfigurationProperties {
|
||||
}
|
||||
|
||||
public void setPartitionKeyExtractorName(String partitionKeyExtractorName) {
|
||||
this.producerProperties
|
||||
.setPartitionKeyExtractorName(partitionKeyExtractorName);
|
||||
this.producerProperties.setPartitionKeyExtractorName(partitionKeyExtractorName);
|
||||
}
|
||||
|
||||
public String getPartitionSelectorName() {
|
||||
@@ -776,28 +766,17 @@ public class KafkaBinderConfigurationProperties {
|
||||
this.kafkaProducerProperties.setConfiguration(configuration);
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public KafkaAdminProperties getAdmin() {
|
||||
return this.kafkaProducerProperties.getAdmin();
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public void setAdmin(KafkaAdminProperties admin) {
|
||||
this.kafkaProducerProperties.setAdmin(admin);
|
||||
}
|
||||
|
||||
public KafkaTopicProperties getTopic() {
|
||||
return this.kafkaProducerProperties.getTopic();
|
||||
}
|
||||
|
||||
public void setTopic(KafkaTopicProperties topic) {
|
||||
this.kafkaProducerProperties.setTopic(topic);
|
||||
}
|
||||
|
||||
public KafkaProducerProperties getExtension() {
|
||||
return this.kafkaProducerProperties;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -45,5 +45,4 @@ public class KafkaBindingProperties implements BinderSpecificPropertiesProvider
|
||||
public void setProducer(KafkaProducerProperties producer) {
|
||||
this.producer = producer;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -19,7 +19,6 @@ package org.springframework.cloud.stream.binder.kafka.properties;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.boot.context.properties.DeprecatedConfigurationProperty;
|
||||
|
||||
/**
|
||||
* Extended consumer properties for Kafka binder.
|
||||
@@ -28,7 +27,6 @@ import org.springframework.boot.context.properties.DeprecatedConfigurationProper
|
||||
* @author Ilayaperumal Gopinathan
|
||||
* @author Soby Chacko
|
||||
* @author Gary Russell
|
||||
* @author Aldo Sinanaj
|
||||
*
|
||||
* <p>
|
||||
* Thanks to Laszlo Szabo for providing the initial patch for generic property support.
|
||||
@@ -40,7 +38,6 @@ public class KafkaConsumerProperties {
|
||||
* Enumeration for starting consumer offset.
|
||||
*/
|
||||
public enum StartOffset {
|
||||
|
||||
/**
|
||||
* Starting from earliest offset.
|
||||
*/
|
||||
@@ -59,14 +56,12 @@ public class KafkaConsumerProperties {
|
||||
public long getReferencePoint() {
|
||||
return this.referencePoint;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Standard headers for the message.
|
||||
*/
|
||||
public enum StandardHeaders {
|
||||
|
||||
/**
|
||||
* No headers.
|
||||
*/
|
||||
@@ -83,7 +78,6 @@ public class KafkaConsumerProperties {
|
||||
* Indicating both ID and timestamp headers.
|
||||
*/
|
||||
both
|
||||
|
||||
}
|
||||
|
||||
private boolean ackEachRecord;
|
||||
@@ -118,7 +112,7 @@ public class KafkaConsumerProperties {
|
||||
|
||||
private Map<String, String> configuration = new HashMap<>();
|
||||
|
||||
private KafkaTopicProperties topic = new KafkaTopicProperties();
|
||||
private KafkaAdminProperties admin = new KafkaAdminProperties();
|
||||
|
||||
public boolean isAckEachRecord() {
|
||||
return this.ackEachRecord;
|
||||
@@ -227,7 +221,6 @@ public class KafkaConsumerProperties {
|
||||
public void setDlqProducerProperties(KafkaProducerProperties dlqProducerProperties) {
|
||||
this.dlqProducerProperties = dlqProducerProperties;
|
||||
}
|
||||
|
||||
public StandardHeaders getStandardHeaders() {
|
||||
return this.standardHeaders;
|
||||
}
|
||||
@@ -260,35 +253,12 @@ public class KafkaConsumerProperties {
|
||||
this.destinationIsPattern = destinationIsPattern;
|
||||
}
|
||||
|
||||
/**
|
||||
* No longer used; get properties such as this via {@link #getTopic()}.
|
||||
* @return Kafka admin properties
|
||||
* @deprecated No longer used
|
||||
*/
|
||||
@Deprecated
|
||||
@DeprecatedConfigurationProperty(reason = "Not used since 2.1.1, set properties such as this via 'topic'")
|
||||
@SuppressWarnings("deprecation")
|
||||
public KafkaAdminProperties getAdmin() {
|
||||
// Temporary workaround to copy the topic properties to the admin one.
|
||||
final KafkaAdminProperties kafkaAdminProperties = new KafkaAdminProperties();
|
||||
kafkaAdminProperties.setReplicationFactor(this.topic.getReplicationFactor());
|
||||
kafkaAdminProperties.setReplicasAssignments(this.topic.getReplicasAssignments());
|
||||
kafkaAdminProperties.setConfiguration(this.topic.getProperties());
|
||||
return kafkaAdminProperties;
|
||||
return this.admin;
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
@SuppressWarnings("deprecation")
|
||||
public void setAdmin(KafkaAdminProperties admin) {
|
||||
this.topic = admin;
|
||||
}
|
||||
|
||||
public KafkaTopicProperties getTopic() {
|
||||
return this.topic;
|
||||
}
|
||||
|
||||
public void setTopic(KafkaTopicProperties topic) {
|
||||
this.topic = topic;
|
||||
this.admin = admin;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -16,15 +16,12 @@
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.properties;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||
import org.springframework.cloud.stream.binder.AbstractExtendedBindingProperties;
|
||||
import org.springframework.cloud.stream.binder.BinderSpecificPropertiesProvider;
|
||||
|
||||
/**
|
||||
* Kafka specific extended binding properties class that extends from
|
||||
* {@link AbstractExtendedBindingProperties}.
|
||||
* Kafka specific extended binding properties class that extends from {@link AbstractExtendedBindingProperties}.
|
||||
*
|
||||
* @author Marius Bogoevici
|
||||
* @author Gary Russell
|
||||
@@ -32,8 +29,8 @@ import org.springframework.cloud.stream.binder.BinderSpecificPropertiesProvider;
|
||||
* @author Oleg Zhurakousky
|
||||
*/
|
||||
@ConfigurationProperties("spring.cloud.stream.kafka")
|
||||
public class KafkaExtendedBindingProperties extends
|
||||
AbstractExtendedBindingProperties<KafkaConsumerProperties, KafkaProducerProperties, KafkaBindingProperties> {
|
||||
public class KafkaExtendedBindingProperties
|
||||
extends AbstractExtendedBindingProperties<KafkaConsumerProperties, KafkaProducerProperties, KafkaBindingProperties> {
|
||||
|
||||
private static final String DEFAULTS_PREFIX = "spring.cloud.stream.kafka.default";
|
||||
|
||||
@@ -42,14 +39,8 @@ public class KafkaExtendedBindingProperties extends
|
||||
return DEFAULTS_PREFIX;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, KafkaBindingProperties> getBindings() {
|
||||
return this.doGetBindings();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<? extends BinderSpecificPropertiesProvider> getExtendedPropertiesEntryClass() {
|
||||
return KafkaBindingProperties.class;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -21,7 +21,6 @@ import java.util.Map;
|
||||
|
||||
import javax.validation.constraints.NotNull;
|
||||
|
||||
import org.springframework.boot.context.properties.DeprecatedConfigurationProperty;
|
||||
import org.springframework.expression.Expression;
|
||||
|
||||
/**
|
||||
@@ -30,7 +29,6 @@ import org.springframework.expression.Expression;
|
||||
* @author Marius Bogoevici
|
||||
* @author Henryk Konsek
|
||||
* @author Gary Russell
|
||||
* @author Aldo Sinanaj
|
||||
*/
|
||||
public class KafkaProducerProperties {
|
||||
|
||||
@@ -48,7 +46,7 @@ public class KafkaProducerProperties {
|
||||
|
||||
private Map<String, String> configuration = new HashMap<>();
|
||||
|
||||
private KafkaTopicProperties topic = new KafkaTopicProperties();
|
||||
private KafkaAdminProperties admin = new KafkaAdminProperties();
|
||||
|
||||
public int getBufferSize() {
|
||||
return this.bufferSize;
|
||||
@@ -107,68 +105,29 @@ public class KafkaProducerProperties {
|
||||
this.configuration = configuration;
|
||||
}
|
||||
|
||||
/**
|
||||
* No longer used; get properties such as this via {@link #getTopic()}.
|
||||
* @return Kafka admin properties
|
||||
* @deprecated No longer used
|
||||
*/
|
||||
@Deprecated
|
||||
@DeprecatedConfigurationProperty(reason = "Not used since 2.1.1, set properties such as this via 'topic'")
|
||||
@SuppressWarnings("deprecation")
|
||||
public KafkaAdminProperties getAdmin() {
|
||||
// Temporary workaround to copy the topic properties to the admin one.
|
||||
final KafkaAdminProperties kafkaAdminProperties = new KafkaAdminProperties();
|
||||
kafkaAdminProperties.setReplicationFactor(this.topic.getReplicationFactor());
|
||||
kafkaAdminProperties.setReplicasAssignments(this.topic.getReplicasAssignments());
|
||||
kafkaAdminProperties.setConfiguration(this.topic.getProperties());
|
||||
return kafkaAdminProperties;
|
||||
return this.admin;
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
@SuppressWarnings("deprecation")
|
||||
public void setAdmin(KafkaAdminProperties admin) {
|
||||
this.topic = admin;
|
||||
}
|
||||
|
||||
public KafkaTopicProperties getTopic() {
|
||||
return this.topic;
|
||||
}
|
||||
|
||||
public void setTopic(KafkaTopicProperties topic) {
|
||||
this.topic = topic;
|
||||
this.admin = admin;
|
||||
}
|
||||
|
||||
/**
|
||||
* Enumeration for compression types.
|
||||
*/
|
||||
public enum CompressionType {
|
||||
|
||||
/**
|
||||
* No compression.
|
||||
*/
|
||||
none,
|
||||
|
||||
/**
|
||||
* gzip based compression.
|
||||
*/
|
||||
gzip,
|
||||
|
||||
/**
|
||||
* snappy based compression.
|
||||
*/
|
||||
snappy,
|
||||
|
||||
/**
|
||||
* lz4 compression.
|
||||
*/
|
||||
lz4,
|
||||
|
||||
// /** // TODO: uncomment and fix docs when kafka-clients 2.1.0 or newer is the
|
||||
// default
|
||||
// * zstd compression
|
||||
// */
|
||||
// zstd
|
||||
|
||||
snappy
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,62 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.properties;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Properties for configuring topics.
|
||||
*
|
||||
* @author Aldo Sinanaj
|
||||
* @since 2.2
|
||||
*
|
||||
*/
|
||||
public class KafkaTopicProperties {
|
||||
|
||||
private Short replicationFactor;
|
||||
|
||||
private Map<Integer, List<Integer>> replicasAssignments = new HashMap<>();
|
||||
|
||||
private Map<String, String> properties = new HashMap<>();
|
||||
|
||||
public Short getReplicationFactor() {
|
||||
return replicationFactor;
|
||||
}
|
||||
|
||||
public void setReplicationFactor(Short replicationFactor) {
|
||||
this.replicationFactor = replicationFactor;
|
||||
}
|
||||
|
||||
public Map<Integer, List<Integer>> getReplicasAssignments() {
|
||||
return replicasAssignments;
|
||||
}
|
||||
|
||||
public void setReplicasAssignments(Map<Integer, List<Integer>> replicasAssignments) {
|
||||
this.replicasAssignments = replicasAssignments;
|
||||
}
|
||||
|
||||
public Map<String, String> getProperties() {
|
||||
return properties;
|
||||
}
|
||||
|
||||
public void setProperties(Map<String, String> properties) {
|
||||
this.properties = properties;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -40,17 +40,16 @@ import org.apache.kafka.clients.admin.TopicDescription;
|
||||
import org.apache.kafka.common.KafkaFuture;
|
||||
import org.apache.kafka.common.PartitionInfo;
|
||||
import org.apache.kafka.common.errors.TopicExistsException;
|
||||
import org.apache.kafka.common.errors.UnknownTopicOrPartitionException;
|
||||
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
|
||||
import org.springframework.cloud.stream.binder.BinderException;
|
||||
import org.springframework.cloud.stream.binder.ExtendedConsumerProperties;
|
||||
import org.springframework.cloud.stream.binder.ExtendedProducerProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.properties.KafkaAdminProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.properties.KafkaBinderConfigurationProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.properties.KafkaProducerProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.properties.KafkaTopicProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.utils.KafkaTopicUtils;
|
||||
import org.springframework.cloud.stream.provisioning.ConsumerDestination;
|
||||
import org.springframework.cloud.stream.provisioning.ProducerDestination;
|
||||
@@ -61,7 +60,6 @@ import org.springframework.retry.backoff.ExponentialBackOffPolicy;
|
||||
import org.springframework.retry.policy.SimpleRetryPolicy;
|
||||
import org.springframework.retry.support.RetryTemplate;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
@@ -73,13 +71,9 @@ import org.springframework.util.StringUtils;
|
||||
* @author Ilayaperumal Gopinathan
|
||||
* @author Simon Flandergan
|
||||
* @author Oleg Zhurakousky
|
||||
* @author Aldo Sinanaj
|
||||
*/
|
||||
public class KafkaTopicProvisioner implements
|
||||
// @checkstyle:off
|
||||
ProvisioningProvider<ExtendedConsumerProperties<KafkaConsumerProperties>, ExtendedProducerProperties<KafkaProducerProperties>>,
|
||||
// @checkstyle:on
|
||||
InitializingBean {
|
||||
public class KafkaTopicProvisioner implements ProvisioningProvider<ExtendedConsumerProperties<KafkaConsumerProperties>,
|
||||
ExtendedProducerProperties<KafkaProducerProperties>>, InitializingBean {
|
||||
|
||||
private static final int DEFAULT_OPERATION_TIMEOUT = 30;
|
||||
|
||||
@@ -93,18 +87,17 @@ public class KafkaTopicProvisioner implements
|
||||
|
||||
private RetryOperations metadataRetryOperations;
|
||||
|
||||
public KafkaTopicProvisioner(
|
||||
KafkaBinderConfigurationProperties kafkaBinderConfigurationProperties,
|
||||
KafkaProperties kafkaProperties) {
|
||||
public KafkaTopicProvisioner(KafkaBinderConfigurationProperties kafkaBinderConfigurationProperties,
|
||||
KafkaProperties kafkaProperties) {
|
||||
Assert.isTrue(kafkaProperties != null, "KafkaProperties cannot be null");
|
||||
this.adminClientProperties = kafkaProperties.buildAdminProperties();
|
||||
this.configurationProperties = kafkaBinderConfigurationProperties;
|
||||
normalalizeBootPropsWithBinder(this.adminClientProperties, kafkaProperties,
|
||||
kafkaBinderConfigurationProperties);
|
||||
normalalizeBootPropsWithBinder(this.adminClientProperties, kafkaProperties, kafkaBinderConfigurationProperties);
|
||||
}
|
||||
|
||||
/**
|
||||
* Mutator for metadata retry operations.
|
||||
*
|
||||
* @param metadataRetryOperations the retry configuration
|
||||
*/
|
||||
public void setMetadataRetryOperations(RetryOperations metadataRetryOperations) {
|
||||
@@ -138,22 +131,18 @@ public class KafkaTopicProvisioner implements
|
||||
}
|
||||
KafkaTopicUtils.validateTopicName(name);
|
||||
try (AdminClient adminClient = AdminClient.create(this.adminClientProperties)) {
|
||||
createTopic(adminClient, name, properties.getPartitionCount(), false,
|
||||
properties.getExtension().getTopic());
|
||||
createTopic(adminClient, name, properties.getPartitionCount(), false, properties.getExtension().getAdmin());
|
||||
int partitions = 0;
|
||||
if (this.configurationProperties.isAutoCreateTopics()) {
|
||||
DescribeTopicsResult describeTopicsResult = adminClient
|
||||
.describeTopics(Collections.singletonList(name));
|
||||
KafkaFuture<Map<String, TopicDescription>> all = describeTopicsResult
|
||||
.all();
|
||||
DescribeTopicsResult describeTopicsResult = adminClient.describeTopics(Collections.singletonList(name));
|
||||
KafkaFuture<Map<String, TopicDescription>> all = describeTopicsResult.all();
|
||||
|
||||
Map<String, TopicDescription> topicDescriptions = null;
|
||||
try {
|
||||
topicDescriptions = all.get(this.operationTimeout, TimeUnit.SECONDS);
|
||||
}
|
||||
catch (Exception ex) {
|
||||
throw new ProvisioningException(
|
||||
"Problems encountered with partitions finding", ex);
|
||||
throw new ProvisioningException("Problems encountered with partitions finding", ex);
|
||||
}
|
||||
TopicDescription topicDescription = topicDescriptions.get(name);
|
||||
partitions = topicDescription.partitions().size();
|
||||
@@ -163,8 +152,7 @@ public class KafkaTopicProvisioner implements
|
||||
}
|
||||
|
||||
@Override
|
||||
public ConsumerDestination provisionConsumerDestination(final String name,
|
||||
final String group,
|
||||
public ConsumerDestination provisionConsumerDestination(final String name, final String group,
|
||||
ExtendedConsumerProperties<KafkaConsumerProperties> properties) {
|
||||
if (!properties.isMultiplex()) {
|
||||
return doProvisionConsumerDestination(name, group, properties);
|
||||
@@ -178,8 +166,7 @@ public class KafkaTopicProvisioner implements
|
||||
}
|
||||
}
|
||||
|
||||
private ConsumerDestination doProvisionConsumerDestination(final String name,
|
||||
final String group,
|
||||
private ConsumerDestination doProvisionConsumerDestination(final String name, final String group,
|
||||
ExtendedConsumerProperties<KafkaConsumerProperties> properties) {
|
||||
|
||||
if (properties.getExtension().isDestinationIsPattern()) {
|
||||
@@ -201,24 +188,18 @@ public class KafkaTopicProvisioner implements
|
||||
int partitionCount = properties.getInstanceCount() * properties.getConcurrency();
|
||||
ConsumerDestination consumerDestination = new KafkaConsumerDestination(name);
|
||||
try (AdminClient adminClient = createAdminClient()) {
|
||||
createTopic(adminClient, name, partitionCount,
|
||||
properties.getExtension().isAutoRebalanceEnabled(),
|
||||
properties.getExtension().getTopic());
|
||||
createTopic(adminClient, name, partitionCount, properties.getExtension().isAutoRebalanceEnabled(),
|
||||
properties.getExtension().getAdmin());
|
||||
if (this.configurationProperties.isAutoCreateTopics()) {
|
||||
DescribeTopicsResult describeTopicsResult = adminClient
|
||||
.describeTopics(Collections.singletonList(name));
|
||||
KafkaFuture<Map<String, TopicDescription>> all = describeTopicsResult
|
||||
.all();
|
||||
DescribeTopicsResult describeTopicsResult = adminClient.describeTopics(Collections.singletonList(name));
|
||||
KafkaFuture<Map<String, TopicDescription>> all = describeTopicsResult.all();
|
||||
try {
|
||||
Map<String, TopicDescription> topicDescriptions = all
|
||||
.get(this.operationTimeout, TimeUnit.SECONDS);
|
||||
Map<String, TopicDescription> topicDescriptions = all.get(this.operationTimeout, TimeUnit.SECONDS);
|
||||
TopicDescription topicDescription = topicDescriptions.get(name);
|
||||
int partitions = topicDescription.partitions().size();
|
||||
consumerDestination = createDlqIfNeedBe(adminClient, name, group,
|
||||
properties, anonymous, partitions);
|
||||
consumerDestination = createDlqIfNeedBe(adminClient, name, group, properties, anonymous, partitions);
|
||||
if (consumerDestination == null) {
|
||||
consumerDestination = new KafkaConsumerDestination(name,
|
||||
partitions);
|
||||
consumerDestination = new KafkaConsumerDestination(name, partitions);
|
||||
}
|
||||
}
|
||||
catch (Exception ex) {
|
||||
@@ -234,24 +215,22 @@ public class KafkaTopicProvisioner implements
|
||||
}
|
||||
|
||||
/**
|
||||
* In general, binder properties supersede boot kafka properties. The one exception is
|
||||
* the bootstrap servers. In that case, we should only override the boot properties if
|
||||
* (there is a binder property AND it is a non-default value) OR (if there is no boot
|
||||
* property); this is needed because the binder property never returns a null value.
|
||||
* In general, binder properties supersede boot kafka properties.
|
||||
* The one exception is the bootstrap servers. In that case, we should only override
|
||||
* the boot properties if (there is a binder property AND it is a non-default value)
|
||||
* OR (if there is no boot property); this is needed because the binder property
|
||||
* never returns a null value.
|
||||
* @param adminProps the admin properties to normalize.
|
||||
* @param bootProps the boot kafka properties.
|
||||
* @param binderProps the binder kafka properties.
|
||||
*/
|
||||
private void normalalizeBootPropsWithBinder(Map<String, Object> adminProps,
|
||||
KafkaProperties bootProps, KafkaBinderConfigurationProperties binderProps) {
|
||||
private void normalalizeBootPropsWithBinder(Map<String, Object> adminProps, KafkaProperties bootProps,
|
||||
KafkaBinderConfigurationProperties binderProps) {
|
||||
// First deal with the outlier
|
||||
String kafkaConnectionString = binderProps.getKafkaConnectionString();
|
||||
if (ObjectUtils
|
||||
.isEmpty(adminProps.get(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG))
|
||||
|| !kafkaConnectionString
|
||||
.equals(binderProps.getDefaultKafkaConnectionString())) {
|
||||
adminProps.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG,
|
||||
kafkaConnectionString);
|
||||
if (ObjectUtils.isEmpty(adminProps.get(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG))
|
||||
|| !kafkaConnectionString.equals(binderProps.getDefaultKafkaConnectionString())) {
|
||||
adminProps.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaConnectionString);
|
||||
}
|
||||
// Now override any boot values with binder values
|
||||
Map<String, String> binderProperties = binderProps.getConfiguration();
|
||||
@@ -264,24 +243,21 @@ public class KafkaTopicProvisioner implements
|
||||
if (adminConfigNames.contains(key)) {
|
||||
Object replaced = adminProps.put(key, value);
|
||||
if (replaced != null && this.logger.isDebugEnabled()) {
|
||||
this.logger.debug("Overrode boot property: [" + key + "], from: ["
|
||||
+ replaced + "] to: [" + value + "]");
|
||||
this.logger.debug("Overrode boot property: [" + key + "], from: [" + replaced + "] to: [" + value + "]");
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private ConsumerDestination createDlqIfNeedBe(AdminClient adminClient, String name,
|
||||
String group, ExtendedConsumerProperties<KafkaConsumerProperties> properties,
|
||||
boolean anonymous, int partitions) {
|
||||
private ConsumerDestination createDlqIfNeedBe(AdminClient adminClient, String name, String group,
|
||||
ExtendedConsumerProperties<KafkaConsumerProperties> properties,
|
||||
boolean anonymous, int partitions) {
|
||||
if (properties.getExtension().isEnableDlq() && !anonymous) {
|
||||
String dlqTopic = StringUtils.hasText(properties.getExtension().getDlqName())
|
||||
? properties.getExtension().getDlqName()
|
||||
: "error." + name + "." + group;
|
||||
String dlqTopic = StringUtils.hasText(properties.getExtension().getDlqName()) ?
|
||||
properties.getExtension().getDlqName() : "error." + name + "." + group;
|
||||
try {
|
||||
createTopicAndPartitions(adminClient, dlqTopic, partitions,
|
||||
properties.getExtension().isAutoRebalanceEnabled(),
|
||||
properties.getExtension().getTopic());
|
||||
properties.getExtension().isAutoRebalanceEnabled(), properties.getExtension().getAdmin());
|
||||
}
|
||||
catch (Throwable throwable) {
|
||||
if (throwable instanceof Error) {
|
||||
@@ -296,34 +272,27 @@ public class KafkaTopicProvisioner implements
|
||||
return null;
|
||||
}
|
||||
|
||||
private void createTopic(AdminClient adminClient, String name, int partitionCount,
|
||||
boolean tolerateLowerPartitionsOnBroker, KafkaTopicProperties properties) {
|
||||
private void createTopic(AdminClient adminClient, String name, int partitionCount, boolean tolerateLowerPartitionsOnBroker,
|
||||
KafkaAdminProperties properties) {
|
||||
try {
|
||||
createTopicIfNecessary(adminClient, name, partitionCount,
|
||||
tolerateLowerPartitionsOnBroker, properties);
|
||||
createTopicIfNecessary(adminClient, name, partitionCount, tolerateLowerPartitionsOnBroker, properties);
|
||||
}
|
||||
// TODO: Remove catching Throwable. See this thread:
|
||||
// TODO:
|
||||
// https://github.com/spring-cloud/spring-cloud-stream-binder-kafka/pull/514#discussion_r241075940
|
||||
catch (Throwable throwable) {
|
||||
if (throwable instanceof Error) {
|
||||
throw (Error) throwable;
|
||||
}
|
||||
else {
|
||||
// TODO:
|
||||
// https://github.com/spring-cloud/spring-cloud-stream-binder-kafka/pull/514#discussion_r241075940
|
||||
throw new ProvisioningException("Provisioning exception", throwable);
|
||||
throw new ProvisioningException("provisioning exception", throwable);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void createTopicIfNecessary(AdminClient adminClient, final String topicName,
|
||||
final int partitionCount, boolean tolerateLowerPartitionsOnBroker,
|
||||
KafkaTopicProperties properties) throws Throwable {
|
||||
private void createTopicIfNecessary(AdminClient adminClient, final String topicName, final int partitionCount,
|
||||
boolean tolerateLowerPartitionsOnBroker, KafkaAdminProperties properties) throws Throwable {
|
||||
|
||||
if (this.configurationProperties.isAutoCreateTopics()) {
|
||||
createTopicAndPartitions(adminClient, topicName, partitionCount,
|
||||
tolerateLowerPartitionsOnBroker, properties);
|
||||
createTopicAndPartitions(adminClient, topicName, partitionCount, tolerateLowerPartitionsOnBroker,
|
||||
properties);
|
||||
}
|
||||
else {
|
||||
this.logger.info("Auto creation of topics is disabled.");
|
||||
@@ -336,14 +305,12 @@ public class KafkaTopicProvisioner implements
|
||||
* @param adminClient kafka admin client
|
||||
* @param topicName topic name
|
||||
* @param partitionCount partition count
|
||||
* @param tolerateLowerPartitionsOnBroker whether lower partitions count on broker is
|
||||
* tolerated ot not
|
||||
* @param topicProperties kafka topic properties
|
||||
* @param tolerateLowerPartitionsOnBroker whether lower partitions count on broker is tolerated ot not
|
||||
* @param adminProperties kafka admin properties
|
||||
* @throws Throwable from topic creation
|
||||
*/
|
||||
private void createTopicAndPartitions(AdminClient adminClient, final String topicName,
|
||||
final int partitionCount, boolean tolerateLowerPartitionsOnBroker,
|
||||
KafkaTopicProperties topicProperties) throws Throwable {
|
||||
private void createTopicAndPartitions(AdminClient adminClient, final String topicName, final int partitionCount,
|
||||
boolean tolerateLowerPartitionsOnBroker, KafkaAdminProperties adminProperties) throws Throwable {
|
||||
|
||||
ListTopicsResult listTopicsResult = adminClient.listTopics();
|
||||
KafkaFuture<Set<String>> namesFutures = listTopicsResult.names();
|
||||
@@ -351,71 +318,54 @@ public class KafkaTopicProvisioner implements
|
||||
Set<String> names = namesFutures.get(this.operationTimeout, TimeUnit.SECONDS);
|
||||
if (names.contains(topicName)) {
|
||||
// only consider minPartitionCount for resizing if autoAddPartitions is true
|
||||
int effectivePartitionCount = this.configurationProperties
|
||||
.isAutoAddPartitions()
|
||||
? Math.max(
|
||||
this.configurationProperties.getMinPartitionCount(),
|
||||
partitionCount)
|
||||
: partitionCount;
|
||||
DescribeTopicsResult describeTopicsResult = adminClient
|
||||
.describeTopics(Collections.singletonList(topicName));
|
||||
KafkaFuture<Map<String, TopicDescription>> topicDescriptionsFuture = describeTopicsResult
|
||||
.all();
|
||||
Map<String, TopicDescription> topicDescriptions = topicDescriptionsFuture
|
||||
.get(this.operationTimeout, TimeUnit.SECONDS);
|
||||
int effectivePartitionCount = this.configurationProperties.isAutoAddPartitions()
|
||||
? Math.max(this.configurationProperties.getMinPartitionCount(), partitionCount)
|
||||
: partitionCount;
|
||||
DescribeTopicsResult describeTopicsResult = adminClient.describeTopics(Collections.singletonList(topicName));
|
||||
KafkaFuture<Map<String, TopicDescription>> topicDescriptionsFuture = describeTopicsResult.all();
|
||||
Map<String, TopicDescription> topicDescriptions = topicDescriptionsFuture.get(this.operationTimeout, TimeUnit.SECONDS);
|
||||
TopicDescription topicDescription = topicDescriptions.get(topicName);
|
||||
int partitionSize = topicDescription.partitions().size();
|
||||
if (partitionSize < effectivePartitionCount) {
|
||||
if (this.configurationProperties.isAutoAddPartitions()) {
|
||||
CreatePartitionsResult partitions = adminClient
|
||||
.createPartitions(Collections.singletonMap(topicName,
|
||||
NewPartitions.increaseTo(effectivePartitionCount)));
|
||||
CreatePartitionsResult partitions = adminClient.createPartitions(
|
||||
Collections.singletonMap(topicName, NewPartitions.increaseTo(effectivePartitionCount)));
|
||||
partitions.all().get(this.operationTimeout, TimeUnit.SECONDS);
|
||||
}
|
||||
else if (tolerateLowerPartitionsOnBroker) {
|
||||
this.logger.warn("The number of expected partitions was: "
|
||||
+ partitionCount + ", but " + partitionSize
|
||||
+ (partitionSize > 1 ? " have " : " has ")
|
||||
+ "been found instead." + "There will be "
|
||||
+ (effectivePartitionCount - partitionSize)
|
||||
+ " idle consumers");
|
||||
this.logger.warn("The number of expected partitions was: " + partitionCount + ", but "
|
||||
+ partitionSize + (partitionSize > 1 ? " have " : " has ") + "been found instead."
|
||||
+ "There will be " + (effectivePartitionCount - partitionSize) + " idle consumers");
|
||||
}
|
||||
else {
|
||||
throw new ProvisioningException(
|
||||
"The number of expected partitions was: " + partitionCount
|
||||
+ ", but " + partitionSize
|
||||
+ (partitionSize > 1 ? " have " : " has ")
|
||||
+ "been found instead."
|
||||
+ "Consider either increasing the partition count of the topic or enabling "
|
||||
+ "`autoAddPartitions`");
|
||||
throw new ProvisioningException("The number of expected partitions was: " + partitionCount + ", but "
|
||||
+ partitionSize + (partitionSize > 1 ? " have " : " has ") + "been found instead."
|
||||
+ "Consider either increasing the partition count of the topic or enabling " +
|
||||
"`autoAddPartitions`");
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
// always consider minPartitionCount for topic creation
|
||||
final int effectivePartitionCount = Math.max(
|
||||
this.configurationProperties.getMinPartitionCount(), partitionCount);
|
||||
final int effectivePartitionCount = Math.max(this.configurationProperties.getMinPartitionCount(),
|
||||
partitionCount);
|
||||
this.metadataRetryOperations.execute((context) -> {
|
||||
|
||||
NewTopic newTopic;
|
||||
Map<Integer, List<Integer>> replicasAssignments = topicProperties
|
||||
.getReplicasAssignments();
|
||||
if (replicasAssignments != null && replicasAssignments.size() > 0) {
|
||||
newTopic = new NewTopic(topicName,
|
||||
topicProperties.getReplicasAssignments());
|
||||
Map<Integer, List<Integer>> replicasAssignments = adminProperties.getReplicasAssignments();
|
||||
if (replicasAssignments != null && replicasAssignments.size() > 0) {
|
||||
newTopic = new NewTopic(topicName, adminProperties.getReplicasAssignments());
|
||||
}
|
||||
else {
|
||||
newTopic = new NewTopic(topicName, effectivePartitionCount,
|
||||
topicProperties.getReplicationFactor() != null
|
||||
? topicProperties.getReplicationFactor()
|
||||
: this.configurationProperties
|
||||
.getReplicationFactor());
|
||||
adminProperties.getReplicationFactor() != null
|
||||
? adminProperties.getReplicationFactor()
|
||||
: this.configurationProperties.getReplicationFactor());
|
||||
}
|
||||
if (topicProperties.getProperties().size() > 0) {
|
||||
newTopic.configs(topicProperties.getProperties());
|
||||
if (adminProperties.getConfiguration().size() > 0) {
|
||||
newTopic.configs(adminProperties.getConfiguration());
|
||||
}
|
||||
CreateTopicsResult createTopicsResult = adminClient
|
||||
.createTopics(Collections.singletonList(newTopic));
|
||||
CreateTopicsResult createTopicsResult = adminClient.createTopics(Collections.singletonList(newTopic));
|
||||
try {
|
||||
createTopicsResult.all().get(this.operationTimeout, TimeUnit.SECONDS);
|
||||
}
|
||||
@@ -423,8 +373,7 @@ public class KafkaTopicProvisioner implements
|
||||
if (ex instanceof ExecutionException) {
|
||||
if (ex.getCause() instanceof TopicExistsException) {
|
||||
if (this.logger.isWarnEnabled()) {
|
||||
this.logger.warn("Attempt to create topic: " + topicName
|
||||
+ ". Topic already exists.");
|
||||
this.logger.warn("Attempt to create topic: " + topicName + ". Topic already exists.");
|
||||
}
|
||||
}
|
||||
else {
|
||||
@@ -443,67 +392,28 @@ public class KafkaTopicProvisioner implements
|
||||
}
|
||||
|
||||
public Collection<PartitionInfo> getPartitionsForTopic(final int partitionCount,
|
||||
final boolean tolerateLowerPartitionsOnBroker,
|
||||
final Callable<Collection<PartitionInfo>> callable, final String topicName) {
|
||||
final boolean tolerateLowerPartitionsOnBroker,
|
||||
final Callable<Collection<PartitionInfo>> callable) {
|
||||
try {
|
||||
return this.metadataRetryOperations.execute((context) -> {
|
||||
Collection<PartitionInfo> partitions = Collections.emptyList();
|
||||
|
||||
try {
|
||||
// This call may return null or throw an exception.
|
||||
partitions = callable.call();
|
||||
}
|
||||
catch (Exception ex) {
|
||||
// The above call can potentially throw exceptions such as timeout. If
|
||||
// we can determine
|
||||
// that the exception was due to an unknown topic on the broker, just
|
||||
// simply rethrow that.
|
||||
if (ex instanceof UnknownTopicOrPartitionException) {
|
||||
throw ex;
|
||||
}
|
||||
this.logger.error("Failed to obtain partition information", ex);
|
||||
}
|
||||
// In some cases, the above partition query may not throw an UnknownTopic..Exception for various reasons.
|
||||
// For that, we are forcing another query to ensure that the topic is present on the server.
|
||||
if (CollectionUtils.isEmpty(partitions)) {
|
||||
final AdminClient adminClient = AdminClient
|
||||
.create(this.adminClientProperties);
|
||||
final DescribeTopicsResult describeTopicsResult = adminClient
|
||||
.describeTopics(Collections.singletonList(topicName));
|
||||
try {
|
||||
describeTopicsResult.all().get();
|
||||
}
|
||||
catch (ExecutionException ex) {
|
||||
if (ex.getCause() instanceof UnknownTopicOrPartitionException) {
|
||||
throw (UnknownTopicOrPartitionException) ex.getCause();
|
||||
return this.metadataRetryOperations
|
||||
.execute((context) -> {
|
||||
Collection<PartitionInfo> partitions = callable.call();
|
||||
// do a sanity check on the partition set
|
||||
int partitionSize = partitions.size();
|
||||
if (partitionSize < partitionCount) {
|
||||
if (tolerateLowerPartitionsOnBroker) {
|
||||
this.logger.warn("The number of expected partitions was: " + partitionCount + ", but "
|
||||
+ partitionSize + (partitionSize > 1 ? " have " : " has ") + "been found instead."
|
||||
+ "There will be " + (partitionCount - partitionSize) + " idle consumers");
|
||||
}
|
||||
else {
|
||||
throw new IllegalStateException("The number of expected partitions was: "
|
||||
+ partitionCount + ", but " + partitionSize
|
||||
+ (partitionSize > 1 ? " have " : " has ") + "been found instead");
|
||||
}
|
||||
}
|
||||
else {
|
||||
logger.warn("No partitions have been retrieved for the topic "
|
||||
+ "(" + topicName
|
||||
+ "). This will affect the health check.");
|
||||
}
|
||||
}
|
||||
}
|
||||
// do a sanity check on the partition set
|
||||
int partitionSize = CollectionUtils.isEmpty(partitions) ? 0 : partitions.size();
|
||||
if (partitionSize < partitionCount) {
|
||||
if (tolerateLowerPartitionsOnBroker) {
|
||||
this.logger.warn("The number of expected partitions was: "
|
||||
+ partitionCount + ", but " + partitionSize
|
||||
+ (partitionSize > 1 ? " have " : " has ")
|
||||
+ "been found instead." + "There will be "
|
||||
+ (partitionCount - partitionSize) + " idle consumers");
|
||||
}
|
||||
else {
|
||||
throw new IllegalStateException(
|
||||
"The number of expected partitions was: " + partitionCount
|
||||
+ ", but " + partitionSize
|
||||
+ (partitionSize > 1 ? " have " : " has ")
|
||||
+ "been found instead");
|
||||
}
|
||||
}
|
||||
return partitions;
|
||||
});
|
||||
return partitions;
|
||||
});
|
||||
}
|
||||
catch (Exception ex) {
|
||||
this.logger.error("Cannot initialize Binder", ex);
|
||||
@@ -534,10 +444,11 @@ public class KafkaTopicProvisioner implements
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "KafkaProducerDestination{" + "producerDestinationName='"
|
||||
+ producerDestinationName + '\'' + ", partitions=" + partitions + '}';
|
||||
return "KafkaProducerDestination{" +
|
||||
"producerDestinationName='" + producerDestinationName + '\'' +
|
||||
", partitions=" + partitions +
|
||||
'}';
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static final class KafkaConsumerDestination implements ConsumerDestination {
|
||||
@@ -556,8 +467,7 @@ public class KafkaTopicProvisioner implements
|
||||
this(consumerDestinationName, partitions, null);
|
||||
}
|
||||
|
||||
KafkaConsumerDestination(String consumerDestinationName, Integer partitions,
|
||||
String dlqName) {
|
||||
KafkaConsumerDestination(String consumerDestinationName, Integer partitions, String dlqName) {
|
||||
this.consumerDestinationName = consumerDestinationName;
|
||||
this.partitions = partitions;
|
||||
this.dlqName = dlqName;
|
||||
@@ -570,11 +480,11 @@ public class KafkaTopicProvisioner implements
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "KafkaConsumerDestination{" + "consumerDestinationName='"
|
||||
+ consumerDestinationName + '\'' + ", partitions=" + partitions
|
||||
+ ", dlqName='" + dlqName + '\'' + '}';
|
||||
return "KafkaConsumerDestination{" +
|
||||
"consumerDestinationName='" + consumerDestinationName + '\'' +
|
||||
", partitions=" + partitions +
|
||||
", dlqName='" + dlqName + '\'' +
|
||||
'}';
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2016-2019 the original author or authors.
|
||||
* Copyright 2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -30,19 +30,20 @@ public final class KafkaTopicUtils {
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate topic name. Allowed chars are ASCII alphanumerics, '.', '_' and '-'.
|
||||
* Validate topic name.
|
||||
* Allowed chars are ASCII alphanumerics, '.', '_' and '-'.
|
||||
*
|
||||
* @param topicName name of the topic
|
||||
*/
|
||||
public static void validateTopicName(String topicName) {
|
||||
try {
|
||||
byte[] utf8 = topicName.getBytes("UTF-8");
|
||||
for (byte b : utf8) {
|
||||
if (!((b >= 'a') && (b <= 'z') || (b >= 'A') && (b <= 'Z')
|
||||
|| (b >= '0') && (b <= '9') || (b == '.') || (b == '-')
|
||||
|| (b == '_'))) {
|
||||
if (!((b >= 'a') && (b <= 'z') || (b >= 'A') && (b <= 'Z') || (b >= '0') && (b <= '9') || (b == '.')
|
||||
|| (b == '-') || (b == '_'))) {
|
||||
throw new IllegalArgumentException(
|
||||
"Topic name can only have ASCII alphanumerics, '.', '_' and '-', but was: '"
|
||||
+ topicName + "'");
|
||||
"Topic name can only have ASCII alphanumerics, '.', '_' and '-', but was: '" + topicName
|
||||
+ "'");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -50,5 +51,4 @@ public final class KafkaTopicUtils {
|
||||
throw new AssertionError(ex); // Can't happen
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -35,6 +35,7 @@ import org.springframework.kafka.test.utils.KafkaTestUtils;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.Assertions.fail;
|
||||
|
||||
|
||||
/**
|
||||
* @author Gary Russell
|
||||
* @since 2.0
|
||||
@@ -46,27 +47,18 @@ public class KafkaTopicProvisionerTests {
|
||||
@Test
|
||||
public void bootPropertiesOverriddenExceptServers() throws Exception {
|
||||
KafkaProperties bootConfig = new KafkaProperties();
|
||||
bootConfig.getProperties().put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG,
|
||||
"PLAINTEXT");
|
||||
bootConfig.getProperties().put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "PLAINTEXT");
|
||||
bootConfig.setBootstrapServers(Collections.singletonList("localhost:1234"));
|
||||
KafkaBinderConfigurationProperties binderConfig = new KafkaBinderConfigurationProperties(
|
||||
bootConfig);
|
||||
binderConfig.getConfiguration().put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG,
|
||||
"SSL");
|
||||
KafkaBinderConfigurationProperties binderConfig = new KafkaBinderConfigurationProperties(bootConfig);
|
||||
binderConfig.getConfiguration().put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SSL");
|
||||
ClassPathResource ts = new ClassPathResource("test.truststore.ks");
|
||||
binderConfig.getConfiguration().put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,
|
||||
ts.getFile().getAbsolutePath());
|
||||
binderConfig.getConfiguration().put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, ts.getFile().getAbsolutePath());
|
||||
binderConfig.setBrokers("localhost:9092");
|
||||
KafkaTopicProvisioner provisioner = new KafkaTopicProvisioner(binderConfig,
|
||||
bootConfig);
|
||||
KafkaTopicProvisioner provisioner = new KafkaTopicProvisioner(binderConfig, bootConfig);
|
||||
AdminClient adminClient = provisioner.createAdminClient();
|
||||
assertThat(KafkaTestUtils.getPropertyValue(adminClient,
|
||||
"client.selector.channelBuilder")).isInstanceOf(SslChannelBuilder.class);
|
||||
Map configs = KafkaTestUtils.getPropertyValue(adminClient,
|
||||
"client.selector.channelBuilder.configs", Map.class);
|
||||
assertThat(
|
||||
((List) configs.get(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG)).get(0))
|
||||
.isEqualTo("localhost:1234");
|
||||
assertThat(KafkaTestUtils.getPropertyValue(adminClient, "client.selector.channelBuilder")).isInstanceOf(SslChannelBuilder.class);
|
||||
Map configs = KafkaTestUtils.getPropertyValue(adminClient, "client.selector.channelBuilder.configs", Map.class);
|
||||
assertThat(((List) configs.get(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG)).get(0)).isEqualTo("localhost:1234");
|
||||
adminClient.close();
|
||||
}
|
||||
|
||||
@@ -74,44 +66,33 @@ public class KafkaTopicProvisionerTests {
|
||||
@Test
|
||||
public void bootPropertiesOverriddenIncludingServers() throws Exception {
|
||||
KafkaProperties bootConfig = new KafkaProperties();
|
||||
bootConfig.getProperties().put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG,
|
||||
"PLAINTEXT");
|
||||
bootConfig.getProperties().put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "PLAINTEXT");
|
||||
bootConfig.setBootstrapServers(Collections.singletonList("localhost:9092"));
|
||||
KafkaBinderConfigurationProperties binderConfig = new KafkaBinderConfigurationProperties(
|
||||
bootConfig);
|
||||
binderConfig.getConfiguration().put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG,
|
||||
"SSL");
|
||||
KafkaBinderConfigurationProperties binderConfig = new KafkaBinderConfigurationProperties(bootConfig);
|
||||
binderConfig.getConfiguration().put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SSL");
|
||||
ClassPathResource ts = new ClassPathResource("test.truststore.ks");
|
||||
binderConfig.getConfiguration().put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,
|
||||
ts.getFile().getAbsolutePath());
|
||||
binderConfig.getConfiguration().put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, ts.getFile().getAbsolutePath());
|
||||
binderConfig.setBrokers("localhost:1234");
|
||||
KafkaTopicProvisioner provisioner = new KafkaTopicProvisioner(binderConfig,
|
||||
bootConfig);
|
||||
KafkaTopicProvisioner provisioner = new KafkaTopicProvisioner(binderConfig, bootConfig);
|
||||
AdminClient adminClient = provisioner.createAdminClient();
|
||||
assertThat(KafkaTestUtils.getPropertyValue(adminClient,
|
||||
"client.selector.channelBuilder")).isInstanceOf(SslChannelBuilder.class);
|
||||
Map configs = KafkaTestUtils.getPropertyValue(adminClient,
|
||||
"client.selector.channelBuilder.configs", Map.class);
|
||||
assertThat(
|
||||
((List) configs.get(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG)).get(0))
|
||||
.isEqualTo("localhost:1234");
|
||||
assertThat(KafkaTestUtils.getPropertyValue(adminClient, "client.selector.channelBuilder")).isInstanceOf(SslChannelBuilder.class);
|
||||
Map configs = KafkaTestUtils.getPropertyValue(adminClient, "client.selector.channelBuilder.configs", Map.class);
|
||||
assertThat(((List) configs.get(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG)).get(0)).isEqualTo("localhost:1234");
|
||||
adminClient.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void brokersInvalid() throws Exception {
|
||||
KafkaProperties bootConfig = new KafkaProperties();
|
||||
KafkaBinderConfigurationProperties binderConfig = new KafkaBinderConfigurationProperties(
|
||||
bootConfig);
|
||||
binderConfig.getConfiguration().put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG,
|
||||
"localhost:1234");
|
||||
KafkaBinderConfigurationProperties binderConfig = new KafkaBinderConfigurationProperties(bootConfig);
|
||||
binderConfig.getConfiguration().put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, "localhost:1234");
|
||||
try {
|
||||
new KafkaTopicProvisioner(binderConfig, bootConfig);
|
||||
fail("Expected illegal state");
|
||||
}
|
||||
catch (IllegalStateException e) {
|
||||
assertThat(e.getMessage()).isEqualTo(
|
||||
"Set binder bootstrap servers via the 'brokers' property, not 'configuration'");
|
||||
assertThat(e.getMessage())
|
||||
.isEqualTo("Set binder bootstrap servers via the 'brokers' property, not 'configuration'");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<artifactId>spring-cloud-stream-binder-kafka-streams</artifactId>
|
||||
@@ -10,7 +10,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
|
||||
<version>2.2.0.M1</version>
|
||||
<version>2.1.0.RC2</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
@@ -22,11 +22,6 @@
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-stream-binder-kafka-core</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-actuator</artifactId>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-configuration-processor</artifactId>
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -36,35 +36,27 @@ import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* An {@link AbstractBinder} implementation for {@link GlobalKTable}.
|
||||
* <p>
|
||||
* Provides only consumer binding for the bound {@link GlobalKTable}. Output bindings are
|
||||
* not allowed on this binder.
|
||||
*
|
||||
* Provides only consumer binding for the bound {@link GlobalKTable}.
|
||||
* Output bindings are not allowed on this binder.
|
||||
*
|
||||
* @author Soby Chacko
|
||||
* @since 2.1.0
|
||||
*/
|
||||
public class GlobalKTableBinder extends
|
||||
// @checkstyle:off
|
||||
AbstractBinder<GlobalKTable<Object, Object>, ExtendedConsumerProperties<KafkaStreamsConsumerProperties>, ExtendedProducerProperties<KafkaStreamsProducerProperties>>
|
||||
implements
|
||||
ExtendedPropertiesBinder<GlobalKTable<Object, Object>, KafkaStreamsConsumerProperties, KafkaStreamsProducerProperties> {
|
||||
implements ExtendedPropertiesBinder<GlobalKTable<Object, Object>, KafkaStreamsConsumerProperties, KafkaStreamsProducerProperties> {
|
||||
|
||||
// @checkstyle:on
|
||||
private final KafkaStreamsBinderConfigurationProperties binderConfigurationProperties;
|
||||
|
||||
private final KafkaTopicProvisioner kafkaTopicProvisioner;
|
||||
|
||||
private final Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers;
|
||||
|
||||
// @checkstyle:off
|
||||
private KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties = new KafkaStreamsExtendedBindingProperties();
|
||||
|
||||
// @checkstyle:on
|
||||
|
||||
public GlobalKTableBinder(
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
public GlobalKTableBinder(KafkaStreamsBinderConfigurationProperties binderConfigurationProperties, KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
this.binderConfigurationProperties = binderConfigurationProperties;
|
||||
this.kafkaTopicProvisioner = kafkaTopicProvisioner;
|
||||
this.kafkaStreamsDlqDispatchers = kafkaStreamsDlqDispatchers;
|
||||
@@ -72,39 +64,31 @@ public class GlobalKTableBinder extends
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Binding<GlobalKTable<Object, Object>> doBindConsumer(String name,
|
||||
String group, GlobalKTable<Object, Object> inputTarget,
|
||||
ExtendedConsumerProperties<KafkaStreamsConsumerProperties> properties) {
|
||||
protected Binding<GlobalKTable<Object, Object>> doBindConsumer(String name, String group, GlobalKTable<Object, Object> inputTarget,
|
||||
ExtendedConsumerProperties<KafkaStreamsConsumerProperties> properties) {
|
||||
if (!StringUtils.hasText(group)) {
|
||||
group = this.binderConfigurationProperties.getApplicationId();
|
||||
}
|
||||
KafkaStreamsBinderUtils.prepareConsumerBinding(name, group,
|
||||
getApplicationContext(), this.kafkaTopicProvisioner,
|
||||
this.binderConfigurationProperties, properties,
|
||||
this.kafkaStreamsDlqDispatchers);
|
||||
KafkaStreamsBinderUtils.prepareConsumerBinding(name, group, getApplicationContext(),
|
||||
this.kafkaTopicProvisioner,
|
||||
this.binderConfigurationProperties, properties, this.kafkaStreamsDlqDispatchers);
|
||||
return new DefaultBinding<>(name, group, inputTarget, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Binding<GlobalKTable<Object, Object>> doBindProducer(String name,
|
||||
GlobalKTable<Object, Object> outboundBindTarget,
|
||||
ExtendedProducerProperties<KafkaStreamsProducerProperties> properties) {
|
||||
throw new UnsupportedOperationException(
|
||||
"No producer level binding is allowed for GlobalKTable");
|
||||
protected Binding<GlobalKTable<Object, Object>> doBindProducer(String name, GlobalKTable<Object, Object> outboundBindTarget,
|
||||
ExtendedProducerProperties<KafkaStreamsProducerProperties> properties) {
|
||||
throw new UnsupportedOperationException("No producer level binding is allowed for GlobalKTable");
|
||||
}
|
||||
|
||||
@Override
|
||||
public KafkaStreamsConsumerProperties getExtendedConsumerProperties(
|
||||
String channelName) {
|
||||
return this.kafkaStreamsExtendedBindingProperties
|
||||
.getExtendedConsumerProperties(channelName);
|
||||
public KafkaStreamsConsumerProperties getExtendedConsumerProperties(String channelName) {
|
||||
return this.kafkaStreamsExtendedBindingProperties.getExtendedConsumerProperties(channelName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public KafkaStreamsProducerProperties getExtendedProducerProperties(
|
||||
String channelName) {
|
||||
throw new UnsupportedOperationException(
|
||||
"No producer binding is allowed and therefore no properties");
|
||||
public KafkaStreamsProducerProperties getExtendedProducerProperties(String channelName) {
|
||||
throw new UnsupportedOperationException("No producer binding is allowed and therefore no properties");
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -114,8 +98,7 @@ public class GlobalKTableBinder extends
|
||||
|
||||
@Override
|
||||
public Class<? extends BinderSpecificPropertiesProvider> getExtendedPropertiesEntryClass() {
|
||||
return this.kafkaStreamsExtendedBindingProperties
|
||||
.getExtendedPropertiesEntryClass();
|
||||
return this.kafkaStreamsExtendedBindingProperties.getExtendedPropertiesEntryClass();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -19,15 +19,13 @@ package org.springframework.cloud.stream.binder.kafka.streams;
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.beans.factory.config.BeanFactoryPostProcessor;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
|
||||
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.properties.KafkaBinderConfigurationProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.provisioning.KafkaTopicProvisioner;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsBinderConfigurationProperties;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Import;
|
||||
|
||||
/**
|
||||
* Configuration for GlobalKTable binder.
|
||||
@@ -36,41 +34,19 @@ import org.springframework.context.annotation.Configuration;
|
||||
* @since 2.1.0
|
||||
*/
|
||||
@Configuration
|
||||
@Import(KafkaStreamsBinderUtils.KafkaStreamsMissingBeansRegistrar.class)
|
||||
public class GlobalKTableBinderConfiguration {
|
||||
|
||||
@Bean
|
||||
@ConditionalOnBean(name = "outerContext")
|
||||
public static BeanFactoryPostProcessor outerContextBeanFactoryPostProcessor() {
|
||||
return (beanFactory) -> {
|
||||
// It is safe to call getBean("outerContext") here, because this bean is
|
||||
// registered as first
|
||||
// and as independent from the parent context.
|
||||
ApplicationContext outerContext = (ApplicationContext) beanFactory
|
||||
.getBean("outerContext");
|
||||
beanFactory.registerSingleton(
|
||||
KafkaStreamsBinderConfigurationProperties.class.getSimpleName(),
|
||||
outerContext
|
||||
.getBean(KafkaStreamsBinderConfigurationProperties.class));
|
||||
beanFactory.registerSingleton(
|
||||
KafkaStreamsBindingInformationCatalogue.class.getSimpleName(),
|
||||
outerContext.getBean(KafkaStreamsBindingInformationCatalogue.class));
|
||||
};
|
||||
}
|
||||
|
||||
@Bean
|
||||
public KafkaTopicProvisioner provisioningProvider(
|
||||
KafkaBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaProperties kafkaProperties) {
|
||||
public KafkaTopicProvisioner provisioningProvider(KafkaBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaProperties kafkaProperties) {
|
||||
return new KafkaTopicProvisioner(binderConfigurationProperties, kafkaProperties);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public GlobalKTableBinder GlobalKTableBinder(
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
@Qualifier("kafkaStreamsDlqDispatchers") Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
return new GlobalKTableBinder(binderConfigurationProperties,
|
||||
kafkaTopicProvisioner, kafkaStreamsDlqDispatchers);
|
||||
public GlobalKTableBinder GlobalKTableBinder(KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
@Qualifier("kafkaStreamsDlqDispatchers") Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
return new GlobalKTableBinder(binderConfigurationProperties, kafkaTopicProvisioner, kafkaStreamsDlqDispatchers);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -27,16 +27,14 @@ import org.springframework.cloud.stream.config.BindingServiceProperties;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link org.springframework.cloud.stream.binding.BindingTargetFactory} for
|
||||
* {@link GlobalKTable}
|
||||
* {@link org.springframework.cloud.stream.binding.BindingTargetFactory} for {@link GlobalKTable}
|
||||
*
|
||||
* Input bindings are only created as output bindings on GlobalKTable are not allowed.
|
||||
*
|
||||
* @author Soby Chacko
|
||||
* @since 2.1.0
|
||||
*/
|
||||
public class GlobalKTableBoundElementFactory
|
||||
extends AbstractBindingTargetFactory<GlobalKTable> {
|
||||
public class GlobalKTableBoundElementFactory extends AbstractBindingTargetFactory<GlobalKTable> {
|
||||
|
||||
private final BindingServiceProperties bindingServiceProperties;
|
||||
|
||||
@@ -47,17 +45,12 @@ public class GlobalKTableBoundElementFactory
|
||||
|
||||
@Override
|
||||
public GlobalKTable createInput(String name) {
|
||||
ConsumerProperties consumerProperties = this.bindingServiceProperties
|
||||
.getConsumerProperties(name);
|
||||
// Always set multiplex to true in the kafka streams binder
|
||||
ConsumerProperties consumerProperties = this.bindingServiceProperties.getConsumerProperties(name);
|
||||
//Always set multiplex to true in the kafka streams binder
|
||||
consumerProperties.setMultiplex(true);
|
||||
|
||||
// @checkstyle:off
|
||||
GlobalKTableBoundElementFactory.GlobalKTableWrapperHandler wrapper = new GlobalKTableBoundElementFactory.GlobalKTableWrapperHandler();
|
||||
// @checkstyle:on
|
||||
ProxyFactory proxyFactory = new ProxyFactory(
|
||||
GlobalKTableBoundElementFactory.GlobalKTableWrapper.class,
|
||||
GlobalKTable.class);
|
||||
ProxyFactory proxyFactory = new ProxyFactory(GlobalKTableBoundElementFactory.GlobalKTableWrapper.class, GlobalKTable.class);
|
||||
proxyFactory.addAdvice(wrapper);
|
||||
|
||||
return (GlobalKTable) proxyFactory.getProxy();
|
||||
@@ -65,21 +58,17 @@ public class GlobalKTableBoundElementFactory
|
||||
|
||||
@Override
|
||||
public GlobalKTable createOutput(String name) {
|
||||
throw new UnsupportedOperationException(
|
||||
"Outbound operations are not allowed on target type GlobalKTable");
|
||||
throw new UnsupportedOperationException("Outbound operations are not allowed on target type GlobalKTable");
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper for GlobalKTable proxy.
|
||||
*/
|
||||
public interface GlobalKTableWrapper {
|
||||
|
||||
void wrap(GlobalKTable<Object, Object> delegate);
|
||||
|
||||
}
|
||||
|
||||
private static class GlobalKTableWrapperHandler implements
|
||||
GlobalKTableBoundElementFactory.GlobalKTableWrapper, MethodInterceptor {
|
||||
private static class GlobalKTableWrapperHandler implements GlobalKTableBoundElementFactory.GlobalKTableWrapper, MethodInterceptor {
|
||||
|
||||
private GlobalKTable<Object, Object> delegate;
|
||||
|
||||
@@ -91,25 +80,17 @@ public class GlobalKTableBoundElementFactory
|
||||
|
||||
@Override
|
||||
public Object invoke(MethodInvocation methodInvocation) throws Throwable {
|
||||
if (methodInvocation.getMethod().getDeclaringClass()
|
||||
.equals(GlobalKTable.class)) {
|
||||
Assert.notNull(this.delegate,
|
||||
"Trying to prepareConsumerBinding " + methodInvocation.getMethod()
|
||||
+ " but no delegate has been set.");
|
||||
return methodInvocation.getMethod().invoke(this.delegate,
|
||||
methodInvocation.getArguments());
|
||||
if (methodInvocation.getMethod().getDeclaringClass().equals(GlobalKTable.class)) {
|
||||
Assert.notNull(this.delegate, "Trying to prepareConsumerBinding " + methodInvocation
|
||||
.getMethod() + " but no delegate has been set.");
|
||||
return methodInvocation.getMethod().invoke(this.delegate, methodInvocation.getArguments());
|
||||
}
|
||||
else if (methodInvocation.getMethod().getDeclaringClass()
|
||||
.equals(GlobalKTableBoundElementFactory.GlobalKTableWrapper.class)) {
|
||||
return methodInvocation.getMethod().invoke(this,
|
||||
methodInvocation.getArguments());
|
||||
else if (methodInvocation.getMethod().getDeclaringClass().equals(GlobalKTableBoundElementFactory.GlobalKTableWrapper.class)) {
|
||||
return methodInvocation.getMethod().invoke(this, methodInvocation.getArguments());
|
||||
}
|
||||
else {
|
||||
throw new IllegalStateException(
|
||||
"Only GlobalKTable method invocations are permitted");
|
||||
throw new IllegalStateException("Only GlobalKTable method invocations are permitted");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -30,10 +30,10 @@ import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStr
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Services pertinent to the interactive query capabilities of Kafka Streams. This class
|
||||
* provides services such as querying for a particular store, which instance is hosting a
|
||||
* particular store etc. This is part of the public API of the kafka streams binder and
|
||||
* the users can inject this service in their applications to make use of it.
|
||||
* Services pertinent to the interactive query capabilities of Kafka Streams. This class provides
|
||||
* services such as querying for a particular store, which instance is hosting a particular store etc.
|
||||
* This is part of the public API of the kafka streams binder and the users can inject this service in their
|
||||
* applications to make use of it.
|
||||
*
|
||||
* @author Soby Chacko
|
||||
* @author Renwei Han
|
||||
@@ -42,22 +42,23 @@ import org.springframework.util.StringUtils;
|
||||
public class InteractiveQueryService {
|
||||
|
||||
private final KafkaStreamsRegistry kafkaStreamsRegistry;
|
||||
|
||||
private final KafkaStreamsBinderConfigurationProperties binderConfigurationProperties;
|
||||
|
||||
/**
|
||||
* Constructor for InteractiveQueryService.
|
||||
*
|
||||
* @param kafkaStreamsRegistry holding {@link KafkaStreamsRegistry}
|
||||
* @param binderConfigurationProperties kafka Streams binder configuration properties
|
||||
*/
|
||||
public InteractiveQueryService(KafkaStreamsRegistry kafkaStreamsRegistry,
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties) {
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties) {
|
||||
this.kafkaStreamsRegistry = kafkaStreamsRegistry;
|
||||
this.binderConfigurationProperties = binderConfigurationProperties;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve and return a queryable store by name created in the application.
|
||||
*
|
||||
* @param storeName name of the queryable store
|
||||
* @param storeType type of the queryable store
|
||||
* @param <T> generic queryable store
|
||||
@@ -72,24 +73,22 @@ public class InteractiveQueryService {
|
||||
}
|
||||
}
|
||||
catch (InvalidStateStoreException ignored) {
|
||||
// pass through
|
||||
//pass through
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the current {@link HostInfo} that the calling kafka streams application is
|
||||
* running on.
|
||||
* Gets the current {@link HostInfo} that the calling kafka streams application is running on.
|
||||
*
|
||||
* Note that the end user applications must provide `applicaiton.server` as a configuration property
|
||||
* when calling this method. If this is not available, then null is returned.
|
||||
*
|
||||
* Note that the end user applications must provide `applicaiton.server` as a
|
||||
* configuration property when calling this method. If this is not available, then
|
||||
* null is returned.
|
||||
* @return the current {@link HostInfo}
|
||||
*/
|
||||
public HostInfo getCurrentHostInfo() {
|
||||
Map<String, String> configuration = this.binderConfigurationProperties
|
||||
.getConfiguration();
|
||||
Map<String, String> configuration = this.binderConfigurationProperties.getConfiguration();
|
||||
if (configuration.containsKey("application.server")) {
|
||||
|
||||
String applicationServer = configuration.get("application.server");
|
||||
@@ -101,27 +100,27 @@ public class InteractiveQueryService {
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the {@link HostInfo} where the provided store and key are hosted on. This may
|
||||
* not be the current host that is running the application. Kafka Streams will look
|
||||
* through all the consumer instances under the same application id and retrieves the
|
||||
* proper host.
|
||||
* Gets the {@link HostInfo} where the provided store and key are hosted on. This may not be the
|
||||
* current host that is running the application. Kafka Streams will look through all the consumer instances
|
||||
* under the same application id and retrieves the proper host.
|
||||
*
|
||||
* Note that the end user applications must provide `applicaiton.server` as a configuration property
|
||||
* for all the application instances when calling this method. If this is not available, then null maybe returned.
|
||||
*
|
||||
* Note that the end user applications must provide `applicaiton.server` as a
|
||||
* configuration property for all the application instances when calling this method.
|
||||
* If this is not available, then null maybe returned.
|
||||
* @param <K> generic type for key
|
||||
* @param store store name
|
||||
* @param key key to look for
|
||||
* @param serializer {@link Serializer} for the key
|
||||
* @return the {@link HostInfo} where the key for the provided store is hosted
|
||||
* currently
|
||||
* @return the {@link HostInfo} where the key for the provided store is hosted currently
|
||||
*/
|
||||
public <K> HostInfo getHostInfo(String store, K key, Serializer<K> serializer) {
|
||||
StreamsMetadata streamsMetadata = this.kafkaStreamsRegistry.getKafkaStreams()
|
||||
.stream()
|
||||
.map((k) -> Optional.ofNullable(k.metadataForKey(store, key, serializer)))
|
||||
.filter(Optional::isPresent).map(Optional::get).findFirst().orElse(null);
|
||||
.filter(Optional::isPresent)
|
||||
.map(Optional::get)
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
return streamsMetadata != null ? streamsMetadata.hostInfo() : null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -40,8 +40,8 @@ import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStr
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* {@link org.springframework.cloud.stream.binder.Binder} implementation for
|
||||
* {@link KStream}. This implemenation extends from the {@link AbstractBinder} directly.
|
||||
* {@link org.springframework.cloud.stream.binder.Binder} implementation for {@link KStream}.
|
||||
* This implemenation extends from the {@link AbstractBinder} directly.
|
||||
* <p>
|
||||
* Provides both producer and consumer bindings for the bound KStream.
|
||||
*
|
||||
@@ -49,22 +49,15 @@ import org.springframework.util.StringUtils;
|
||||
* @author Soby Chacko
|
||||
*/
|
||||
class KStreamBinder extends
|
||||
// @checkstyle:off
|
||||
AbstractBinder<KStream<Object, Object>, ExtendedConsumerProperties<KafkaStreamsConsumerProperties>, ExtendedProducerProperties<KafkaStreamsProducerProperties>>
|
||||
implements
|
||||
ExtendedPropertiesBinder<KStream<Object, Object>, KafkaStreamsConsumerProperties, KafkaStreamsProducerProperties> {
|
||||
|
||||
// @checkstyle:on
|
||||
implements ExtendedPropertiesBinder<KStream<Object, Object>, KafkaStreamsConsumerProperties, KafkaStreamsProducerProperties> {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(KStreamBinder.class);
|
||||
|
||||
private final KafkaTopicProvisioner kafkaTopicProvisioner;
|
||||
|
||||
// @checkstyle:off
|
||||
private KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties = new KafkaStreamsExtendedBindingProperties();
|
||||
|
||||
// @checkstyle:on
|
||||
|
||||
private final KafkaStreamsBinderConfigurationProperties binderConfigurationProperties;
|
||||
|
||||
private final KafkaStreamsMessageConversionDelegate kafkaStreamsMessageConversionDelegate;
|
||||
@@ -76,11 +69,11 @@ class KStreamBinder extends
|
||||
private final Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers;
|
||||
|
||||
KStreamBinder(KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
KafkaStreamsMessageConversionDelegate kafkaStreamsMessageConversionDelegate,
|
||||
KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue,
|
||||
KeyValueSerdeResolver keyValueSerdeResolver,
|
||||
Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
KafkaStreamsMessageConversionDelegate kafkaStreamsMessageConversionDelegate,
|
||||
KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue,
|
||||
KeyValueSerdeResolver keyValueSerdeResolver,
|
||||
Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
this.binderConfigurationProperties = binderConfigurationProperties;
|
||||
this.kafkaTopicProvisioner = kafkaTopicProvisioner;
|
||||
this.kafkaStreamsMessageConversionDelegate = kafkaStreamsMessageConversionDelegate;
|
||||
@@ -91,77 +84,57 @@ class KStreamBinder extends
|
||||
|
||||
@Override
|
||||
protected Binding<KStream<Object, Object>> doBindConsumer(String name, String group,
|
||||
KStream<Object, Object> inputTarget,
|
||||
// @checkstyle:off
|
||||
ExtendedConsumerProperties<KafkaStreamsConsumerProperties> properties) {
|
||||
// @checkstyle:on
|
||||
this.kafkaStreamsBindingInformationCatalogue
|
||||
.registerConsumerProperties(inputTarget, properties.getExtension());
|
||||
KStream<Object, Object> inputTarget,
|
||||
ExtendedConsumerProperties<KafkaStreamsConsumerProperties> properties) {
|
||||
this.kafkaStreamsBindingInformationCatalogue.registerConsumerProperties(inputTarget, properties.getExtension());
|
||||
if (!StringUtils.hasText(group)) {
|
||||
group = this.binderConfigurationProperties.getApplicationId();
|
||||
}
|
||||
KafkaStreamsBinderUtils.prepareConsumerBinding(name, group,
|
||||
getApplicationContext(), this.kafkaTopicProvisioner,
|
||||
this.binderConfigurationProperties, properties,
|
||||
this.kafkaStreamsDlqDispatchers);
|
||||
KafkaStreamsBinderUtils.prepareConsumerBinding(name, group, getApplicationContext(),
|
||||
this.kafkaTopicProvisioner,
|
||||
this.binderConfigurationProperties, properties, this.kafkaStreamsDlqDispatchers);
|
||||
|
||||
return new DefaultBinding<>(name, group, inputTarget, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Binding<KStream<Object, Object>> doBindProducer(String name,
|
||||
KStream<Object, Object> outboundBindTarget,
|
||||
// @checkstyle:off
|
||||
ExtendedProducerProperties<KafkaStreamsProducerProperties> properties) {
|
||||
// @checkstyle:on
|
||||
protected Binding<KStream<Object, Object>> doBindProducer(String name, KStream<Object, Object> outboundBindTarget,
|
||||
ExtendedProducerProperties<KafkaStreamsProducerProperties> properties) {
|
||||
ExtendedProducerProperties<KafkaProducerProperties> extendedProducerProperties = new ExtendedProducerProperties<>(
|
||||
new KafkaProducerProperties());
|
||||
this.kafkaTopicProvisioner.provisionProducerDestination(name,
|
||||
extendedProducerProperties);
|
||||
Serde<?> keySerde = this.keyValueSerdeResolver
|
||||
.getOuboundKeySerde(properties.getExtension());
|
||||
Serde<?> valueSerde = this.keyValueSerdeResolver.getOutboundValueSerde(properties,
|
||||
properties.getExtension());
|
||||
to(properties.isUseNativeEncoding(), name, outboundBindTarget,
|
||||
(Serde<Object>) keySerde, (Serde<Object>) valueSerde);
|
||||
this.kafkaTopicProvisioner.provisionProducerDestination(name, extendedProducerProperties);
|
||||
Serde<?> keySerde = this.keyValueSerdeResolver.getOuboundKeySerde(properties.getExtension());
|
||||
Serde<?> valueSerde = this.keyValueSerdeResolver.getOutboundValueSerde(properties, properties.getExtension());
|
||||
to(properties.isUseNativeEncoding(), name, outboundBindTarget, (Serde<Object>) keySerde, (Serde<Object>) valueSerde);
|
||||
return new DefaultBinding<>(name, null, outboundBindTarget, null);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private void to(boolean isNativeEncoding, String name,
|
||||
KStream<Object, Object> outboundBindTarget, Serde<Object> keySerde,
|
||||
Serde<Object> valueSerde) {
|
||||
private void to(boolean isNativeEncoding, String name, KStream<Object, Object> outboundBindTarget,
|
||||
Serde<Object> keySerde, Serde<Object> valueSerde) {
|
||||
if (!isNativeEncoding) {
|
||||
LOG.info("Native encoding is disabled for " + name
|
||||
+ ". Outbound message conversion done by Spring Cloud Stream.");
|
||||
this.kafkaStreamsMessageConversionDelegate
|
||||
.serializeOnOutbound(outboundBindTarget)
|
||||
LOG.info("Native encoding is disabled for " + name + ". Outbound message conversion done by Spring Cloud Stream.");
|
||||
this.kafkaStreamsMessageConversionDelegate.serializeOnOutbound(outboundBindTarget)
|
||||
.to(name, Produced.with(keySerde, valueSerde));
|
||||
}
|
||||
else {
|
||||
LOG.info("Native encoding is enabled for " + name
|
||||
+ ". Outbound serialization done at the broker.");
|
||||
LOG.info("Native encoding is enabled for " + name + ". Outbound serialization done at the broker.");
|
||||
outboundBindTarget.to(name, Produced.with(keySerde, valueSerde));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public KafkaStreamsConsumerProperties getExtendedConsumerProperties(
|
||||
String channelName) {
|
||||
return this.kafkaStreamsExtendedBindingProperties
|
||||
.getExtendedConsumerProperties(channelName);
|
||||
public KafkaStreamsConsumerProperties getExtendedConsumerProperties(String channelName) {
|
||||
return this.kafkaStreamsExtendedBindingProperties.getExtendedConsumerProperties(channelName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public KafkaStreamsProducerProperties getExtendedProducerProperties(
|
||||
String channelName) {
|
||||
return this.kafkaStreamsExtendedBindingProperties
|
||||
.getExtendedProducerProperties(channelName);
|
||||
public KafkaStreamsProducerProperties getExtendedProducerProperties(String channelName) {
|
||||
return this.kafkaStreamsExtendedBindingProperties.getExtendedProducerProperties(channelName);
|
||||
}
|
||||
|
||||
public void setKafkaStreamsExtendedBindingProperties(
|
||||
KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties) {
|
||||
public void setKafkaStreamsExtendedBindingProperties(KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties) {
|
||||
this.kafkaStreamsExtendedBindingProperties = kafkaStreamsExtendedBindingProperties;
|
||||
}
|
||||
|
||||
@@ -172,8 +145,6 @@ class KStreamBinder extends
|
||||
|
||||
@Override
|
||||
public Class<? extends BinderSpecificPropertiesProvider> getExtendedPropertiesEntryClass() {
|
||||
return this.kafkaStreamsExtendedBindingProperties
|
||||
.getExtendedPropertiesEntryClass();
|
||||
return this.kafkaStreamsExtendedBindingProperties.getExtendedPropertiesEntryClass();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -19,17 +19,20 @@ package org.springframework.cloud.stream.binder.kafka.streams;
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.beans.factory.config.BeanFactoryPostProcessor;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
|
||||
import org.springframework.beans.factory.config.MethodInvokingFactoryBean;
|
||||
import org.springframework.beans.factory.support.AbstractBeanDefinition;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.boot.autoconfigure.kafka.KafkaAutoConfiguration;
|
||||
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.properties.KafkaBinderConfigurationProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.provisioning.KafkaTopicProvisioner;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsBinderConfigurationProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsExtendedBindingProperties;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.core.type.AnnotationMetadata;
|
||||
|
||||
/**
|
||||
* Configuration for KStream binder.
|
||||
@@ -39,62 +42,69 @@ import org.springframework.context.annotation.Import;
|
||||
* @author Soby Chacko
|
||||
*/
|
||||
@Configuration
|
||||
@Import({ KafkaAutoConfiguration.class,
|
||||
KafkaStreamsBinderHealthIndicatorConfiguration.class })
|
||||
@Import({KafkaAutoConfiguration.class, KStreamBinderConfiguration.KStreamMissingBeansRegistrar.class})
|
||||
public class KStreamBinderConfiguration {
|
||||
|
||||
@Bean
|
||||
public KafkaTopicProvisioner provisioningProvider(
|
||||
KafkaStreamsBinderConfigurationProperties kafkaStreamsBinderConfigurationProperties,
|
||||
KafkaProperties kafkaProperties) {
|
||||
return new KafkaTopicProvisioner(kafkaStreamsBinderConfigurationProperties,
|
||||
kafkaProperties);
|
||||
public KafkaTopicProvisioner provisioningProvider(KafkaBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaProperties kafkaProperties) {
|
||||
return new KafkaTopicProvisioner(binderConfigurationProperties, kafkaProperties);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public KStreamBinder kStreamBinder(
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
KafkaStreamsMessageConversionDelegate KafkaStreamsMessageConversionDelegate,
|
||||
KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue,
|
||||
KeyValueSerdeResolver keyValueSerdeResolver,
|
||||
KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties,
|
||||
@Qualifier("kafkaStreamsDlqDispatchers") Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
KStreamBinder kStreamBinder = new KStreamBinder(binderConfigurationProperties,
|
||||
kafkaTopicProvisioner, KafkaStreamsMessageConversionDelegate,
|
||||
KafkaStreamsBindingInformationCatalogue, keyValueSerdeResolver,
|
||||
kafkaStreamsDlqDispatchers);
|
||||
kStreamBinder.setKafkaStreamsExtendedBindingProperties(
|
||||
kafkaStreamsExtendedBindingProperties);
|
||||
public KStreamBinder kStreamBinder(KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
KafkaStreamsMessageConversionDelegate KafkaStreamsMessageConversionDelegate,
|
||||
KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue,
|
||||
KeyValueSerdeResolver keyValueSerdeResolver,
|
||||
KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties,
|
||||
@Qualifier("kafkaStreamsDlqDispatchers") Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
KStreamBinder kStreamBinder = new KStreamBinder(binderConfigurationProperties, kafkaTopicProvisioner,
|
||||
KafkaStreamsMessageConversionDelegate, KafkaStreamsBindingInformationCatalogue,
|
||||
keyValueSerdeResolver, kafkaStreamsDlqDispatchers);
|
||||
kStreamBinder.setKafkaStreamsExtendedBindingProperties(kafkaStreamsExtendedBindingProperties);
|
||||
return kStreamBinder;
|
||||
}
|
||||
|
||||
@Bean
|
||||
@ConditionalOnBean(name = "outerContext")
|
||||
public static BeanFactoryPostProcessor outerContextBeanFactoryPostProcessor() {
|
||||
return beanFactory -> {
|
||||
/**
|
||||
* Registrar for missing beans when there are multiple binders in the application.
|
||||
*/
|
||||
static class KStreamMissingBeansRegistrar extends KafkaStreamsBinderUtils.KafkaStreamsMissingBeansRegistrar {
|
||||
|
||||
// It is safe to call getBean("outerContext") here, because this bean is
|
||||
// registered as first
|
||||
// and as independent from the parent context.
|
||||
ApplicationContext outerContext = (ApplicationContext) beanFactory
|
||||
.getBean("outerContext");
|
||||
beanFactory.registerSingleton(
|
||||
KafkaStreamsBinderConfigurationProperties.class.getSimpleName(),
|
||||
outerContext
|
||||
.getBean(KafkaStreamsBinderConfigurationProperties.class));
|
||||
beanFactory.registerSingleton(
|
||||
KafkaStreamsMessageConversionDelegate.class.getSimpleName(),
|
||||
outerContext.getBean(KafkaStreamsMessageConversionDelegate.class));
|
||||
beanFactory.registerSingleton(
|
||||
KafkaStreamsBindingInformationCatalogue.class.getSimpleName(),
|
||||
outerContext.getBean(KafkaStreamsBindingInformationCatalogue.class));
|
||||
beanFactory.registerSingleton(KeyValueSerdeResolver.class.getSimpleName(),
|
||||
outerContext.getBean(KeyValueSerdeResolver.class));
|
||||
beanFactory.registerSingleton(
|
||||
KafkaStreamsExtendedBindingProperties.class.getSimpleName(),
|
||||
outerContext.getBean(KafkaStreamsExtendedBindingProperties.class));
|
||||
};
|
||||
private static final String BEAN_NAME = "outerContext";
|
||||
|
||||
@Override
|
||||
public void registerBeanDefinitions(AnnotationMetadata importingClassMetadata,
|
||||
BeanDefinitionRegistry registry) {
|
||||
super.registerBeanDefinitions(importingClassMetadata, registry);
|
||||
|
||||
if (registry.containsBeanDefinition(BEAN_NAME)) {
|
||||
|
||||
AbstractBeanDefinition converstionDelegateBean = BeanDefinitionBuilder.genericBeanDefinition(MethodInvokingFactoryBean.class)
|
||||
.addPropertyReference("targetObject", BEAN_NAME)
|
||||
.addPropertyValue("targetMethod", "getBean")
|
||||
.addPropertyValue("arguments", KafkaStreamsMessageConversionDelegate.class)
|
||||
.getBeanDefinition();
|
||||
|
||||
registry.registerBeanDefinition(KafkaStreamsMessageConversionDelegate.class.getSimpleName(), converstionDelegateBean);
|
||||
|
||||
AbstractBeanDefinition keyValueSerdeResolverBean = BeanDefinitionBuilder.genericBeanDefinition(MethodInvokingFactoryBean.class)
|
||||
.addPropertyReference("targetObject", BEAN_NAME)
|
||||
.addPropertyValue("targetMethod", "getBean")
|
||||
.addPropertyValue("arguments", KeyValueSerdeResolver.class)
|
||||
.getBeanDefinition();
|
||||
|
||||
registry.registerBeanDefinition(KeyValueSerdeResolver.class.getSimpleName(), keyValueSerdeResolverBean);
|
||||
|
||||
AbstractBeanDefinition kafkaStreamsExtendedBindingPropertiesBean = BeanDefinitionBuilder.genericBeanDefinition(MethodInvokingFactoryBean.class)
|
||||
.addPropertyReference("targetObject", BEAN_NAME)
|
||||
.addPropertyValue("targetMethod", "getBean")
|
||||
.addPropertyValue("arguments", KafkaStreamsExtendedBindingProperties.class)
|
||||
.getBeanDefinition();
|
||||
|
||||
registry.registerBeanDefinition(KafkaStreamsExtendedBindingProperties.class.getSimpleName(), kafkaStreamsExtendedBindingPropertiesBean);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -28,11 +28,10 @@ import org.springframework.cloud.stream.config.BindingServiceProperties;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link org.springframework.cloud.stream.binding.BindingTargetFactory}
|
||||
* for{@link KStream}.
|
||||
* {@link org.springframework.cloud.stream.binding.BindingTargetFactory} for{@link KStream}.
|
||||
*
|
||||
* The implementation creates proxies for both input and output binding. The actual target
|
||||
* will be created downstream through further binding process.
|
||||
* The implementation creates proxies for both input and output binding.
|
||||
* The actual target will be created downstream through further binding process.
|
||||
*
|
||||
* @author Marius Bogoevici
|
||||
* @author Soby Chacko
|
||||
@@ -44,7 +43,7 @@ class KStreamBoundElementFactory extends AbstractBindingTargetFactory<KStream> {
|
||||
private final KafkaStreamsBindingInformationCatalogue kafkaStreamsBindingInformationCatalogue;
|
||||
|
||||
KStreamBoundElementFactory(BindingServiceProperties bindingServiceProperties,
|
||||
KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue) {
|
||||
KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue) {
|
||||
super(KStream.class);
|
||||
this.bindingServiceProperties = bindingServiceProperties;
|
||||
this.kafkaStreamsBindingInformationCatalogue = KafkaStreamsBindingInformationCatalogue;
|
||||
@@ -52,9 +51,8 @@ class KStreamBoundElementFactory extends AbstractBindingTargetFactory<KStream> {
|
||||
|
||||
@Override
|
||||
public KStream createInput(String name) {
|
||||
ConsumerProperties consumerProperties = this.bindingServiceProperties
|
||||
.getConsumerProperties(name);
|
||||
// Always set multiplex to true in the kafka streams binder
|
||||
ConsumerProperties consumerProperties = this.bindingServiceProperties.getConsumerProperties(name);
|
||||
//Always set multiplex to true in the kafka streams binder
|
||||
consumerProperties.setMultiplex(true);
|
||||
return createProxyForKStream(name);
|
||||
}
|
||||
@@ -72,12 +70,9 @@ class KStreamBoundElementFactory extends AbstractBindingTargetFactory<KStream> {
|
||||
|
||||
KStream proxy = (KStream) proxyFactory.getProxy();
|
||||
|
||||
// Add the binding properties to the catalogue for later retrieval during further
|
||||
// binding steps downstream.
|
||||
BindingProperties bindingProperties = this.bindingServiceProperties
|
||||
.getBindingProperties(name);
|
||||
this.kafkaStreamsBindingInformationCatalogue.registerBindingProperties(proxy,
|
||||
bindingProperties);
|
||||
//Add the binding properties to the catalogue for later retrieval during further binding steps downstream.
|
||||
BindingProperties bindingProperties = this.bindingServiceProperties.getBindingProperties(name);
|
||||
this.kafkaStreamsBindingInformationCatalogue.registerBindingProperties(proxy, bindingProperties);
|
||||
return proxy;
|
||||
}
|
||||
|
||||
@@ -90,8 +85,7 @@ class KStreamBoundElementFactory extends AbstractBindingTargetFactory<KStream> {
|
||||
|
||||
}
|
||||
|
||||
private static class KStreamWrapperHandler
|
||||
implements KStreamWrapper, MethodInterceptor {
|
||||
private static class KStreamWrapperHandler implements KStreamWrapper, MethodInterceptor {
|
||||
|
||||
private KStream<Object, Object> delegate;
|
||||
|
||||
@@ -104,23 +98,16 @@ class KStreamBoundElementFactory extends AbstractBindingTargetFactory<KStream> {
|
||||
@Override
|
||||
public Object invoke(MethodInvocation methodInvocation) throws Throwable {
|
||||
if (methodInvocation.getMethod().getDeclaringClass().equals(KStream.class)) {
|
||||
Assert.notNull(this.delegate,
|
||||
"Trying to prepareConsumerBinding " + methodInvocation.getMethod()
|
||||
+ " but no delegate has been set.");
|
||||
return methodInvocation.getMethod().invoke(this.delegate,
|
||||
methodInvocation.getArguments());
|
||||
Assert.notNull(this.delegate, "Trying to prepareConsumerBinding " + methodInvocation
|
||||
.getMethod() + " but no delegate has been set.");
|
||||
return methodInvocation.getMethod().invoke(this.delegate, methodInvocation.getArguments());
|
||||
}
|
||||
else if (methodInvocation.getMethod().getDeclaringClass()
|
||||
.equals(KStreamWrapper.class)) {
|
||||
return methodInvocation.getMethod().invoke(this,
|
||||
methodInvocation.getArguments());
|
||||
else if (methodInvocation.getMethod().getDeclaringClass().equals(KStreamWrapper.class)) {
|
||||
return methodInvocation.getMethod().invoke(this, methodInvocation.getArguments());
|
||||
}
|
||||
else {
|
||||
throw new IllegalStateException(
|
||||
"Only KStream method invocations are permitted");
|
||||
throw new IllegalStateException("Only KStream method invocations are permitted");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -28,16 +28,13 @@ import org.springframework.core.ResolvableType;
|
||||
* @author Marius Bogoevici
|
||||
* @author Soby Chacko
|
||||
*/
|
||||
class KStreamStreamListenerParameterAdapter
|
||||
implements StreamListenerParameterAdapter<KStream<?, ?>, KStream<?, ?>> {
|
||||
class KStreamStreamListenerParameterAdapter implements StreamListenerParameterAdapter<KStream<?, ?>, KStream<?, ?>> {
|
||||
|
||||
private final KafkaStreamsMessageConversionDelegate kafkaStreamsMessageConversionDelegate;
|
||||
|
||||
private final KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue;
|
||||
|
||||
KStreamStreamListenerParameterAdapter(
|
||||
KafkaStreamsMessageConversionDelegate kafkaStreamsMessageConversionDelegate,
|
||||
KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue) {
|
||||
KStreamStreamListenerParameterAdapter(KafkaStreamsMessageConversionDelegate kafkaStreamsMessageConversionDelegate,
|
||||
KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue) {
|
||||
this.kafkaStreamsMessageConversionDelegate = kafkaStreamsMessageConversionDelegate;
|
||||
this.KafkaStreamsBindingInformationCatalogue = KafkaStreamsBindingInformationCatalogue;
|
||||
}
|
||||
@@ -54,14 +51,11 @@ class KStreamStreamListenerParameterAdapter
|
||||
ResolvableType resolvableType = ResolvableType.forMethodParameter(parameter);
|
||||
final Class<?> valueClass = (resolvableType.getGeneric(1).getRawClass() != null)
|
||||
? (resolvableType.getGeneric(1).getRawClass()) : Object.class;
|
||||
if (this.KafkaStreamsBindingInformationCatalogue
|
||||
.isUseNativeDecoding(bindingTarget)) {
|
||||
if (this.KafkaStreamsBindingInformationCatalogue.isUseNativeDecoding(bindingTarget)) {
|
||||
return bindingTarget;
|
||||
}
|
||||
else {
|
||||
return this.kafkaStreamsMessageConversionDelegate
|
||||
.deserializeOnInbound(valueClass, bindingTarget);
|
||||
return this.kafkaStreamsMessageConversionDelegate.deserializeOnInbound(valueClass, bindingTarget);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2017-2018 the original author or authors.
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -29,19 +29,16 @@ import org.springframework.cloud.stream.binding.StreamListenerResultAdapter;
|
||||
* @author Marius Bogoevici
|
||||
* @author Soby Chacko
|
||||
*/
|
||||
class KStreamStreamListenerResultAdapter implements
|
||||
StreamListenerResultAdapter<KStream, KStreamBoundElementFactory.KStreamWrapper> {
|
||||
class KStreamStreamListenerResultAdapter implements StreamListenerResultAdapter<KStream, KStreamBoundElementFactory.KStreamWrapper> {
|
||||
|
||||
@Override
|
||||
public boolean supports(Class<?> resultType, Class<?> boundElement) {
|
||||
return KStream.class.isAssignableFrom(resultType)
|
||||
&& KStream.class.isAssignableFrom(boundElement);
|
||||
return KStream.class.isAssignableFrom(resultType) && KStream.class.isAssignableFrom(boundElement);
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public Closeable adapt(KStream streamListenerResult,
|
||||
KStreamBoundElementFactory.KStreamWrapper boundElement) {
|
||||
public Closeable adapt(KStream streamListenerResult, KStreamBoundElementFactory.KStreamWrapper boundElement) {
|
||||
boundElement.wrap(streamListenerResult);
|
||||
return new NoOpCloseable();
|
||||
}
|
||||
@@ -54,5 +51,4 @@ class KStreamStreamListenerResultAdapter implements
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -35,21 +35,16 @@ import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStr
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* {@link org.springframework.cloud.stream.binder.Binder} implementation for
|
||||
* {@link KTable}. This implemenation extends from the {@link AbstractBinder} directly.
|
||||
* {@link org.springframework.cloud.stream.binder.Binder} implementation for {@link KTable}.
|
||||
* This implemenation extends from the {@link AbstractBinder} directly.
|
||||
*
|
||||
* Provides only consumer binding for the bound KTable as output bindings are not allowed
|
||||
* on it.
|
||||
* Provides only consumer binding for the bound KTable as output bindings are not allowed on it.
|
||||
*
|
||||
* @author Soby Chacko
|
||||
*/
|
||||
class KTableBinder extends
|
||||
// @checkstyle:off
|
||||
AbstractBinder<KTable<Object, Object>, ExtendedConsumerProperties<KafkaStreamsConsumerProperties>, ExtendedProducerProperties<KafkaStreamsProducerProperties>>
|
||||
implements
|
||||
ExtendedPropertiesBinder<KTable<Object, Object>, KafkaStreamsConsumerProperties, KafkaStreamsProducerProperties> {
|
||||
|
||||
// @checkstyle:on
|
||||
implements ExtendedPropertiesBinder<KTable<Object, Object>, KafkaStreamsConsumerProperties, KafkaStreamsProducerProperties> {
|
||||
|
||||
private final KafkaStreamsBinderConfigurationProperties binderConfigurationProperties;
|
||||
|
||||
@@ -57,14 +52,10 @@ class KTableBinder extends
|
||||
|
||||
private Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers;
|
||||
|
||||
// @checkstyle:off
|
||||
private KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties = new KafkaStreamsExtendedBindingProperties();
|
||||
|
||||
// @checkstyle:on
|
||||
|
||||
KTableBinder(KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
KTableBinder(KafkaStreamsBinderConfigurationProperties binderConfigurationProperties, KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
this.binderConfigurationProperties = binderConfigurationProperties;
|
||||
this.kafkaTopicProvisioner = kafkaTopicProvisioner;
|
||||
this.kafkaStreamsDlqDispatchers = kafkaStreamsDlqDispatchers;
|
||||
@@ -72,43 +63,31 @@ class KTableBinder extends
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Binding<KTable<Object, Object>> doBindConsumer(String name, String group,
|
||||
KTable<Object, Object> inputTarget,
|
||||
// @checkstyle:off
|
||||
ExtendedConsumerProperties<KafkaStreamsConsumerProperties> properties) {
|
||||
// @checkstyle:on
|
||||
protected Binding<KTable<Object, Object>> doBindConsumer(String name, String group, KTable<Object, Object> inputTarget,
|
||||
ExtendedConsumerProperties<KafkaStreamsConsumerProperties> properties) {
|
||||
if (!StringUtils.hasText(group)) {
|
||||
group = this.binderConfigurationProperties.getApplicationId();
|
||||
}
|
||||
KafkaStreamsBinderUtils.prepareConsumerBinding(name, group,
|
||||
getApplicationContext(), this.kafkaTopicProvisioner,
|
||||
this.binderConfigurationProperties, properties,
|
||||
this.kafkaStreamsDlqDispatchers);
|
||||
KafkaStreamsBinderUtils.prepareConsumerBinding(name, group, getApplicationContext(),
|
||||
this.kafkaTopicProvisioner,
|
||||
this.binderConfigurationProperties, properties, this.kafkaStreamsDlqDispatchers);
|
||||
return new DefaultBinding<>(name, group, inputTarget, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Binding<KTable<Object, Object>> doBindProducer(String name,
|
||||
KTable<Object, Object> outboundBindTarget,
|
||||
// @checkstyle:off
|
||||
ExtendedProducerProperties<KafkaStreamsProducerProperties> properties) {
|
||||
// @checkstyle:on
|
||||
throw new UnsupportedOperationException(
|
||||
"No producer level binding is allowed for KTable");
|
||||
protected Binding<KTable<Object, Object>> doBindProducer(String name, KTable<Object, Object> outboundBindTarget,
|
||||
ExtendedProducerProperties<KafkaStreamsProducerProperties> properties) {
|
||||
throw new UnsupportedOperationException("No producer level binding is allowed for KTable");
|
||||
}
|
||||
|
||||
@Override
|
||||
public KafkaStreamsConsumerProperties getExtendedConsumerProperties(
|
||||
String channelName) {
|
||||
return this.kafkaStreamsExtendedBindingProperties
|
||||
.getExtendedConsumerProperties(channelName);
|
||||
public KafkaStreamsConsumerProperties getExtendedConsumerProperties(String channelName) {
|
||||
return this.kafkaStreamsExtendedBindingProperties.getExtendedConsumerProperties(channelName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public KafkaStreamsProducerProperties getExtendedProducerProperties(
|
||||
String channelName) {
|
||||
return this.kafkaStreamsExtendedBindingProperties
|
||||
.getExtendedProducerProperties(channelName);
|
||||
public KafkaStreamsProducerProperties getExtendedProducerProperties(String channelName) {
|
||||
return this.kafkaStreamsExtendedBindingProperties.getExtendedProducerProperties(channelName);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -118,8 +97,6 @@ class KTableBinder extends
|
||||
|
||||
@Override
|
||||
public Class<? extends BinderSpecificPropertiesProvider> getExtendedPropertiesEntryClass() {
|
||||
return this.kafkaStreamsExtendedBindingProperties
|
||||
.getExtendedPropertiesEntryClass();
|
||||
return this.kafkaStreamsExtendedBindingProperties.getExtendedPropertiesEntryClass();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -28,6 +28,7 @@ import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStr
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Import;
|
||||
|
||||
/**
|
||||
* Configuration for KTable binder.
|
||||
@@ -36,42 +37,32 @@ import org.springframework.context.annotation.Configuration;
|
||||
*/
|
||||
@SuppressWarnings("ALL")
|
||||
@Configuration
|
||||
@Import(KafkaStreamsBinderUtils.KafkaStreamsMissingBeansRegistrar.class)
|
||||
public class KTableBinderConfiguration {
|
||||
|
||||
@Bean
|
||||
@ConditionalOnBean(name = "outerContext")
|
||||
public static BeanFactoryPostProcessor outerContextBeanFactoryPostProcessor() {
|
||||
public BeanFactoryPostProcessor outerContextBeanFactoryPostProcessor() {
|
||||
return (beanFactory) -> {
|
||||
// It is safe to call getBean("outerContext") here, because this bean is
|
||||
// registered as first
|
||||
// and as independent from the parent context.
|
||||
ApplicationContext outerContext = (ApplicationContext) beanFactory
|
||||
.getBean("outerContext");
|
||||
beanFactory.registerSingleton(
|
||||
KafkaStreamsBinderConfigurationProperties.class.getSimpleName(),
|
||||
outerContext
|
||||
.getBean(KafkaStreamsBinderConfigurationProperties.class));
|
||||
beanFactory.registerSingleton(
|
||||
KafkaStreamsBindingInformationCatalogue.class.getSimpleName(),
|
||||
outerContext.getBean(KafkaStreamsBindingInformationCatalogue.class));
|
||||
ApplicationContext outerContext = (ApplicationContext) beanFactory.getBean("outerContext");
|
||||
beanFactory.registerSingleton(KafkaStreamsBinderConfigurationProperties.class.getSimpleName(), outerContext
|
||||
.getBean(KafkaStreamsBinderConfigurationProperties.class));
|
||||
beanFactory.registerSingleton(KafkaStreamsBindingInformationCatalogue.class.getSimpleName(), outerContext
|
||||
.getBean(KafkaStreamsBindingInformationCatalogue.class));
|
||||
};
|
||||
}
|
||||
|
||||
@Bean
|
||||
public KafkaTopicProvisioner provisioningProvider(
|
||||
KafkaBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaProperties kafkaProperties) {
|
||||
public KafkaTopicProvisioner provisioningProvider(KafkaBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaProperties kafkaProperties) {
|
||||
return new KafkaTopicProvisioner(binderConfigurationProperties, kafkaProperties);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public KTableBinder kTableBinder(
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
@Qualifier("kafkaStreamsDlqDispatchers") Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
KTableBinder kStreamBinder = new KTableBinder(binderConfigurationProperties,
|
||||
kafkaTopicProvisioner, kafkaStreamsDlqDispatchers);
|
||||
public KTableBinder kTableBinder(KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
@Qualifier("kafkaStreamsDlqDispatchers") Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
KTableBinder kStreamBinder = new KTableBinder(binderConfigurationProperties, kafkaTopicProvisioner, kafkaStreamsDlqDispatchers);
|
||||
return kStreamBinder;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -27,8 +27,7 @@ import org.springframework.cloud.stream.config.BindingServiceProperties;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link org.springframework.cloud.stream.binding.BindingTargetFactory} for
|
||||
* {@link KTable}
|
||||
* {@link org.springframework.cloud.stream.binding.BindingTargetFactory} for {@link KTable}
|
||||
*
|
||||
* Input bindings are only created as output bindings on KTable are not allowed.
|
||||
*
|
||||
@@ -45,14 +44,12 @@ class KTableBoundElementFactory extends AbstractBindingTargetFactory<KTable> {
|
||||
|
||||
@Override
|
||||
public KTable createInput(String name) {
|
||||
ConsumerProperties consumerProperties = this.bindingServiceProperties
|
||||
.getConsumerProperties(name);
|
||||
// Always set multiplex to true in the kafka streams binder
|
||||
ConsumerProperties consumerProperties = this.bindingServiceProperties.getConsumerProperties(name);
|
||||
//Always set multiplex to true in the kafka streams binder
|
||||
consumerProperties.setMultiplex(true);
|
||||
|
||||
KTableBoundElementFactory.KTableWrapperHandler wrapper = new KTableBoundElementFactory.KTableWrapperHandler();
|
||||
ProxyFactory proxyFactory = new ProxyFactory(
|
||||
KTableBoundElementFactory.KTableWrapper.class, KTable.class);
|
||||
ProxyFactory proxyFactory = new ProxyFactory(KTableBoundElementFactory.KTableWrapper.class, KTable.class);
|
||||
proxyFactory.addAdvice(wrapper);
|
||||
|
||||
return (KTable) proxyFactory.getProxy();
|
||||
@@ -61,21 +58,17 @@ class KTableBoundElementFactory extends AbstractBindingTargetFactory<KTable> {
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public KTable createOutput(final String name) {
|
||||
throw new UnsupportedOperationException(
|
||||
"Outbound operations are not allowed on target type KTable");
|
||||
throw new UnsupportedOperationException("Outbound operations are not allowed on target type KTable");
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper for KTable proxy.
|
||||
*/
|
||||
public interface KTableWrapper {
|
||||
|
||||
void wrap(KTable<Object, Object> delegate);
|
||||
|
||||
}
|
||||
|
||||
private static class KTableWrapperHandler
|
||||
implements KTableBoundElementFactory.KTableWrapper, MethodInterceptor {
|
||||
private static class KTableWrapperHandler implements KTableBoundElementFactory.KTableWrapper, MethodInterceptor {
|
||||
|
||||
private KTable<Object, Object> delegate;
|
||||
|
||||
@@ -88,23 +81,16 @@ class KTableBoundElementFactory extends AbstractBindingTargetFactory<KTable> {
|
||||
@Override
|
||||
public Object invoke(MethodInvocation methodInvocation) throws Throwable {
|
||||
if (methodInvocation.getMethod().getDeclaringClass().equals(KTable.class)) {
|
||||
Assert.notNull(this.delegate,
|
||||
"Trying to prepareConsumerBinding " + methodInvocation.getMethod()
|
||||
+ " but no delegate has been set.");
|
||||
return methodInvocation.getMethod().invoke(this.delegate,
|
||||
methodInvocation.getArguments());
|
||||
Assert.notNull(this.delegate, "Trying to prepareConsumerBinding " + methodInvocation
|
||||
.getMethod() + " but no delegate has been set.");
|
||||
return methodInvocation.getMethod().invoke(this.delegate, methodInvocation.getArguments());
|
||||
}
|
||||
else if (methodInvocation.getMethod().getDeclaringClass()
|
||||
.equals(KTableBoundElementFactory.KTableWrapper.class)) {
|
||||
return methodInvocation.getMethod().invoke(this,
|
||||
methodInvocation.getArguments());
|
||||
else if (methodInvocation.getMethod().getDeclaringClass().equals(KTableBoundElementFactory.KTableWrapper.class)) {
|
||||
return methodInvocation.getMethod().invoke(this, methodInvocation.getArguments());
|
||||
}
|
||||
else {
|
||||
throw new IllegalStateException(
|
||||
"Only KTable method invocations are permitted");
|
||||
throw new IllegalStateException("Only KTable method invocations are permitted");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2017-2019 the original author or authors.
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -35,12 +35,9 @@ public class KafkaStreamsApplicationSupportAutoConfiguration {
|
||||
|
||||
@Bean
|
||||
@ConditionalOnProperty("spring.cloud.stream.kafka.streams.timeWindow.length")
|
||||
public TimeWindows configuredTimeWindow(
|
||||
KafkaStreamsApplicationSupportProperties processorProperties) {
|
||||
public TimeWindows configuredTimeWindow(KafkaStreamsApplicationSupportProperties processorProperties) {
|
||||
return processorProperties.getTimeWindow().getAdvanceBy() > 0
|
||||
? TimeWindows.of(processorProperties.getTimeWindow().getLength())
|
||||
.advanceBy(processorProperties.getTimeWindow().getAdvanceBy())
|
||||
? TimeWindows.of(processorProperties.getTimeWindow().getLength()).advanceBy(processorProperties.getTimeWindow().getAdvanceBy())
|
||||
: TimeWindows.of(processorProperties.getTimeWindow().getLength());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,81 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.kafka.streams.KafkaStreams;
|
||||
import org.apache.kafka.streams.processor.TaskMetadata;
|
||||
import org.apache.kafka.streams.processor.ThreadMetadata;
|
||||
|
||||
import org.springframework.boot.actuate.health.AbstractHealthIndicator;
|
||||
import org.springframework.boot.actuate.health.Health;
|
||||
import org.springframework.boot.actuate.health.Status;
|
||||
|
||||
/**
|
||||
* Health indicator for Kafka Streams.
|
||||
*
|
||||
* @author Arnaud Jardiné
|
||||
*/
|
||||
class KafkaStreamsBinderHealthIndicator extends AbstractHealthIndicator {
|
||||
|
||||
private final KafkaStreamsRegistry kafkaStreamsRegistry;
|
||||
|
||||
KafkaStreamsBinderHealthIndicator(KafkaStreamsRegistry kafkaStreamsRegistry) {
|
||||
super("Kafka-streams health check failed");
|
||||
this.kafkaStreamsRegistry = kafkaStreamsRegistry;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doHealthCheck(Health.Builder builder) throws Exception {
|
||||
boolean up = true;
|
||||
for (KafkaStreams kStream : kafkaStreamsRegistry.getKafkaStreams()) {
|
||||
up &= kStream.state().isRunning();
|
||||
builder.withDetails(buildDetails(kStream));
|
||||
}
|
||||
builder.status(up ? Status.UP : Status.DOWN);
|
||||
}
|
||||
|
||||
private static Map<String, Object> buildDetails(KafkaStreams kStreams) {
|
||||
final Map<String, Object> details = new HashMap<>();
|
||||
if (kStreams.state().isRunning()) {
|
||||
for (ThreadMetadata metadata : kStreams.localThreadsMetadata()) {
|
||||
details.put("threadName", metadata.threadName());
|
||||
details.put("threadState", metadata.threadState());
|
||||
details.put("activeTasks", taskDetails(metadata.activeTasks()));
|
||||
details.put("standbyTasks", taskDetails(metadata.standbyTasks()));
|
||||
}
|
||||
}
|
||||
return details;
|
||||
}
|
||||
|
||||
private static Map<String, Object> taskDetails(Set<TaskMetadata> taskMetadata) {
|
||||
final Map<String, Object> details = new HashMap<>();
|
||||
for (TaskMetadata metadata : taskMetadata) {
|
||||
details.put("taskId", metadata.taskId());
|
||||
details.put("partitions",
|
||||
metadata.topicPartitions().stream().map(
|
||||
p -> "partition=" + p.partition() + ", topic=" + p.topic())
|
||||
.collect(Collectors.toList()));
|
||||
}
|
||||
return details;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams;
|
||||
|
||||
import org.springframework.boot.actuate.autoconfigure.health.ConditionalOnEnabledHealthIndicator;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
/**
|
||||
* Configuration class for Kafka-streams binder health indicator beans.
|
||||
*
|
||||
* @author Arnaud Jardiné
|
||||
*/
|
||||
@Configuration
|
||||
@ConditionalOnClass(name = "org.springframework.boot.actuate.health.HealthIndicator")
|
||||
@ConditionalOnEnabledHealthIndicator("binders")
|
||||
class KafkaStreamsBinderHealthIndicatorConfiguration {
|
||||
|
||||
@Bean
|
||||
@ConditionalOnBean(KafkaStreamsRegistry.class)
|
||||
KafkaStreamsBinderHealthIndicator kafkaStreamsBinderHealthIndicator(
|
||||
KafkaStreamsRegistry kafkaStreamsRegistry) {
|
||||
return new KafkaStreamsBinderHealthIndicator(kafkaStreamsRegistry);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -31,26 +31,19 @@ import org.springframework.beans.factory.ObjectProvider;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
|
||||
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||
import org.springframework.boot.context.properties.EnableConfigurationProperties;
|
||||
import org.springframework.cloud.function.context.FunctionCatalog;
|
||||
import org.springframework.cloud.stream.binder.BinderConfiguration;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsBinderConfigurationProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsExtendedBindingProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.serde.CompositeNonNativeSerde;
|
||||
import org.springframework.cloud.stream.binding.BindableProxyFactory;
|
||||
import org.springframework.cloud.stream.binding.BindingService;
|
||||
import org.springframework.cloud.stream.binding.StreamListenerResultAdapter;
|
||||
import org.springframework.cloud.stream.config.BinderProperties;
|
||||
import org.springframework.cloud.stream.config.BindingServiceConfiguration;
|
||||
import org.springframework.cloud.stream.config.BindingServiceProperties;
|
||||
import org.springframework.cloud.stream.converter.CompositeMessageConverterFactory;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.core.env.ConfigurableEnvironment;
|
||||
import org.springframework.core.env.Environment;
|
||||
import org.springframework.core.env.MapPropertySource;
|
||||
import org.springframework.kafka.config.KafkaStreamsConfiguration;
|
||||
import org.springframework.kafka.core.CleanupConfig;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
@@ -68,142 +61,71 @@ import org.springframework.util.StringUtils;
|
||||
@AutoConfigureAfter(BindingServiceConfiguration.class)
|
||||
public class KafkaStreamsBinderSupportAutoConfiguration {
|
||||
|
||||
private static final String KSTREAM_BINDER_TYPE = "kstream";
|
||||
|
||||
private static final String KTABLE_BINDER_TYPE = "ktable";
|
||||
|
||||
private static final String GLOBALKTABLE_BINDER_TYPE = "globalktable";
|
||||
|
||||
@Bean
|
||||
@ConfigurationProperties(prefix = "spring.cloud.stream.kafka.streams.binder")
|
||||
public KafkaStreamsBinderConfigurationProperties binderConfigurationProperties(
|
||||
KafkaProperties kafkaProperties, ConfigurableEnvironment environment,
|
||||
BindingServiceProperties properties) {
|
||||
final Map<String, BinderConfiguration> binderConfigurations = getBinderConfigurations(
|
||||
properties);
|
||||
for (Map.Entry<String, BinderConfiguration> entry : binderConfigurations
|
||||
.entrySet()) {
|
||||
final BinderConfiguration binderConfiguration = entry.getValue();
|
||||
final String binderType = binderConfiguration.getBinderType();
|
||||
if (binderType != null && (binderType.equals(KSTREAM_BINDER_TYPE)
|
||||
|| binderType.equals(KTABLE_BINDER_TYPE)
|
||||
|| binderType.equals(GLOBALKTABLE_BINDER_TYPE))) {
|
||||
Map<String, Object> binderProperties = new HashMap<>();
|
||||
this.flatten(null, binderConfiguration.getProperties(), binderProperties);
|
||||
environment.getPropertySources().addFirst(
|
||||
new MapPropertySource("kafkaStreamsBinderEnv", binderProperties));
|
||||
}
|
||||
}
|
||||
public KafkaStreamsBinderConfigurationProperties binderConfigurationProperties(KafkaProperties kafkaProperties) {
|
||||
return new KafkaStreamsBinderConfigurationProperties(kafkaProperties);
|
||||
}
|
||||
|
||||
// TODO: Lifted from core - good candidate for exposing as a utility method in core.
|
||||
private static Map<String, BinderConfiguration> getBinderConfigurations(
|
||||
BindingServiceProperties properties) {
|
||||
|
||||
Map<String, BinderConfiguration> binderConfigurations = new HashMap<>();
|
||||
Map<String, BinderProperties> declaredBinders = properties.getBinders();
|
||||
|
||||
for (Map.Entry<String, BinderProperties> binderEntry : declaredBinders
|
||||
.entrySet()) {
|
||||
BinderProperties binderProperties = binderEntry.getValue();
|
||||
binderConfigurations.put(binderEntry.getKey(),
|
||||
new BinderConfiguration(binderProperties.getType(),
|
||||
binderProperties.getEnvironment(),
|
||||
binderProperties.isInheritEnvironment(),
|
||||
binderProperties.isDefaultCandidate()));
|
||||
}
|
||||
return binderConfigurations;
|
||||
}
|
||||
|
||||
// TODO: Lifted from core - good candidate for exposing as a utility method in core.
|
||||
@SuppressWarnings("unchecked")
|
||||
private void flatten(String propertyName, Object value,
|
||||
Map<String, Object> flattenedProperties) {
|
||||
if (value instanceof Map) {
|
||||
((Map<Object, Object>) value).forEach((k, v) -> flatten(
|
||||
(propertyName != null ? propertyName + "." : "") + k, v,
|
||||
flattenedProperties));
|
||||
}
|
||||
else {
|
||||
flattenedProperties.put(propertyName, value.toString());
|
||||
}
|
||||
}
|
||||
|
||||
@Bean
|
||||
public KafkaStreamsConfiguration kafkaStreamsConfiguration(
|
||||
KafkaStreamsBinderConfigurationProperties properties,
|
||||
Environment environment) {
|
||||
KafkaProperties kafkaProperties = properties.getKafkaProperties();
|
||||
public KafkaStreamsConfiguration kafkaStreamsConfiguration(KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
Environment environment) {
|
||||
KafkaProperties kafkaProperties = binderConfigurationProperties.getKafkaProperties();
|
||||
Map<String, Object> streamsProperties = kafkaProperties.buildStreamsProperties();
|
||||
if (kafkaProperties.getStreams().getApplicationId() == null) {
|
||||
String applicationName = environment.getProperty("spring.application.name");
|
||||
if (applicationName != null) {
|
||||
streamsProperties.put(StreamsConfig.APPLICATION_ID_CONFIG,
|
||||
applicationName);
|
||||
streamsProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationName);
|
||||
}
|
||||
}
|
||||
return new KafkaStreamsConfiguration(streamsProperties);
|
||||
}
|
||||
|
||||
@Bean("streamConfigGlobalProperties")
|
||||
public Map<String, Object> streamConfigGlobalProperties(
|
||||
KafkaStreamsBinderConfigurationProperties configProperties,
|
||||
KafkaStreamsConfiguration kafkaStreamsConfiguration) {
|
||||
public Map<String, Object> streamConfigGlobalProperties(KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
KafkaStreamsConfiguration kafkaStreamsConfiguration) {
|
||||
|
||||
Properties properties = kafkaStreamsConfiguration.asProperties();
|
||||
// Override Spring Boot bootstrap server setting if left to default with the value
|
||||
// configured in the binder
|
||||
if (ObjectUtils.isEmpty(properties.get(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG))) {
|
||||
properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG,
|
||||
configProperties.getKafkaConnectionString());
|
||||
properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, binderConfigurationProperties.getKafkaConnectionString());
|
||||
}
|
||||
else {
|
||||
Object bootstrapServerConfig = properties
|
||||
.get(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG);
|
||||
Object bootstrapServerConfig = properties.get(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG);
|
||||
if (bootstrapServerConfig instanceof String) {
|
||||
@SuppressWarnings("unchecked")
|
||||
String bootStrapServers = (String) properties
|
||||
.get(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG);
|
||||
if (bootStrapServers.equals("localhost:9092")) {
|
||||
properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG,
|
||||
configProperties.getKafkaConnectionString());
|
||||
properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, binderConfigurationProperties.getKafkaConnectionString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
String binderProvidedApplicationId = configProperties.getApplicationId();
|
||||
String binderProvidedApplicationId = binderConfigurationProperties.getApplicationId();
|
||||
if (StringUtils.hasText(binderProvidedApplicationId)) {
|
||||
properties.put(StreamsConfig.APPLICATION_ID_CONFIG,
|
||||
binderProvidedApplicationId);
|
||||
properties.put(StreamsConfig.APPLICATION_ID_CONFIG, binderProvidedApplicationId);
|
||||
}
|
||||
|
||||
properties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG,
|
||||
Serdes.ByteArraySerde.class.getName());
|
||||
properties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG,
|
||||
Serdes.ByteArraySerde.class.getName());
|
||||
properties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.ByteArraySerde.class.getName());
|
||||
properties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.ByteArraySerde.class.getName());
|
||||
|
||||
if (configProperties
|
||||
.getSerdeError() == KafkaStreamsBinderConfigurationProperties.SerdeError.logAndContinue) {
|
||||
properties.put(
|
||||
StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG,
|
||||
if (binderConfigurationProperties.getSerdeError() == KafkaStreamsBinderConfigurationProperties.SerdeError.logAndContinue) {
|
||||
properties.put(StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG,
|
||||
LogAndContinueExceptionHandler.class.getName());
|
||||
}
|
||||
else if (configProperties
|
||||
.getSerdeError() == KafkaStreamsBinderConfigurationProperties.SerdeError.logAndFail) {
|
||||
properties.put(
|
||||
StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG,
|
||||
else if (binderConfigurationProperties.getSerdeError() == KafkaStreamsBinderConfigurationProperties.SerdeError.logAndFail) {
|
||||
properties.put(StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG,
|
||||
LogAndFailExceptionHandler.class.getName());
|
||||
}
|
||||
else if (configProperties
|
||||
.getSerdeError() == KafkaStreamsBinderConfigurationProperties.SerdeError.sendToDlq) {
|
||||
properties.put(
|
||||
StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG,
|
||||
else if (binderConfigurationProperties.getSerdeError() == KafkaStreamsBinderConfigurationProperties.SerdeError.sendToDlq) {
|
||||
properties.put(StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG,
|
||||
SendToDlqAndContinue.class.getName());
|
||||
}
|
||||
|
||||
if (!ObjectUtils.isEmpty(configProperties.getConfiguration())) {
|
||||
properties.putAll(configProperties.getConfiguration());
|
||||
if (!ObjectUtils.isEmpty(binderConfigurationProperties.getConfiguration())) {
|
||||
properties.putAll(binderConfigurationProperties.getConfiguration());
|
||||
}
|
||||
return properties.entrySet().stream().collect(
|
||||
Collectors.toMap((e) -> String.valueOf(e.getKey()), Map.Entry::getValue));
|
||||
@@ -216,11 +138,8 @@ public class KafkaStreamsBinderSupportAutoConfiguration {
|
||||
|
||||
@Bean
|
||||
public KStreamStreamListenerParameterAdapter kstreamStreamListenerParameterAdapter(
|
||||
KafkaStreamsMessageConversionDelegate kstreamBoundMessageConversionDelegate,
|
||||
KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue) {
|
||||
return new KStreamStreamListenerParameterAdapter(
|
||||
kstreamBoundMessageConversionDelegate,
|
||||
KafkaStreamsBindingInformationCatalogue);
|
||||
KafkaStreamsMessageConversionDelegate kstreamBoundMessageConversionDelegate, KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue) {
|
||||
return new KStreamStreamListenerParameterAdapter(kstreamBoundMessageConversionDelegate, KafkaStreamsBindingInformationCatalogue);
|
||||
}
|
||||
|
||||
@Bean
|
||||
@@ -232,31 +151,14 @@ public class KafkaStreamsBinderSupportAutoConfiguration {
|
||||
KStreamStreamListenerParameterAdapter kafkaStreamListenerParameterAdapter,
|
||||
Collection<StreamListenerResultAdapter> streamListenerResultAdapters,
|
||||
ObjectProvider<CleanupConfig> cleanupConfig) {
|
||||
return new KafkaStreamsStreamListenerSetupMethodOrchestrator(
|
||||
bindingServiceProperties, kafkaStreamsExtendedBindingProperties,
|
||||
keyValueSerdeResolver, kafkaStreamsBindingInformationCatalogue,
|
||||
return new KafkaStreamsStreamListenerSetupMethodOrchestrator(bindingServiceProperties,
|
||||
kafkaStreamsExtendedBindingProperties, keyValueSerdeResolver, kafkaStreamsBindingInformationCatalogue,
|
||||
kafkaStreamListenerParameterAdapter, streamListenerResultAdapters,
|
||||
cleanupConfig.getIfUnique());
|
||||
}
|
||||
|
||||
@Bean
|
||||
// @ConditionalOnProperty("spring.cloud.stream.kafka.streams.function.definition")
|
||||
@ConditionalOnProperty("spring.cloud.stream.function.definition")
|
||||
public KafkaStreamsFunctionProcessor kafkaStreamsFunctionProcessor(BindingServiceProperties bindingServiceProperties,
|
||||
KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties,
|
||||
KeyValueSerdeResolver keyValueSerdeResolver,
|
||||
KafkaStreamsBindingInformationCatalogue kafkaStreamsBindingInformationCatalogue,
|
||||
KafkaStreamsMessageConversionDelegate kafkaStreamsMessageConversionDelegate,
|
||||
ObjectProvider<CleanupConfig> cleanupConfig,
|
||||
FunctionCatalog functionCatalog, BindableProxyFactory bindableProxyFactory) {
|
||||
return new KafkaStreamsFunctionProcessor(bindingServiceProperties, kafkaStreamsExtendedBindingProperties,
|
||||
keyValueSerdeResolver, kafkaStreamsBindingInformationCatalogue, kafkaStreamsMessageConversionDelegate,
|
||||
cleanupConfig.getIfUnique(), functionCatalog, bindableProxyFactory);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public KafkaStreamsMessageConversionDelegate messageConversionDelegate(
|
||||
CompositeMessageConverterFactory compositeMessageConverterFactory,
|
||||
public KafkaStreamsMessageConversionDelegate messageConversionDelegate(CompositeMessageConverterFactory compositeMessageConverterFactory,
|
||||
SendToDlqAndContinue sendToDlqAndContinue,
|
||||
KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue,
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties) {
|
||||
@@ -265,29 +167,25 @@ public class KafkaStreamsBinderSupportAutoConfiguration {
|
||||
}
|
||||
|
||||
@Bean
|
||||
public CompositeNonNativeSerde compositeNonNativeSerde(
|
||||
CompositeMessageConverterFactory compositeMessageConverterFactory) {
|
||||
public CompositeNonNativeSerde compositeNonNativeSerde(CompositeMessageConverterFactory compositeMessageConverterFactory) {
|
||||
return new CompositeNonNativeSerde(compositeMessageConverterFactory);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public KStreamBoundElementFactory kStreamBoundElementFactory(
|
||||
BindingServiceProperties bindingServiceProperties,
|
||||
KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue) {
|
||||
public KStreamBoundElementFactory kStreamBoundElementFactory(BindingServiceProperties bindingServiceProperties,
|
||||
KafkaStreamsBindingInformationCatalogue KafkaStreamsBindingInformationCatalogue) {
|
||||
return new KStreamBoundElementFactory(bindingServiceProperties,
|
||||
KafkaStreamsBindingInformationCatalogue);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public KTableBoundElementFactory kTableBoundElementFactory(
|
||||
BindingServiceProperties bindingServiceProperties) {
|
||||
public KTableBoundElementFactory kTableBoundElementFactory(BindingServiceProperties bindingServiceProperties) {
|
||||
return new KTableBoundElementFactory(bindingServiceProperties);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public GlobalKTableBoundElementFactory globalKTableBoundElementFactory(
|
||||
BindingServiceProperties properties) {
|
||||
return new GlobalKTableBoundElementFactory(properties);
|
||||
public GlobalKTableBoundElementFactory globalKTableBoundElementFactory(BindingServiceProperties bindingServiceProperties) {
|
||||
return new GlobalKTableBoundElementFactory(bindingServiceProperties);
|
||||
}
|
||||
|
||||
@Bean
|
||||
@@ -302,24 +200,20 @@ public class KafkaStreamsBinderSupportAutoConfiguration {
|
||||
|
||||
@Bean
|
||||
@SuppressWarnings("unchecked")
|
||||
public KeyValueSerdeResolver keyValueSerdeResolver(
|
||||
@Qualifier("streamConfigGlobalProperties") Object streamConfigGlobalProperties,
|
||||
KafkaStreamsBinderConfigurationProperties properties) {
|
||||
return new KeyValueSerdeResolver(
|
||||
(Map<String, Object>) streamConfigGlobalProperties, properties);
|
||||
public KeyValueSerdeResolver keyValueSerdeResolver(@Qualifier("streamConfigGlobalProperties") Object streamConfigGlobalProperties,
|
||||
KafkaStreamsBinderConfigurationProperties kafkaStreamsBinderConfigurationProperties) {
|
||||
return new KeyValueSerdeResolver((Map<String, Object>) streamConfigGlobalProperties, kafkaStreamsBinderConfigurationProperties);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public QueryableStoreRegistry queryableStoreTypeRegistry(
|
||||
KafkaStreamsRegistry kafkaStreamsRegistry) {
|
||||
public QueryableStoreRegistry queryableStoreTypeRegistry(KafkaStreamsRegistry kafkaStreamsRegistry) {
|
||||
return new QueryableStoreRegistry(kafkaStreamsRegistry);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public InteractiveQueryService interactiveQueryServices(
|
||||
KafkaStreamsRegistry kafkaStreamsRegistry,
|
||||
KafkaStreamsBinderConfigurationProperties properties) {
|
||||
return new InteractiveQueryService(kafkaStreamsRegistry, properties);
|
||||
public InteractiveQueryService interactiveQueryServices(KafkaStreamsRegistry kafkaStreamsRegistry,
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties) {
|
||||
return new InteractiveQueryService(kafkaStreamsRegistry, binderConfigurationProperties);
|
||||
}
|
||||
|
||||
@Bean
|
||||
@@ -328,10 +222,9 @@ public class KafkaStreamsBinderSupportAutoConfiguration {
|
||||
}
|
||||
|
||||
@Bean
|
||||
public StreamsBuilderFactoryManager streamsBuilderFactoryManager(
|
||||
KafkaStreamsBindingInformationCatalogue catalogue,
|
||||
KafkaStreamsRegistry kafkaStreamsRegistry) {
|
||||
return new StreamsBuilderFactoryManager(catalogue, kafkaStreamsRegistry);
|
||||
public StreamsBuilderFactoryManager streamsBuilderFactoryManager(KafkaStreamsBindingInformationCatalogue kafkaStreamsBindingInformationCatalogue,
|
||||
KafkaStreamsRegistry kafkaStreamsRegistry) {
|
||||
return new StreamsBuilderFactoryManager(kafkaStreamsBindingInformationCatalogue, kafkaStreamsRegistry);
|
||||
}
|
||||
|
||||
@Bean("kafkaStreamsDlqDispatchers")
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -18,12 +18,18 @@ package org.springframework.cloud.stream.binder.kafka.streams;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.beans.factory.config.MethodInvokingFactoryBean;
|
||||
import org.springframework.beans.factory.support.AbstractBeanDefinition;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.cloud.stream.binder.ExtendedConsumerProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.provisioning.KafkaTopicProvisioner;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsBinderConfigurationProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsConsumerProperties;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.annotation.ImportBeanDefinitionRegistrar;
|
||||
import org.springframework.core.type.AnnotationMetadata;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
@@ -37,50 +43,70 @@ final class KafkaStreamsBinderUtils {
|
||||
|
||||
}
|
||||
|
||||
static void prepareConsumerBinding(String name, String group,
|
||||
ApplicationContext context, KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
ExtendedConsumerProperties<KafkaStreamsConsumerProperties> properties,
|
||||
Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
static void prepareConsumerBinding(String name, String group, ApplicationContext context,
|
||||
KafkaTopicProvisioner kafkaTopicProvisioner,
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties,
|
||||
ExtendedConsumerProperties<KafkaStreamsConsumerProperties> properties,
|
||||
Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers) {
|
||||
ExtendedConsumerProperties<KafkaConsumerProperties> extendedConsumerProperties = new ExtendedConsumerProperties<>(
|
||||
properties.getExtension());
|
||||
if (binderConfigurationProperties
|
||||
.getSerdeError() == KafkaStreamsBinderConfigurationProperties.SerdeError.sendToDlq) {
|
||||
if (binderConfigurationProperties.getSerdeError() == KafkaStreamsBinderConfigurationProperties.SerdeError.sendToDlq) {
|
||||
extendedConsumerProperties.getExtension().setEnableDlq(true);
|
||||
}
|
||||
|
||||
String[] inputTopics = StringUtils.commaDelimitedListToStringArray(name);
|
||||
for (String inputTopic : inputTopics) {
|
||||
kafkaTopicProvisioner.provisionConsumerDestination(inputTopic, group,
|
||||
extendedConsumerProperties);
|
||||
kafkaTopicProvisioner.provisionConsumerDestination(inputTopic, group, extendedConsumerProperties);
|
||||
}
|
||||
|
||||
if (extendedConsumerProperties.getExtension().isEnableDlq()) {
|
||||
KafkaStreamsDlqDispatch kafkaStreamsDlqDispatch = !StringUtils
|
||||
.isEmpty(extendedConsumerProperties.getExtension().getDlqName())
|
||||
? new KafkaStreamsDlqDispatch(
|
||||
extendedConsumerProperties.getExtension()
|
||||
.getDlqName(),
|
||||
binderConfigurationProperties,
|
||||
extendedConsumerProperties.getExtension())
|
||||
: null;
|
||||
KafkaStreamsDlqDispatch kafkaStreamsDlqDispatch = !StringUtils.isEmpty(extendedConsumerProperties.getExtension().getDlqName()) ?
|
||||
new KafkaStreamsDlqDispatch(extendedConsumerProperties.getExtension().getDlqName(), binderConfigurationProperties,
|
||||
extendedConsumerProperties.getExtension()) : null;
|
||||
for (String inputTopic : inputTopics) {
|
||||
if (StringUtils.isEmpty(
|
||||
extendedConsumerProperties.getExtension().getDlqName())) {
|
||||
if (StringUtils.isEmpty(extendedConsumerProperties.getExtension().getDlqName())) {
|
||||
String dlqName = "error." + inputTopic + "." + group;
|
||||
kafkaStreamsDlqDispatch = new KafkaStreamsDlqDispatch(dlqName,
|
||||
binderConfigurationProperties,
|
||||
kafkaStreamsDlqDispatch = new KafkaStreamsDlqDispatch(dlqName, binderConfigurationProperties,
|
||||
extendedConsumerProperties.getExtension());
|
||||
}
|
||||
|
||||
SendToDlqAndContinue sendToDlqAndContinue = context
|
||||
.getBean(SendToDlqAndContinue.class);
|
||||
sendToDlqAndContinue.addKStreamDlqDispatch(inputTopic,
|
||||
kafkaStreamsDlqDispatch);
|
||||
SendToDlqAndContinue sendToDlqAndContinue = context.getBean(SendToDlqAndContinue.class);
|
||||
sendToDlqAndContinue.addKStreamDlqDispatch(inputTopic, kafkaStreamsDlqDispatch);
|
||||
|
||||
kafkaStreamsDlqDispatchers.put(inputTopic, kafkaStreamsDlqDispatch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper lass for missing bean registration.
|
||||
*/
|
||||
static class KafkaStreamsMissingBeansRegistrar implements ImportBeanDefinitionRegistrar {
|
||||
|
||||
private static final String BEAN_NAME = "outerContext";
|
||||
|
||||
@Override
|
||||
public void registerBeanDefinitions(AnnotationMetadata importingClassMetadata,
|
||||
BeanDefinitionRegistry registry) {
|
||||
if (registry.containsBeanDefinition(BEAN_NAME)) {
|
||||
|
||||
AbstractBeanDefinition configBean = BeanDefinitionBuilder.genericBeanDefinition(MethodInvokingFactoryBean.class)
|
||||
.addPropertyReference("targetObject", BEAN_NAME)
|
||||
.addPropertyValue("targetMethod", "getBean")
|
||||
.addPropertyValue("arguments", KafkaStreamsBinderConfigurationProperties.class)
|
||||
.getBeanDefinition();
|
||||
|
||||
registry.registerBeanDefinition(KafkaStreamsBinderConfigurationProperties.class.getSimpleName(), configBean);
|
||||
|
||||
AbstractBeanDefinition catalogueBean = BeanDefinitionBuilder.genericBeanDefinition(MethodInvokingFactoryBean.class)
|
||||
.addPropertyReference("targetObject", BEAN_NAME)
|
||||
.addPropertyValue("targetMethod", "getBean")
|
||||
.addPropertyValue("arguments", KafkaStreamsBindingInformationCatalogue.class)
|
||||
.getBeanDefinition();
|
||||
|
||||
registry.registerBeanDefinition(KafkaStreamsBindingInformationCatalogue.class.getSimpleName(), catalogueBean);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -30,11 +30,10 @@ import org.springframework.cloud.stream.config.BindingProperties;
|
||||
import org.springframework.kafka.config.StreamsBuilderFactoryBean;
|
||||
|
||||
/**
|
||||
* A catalogue that provides binding information for Kafka Streams target types such as
|
||||
* KStream. It also keeps a catalogue for the underlying {@link StreamsBuilderFactoryBean}
|
||||
* and {@link StreamsConfig} associated with various
|
||||
* {@link org.springframework.cloud.stream.annotation.StreamListener} methods in the
|
||||
* {@link org.springframework.context.ApplicationContext}.
|
||||
* A catalogue that provides binding information for Kafka Streams target types such as KStream.
|
||||
* It also keeps a catalogue for the underlying {@link StreamsBuilderFactoryBean} and
|
||||
* {@link StreamsConfig} associated with various {@link org.springframework.cloud.stream.annotation.StreamListener}
|
||||
* methods in the {@link org.springframework.context.ApplicationContext}.
|
||||
*
|
||||
* @author Soby Chacko
|
||||
*/
|
||||
@@ -47,8 +46,9 @@ class KafkaStreamsBindingInformationCatalogue {
|
||||
private final Set<StreamsBuilderFactoryBean> streamsBuilderFactoryBeans = new HashSet<>();
|
||||
|
||||
/**
|
||||
* For a given bounded {@link KStream}, retrieve it's corresponding destination on the
|
||||
* broker.
|
||||
* For a given bounded {@link KStream}, retrieve it's corresponding destination
|
||||
* on the broker.
|
||||
*
|
||||
* @param bindingTarget binding target for KStream
|
||||
* @return destination topic on Kafka
|
||||
*/
|
||||
@@ -59,6 +59,7 @@ class KafkaStreamsBindingInformationCatalogue {
|
||||
|
||||
/**
|
||||
* Is native decoding is enabled on this {@link KStream}.
|
||||
*
|
||||
* @param bindingTarget binding target for KStream
|
||||
* @return true if native decoding is enabled, fasle otherwise.
|
||||
*/
|
||||
@@ -72,6 +73,7 @@ class KafkaStreamsBindingInformationCatalogue {
|
||||
|
||||
/**
|
||||
* Is DLQ enabled for this {@link KStream}.
|
||||
*
|
||||
* @param bindingTarget binding target for KStream
|
||||
* @return true if DLQ is enabled, false otherwise.
|
||||
*/
|
||||
@@ -81,6 +83,7 @@ class KafkaStreamsBindingInformationCatalogue {
|
||||
|
||||
/**
|
||||
* Retrieve the content type associated with a given {@link KStream}.
|
||||
*
|
||||
* @param bindingTarget binding target for KStream
|
||||
* @return content Type associated.
|
||||
*/
|
||||
@@ -91,28 +94,28 @@ class KafkaStreamsBindingInformationCatalogue {
|
||||
|
||||
/**
|
||||
* Register a cache for bounded KStream -> {@link BindingProperties}.
|
||||
*
|
||||
* @param bindingTarget binding target for KStream
|
||||
* @param bindingProperties {@link BindingProperties} for this KStream
|
||||
*/
|
||||
void registerBindingProperties(KStream<?, ?> bindingTarget,
|
||||
BindingProperties bindingProperties) {
|
||||
void registerBindingProperties(KStream<?, ?> bindingTarget, BindingProperties bindingProperties) {
|
||||
this.bindingProperties.put(bindingTarget, bindingProperties);
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a cache for bounded KStream -> {@link KafkaStreamsConsumerProperties}.
|
||||
*
|
||||
* @param bindingTarget binding target for KStream
|
||||
* @param kafkaStreamsConsumerProperties consumer properties for this KStream
|
||||
*/
|
||||
void registerConsumerProperties(KStream<?, ?> bindingTarget,
|
||||
KafkaStreamsConsumerProperties kafkaStreamsConsumerProperties) {
|
||||
void registerConsumerProperties(KStream<?, ?> bindingTarget, KafkaStreamsConsumerProperties kafkaStreamsConsumerProperties) {
|
||||
this.consumerProperties.put(bindingTarget, kafkaStreamsConsumerProperties);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a mapping for KStream -> {@link StreamsBuilderFactoryBean}.
|
||||
* @param streamsBuilderFactoryBean provides the {@link StreamsBuilderFactoryBean}
|
||||
* mapped to the KStream
|
||||
*
|
||||
* @param streamsBuilderFactoryBean provides the {@link StreamsBuilderFactoryBean} mapped to the KStream
|
||||
*/
|
||||
void addStreamBuilderFactory(StreamsBuilderFactoryBean streamsBuilderFactoryBean) {
|
||||
this.streamsBuilderFactoryBeans.add(streamsBuilderFactoryBean);
|
||||
@@ -121,5 +124,4 @@ class KafkaStreamsBindingInformationCatalogue {
|
||||
Set<StreamsBuilderFactoryBean> getStreamsBuilderFactoryBeans() {
|
||||
return this.streamsBuilderFactoryBeans;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -53,11 +53,10 @@ class KafkaStreamsDlqDispatch {
|
||||
private final String dlqName;
|
||||
|
||||
KafkaStreamsDlqDispatch(String dlqName,
|
||||
KafkaBinderConfigurationProperties kafkaBinderConfigurationProperties,
|
||||
KafkaConsumerProperties kafkaConsumerProperties) {
|
||||
KafkaBinderConfigurationProperties kafkaBinderConfigurationProperties,
|
||||
KafkaConsumerProperties kafkaConsumerProperties) {
|
||||
ProducerFactory<byte[], byte[]> producerFactory = getProducerFactory(
|
||||
new ExtendedProducerProperties<>(
|
||||
kafkaConsumerProperties.getDlqProducerProperties()),
|
||||
new ExtendedProducerProperties<>(kafkaConsumerProperties.getDlqProducerProperties()),
|
||||
kafkaBinderConfigurationProperties);
|
||||
|
||||
this.kafkaTemplate = new KafkaTemplate<>(producerFactory);
|
||||
@@ -66,58 +65,55 @@ class KafkaStreamsDlqDispatch {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void sendToDlq(byte[] key, byte[] value, int partittion) {
|
||||
ProducerRecord<byte[], byte[]> producerRecord = new ProducerRecord<>(this.dlqName,
|
||||
partittion, key, value, null);
|
||||
ProducerRecord<byte[], byte[]> producerRecord = new ProducerRecord<>(this.dlqName, partittion,
|
||||
key, value, null);
|
||||
|
||||
StringBuilder sb = new StringBuilder().append(" a message with key='")
|
||||
.append(toDisplayString(ObjectUtils.nullSafeToString(key))).append("'")
|
||||
.append(" and payload='")
|
||||
.append(toDisplayString(ObjectUtils.nullSafeToString(value))).append("'")
|
||||
.append(" received from ").append(partittion);
|
||||
.append(toDisplayString(ObjectUtils.nullSafeToString(value)))
|
||||
.append("'").append(" received from ")
|
||||
.append(partittion);
|
||||
ListenableFuture<SendResult<byte[], byte[]>> sentDlq = null;
|
||||
try {
|
||||
sentDlq = this.kafkaTemplate.send(producerRecord);
|
||||
sentDlq.addCallback(
|
||||
new ListenableFutureCallback<SendResult<byte[], byte[]>>() {
|
||||
sentDlq.addCallback(new ListenableFutureCallback<SendResult<byte[], byte[]>>() {
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable ex) {
|
||||
KafkaStreamsDlqDispatch.this.logger
|
||||
.error("Error sending to DLQ " + sb.toString(), ex);
|
||||
}
|
||||
@Override
|
||||
public void onFailure(Throwable ex) {
|
||||
KafkaStreamsDlqDispatch.this.logger.error(
|
||||
"Error sending to DLQ " + sb.toString(), ex);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onSuccess(SendResult<byte[], byte[]> result) {
|
||||
if (KafkaStreamsDlqDispatch.this.logger.isDebugEnabled()) {
|
||||
KafkaStreamsDlqDispatch.this.logger
|
||||
.debug("Sent to DLQ " + sb.toString());
|
||||
}
|
||||
}
|
||||
});
|
||||
@Override
|
||||
public void onSuccess(SendResult<byte[], byte[]> result) {
|
||||
if (KafkaStreamsDlqDispatch.this.logger.isDebugEnabled()) {
|
||||
KafkaStreamsDlqDispatch.this.logger.debug(
|
||||
"Sent to DLQ " + sb.toString());
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
catch (Exception ex) {
|
||||
if (sentDlq == null) {
|
||||
KafkaStreamsDlqDispatch.this.logger
|
||||
.error("Error sending to DLQ " + sb.toString(), ex);
|
||||
KafkaStreamsDlqDispatch.this.logger.error(
|
||||
"Error sending to DLQ " + sb.toString(), ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private DefaultKafkaProducerFactory<byte[], byte[]> getProducerFactory(
|
||||
ExtendedProducerProperties<KafkaProducerProperties> producerProperties,
|
||||
KafkaBinderConfigurationProperties configurationProperties) {
|
||||
private DefaultKafkaProducerFactory<byte[], byte[]> getProducerFactory(ExtendedProducerProperties<KafkaProducerProperties> producerProperties,
|
||||
KafkaBinderConfigurationProperties configurationProperties) {
|
||||
Map<String, Object> props = new HashMap<>();
|
||||
props.put(ProducerConfig.RETRIES_CONFIG, 0);
|
||||
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
|
||||
props.put(ProducerConfig.ACKS_CONFIG, configurationProperties.getRequiredAcks());
|
||||
Map<String, Object> mergedConfig = configurationProperties
|
||||
.mergedProducerConfiguration();
|
||||
Map<String, Object> mergedConfig = configurationProperties.mergedProducerConfiguration();
|
||||
if (!ObjectUtils.isEmpty(mergedConfig)) {
|
||||
props.putAll(mergedConfig);
|
||||
}
|
||||
if (ObjectUtils.isEmpty(props.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG))) {
|
||||
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,
|
||||
configurationProperties.getKafkaConnectionString());
|
||||
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, configurationProperties.getKafkaConnectionString());
|
||||
}
|
||||
if (ObjectUtils.isEmpty(props.get(ProducerConfig.BATCH_SIZE_CONFIG))) {
|
||||
props.put(ProducerConfig.BATCH_SIZE_CONFIG,
|
||||
@@ -134,10 +130,9 @@ class KafkaStreamsDlqDispatch {
|
||||
if (!ObjectUtils.isEmpty(producerProperties.getExtension().getConfiguration())) {
|
||||
props.putAll(producerProperties.getExtension().getConfiguration());
|
||||
}
|
||||
// Always send as byte[] on dlq (the same byte[] that the consumer received)
|
||||
//Always send as byte[] on dlq (the same byte[] that the consumer received)
|
||||
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
|
||||
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
|
||||
ByteArraySerializer.class);
|
||||
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
|
||||
|
||||
return new DefaultKafkaProducerFactory<>(props);
|
||||
}
|
||||
@@ -148,5 +143,4 @@ class KafkaStreamsDlqDispatch {
|
||||
}
|
||||
return original.substring(0, 50) + "...";
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,466 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.kafka.common.serialization.Serde;
|
||||
import org.apache.kafka.common.utils.Bytes;
|
||||
import org.apache.kafka.streams.StreamsBuilder;
|
||||
import org.apache.kafka.streams.StreamsConfig;
|
||||
import org.apache.kafka.streams.Topology;
|
||||
import org.apache.kafka.streams.kstream.Consumed;
|
||||
import org.apache.kafka.streams.kstream.GlobalKTable;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.apache.kafka.streams.kstream.KTable;
|
||||
import org.apache.kafka.streams.kstream.Materialized;
|
||||
import org.apache.kafka.streams.state.KeyValueStore;
|
||||
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.beans.factory.BeanInitializationException;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.cloud.function.context.FunctionCatalog;
|
||||
import org.springframework.cloud.function.core.FluxedFunction;
|
||||
import org.springframework.cloud.stream.binder.ConsumerProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsConsumerProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsExtendedBindingProperties;
|
||||
import org.springframework.cloud.stream.binding.BindableProxyFactory;
|
||||
import org.springframework.cloud.stream.binding.StreamListenerErrorMessages;
|
||||
import org.springframework.cloud.stream.config.BindingProperties;
|
||||
import org.springframework.cloud.stream.config.BindingServiceProperties;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.core.ResolvableType;
|
||||
import org.springframework.kafka.config.KafkaStreamsConfiguration;
|
||||
import org.springframework.kafka.config.StreamsBuilderFactoryBean;
|
||||
import org.springframework.kafka.core.CleanupConfig;
|
||||
import org.springframework.messaging.MessageHeaders;
|
||||
import org.springframework.messaging.support.MessageBuilder;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* @author Soby Chacko
|
||||
*/
|
||||
public class KafkaStreamsFunctionProcessor implements ApplicationContextAware {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(KafkaStreamsFunctionProcessor.class);
|
||||
|
||||
private final BindingServiceProperties bindingServiceProperties;
|
||||
private final Map<String, StreamsBuilderFactoryBean> methodStreamsBuilderFactoryBeanMap = new HashMap<>();
|
||||
private final KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties;
|
||||
private final KeyValueSerdeResolver keyValueSerdeResolver;
|
||||
private final KafkaStreamsBindingInformationCatalogue kafkaStreamsBindingInformationCatalogue;
|
||||
private final KafkaStreamsMessageConversionDelegate kafkaStreamsMessageConversionDelegate;
|
||||
private final CleanupConfig cleanupConfig;
|
||||
private final FunctionCatalog functionCatalog;
|
||||
private final BindableProxyFactory bindableProxyFactory;
|
||||
|
||||
private ConfigurableApplicationContext applicationContext;
|
||||
|
||||
|
||||
public KafkaStreamsFunctionProcessor(BindingServiceProperties bindingServiceProperties,
|
||||
KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties,
|
||||
KeyValueSerdeResolver keyValueSerdeResolver,
|
||||
KafkaStreamsBindingInformationCatalogue kafkaStreamsBindingInformationCatalogue,
|
||||
KafkaStreamsMessageConversionDelegate kafkaStreamsMessageConversionDelegate,
|
||||
CleanupConfig cleanupConfig,
|
||||
FunctionCatalog functionCatalog,
|
||||
BindableProxyFactory bindableProxyFactory) {
|
||||
this.bindingServiceProperties = bindingServiceProperties;
|
||||
this.kafkaStreamsExtendedBindingProperties = kafkaStreamsExtendedBindingProperties;
|
||||
this.keyValueSerdeResolver = keyValueSerdeResolver;
|
||||
this.kafkaStreamsBindingInformationCatalogue = kafkaStreamsBindingInformationCatalogue;
|
||||
this.kafkaStreamsMessageConversionDelegate = kafkaStreamsMessageConversionDelegate;
|
||||
this.cleanupConfig = cleanupConfig;
|
||||
this.functionCatalog = functionCatalog;
|
||||
this.bindableProxyFactory = bindableProxyFactory;
|
||||
}
|
||||
|
||||
private Map<String, ResolvableType> buildTypeMap(ResolvableType resolvableType) {
|
||||
final Set<String> inputs = new TreeSet<>(this.bindableProxyFactory.getInputs());
|
||||
|
||||
Map<String, ResolvableType> map = new LinkedHashMap<>();
|
||||
final Iterator<String> iterator = inputs.iterator();
|
||||
|
||||
if (iterator.hasNext()) {
|
||||
map.put(iterator.next(), resolvableType.getGeneric(0));
|
||||
ResolvableType generic = resolvableType.getGeneric(1);
|
||||
|
||||
while (iterator.hasNext() && generic != null) {
|
||||
if (generic.getRawClass() != null &&
|
||||
(generic.getRawClass().equals(Function.class) ||
|
||||
generic.getRawClass().equals(Consumer.class))) {
|
||||
map.put(iterator.next(), generic.getGeneric(0));
|
||||
}
|
||||
generic = generic.getGeneric(1);
|
||||
}
|
||||
}
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void orchestrateStreamListenerSetupMethod(ResolvableType resolvableType, String functionName) {
|
||||
final Set<String> outputs = new TreeSet<>(this.bindableProxyFactory.getOutputs());
|
||||
|
||||
String[] methodAnnotatedOutboundNames = new String[outputs.size()];
|
||||
int j = 0;
|
||||
for (String output : outputs) {
|
||||
methodAnnotatedOutboundNames[j++] = output;
|
||||
}
|
||||
|
||||
final Map<String, ResolvableType> stringResolvableTypeMap = buildTypeMap(resolvableType);
|
||||
Object[] adaptedInboundArguments = adaptAndRetrieveInboundArguments(stringResolvableTypeMap, "foobar");
|
||||
try {
|
||||
if (resolvableType.getRawClass() != null && resolvableType.getRawClass().equals(Consumer.class)) {
|
||||
Consumer<Object> consumer = functionCatalog.lookup(Consumer.class, functionName);
|
||||
consumer.accept(adaptedInboundArguments[0]);
|
||||
}
|
||||
else {
|
||||
|
||||
Function<Object, Object> function = functionCatalog.lookup(Function.class, functionName);
|
||||
Object target = null;
|
||||
if (function instanceof FluxedFunction) {
|
||||
target = ((FluxedFunction) function).getTarget();
|
||||
}
|
||||
function = (Function) target;
|
||||
Object result = function.apply(adaptedInboundArguments[0]);
|
||||
int i = 1;
|
||||
while (result instanceof Function || result instanceof Consumer) {
|
||||
if (result instanceof Function) {
|
||||
result = ((Function) result).apply(adaptedInboundArguments[i]);
|
||||
}
|
||||
else {
|
||||
((Consumer) result).accept(adaptedInboundArguments[i]);
|
||||
result = null;
|
||||
}
|
||||
i++;
|
||||
}
|
||||
if (result != null) {
|
||||
if (result.getClass().isArray()) {
|
||||
Assert.isTrue(methodAnnotatedOutboundNames.length == ((Object[]) result).length,
|
||||
"Result does not match with the number of declared outbounds");
|
||||
}
|
||||
else {
|
||||
Assert.isTrue(methodAnnotatedOutboundNames.length == 1,
|
||||
"Result does not match with the number of declared outbounds");
|
||||
}
|
||||
if (result.getClass().isArray()) {
|
||||
Object[] outboundKStreams = (Object[]) result;
|
||||
int k = 0;
|
||||
for (Object outboundKStream : outboundKStreams) {
|
||||
Object targetBean = this.applicationContext.getBean(methodAnnotatedOutboundNames[k++]);
|
||||
|
||||
KStreamBoundElementFactory.KStreamWrapper
|
||||
boundElement = (KStreamBoundElementFactory.KStreamWrapper) targetBean;
|
||||
boundElement.wrap((KStream) outboundKStream);
|
||||
}
|
||||
}
|
||||
else {
|
||||
Object targetBean = this.applicationContext.getBean(methodAnnotatedOutboundNames[0]);
|
||||
|
||||
KStreamBoundElementFactory.KStreamWrapper
|
||||
boundElement = (KStreamBoundElementFactory.KStreamWrapper) targetBean;
|
||||
boundElement.wrap((KStream) result);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex) {
|
||||
throw new BeanInitializationException("Cannot setup StreamListener for foobar", ex);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
private Object[] adaptAndRetrieveInboundArguments(Map<String, ResolvableType> stringResolvableTypeMap,
|
||||
String functionName) {
|
||||
Object[] arguments = new Object[stringResolvableTypeMap.size()];
|
||||
int i = 0;
|
||||
for (String input : stringResolvableTypeMap.keySet()) {
|
||||
Class<?> parameterType = stringResolvableTypeMap.get(input).getRawClass();
|
||||
|
||||
if (input != null) {
|
||||
Assert.isInstanceOf(String.class, input, "Annotation value must be a String");
|
||||
Object targetBean = applicationContext.getBean(input);
|
||||
BindingProperties bindingProperties = this.bindingServiceProperties.getBindingProperties(input);
|
||||
enableNativeDecodingForKTableAlways(parameterType, bindingProperties);
|
||||
//Retrieve the StreamsConfig created for this method if available.
|
||||
//Otherwise, create the StreamsBuilderFactory and get the underlying config.
|
||||
if (!this.methodStreamsBuilderFactoryBeanMap.containsKey(functionName)) {
|
||||
buildStreamsBuilderAndRetrieveConfig(functionName, applicationContext, input);
|
||||
}
|
||||
try {
|
||||
StreamsBuilderFactoryBean streamsBuilderFactoryBean =
|
||||
this.methodStreamsBuilderFactoryBeanMap.get(functionName);
|
||||
StreamsBuilder streamsBuilder = streamsBuilderFactoryBean.getObject();
|
||||
KafkaStreamsConsumerProperties extendedConsumerProperties =
|
||||
this.kafkaStreamsExtendedBindingProperties.getExtendedConsumerProperties(input);
|
||||
//get state store spec
|
||||
//KafkaStreamsStateStoreProperties spec = buildStateStoreSpec(method);
|
||||
Serde<?> keySerde = this.keyValueSerdeResolver.getInboundKeySerde(extendedConsumerProperties);
|
||||
Serde<?> valueSerde = this.keyValueSerdeResolver.getInboundValueSerde(
|
||||
bindingProperties.getConsumer(), extendedConsumerProperties);
|
||||
|
||||
final KafkaConsumerProperties.StartOffset startOffset = extendedConsumerProperties.getStartOffset();
|
||||
Topology.AutoOffsetReset autoOffsetReset = null;
|
||||
if (startOffset != null) {
|
||||
switch (startOffset) {
|
||||
case earliest:
|
||||
autoOffsetReset = Topology.AutoOffsetReset.EARLIEST;
|
||||
break;
|
||||
case latest:
|
||||
autoOffsetReset = Topology.AutoOffsetReset.LATEST;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (extendedConsumerProperties.isResetOffsets()) {
|
||||
LOG.warn("Detected resetOffsets configured on binding " + input + ". "
|
||||
+ "Setting resetOffsets in Kafka Streams binder does not have any effect.");
|
||||
}
|
||||
|
||||
if (parameterType.isAssignableFrom(KStream.class)) {
|
||||
KStream<?, ?> stream = getkStream(input, bindingProperties,
|
||||
streamsBuilder, keySerde, valueSerde, autoOffsetReset);
|
||||
KStreamBoundElementFactory.KStreamWrapper kStreamWrapper =
|
||||
(KStreamBoundElementFactory.KStreamWrapper) targetBean;
|
||||
//wrap the proxy created during the initial target type binding with real object (KStream)
|
||||
kStreamWrapper.wrap((KStream<Object, Object>) stream);
|
||||
this.kafkaStreamsBindingInformationCatalogue.addStreamBuilderFactory(streamsBuilderFactoryBean);
|
||||
|
||||
if (KStream.class.isAssignableFrom(stringResolvableTypeMap.get(input).getRawClass())) {
|
||||
final Class<?> valueClass =
|
||||
(stringResolvableTypeMap.get(input).getGeneric(1).getRawClass() != null)
|
||||
? (stringResolvableTypeMap.get(input).getGeneric(1).getRawClass()) : Object.class;
|
||||
if (this.kafkaStreamsBindingInformationCatalogue.isUseNativeDecoding(
|
||||
(KStream) kStreamWrapper)) {
|
||||
arguments[i] = stream;
|
||||
}
|
||||
else {
|
||||
arguments[i] = this.kafkaStreamsMessageConversionDelegate.deserializeOnInbound(
|
||||
valueClass, stream);
|
||||
}
|
||||
}
|
||||
|
||||
if (arguments[i] == null) {
|
||||
arguments[i] = stream;
|
||||
}
|
||||
Assert.notNull(arguments[i], "problems..");
|
||||
}
|
||||
else if (parameterType.isAssignableFrom(KTable.class)) {
|
||||
String materializedAs = extendedConsumerProperties.getMaterializedAs();
|
||||
String bindingDestination = this.bindingServiceProperties.getBindingDestination(input);
|
||||
KTable<?, ?> table = getKTable(streamsBuilder, keySerde, valueSerde, materializedAs,
|
||||
bindingDestination, autoOffsetReset);
|
||||
KTableBoundElementFactory.KTableWrapper kTableWrapper =
|
||||
(KTableBoundElementFactory.KTableWrapper) targetBean;
|
||||
//wrap the proxy created during the initial target type binding with real object (KTable)
|
||||
kTableWrapper.wrap((KTable<Object, Object>) table);
|
||||
this.kafkaStreamsBindingInformationCatalogue.addStreamBuilderFactory(streamsBuilderFactoryBean);
|
||||
arguments[i] = table;
|
||||
}
|
||||
else if (parameterType.isAssignableFrom(GlobalKTable.class)) {
|
||||
String materializedAs = extendedConsumerProperties.getMaterializedAs();
|
||||
String bindingDestination = this.bindingServiceProperties.getBindingDestination(input);
|
||||
GlobalKTable<?, ?> table = getGlobalKTable(streamsBuilder, keySerde, valueSerde, materializedAs,
|
||||
bindingDestination, autoOffsetReset);
|
||||
GlobalKTableBoundElementFactory.GlobalKTableWrapper globalKTableWrapper =
|
||||
(GlobalKTableBoundElementFactory.GlobalKTableWrapper) targetBean;
|
||||
//wrap the proxy created during the initial target type binding with real object (KTable)
|
||||
globalKTableWrapper.wrap((GlobalKTable<Object, Object>) table);
|
||||
this.kafkaStreamsBindingInformationCatalogue.addStreamBuilderFactory(streamsBuilderFactoryBean);
|
||||
arguments[i] = table;
|
||||
}
|
||||
i++;
|
||||
}
|
||||
catch (Exception ex) {
|
||||
throw new IllegalStateException(ex);
|
||||
}
|
||||
}
|
||||
else {
|
||||
throw new IllegalStateException(StreamListenerErrorMessages.INVALID_DECLARATIVE_METHOD_PARAMETERS);
|
||||
}
|
||||
}
|
||||
return arguments;
|
||||
}
|
||||
|
||||
private GlobalKTable<?, ?> getGlobalKTable(StreamsBuilder streamsBuilder,
|
||||
Serde<?> keySerde, Serde<?> valueSerde, String materializedAs,
|
||||
String bindingDestination, Topology.AutoOffsetReset autoOffsetReset) {
|
||||
return materializedAs != null ?
|
||||
materializedAsGlobalKTable(streamsBuilder, bindingDestination, materializedAs,
|
||||
keySerde, valueSerde, autoOffsetReset) :
|
||||
streamsBuilder.globalTable(bindingDestination,
|
||||
Consumed.with(keySerde, valueSerde).withOffsetResetPolicy(autoOffsetReset));
|
||||
}
|
||||
|
||||
private KTable<?, ?> getKTable(StreamsBuilder streamsBuilder, Serde<?> keySerde, Serde<?> valueSerde,
|
||||
String materializedAs,
|
||||
String bindingDestination, Topology.AutoOffsetReset autoOffsetReset) {
|
||||
return materializedAs != null ?
|
||||
materializedAs(streamsBuilder, bindingDestination, materializedAs, keySerde, valueSerde,
|
||||
autoOffsetReset) :
|
||||
streamsBuilder.table(bindingDestination,
|
||||
Consumed.with(keySerde, valueSerde).withOffsetResetPolicy(autoOffsetReset));
|
||||
}
|
||||
|
||||
private <K, V> KTable<K, V> materializedAs(StreamsBuilder streamsBuilder, String destination,
|
||||
String storeName, Serde<K> k, Serde<V> v,
|
||||
Topology.AutoOffsetReset autoOffsetReset) {
|
||||
return streamsBuilder.table(this.bindingServiceProperties.getBindingDestination(destination),
|
||||
Consumed.with(k, v).withOffsetResetPolicy(autoOffsetReset),
|
||||
getMaterialized(storeName, k, v));
|
||||
}
|
||||
|
||||
private <K, V> GlobalKTable<K, V> materializedAsGlobalKTable(StreamsBuilder streamsBuilder,
|
||||
String destination, String storeName,
|
||||
Serde<K> k, Serde<V> v,
|
||||
Topology.AutoOffsetReset autoOffsetReset) {
|
||||
return streamsBuilder.globalTable(this.bindingServiceProperties.getBindingDestination(destination),
|
||||
Consumed.with(k, v).withOffsetResetPolicy(autoOffsetReset),
|
||||
getMaterialized(storeName, k, v));
|
||||
}
|
||||
|
||||
private <K, V> Materialized<K, V, KeyValueStore<Bytes, byte[]>> getMaterialized(String storeName,
|
||||
Serde<K> k, Serde<V> v) {
|
||||
return Materialized.<K, V, KeyValueStore<Bytes, byte[]>>as(storeName)
|
||||
.withKeySerde(k)
|
||||
.withValueSerde(v);
|
||||
}
|
||||
|
||||
private KStream<?, ?> getkStream(String inboundName,
|
||||
BindingProperties bindingProperties,
|
||||
StreamsBuilder streamsBuilder,
|
||||
Serde<?> keySerde, Serde<?> valueSerde, Topology.AutoOffsetReset autoOffsetReset) {
|
||||
String[] bindingTargets = StringUtils
|
||||
.commaDelimitedListToStringArray(this.bindingServiceProperties.getBindingDestination(inboundName));
|
||||
|
||||
KStream<?, ?> stream =
|
||||
streamsBuilder.stream(Arrays.asList(bindingTargets),
|
||||
Consumed.with(keySerde, valueSerde)
|
||||
.withOffsetResetPolicy(autoOffsetReset));
|
||||
final boolean nativeDecoding = this.bindingServiceProperties.getConsumerProperties(inboundName)
|
||||
.isUseNativeDecoding();
|
||||
if (nativeDecoding) {
|
||||
LOG.info("Native decoding is enabled for " + inboundName + ". " +
|
||||
"Inbound deserialization done at the broker.");
|
||||
}
|
||||
else {
|
||||
LOG.info("Native decoding is disabled for " + inboundName + ". " +
|
||||
"Inbound message conversion done by Spring Cloud Stream.");
|
||||
}
|
||||
|
||||
stream = stream.mapValues((value) -> {
|
||||
Object returnValue;
|
||||
String contentType = bindingProperties.getContentType();
|
||||
if (value != null && !StringUtils.isEmpty(contentType) && !nativeDecoding) {
|
||||
returnValue = MessageBuilder.withPayload(value)
|
||||
.setHeader(MessageHeaders.CONTENT_TYPE, contentType).build();
|
||||
}
|
||||
else {
|
||||
returnValue = value;
|
||||
}
|
||||
return returnValue;
|
||||
});
|
||||
return stream;
|
||||
}
|
||||
|
||||
private void enableNativeDecodingForKTableAlways(Class<?> parameterType, BindingProperties bindingProperties) {
|
||||
if (parameterType.isAssignableFrom(KTable.class) || parameterType.isAssignableFrom(GlobalKTable.class)) {
|
||||
if (bindingProperties.getConsumer() == null) {
|
||||
bindingProperties.setConsumer(new ConsumerProperties());
|
||||
}
|
||||
//No framework level message conversion provided for KTable/GlobalKTable, its done by the broker.
|
||||
bindingProperties.getConsumer().setUseNativeDecoding(true);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
private void buildStreamsBuilderAndRetrieveConfig(String functionName, ApplicationContext applicationContext,
|
||||
String inboundName) {
|
||||
ConfigurableListableBeanFactory beanFactory = this.applicationContext.getBeanFactory();
|
||||
|
||||
Map<String, Object> streamConfigGlobalProperties = applicationContext.getBean("streamConfigGlobalProperties",
|
||||
Map.class);
|
||||
|
||||
KafkaStreamsConsumerProperties extendedConsumerProperties = this.kafkaStreamsExtendedBindingProperties
|
||||
.getExtendedConsumerProperties(inboundName);
|
||||
streamConfigGlobalProperties.putAll(extendedConsumerProperties.getConfiguration());
|
||||
|
||||
String applicationId = extendedConsumerProperties.getApplicationId();
|
||||
//override application.id if set at the individual binding level.
|
||||
if (StringUtils.hasText(applicationId)) {
|
||||
streamConfigGlobalProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId);
|
||||
}
|
||||
|
||||
int concurrency = this.bindingServiceProperties.getConsumerProperties(inboundName).getConcurrency();
|
||||
// override concurrency if set at the individual binding level.
|
||||
if (concurrency > 1) {
|
||||
streamConfigGlobalProperties.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, concurrency);
|
||||
}
|
||||
|
||||
Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers = applicationContext.getBean(
|
||||
"kafkaStreamsDlqDispatchers", Map.class);
|
||||
|
||||
KafkaStreamsConfiguration kafkaStreamsConfiguration =
|
||||
new KafkaStreamsConfiguration(streamConfigGlobalProperties) {
|
||||
@Override
|
||||
public Properties asProperties() {
|
||||
Properties properties = super.asProperties();
|
||||
properties.put(SendToDlqAndContinue.KAFKA_STREAMS_DLQ_DISPATCHERS, kafkaStreamsDlqDispatchers);
|
||||
return properties;
|
||||
}
|
||||
};
|
||||
|
||||
StreamsBuilderFactoryBean streamsBuilder = this.cleanupConfig == null
|
||||
? new StreamsBuilderFactoryBean(kafkaStreamsConfiguration)
|
||||
: new StreamsBuilderFactoryBean(kafkaStreamsConfiguration, this.cleanupConfig);
|
||||
streamsBuilder.setAutoStartup(false);
|
||||
BeanDefinition streamsBuilderBeanDefinition =
|
||||
BeanDefinitionBuilder.genericBeanDefinition(
|
||||
(Class<StreamsBuilderFactoryBean>) streamsBuilder.getClass(), () -> streamsBuilder)
|
||||
.getRawBeanDefinition();
|
||||
((BeanDefinitionRegistry) beanFactory).registerBeanDefinition("stream-builder-" +
|
||||
functionName, streamsBuilderBeanDefinition);
|
||||
StreamsBuilderFactoryBean streamsBuilderX = applicationContext.getBean("&stream-builder-" +
|
||||
functionName, StreamsBuilderFactoryBean.class);
|
||||
this.methodStreamsBuilderFactoryBeanMap.put(functionName, streamsBuilderX);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
|
||||
this.applicationContext = (ConfigurableApplicationContext) applicationContext;
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -38,18 +38,17 @@ import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Delegate for handling all framework level message conversions inbound and outbound on
|
||||
* {@link KStream}. If native encoding is not enabled, then serialization will be
|
||||
* performed on outbound messages based on a contentType. Similarly, if native decoding is
|
||||
* not enabled, deserialization will be performed on inbound messages based on a
|
||||
* contentType. Based on the contentType, a {@link MessageConverter} will be resolved.
|
||||
* Delegate for handling all framework level message conversions inbound and outbound on {@link KStream}.
|
||||
* If native encoding is not enabled, then serialization will be performed on outbound messages based
|
||||
* on a contentType. Similarly, if native decoding is not enabled, deserialization will be performed on
|
||||
* inbound messages based on a contentType. Based on the contentType, a {@link MessageConverter} will
|
||||
* be resolved.
|
||||
*
|
||||
* @author Soby Chacko
|
||||
*/
|
||||
public class KafkaStreamsMessageConversionDelegate {
|
||||
|
||||
private static final Log LOG = LogFactory
|
||||
.getLog(KafkaStreamsMessageConversionDelegate.class);
|
||||
private static final Log LOG = LogFactory.getLog(KafkaStreamsMessageConversionDelegate.class);
|
||||
|
||||
private static final ThreadLocal<KeyValue<Object, Object>> keyValueThreadLocal = new ThreadLocal<>();
|
||||
|
||||
@@ -61,11 +60,10 @@ public class KafkaStreamsMessageConversionDelegate {
|
||||
|
||||
private final KafkaStreamsBinderConfigurationProperties kstreamBinderConfigurationProperties;
|
||||
|
||||
KafkaStreamsMessageConversionDelegate(
|
||||
CompositeMessageConverterFactory compositeMessageConverterFactory,
|
||||
SendToDlqAndContinue sendToDlqAndContinue,
|
||||
KafkaStreamsBindingInformationCatalogue kstreamBindingInformationCatalogue,
|
||||
KafkaStreamsBinderConfigurationProperties kstreamBinderConfigurationProperties) {
|
||||
KafkaStreamsMessageConversionDelegate(CompositeMessageConverterFactory compositeMessageConverterFactory,
|
||||
SendToDlqAndContinue sendToDlqAndContinue,
|
||||
KafkaStreamsBindingInformationCatalogue kstreamBindingInformationCatalogue,
|
||||
KafkaStreamsBinderConfigurationProperties kstreamBinderConfigurationProperties) {
|
||||
this.compositeMessageConverterFactory = compositeMessageConverterFactory;
|
||||
this.sendToDlqAndContinue = sendToDlqAndContinue;
|
||||
this.kstreamBindingInformationCatalogue = kstreamBindingInformationCatalogue;
|
||||
@@ -74,103 +72,86 @@ public class KafkaStreamsMessageConversionDelegate {
|
||||
|
||||
/**
|
||||
* Serialize {@link KStream} records on outbound based on contentType.
|
||||
*
|
||||
* @param outboundBindTarget outbound KStream target
|
||||
* @return serialized KStream
|
||||
*/
|
||||
@SuppressWarnings("rawtypes")
|
||||
public KStream serializeOnOutbound(KStream<?, ?> outboundBindTarget) {
|
||||
String contentType = this.kstreamBindingInformationCatalogue
|
||||
.getContentType(outboundBindTarget);
|
||||
MessageConverter messageConverter = this.compositeMessageConverterFactory
|
||||
.getMessageConverterForAllRegistered();
|
||||
String contentType = this.kstreamBindingInformationCatalogue.getContentType(outboundBindTarget);
|
||||
MessageConverter messageConverter = this.compositeMessageConverterFactory.getMessageConverterForAllRegistered();
|
||||
|
||||
return outboundBindTarget.mapValues((v) -> {
|
||||
Message<?> message = v instanceof Message<?> ? (Message<?>) v
|
||||
: MessageBuilder.withPayload(v).build();
|
||||
Message<?> message = v instanceof Message<?> ? (Message<?>) v :
|
||||
MessageBuilder.withPayload(v).build();
|
||||
Map<String, Object> headers = new HashMap<>(message.getHeaders());
|
||||
if (!StringUtils.isEmpty(contentType)) {
|
||||
headers.put(MessageHeaders.CONTENT_TYPE, contentType);
|
||||
}
|
||||
MessageHeaders messageHeaders = new MessageHeaders(headers);
|
||||
return messageConverter.toMessage(message.getPayload(), messageHeaders)
|
||||
.getPayload();
|
||||
return
|
||||
messageConverter.toMessage(message.getPayload(),
|
||||
messageHeaders).getPayload();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Deserialize incoming {@link KStream} based on content type.
|
||||
*
|
||||
* @param valueClass on KStream value
|
||||
* @param bindingTarget inbound KStream target
|
||||
* @return deserialized KStream
|
||||
*/
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
public KStream deserializeOnInbound(Class<?> valueClass,
|
||||
KStream<?, ?> bindingTarget) {
|
||||
MessageConverter messageConverter = this.compositeMessageConverterFactory
|
||||
.getMessageConverterForAllRegistered();
|
||||
public KStream deserializeOnInbound(Class<?> valueClass, KStream<?, ?> bindingTarget) {
|
||||
MessageConverter messageConverter = this.compositeMessageConverterFactory.getMessageConverterForAllRegistered();
|
||||
final PerRecordContentTypeHolder perRecordContentTypeHolder = new PerRecordContentTypeHolder();
|
||||
|
||||
resolvePerRecordContentType(bindingTarget, perRecordContentTypeHolder);
|
||||
|
||||
// Deserialize using a branching strategy
|
||||
//Deserialize using a branching strategy
|
||||
KStream<?, ?>[] branch = bindingTarget.branch(
|
||||
// First filter where the message is converted and return true if
|
||||
// everything went well, return false otherwise.
|
||||
(o, o2) -> {
|
||||
boolean isValidRecord = false;
|
||||
//First filter where the message is converted and return true if everything went well, return false otherwise.
|
||||
(o, o2) -> {
|
||||
boolean isValidRecord = false;
|
||||
|
||||
try {
|
||||
// if the record is a tombstone, ignore and exit from processing
|
||||
// further.
|
||||
if (o2 != null) {
|
||||
if (o2 instanceof Message || o2 instanceof String
|
||||
|| o2 instanceof byte[]) {
|
||||
Message<?> m1 = null;
|
||||
if (o2 instanceof Message) {
|
||||
m1 = perRecordContentTypeHolder.contentType != null
|
||||
? MessageBuilder.fromMessage((Message<?>) o2)
|
||||
.setHeader(
|
||||
MessageHeaders.CONTENT_TYPE,
|
||||
perRecordContentTypeHolder.contentType)
|
||||
.build()
|
||||
: (Message<?>) o2;
|
||||
}
|
||||
else {
|
||||
m1 = perRecordContentTypeHolder.contentType != null
|
||||
? MessageBuilder.withPayload(o2).setHeader(
|
||||
MessageHeaders.CONTENT_TYPE,
|
||||
perRecordContentTypeHolder.contentType)
|
||||
.build()
|
||||
: MessageBuilder.withPayload(o2).build();
|
||||
}
|
||||
convertAndSetMessage(o, valueClass, messageConverter, m1);
|
||||
try {
|
||||
//if the record is a tombstone, ignore and exit from processing further.
|
||||
if (o2 != null) {
|
||||
if (o2 instanceof Message || o2 instanceof String || o2 instanceof byte[]) {
|
||||
Message<?> m1 = null;
|
||||
if (o2 instanceof Message) {
|
||||
m1 = perRecordContentTypeHolder.contentType != null
|
||||
? MessageBuilder.fromMessage((Message<?>) o2).setHeader(MessageHeaders.CONTENT_TYPE,
|
||||
perRecordContentTypeHolder.contentType).build() : (Message<?>) o2;
|
||||
}
|
||||
else {
|
||||
keyValueThreadLocal.set(new KeyValue<>(o, o2));
|
||||
m1 = perRecordContentTypeHolder.contentType != null ? MessageBuilder.withPayload(o2)
|
||||
.setHeader(MessageHeaders.CONTENT_TYPE, perRecordContentTypeHolder.contentType).build() : MessageBuilder.withPayload(o2).build();
|
||||
}
|
||||
isValidRecord = true;
|
||||
convertAndSetMessage(o, valueClass, messageConverter, m1);
|
||||
}
|
||||
else {
|
||||
LOG.info(
|
||||
"Received a tombstone record. This will be skipped from further processing.");
|
||||
keyValueThreadLocal.set(new KeyValue<>(o, o2));
|
||||
}
|
||||
isValidRecord = true;
|
||||
}
|
||||
catch (Exception e) {
|
||||
LOG.warn(
|
||||
"Deserialization has failed. This will be skipped from further processing.",
|
||||
e);
|
||||
// pass through
|
||||
else {
|
||||
LOG.info("Received a tombstone record. This will be skipped from further processing.");
|
||||
}
|
||||
return isValidRecord;
|
||||
},
|
||||
// second filter that catches any messages for which an exception thrown
|
||||
// in the first filter above.
|
||||
(k, v) -> true);
|
||||
// process errors from the second filter in the branch above.
|
||||
}
|
||||
catch (Exception ignored) {
|
||||
//pass through
|
||||
}
|
||||
return isValidRecord;
|
||||
},
|
||||
//second filter that catches any messages for which an exception thrown in the first filter above.
|
||||
(k, v) -> true
|
||||
);
|
||||
//process errors from the second filter in the branch above.
|
||||
processErrorFromDeserialization(bindingTarget, branch[1]);
|
||||
|
||||
// first branch above is the branch where the messages are converted, let it go
|
||||
// through further processing.
|
||||
//first branch above is the branch where the messages are converted, let it go through further processing.
|
||||
return branch[0].mapValues((o2) -> {
|
||||
Object objectValue = keyValueThreadLocal.get().value;
|
||||
keyValueThreadLocal.remove();
|
||||
@@ -179,8 +160,7 @@ public class KafkaStreamsMessageConversionDelegate {
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
private void resolvePerRecordContentType(KStream<?, ?> outboundBindTarget,
|
||||
PerRecordContentTypeHolder perRecordContentTypeHolder) {
|
||||
private void resolvePerRecordContentType(KStream<?, ?> outboundBindTarget, PerRecordContentTypeHolder perRecordContentTypeHolder) {
|
||||
outboundBindTarget.process(() -> new Processor() {
|
||||
|
||||
ProcessorContext context;
|
||||
@@ -193,14 +173,11 @@ public class KafkaStreamsMessageConversionDelegate {
|
||||
@Override
|
||||
public void process(Object key, Object value) {
|
||||
final Headers headers = this.context.headers();
|
||||
final Iterable<Header> contentTypes = headers
|
||||
.headers(MessageHeaders.CONTENT_TYPE);
|
||||
final Iterable<Header> contentTypes = headers.headers(MessageHeaders.CONTENT_TYPE);
|
||||
if (contentTypes != null && contentTypes.iterator().hasNext()) {
|
||||
final String contentType = new String(
|
||||
contentTypes.iterator().next().value());
|
||||
// remove leading and trailing quotes
|
||||
final String cleanContentType = StringUtils.replace(contentType, "\"",
|
||||
"");
|
||||
final String contentType = new String(contentTypes.iterator().next().value());
|
||||
//remove leading and trailing quotes
|
||||
final String cleanContentType = StringUtils.replace(contentType, "\"", "");
|
||||
perRecordContentTypeHolder.setContentType(cleanContentType);
|
||||
}
|
||||
}
|
||||
@@ -212,8 +189,7 @@ public class KafkaStreamsMessageConversionDelegate {
|
||||
});
|
||||
}
|
||||
|
||||
private void convertAndSetMessage(Object o, Class<?> valueClass,
|
||||
MessageConverter messageConverter, Message<?> msg) {
|
||||
private void convertAndSetMessage(Object o, Class<?> valueClass, MessageConverter messageConverter, Message<?> msg) {
|
||||
Object result = valueClass.isAssignableFrom(msg.getPayload().getClass())
|
||||
? msg.getPayload() : messageConverter.fromMessage(msg, valueClass);
|
||||
|
||||
@@ -223,8 +199,7 @@ public class KafkaStreamsMessageConversionDelegate {
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
private void processErrorFromDeserialization(KStream<?, ?> bindingTarget,
|
||||
KStream<?, ?> branch) {
|
||||
private void processErrorFromDeserialization(KStream<?, ?> bindingTarget, KStream<?, ?> branch) {
|
||||
branch.process(() -> new Processor() {
|
||||
ProcessorContext context;
|
||||
|
||||
@@ -235,35 +210,24 @@ public class KafkaStreamsMessageConversionDelegate {
|
||||
|
||||
@Override
|
||||
public void process(Object o, Object o2) {
|
||||
// Only continue if the record was not a tombstone.
|
||||
//Only continue if the record was not a tombstone.
|
||||
if (o2 != null) {
|
||||
if (KafkaStreamsMessageConversionDelegate.this.kstreamBindingInformationCatalogue
|
||||
.isDlqEnabled(bindingTarget)) {
|
||||
if (KafkaStreamsMessageConversionDelegate.this.kstreamBindingInformationCatalogue.isDlqEnabled(bindingTarget)) {
|
||||
String destination = this.context.topic();
|
||||
if (o2 instanceof Message) {
|
||||
Message message = (Message) o2;
|
||||
KafkaStreamsMessageConversionDelegate.this.sendToDlqAndContinue
|
||||
.sendToDlq(destination, (byte[]) o,
|
||||
(byte[]) message.getPayload(),
|
||||
this.context.partition());
|
||||
KafkaStreamsMessageConversionDelegate.this.sendToDlqAndContinue.sendToDlq(destination, (byte[]) o, (byte[]) message.getPayload(), this.context.partition());
|
||||
}
|
||||
else {
|
||||
KafkaStreamsMessageConversionDelegate.this.sendToDlqAndContinue
|
||||
.sendToDlq(destination, (byte[]) o, (byte[]) o2,
|
||||
this.context.partition());
|
||||
KafkaStreamsMessageConversionDelegate.this.sendToDlqAndContinue.sendToDlq(destination, (byte[]) o, (byte[]) o2, this.context.partition());
|
||||
}
|
||||
}
|
||||
else if (KafkaStreamsMessageConversionDelegate.this.kstreamBinderConfigurationProperties
|
||||
.getSerdeError() == KafkaStreamsBinderConfigurationProperties.SerdeError.logAndFail) {
|
||||
throw new IllegalStateException("Inbound deserialization failed. "
|
||||
+ "Stopping further processing of records.");
|
||||
else if (KafkaStreamsMessageConversionDelegate.this.kstreamBinderConfigurationProperties.getSerdeError() == KafkaStreamsBinderConfigurationProperties.SerdeError.logAndFail) {
|
||||
throw new IllegalStateException("Inbound deserialization failed. Stopping further processing of records.");
|
||||
}
|
||||
else if (KafkaStreamsMessageConversionDelegate.this.kstreamBinderConfigurationProperties
|
||||
.getSerdeError() == KafkaStreamsBinderConfigurationProperties.SerdeError.logAndContinue) {
|
||||
// quietly passing through. No action needed, this is similar to
|
||||
// log and continue.
|
||||
LOG.error(
|
||||
"Inbound deserialization failed. Skipping this record and continuing.");
|
||||
else if (KafkaStreamsMessageConversionDelegate.this.kstreamBinderConfigurationProperties.getSerdeError() == KafkaStreamsBinderConfigurationProperties.SerdeError.logAndContinue) {
|
||||
//quietly passing through. No action needed, this is similar to log and continue.
|
||||
LOG.error("Inbound deserialization failed. Skipping this record and continuing.");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -282,7 +246,5 @@ public class KafkaStreamsMessageConversionDelegate {
|
||||
void setContentType(String contentType) {
|
||||
this.contentType = contentType;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -37,10 +37,10 @@ class KafkaStreamsRegistry {
|
||||
|
||||
/**
|
||||
* Register the {@link KafkaStreams} object created in the application.
|
||||
*
|
||||
* @param kafkaStreams {@link KafkaStreams} object created in the application
|
||||
*/
|
||||
void registerKafkaStreams(KafkaStreams kafkaStreams) {
|
||||
this.kafkaStreams.add(kafkaStreams);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -17,11 +17,9 @@
|
||||
package org.springframework.cloud.stream.binder.kafka.streams;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
|
||||
@@ -80,23 +78,20 @@ import org.springframework.util.StringUtils;
|
||||
/**
|
||||
* Kafka Streams specific implementation for {@link StreamListenerSetupMethodOrchestrator}
|
||||
* that overrides the default mechanisms for invoking StreamListener adapters.
|
||||
* <p>
|
||||
*
|
||||
* The orchestration primarily focus on the following areas:
|
||||
* <p>
|
||||
* 1. Allow multiple KStream output bindings (KStream branching) by allowing more than one
|
||||
* output values on {@link SendTo} 2. Allow multiple inbound bindings for multiple KStream
|
||||
* and or KTable/GlobalKTable types. 3. Each StreamListener method that it orchestrates
|
||||
* gets its own {@link StreamsBuilderFactoryBean} and {@link StreamsConfig}
|
||||
*
|
||||
* 1. Allow multiple KStream output bindings (KStream branching) by allowing more than one output values on {@link SendTo}
|
||||
* 2. Allow multiple inbound bindings for multiple KStream and or KTable/GlobalKTable types.
|
||||
* 3. Each StreamListener method that it orchestrates gets its own {@link StreamsBuilderFactoryBean} and {@link StreamsConfig}
|
||||
*
|
||||
* @author Soby Chacko
|
||||
* @author Lei Chen
|
||||
* @author Gary Russell
|
||||
*/
|
||||
class KafkaStreamsStreamListenerSetupMethodOrchestrator
|
||||
implements StreamListenerSetupMethodOrchestrator, ApplicationContextAware {
|
||||
class KafkaStreamsStreamListenerSetupMethodOrchestrator implements StreamListenerSetupMethodOrchestrator, ApplicationContextAware {
|
||||
|
||||
private static final Log LOG = LogFactory
|
||||
.getLog(KafkaStreamsStreamListenerSetupMethodOrchestrator.class);
|
||||
private static final Log LOG = LogFactory.getLog(KafkaStreamsStreamListenerSetupMethodOrchestrator.class);
|
||||
|
||||
private final StreamListenerParameterAdapter streamListenerParameterAdapter;
|
||||
|
||||
@@ -112,39 +107,36 @@ class KafkaStreamsStreamListenerSetupMethodOrchestrator
|
||||
|
||||
private final Map<Method, StreamsBuilderFactoryBean> methodStreamsBuilderFactoryBeanMap = new HashMap<>();
|
||||
|
||||
private final Map<Method, List<String>> registeredStoresPerMethod = new HashMap<>();
|
||||
|
||||
private final CleanupConfig cleanupConfig;
|
||||
|
||||
private ConfigurableApplicationContext applicationContext;
|
||||
|
||||
KafkaStreamsStreamListenerSetupMethodOrchestrator(
|
||||
BindingServiceProperties bindingServiceProperties,
|
||||
KafkaStreamsExtendedBindingProperties extendedBindingProperties,
|
||||
KeyValueSerdeResolver keyValueSerdeResolver,
|
||||
KafkaStreamsBindingInformationCatalogue bindingInformationCatalogue,
|
||||
StreamListenerParameterAdapter streamListenerParameterAdapter,
|
||||
Collection<StreamListenerResultAdapter> listenerResultAdapters,
|
||||
CleanupConfig cleanupConfig) {
|
||||
KafkaStreamsStreamListenerSetupMethodOrchestrator(BindingServiceProperties bindingServiceProperties,
|
||||
KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties,
|
||||
KeyValueSerdeResolver keyValueSerdeResolver,
|
||||
KafkaStreamsBindingInformationCatalogue kafkaStreamsBindingInformationCatalogue,
|
||||
StreamListenerParameterAdapter streamListenerParameterAdapter,
|
||||
Collection<StreamListenerResultAdapter> streamListenerResultAdapters,
|
||||
CleanupConfig cleanupConfig) {
|
||||
this.bindingServiceProperties = bindingServiceProperties;
|
||||
this.kafkaStreamsExtendedBindingProperties = extendedBindingProperties;
|
||||
this.kafkaStreamsExtendedBindingProperties = kafkaStreamsExtendedBindingProperties;
|
||||
this.keyValueSerdeResolver = keyValueSerdeResolver;
|
||||
this.kafkaStreamsBindingInformationCatalogue = bindingInformationCatalogue;
|
||||
this.kafkaStreamsBindingInformationCatalogue = kafkaStreamsBindingInformationCatalogue;
|
||||
this.streamListenerParameterAdapter = streamListenerParameterAdapter;
|
||||
this.streamListenerResultAdapters = listenerResultAdapters;
|
||||
this.streamListenerResultAdapters = streamListenerResultAdapters;
|
||||
this.cleanupConfig = cleanupConfig;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supports(Method method) {
|
||||
return methodParameterSupports(method) && (methodReturnTypeSuppports(method)
|
||||
|| Void.TYPE.equals(method.getReturnType()));
|
||||
return methodParameterSupports(method) &&
|
||||
(methodReturnTypeSuppports(method) || Void.TYPE.equals(method.getReturnType()));
|
||||
}
|
||||
|
||||
private boolean methodReturnTypeSuppports(Method method) {
|
||||
Class<?> returnType = method.getReturnType();
|
||||
if (returnType.equals(KStream.class) || (returnType.isArray()
|
||||
&& returnType.getComponentType().equals(KStream.class))) {
|
||||
if (returnType.equals(KStream.class) ||
|
||||
(returnType.isArray() && returnType.getComponentType().equals(KStream.class))) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
@@ -165,14 +157,12 @@ class KafkaStreamsStreamListenerSetupMethodOrchestrator
|
||||
|
||||
@Override
|
||||
@SuppressWarnings({"rawtypes", "unchecked"})
|
||||
public void orchestrateStreamListenerSetupMethod(StreamListener streamListener,
|
||||
Method method, Object bean) {
|
||||
public void orchestrateStreamListenerSetupMethod(StreamListener streamListener, Method method, Object bean) {
|
||||
String[] methodAnnotatedOutboundNames = getOutboundBindingTargetNames(method);
|
||||
validateStreamListenerMethod(streamListener, method,
|
||||
methodAnnotatedOutboundNames);
|
||||
validateStreamListenerMethod(streamListener, method, methodAnnotatedOutboundNames);
|
||||
String methodAnnotatedInboundName = streamListener.value();
|
||||
Object[] adaptedInboundArguments = adaptAndRetrieveInboundArguments(method,
|
||||
methodAnnotatedInboundName, this.applicationContext,
|
||||
Object[] adaptedInboundArguments = adaptAndRetrieveInboundArguments(method, methodAnnotatedInboundName,
|
||||
this.applicationContext,
|
||||
this.streamListenerParameterAdapter);
|
||||
try {
|
||||
ReflectionUtils.makeAccessible(method);
|
||||
@@ -183,8 +173,7 @@ class KafkaStreamsStreamListenerSetupMethodOrchestrator
|
||||
Object result = method.invoke(bean, adaptedInboundArguments);
|
||||
|
||||
if (result.getClass().isArray()) {
|
||||
Assert.isTrue(
|
||||
methodAnnotatedOutboundNames.length == ((Object[]) result).length,
|
||||
Assert.isTrue(methodAnnotatedOutboundNames.length == ((Object[]) result).length,
|
||||
"Result does not match with the number of declared outbounds");
|
||||
}
|
||||
else {
|
||||
@@ -195,24 +184,19 @@ class KafkaStreamsStreamListenerSetupMethodOrchestrator
|
||||
Object[] outboundKStreams = (Object[]) result;
|
||||
int i = 0;
|
||||
for (Object outboundKStream : outboundKStreams) {
|
||||
Object targetBean = this.applicationContext
|
||||
.getBean(methodAnnotatedOutboundNames[i++]);
|
||||
Object targetBean = this.applicationContext.getBean(methodAnnotatedOutboundNames[i++]);
|
||||
for (StreamListenerResultAdapter streamListenerResultAdapter : this.streamListenerResultAdapters) {
|
||||
if (streamListenerResultAdapter.supports(
|
||||
outboundKStream.getClass(), targetBean.getClass())) {
|
||||
streamListenerResultAdapter.adapt(outboundKStream,
|
||||
targetBean);
|
||||
if (streamListenerResultAdapter.supports(outboundKStream.getClass(), targetBean.getClass())) {
|
||||
streamListenerResultAdapter.adapt(outboundKStream, targetBean);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
Object targetBean = this.applicationContext
|
||||
.getBean(methodAnnotatedOutboundNames[0]);
|
||||
Object targetBean = this.applicationContext.getBean(methodAnnotatedOutboundNames[0]);
|
||||
for (StreamListenerResultAdapter streamListenerResultAdapter : this.streamListenerResultAdapters) {
|
||||
if (streamListenerResultAdapter.supports(result.getClass(),
|
||||
targetBean.getClass())) {
|
||||
if (streamListenerResultAdapter.supports(result.getClass(), targetBean.getClass())) {
|
||||
streamListenerResultAdapter.adapt(result, targetBean);
|
||||
break;
|
||||
}
|
||||
@@ -221,8 +205,7 @@ class KafkaStreamsStreamListenerSetupMethodOrchestrator
|
||||
}
|
||||
}
|
||||
catch (Exception ex) {
|
||||
throw new BeanInitializationException(
|
||||
"Cannot setup StreamListener for " + method, ex);
|
||||
throw new BeanInitializationException("Cannot setup StreamListener for " + method, ex);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -230,131 +213,94 @@ class KafkaStreamsStreamListenerSetupMethodOrchestrator
|
||||
@SuppressWarnings({"unchecked"})
|
||||
public Object[] adaptAndRetrieveInboundArguments(Method method, String inboundName,
|
||||
ApplicationContext applicationContext,
|
||||
StreamListenerParameterAdapter... adapters) {
|
||||
StreamListenerParameterAdapter... streamListenerParameterAdapters) {
|
||||
Object[] arguments = new Object[method.getParameterTypes().length];
|
||||
for (int parameterIndex = 0; parameterIndex < arguments.length; parameterIndex++) {
|
||||
MethodParameter methodParameter = MethodParameter.forExecutable(method,
|
||||
parameterIndex);
|
||||
MethodParameter methodParameter = MethodParameter.forExecutable(method, parameterIndex);
|
||||
Class<?> parameterType = methodParameter.getParameterType();
|
||||
Object targetReferenceValue = null;
|
||||
if (methodParameter.hasParameterAnnotation(Input.class)) {
|
||||
targetReferenceValue = AnnotationUtils
|
||||
.getValue(methodParameter.getParameterAnnotation(Input.class));
|
||||
Input methodAnnotation = methodParameter
|
||||
.getParameterAnnotation(Input.class);
|
||||
targetReferenceValue = AnnotationUtils.getValue(methodParameter.getParameterAnnotation(Input.class));
|
||||
Input methodAnnotation = methodParameter.getParameterAnnotation(Input.class);
|
||||
inboundName = methodAnnotation.value();
|
||||
}
|
||||
else if (arguments.length == 1 && StringUtils.hasText(inboundName)) {
|
||||
targetReferenceValue = inboundName;
|
||||
}
|
||||
if (targetReferenceValue != null) {
|
||||
Assert.isInstanceOf(String.class, targetReferenceValue,
|
||||
"Annotation value must be a String");
|
||||
Object targetBean = applicationContext
|
||||
.getBean((String) targetReferenceValue);
|
||||
BindingProperties bindingProperties = this.bindingServiceProperties
|
||||
.getBindingProperties(inboundName);
|
||||
Assert.isInstanceOf(String.class, targetReferenceValue, "Annotation value must be a String");
|
||||
Object targetBean = applicationContext.getBean((String) targetReferenceValue);
|
||||
BindingProperties bindingProperties = this.bindingServiceProperties.getBindingProperties(inboundName);
|
||||
enableNativeDecodingForKTableAlways(parameterType, bindingProperties);
|
||||
// Retrieve the StreamsConfig created for this method if available.
|
||||
// Otherwise, create the StreamsBuilderFactory and get the underlying
|
||||
// config.
|
||||
//Retrieve the StreamsConfig created for this method if available.
|
||||
//Otherwise, create the StreamsBuilderFactory and get the underlying config.
|
||||
if (!this.methodStreamsBuilderFactoryBeanMap.containsKey(method)) {
|
||||
buildStreamsBuilderAndRetrieveConfig(method, applicationContext,
|
||||
inboundName);
|
||||
buildStreamsBuilderAndRetrieveConfig(method, applicationContext, inboundName);
|
||||
}
|
||||
try {
|
||||
StreamsBuilderFactoryBean streamsBuilderFactoryBean = this.methodStreamsBuilderFactoryBeanMap
|
||||
.get(method);
|
||||
StreamsBuilderFactoryBean streamsBuilderFactoryBean = this.methodStreamsBuilderFactoryBeanMap.get(method);
|
||||
StreamsBuilder streamsBuilder = streamsBuilderFactoryBean.getObject();
|
||||
KafkaStreamsConsumerProperties extendedConsumerProperties = this.kafkaStreamsExtendedBindingProperties
|
||||
.getExtendedConsumerProperties(inboundName);
|
||||
// get state store spec
|
||||
KafkaStreamsConsumerProperties extendedConsumerProperties = this.kafkaStreamsExtendedBindingProperties.getExtendedConsumerProperties(inboundName);
|
||||
//get state store spec
|
||||
KafkaStreamsStateStoreProperties spec = buildStateStoreSpec(method);
|
||||
Serde<?> keySerde = this.keyValueSerdeResolver
|
||||
.getInboundKeySerde(extendedConsumerProperties);
|
||||
Serde<?> valueSerde = this.keyValueSerdeResolver.getInboundValueSerde(
|
||||
bindingProperties.getConsumer(), extendedConsumerProperties);
|
||||
Serde<?> keySerde = this.keyValueSerdeResolver.getInboundKeySerde(extendedConsumerProperties);
|
||||
Serde<?> valueSerde = this.keyValueSerdeResolver.getInboundValueSerde(bindingProperties.getConsumer(), extendedConsumerProperties);
|
||||
|
||||
final KafkaConsumerProperties.StartOffset startOffset = extendedConsumerProperties
|
||||
.getStartOffset();
|
||||
final KafkaConsumerProperties.StartOffset startOffset = extendedConsumerProperties.getStartOffset();
|
||||
Topology.AutoOffsetReset autoOffsetReset = null;
|
||||
if (startOffset != null) {
|
||||
switch (startOffset) {
|
||||
case earliest:
|
||||
autoOffsetReset = Topology.AutoOffsetReset.EARLIEST;
|
||||
case earliest : autoOffsetReset = Topology.AutoOffsetReset.EARLIEST;
|
||||
break;
|
||||
case latest:
|
||||
autoOffsetReset = Topology.AutoOffsetReset.LATEST;
|
||||
break;
|
||||
default:
|
||||
case latest : autoOffsetReset = Topology.AutoOffsetReset.LATEST;
|
||||
break;
|
||||
default: break;
|
||||
}
|
||||
}
|
||||
if (extendedConsumerProperties.isResetOffsets()) {
|
||||
LOG.warn("Detected resetOffsets configured on binding "
|
||||
+ inboundName + ". "
|
||||
LOG.warn("Detected resetOffsets configured on binding " + inboundName + ". "
|
||||
+ "Setting resetOffsets in Kafka Streams binder does not have any effect.");
|
||||
}
|
||||
|
||||
if (parameterType.isAssignableFrom(KStream.class)) {
|
||||
KStream<?, ?> stream = getkStream(inboundName, spec,
|
||||
bindingProperties, streamsBuilder, keySerde, valueSerde,
|
||||
autoOffsetReset);
|
||||
KStream<?, ?> stream = getkStream(inboundName, spec, bindingProperties,
|
||||
streamsBuilder, keySerde, valueSerde, autoOffsetReset);
|
||||
KStreamBoundElementFactory.KStreamWrapper kStreamWrapper = (KStreamBoundElementFactory.KStreamWrapper) targetBean;
|
||||
// wrap the proxy created during the initial target type binding
|
||||
// with real object (KStream)
|
||||
//wrap the proxy created during the initial target type binding with real object (KStream)
|
||||
kStreamWrapper.wrap((KStream<Object, Object>) stream);
|
||||
this.kafkaStreamsBindingInformationCatalogue
|
||||
.addStreamBuilderFactory(streamsBuilderFactoryBean);
|
||||
for (StreamListenerParameterAdapter streamListenerParameterAdapter : adapters) {
|
||||
if (streamListenerParameterAdapter.supports(stream.getClass(),
|
||||
methodParameter)) {
|
||||
arguments[parameterIndex] = streamListenerParameterAdapter
|
||||
.adapt(kStreamWrapper, methodParameter);
|
||||
this.kafkaStreamsBindingInformationCatalogue.addStreamBuilderFactory(streamsBuilderFactoryBean);
|
||||
for (StreamListenerParameterAdapter streamListenerParameterAdapter : streamListenerParameterAdapters) {
|
||||
if (streamListenerParameterAdapter.supports(stream.getClass(), methodParameter)) {
|
||||
arguments[parameterIndex] = streamListenerParameterAdapter.adapt(kStreamWrapper, methodParameter);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (arguments[parameterIndex] == null
|
||||
&& parameterType.isAssignableFrom(stream.getClass())) {
|
||||
if (arguments[parameterIndex] == null && parameterType.isAssignableFrom(stream.getClass())) {
|
||||
arguments[parameterIndex] = stream;
|
||||
}
|
||||
Assert.notNull(arguments[parameterIndex],
|
||||
"Cannot convert argument " + parameterIndex + " of "
|
||||
+ method + "from " + stream.getClass() + " to "
|
||||
+ parameterType);
|
||||
Assert.notNull(arguments[parameterIndex], "Cannot convert argument " + parameterIndex + " of " + method
|
||||
+ "from " + stream.getClass() + " to " + parameterType);
|
||||
}
|
||||
else if (parameterType.isAssignableFrom(KTable.class)) {
|
||||
String materializedAs = extendedConsumerProperties
|
||||
.getMaterializedAs();
|
||||
String bindingDestination = this.bindingServiceProperties
|
||||
.getBindingDestination(inboundName);
|
||||
KTable<?, ?> table = getKTable(streamsBuilder, keySerde,
|
||||
valueSerde, materializedAs, bindingDestination,
|
||||
autoOffsetReset);
|
||||
String materializedAs = extendedConsumerProperties.getMaterializedAs();
|
||||
String bindingDestination = this.bindingServiceProperties.getBindingDestination(inboundName);
|
||||
KTable<?, ?> table = getKTable(streamsBuilder, keySerde, valueSerde, materializedAs,
|
||||
bindingDestination, autoOffsetReset);
|
||||
KTableBoundElementFactory.KTableWrapper kTableWrapper = (KTableBoundElementFactory.KTableWrapper) targetBean;
|
||||
// wrap the proxy created during the initial target type binding
|
||||
// with real object (KTable)
|
||||
//wrap the proxy created during the initial target type binding with real object (KTable)
|
||||
kTableWrapper.wrap((KTable<Object, Object>) table);
|
||||
this.kafkaStreamsBindingInformationCatalogue
|
||||
.addStreamBuilderFactory(streamsBuilderFactoryBean);
|
||||
this.kafkaStreamsBindingInformationCatalogue.addStreamBuilderFactory(streamsBuilderFactoryBean);
|
||||
arguments[parameterIndex] = table;
|
||||
}
|
||||
else if (parameterType.isAssignableFrom(GlobalKTable.class)) {
|
||||
String materializedAs = extendedConsumerProperties
|
||||
.getMaterializedAs();
|
||||
String bindingDestination = this.bindingServiceProperties
|
||||
.getBindingDestination(inboundName);
|
||||
GlobalKTable<?, ?> table = getGlobalKTable(streamsBuilder,
|
||||
keySerde, valueSerde, materializedAs, bindingDestination,
|
||||
autoOffsetReset);
|
||||
// @checkstyle:off
|
||||
String materializedAs = extendedConsumerProperties.getMaterializedAs();
|
||||
String bindingDestination = this.bindingServiceProperties.getBindingDestination(inboundName);
|
||||
GlobalKTable<?, ?> table = getGlobalKTable(streamsBuilder, keySerde, valueSerde, materializedAs,
|
||||
bindingDestination, autoOffsetReset);
|
||||
GlobalKTableBoundElementFactory.GlobalKTableWrapper globalKTableWrapper = (GlobalKTableBoundElementFactory.GlobalKTableWrapper) targetBean;
|
||||
// @checkstyle:on
|
||||
// wrap the proxy created during the initial target type binding
|
||||
// with real object (KTable)
|
||||
//wrap the proxy created during the initial target type binding with real object (KTable)
|
||||
globalKTableWrapper.wrap((GlobalKTable<Object, Object>) table);
|
||||
this.kafkaStreamsBindingInformationCatalogue
|
||||
.addStreamBuilderFactory(streamsBuilderFactoryBean);
|
||||
this.kafkaStreamsBindingInformationCatalogue.addStreamBuilderFactory(streamsBuilderFactoryBean);
|
||||
arguments[parameterIndex] = table;
|
||||
}
|
||||
}
|
||||
@@ -363,87 +309,67 @@ class KafkaStreamsStreamListenerSetupMethodOrchestrator
|
||||
}
|
||||
}
|
||||
else {
|
||||
throw new IllegalStateException(
|
||||
StreamListenerErrorMessages.INVALID_DECLARATIVE_METHOD_PARAMETERS);
|
||||
throw new IllegalStateException(StreamListenerErrorMessages.INVALID_DECLARATIVE_METHOD_PARAMETERS);
|
||||
}
|
||||
}
|
||||
return arguments;
|
||||
}
|
||||
|
||||
private GlobalKTable<?, ?> getGlobalKTable(StreamsBuilder streamsBuilder,
|
||||
Serde<?> keySerde, Serde<?> valueSerde, String materializedAs,
|
||||
private GlobalKTable<?, ?> getGlobalKTable(StreamsBuilder streamsBuilder, Serde<?> keySerde, Serde<?> valueSerde, String materializedAs,
|
||||
String bindingDestination, Topology.AutoOffsetReset autoOffsetReset) {
|
||||
return materializedAs != null
|
||||
? materializedAsGlobalKTable(streamsBuilder, bindingDestination,
|
||||
materializedAs, keySerde, valueSerde, autoOffsetReset)
|
||||
: streamsBuilder.globalTable(bindingDestination,
|
||||
Consumed.with(keySerde, valueSerde)
|
||||
.withOffsetResetPolicy(autoOffsetReset));
|
||||
return materializedAs != null ?
|
||||
materializedAsGlobalKTable(streamsBuilder, bindingDestination, materializedAs, keySerde, valueSerde, autoOffsetReset) :
|
||||
streamsBuilder.globalTable(bindingDestination,
|
||||
Consumed.with(keySerde, valueSerde).withOffsetResetPolicy(autoOffsetReset));
|
||||
}
|
||||
|
||||
private KTable<?, ?> getKTable(StreamsBuilder streamsBuilder, Serde<?> keySerde,
|
||||
Serde<?> valueSerde, String materializedAs, String bindingDestination,
|
||||
Topology.AutoOffsetReset autoOffsetReset) {
|
||||
return materializedAs != null
|
||||
? materializedAs(streamsBuilder, bindingDestination, materializedAs,
|
||||
keySerde, valueSerde, autoOffsetReset)
|
||||
: streamsBuilder.table(bindingDestination,
|
||||
Consumed.with(keySerde, valueSerde)
|
||||
.withOffsetResetPolicy(autoOffsetReset));
|
||||
private KTable<?, ?> getKTable(StreamsBuilder streamsBuilder, Serde<?> keySerde, Serde<?> valueSerde, String materializedAs,
|
||||
String bindingDestination, Topology.AutoOffsetReset autoOffsetReset) {
|
||||
return materializedAs != null ?
|
||||
materializedAs(streamsBuilder, bindingDestination, materializedAs, keySerde, valueSerde, autoOffsetReset) :
|
||||
streamsBuilder.table(bindingDestination,
|
||||
Consumed.with(keySerde, valueSerde).withOffsetResetPolicy(autoOffsetReset));
|
||||
}
|
||||
|
||||
private <K, V> KTable<K, V> materializedAs(StreamsBuilder streamsBuilder,
|
||||
String destination, String storeName, Serde<K> k, Serde<V> v,
|
||||
private <K, V> KTable<K, V> materializedAs(StreamsBuilder streamsBuilder, String destination, String storeName, Serde<K> k, Serde<V> v,
|
||||
Topology.AutoOffsetReset autoOffsetReset) {
|
||||
return streamsBuilder.table(
|
||||
this.bindingServiceProperties.getBindingDestination(destination),
|
||||
return streamsBuilder.table(this.bindingServiceProperties.getBindingDestination(destination),
|
||||
Consumed.with(k, v).withOffsetResetPolicy(autoOffsetReset),
|
||||
getMaterialized(storeName, k, v));
|
||||
}
|
||||
|
||||
private <K, V> GlobalKTable<K, V> materializedAsGlobalKTable(
|
||||
StreamsBuilder streamsBuilder, String destination, String storeName,
|
||||
Serde<K> k, Serde<V> v, Topology.AutoOffsetReset autoOffsetReset) {
|
||||
return streamsBuilder.globalTable(
|
||||
this.bindingServiceProperties.getBindingDestination(destination),
|
||||
private <K, V> GlobalKTable<K, V> materializedAsGlobalKTable(StreamsBuilder streamsBuilder, String destination, String storeName, Serde<K> k, Serde<V> v,
|
||||
Topology.AutoOffsetReset autoOffsetReset) {
|
||||
return streamsBuilder.globalTable(this.bindingServiceProperties.getBindingDestination(destination),
|
||||
Consumed.with(k, v).withOffsetResetPolicy(autoOffsetReset),
|
||||
getMaterialized(storeName, k, v));
|
||||
}
|
||||
|
||||
private <K, V> Materialized<K, V, KeyValueStore<Bytes, byte[]>> getMaterialized(
|
||||
String storeName, Serde<K> k, Serde<V> v) {
|
||||
private <K, V> Materialized<K, V, KeyValueStore<Bytes, byte[]>> getMaterialized(String storeName, Serde<K> k, Serde<V> v) {
|
||||
return Materialized.<K, V, KeyValueStore<Bytes, byte[]>>as(storeName)
|
||||
.withKeySerde(k).withValueSerde(v);
|
||||
.withKeySerde(k)
|
||||
.withValueSerde(v);
|
||||
}
|
||||
|
||||
private StoreBuilder buildStateStore(KafkaStreamsStateStoreProperties spec) {
|
||||
try {
|
||||
|
||||
Serde<?> keySerde = this.keyValueSerdeResolver
|
||||
.getStateStoreKeySerde(spec.getKeySerdeString());
|
||||
Serde<?> valueSerde = this.keyValueSerdeResolver
|
||||
.getStateStoreValueSerde(spec.getValueSerdeString());
|
||||
Serde<?> keySerde = this.keyValueSerdeResolver.getStateStoreKeySerde(spec.getKeySerdeString());
|
||||
Serde<?> valueSerde = this.keyValueSerdeResolver.getStateStoreValueSerde(spec.getValueSerdeString());
|
||||
StoreBuilder builder;
|
||||
switch (spec.getType()) {
|
||||
case KEYVALUE:
|
||||
builder = Stores.keyValueStoreBuilder(
|
||||
Stores.persistentKeyValueStore(spec.getName()), keySerde,
|
||||
valueSerde);
|
||||
builder = Stores.keyValueStoreBuilder(Stores.persistentKeyValueStore(spec.getName()), keySerde, valueSerde);
|
||||
break;
|
||||
case WINDOW:
|
||||
builder = Stores
|
||||
.windowStoreBuilder(
|
||||
Stores.persistentWindowStore(spec.getName(),
|
||||
spec.getRetention(), 3, spec.getLength(), false),
|
||||
keySerde, valueSerde);
|
||||
builder = Stores.windowStoreBuilder(Stores.persistentWindowStore(spec.getName(), spec.getRetention(), 3, spec.getLength(), false),
|
||||
keySerde,
|
||||
valueSerde);
|
||||
break;
|
||||
case SESSION:
|
||||
builder = Stores.sessionStoreBuilder(Stores.persistentSessionStore(
|
||||
spec.getName(), spec.getRetention()), keySerde, valueSerde);
|
||||
builder = Stores.sessionStoreBuilder(Stores.persistentSessionStore(spec.getName(), spec.getRetention()), keySerde, valueSerde);
|
||||
break;
|
||||
default:
|
||||
throw new UnsupportedOperationException(
|
||||
"state store type (" + spec.getType() + ") is not supported!");
|
||||
throw new UnsupportedOperationException("state store type (" + spec.getType() + ") is not supported!");
|
||||
}
|
||||
if (spec.isCacheEnabled()) {
|
||||
builder = builder.withCachingEnabled();
|
||||
@@ -459,11 +385,10 @@ class KafkaStreamsStreamListenerSetupMethodOrchestrator
|
||||
}
|
||||
}
|
||||
|
||||
private KStream<?, ?> getkStream(String inboundName,
|
||||
KafkaStreamsStateStoreProperties storeSpec,
|
||||
BindingProperties bindingProperties, StreamsBuilder streamsBuilder,
|
||||
Serde<?> keySerde, Serde<?> valueSerde,
|
||||
Topology.AutoOffsetReset autoOffsetReset) {
|
||||
private KStream<?, ?> getkStream(String inboundName, KafkaStreamsStateStoreProperties storeSpec,
|
||||
BindingProperties bindingProperties,
|
||||
StreamsBuilder streamsBuilder,
|
||||
Serde<?> keySerde, Serde<?> valueSerde, Topology.AutoOffsetReset autoOffsetReset) {
|
||||
if (storeSpec != null) {
|
||||
StoreBuilder storeBuilder = buildStateStore(storeSpec);
|
||||
streamsBuilder.addStateStore(storeBuilder);
|
||||
@@ -471,21 +396,19 @@ class KafkaStreamsStreamListenerSetupMethodOrchestrator
|
||||
LOG.info("state store " + storeBuilder.name() + " added to topology");
|
||||
}
|
||||
}
|
||||
String[] bindingTargets = StringUtils.commaDelimitedListToStringArray(
|
||||
this.bindingServiceProperties.getBindingDestination(inboundName));
|
||||
String[] bindingTargets = StringUtils
|
||||
.commaDelimitedListToStringArray(this.bindingServiceProperties.getBindingDestination(inboundName));
|
||||
|
||||
KStream<?, ?> stream = streamsBuilder.stream(Arrays.asList(bindingTargets),
|
||||
Consumed.with(keySerde, valueSerde)
|
||||
.withOffsetResetPolicy(autoOffsetReset));
|
||||
final boolean nativeDecoding = this.bindingServiceProperties
|
||||
.getConsumerProperties(inboundName).isUseNativeDecoding();
|
||||
KStream<?, ?> stream =
|
||||
streamsBuilder.stream(Arrays.asList(bindingTargets),
|
||||
Consumed.with(keySerde, valueSerde)
|
||||
.withOffsetResetPolicy(autoOffsetReset));
|
||||
final boolean nativeDecoding = this.bindingServiceProperties.getConsumerProperties(inboundName).isUseNativeDecoding();
|
||||
if (nativeDecoding) {
|
||||
LOG.info("Native decoding is enabled for " + inboundName
|
||||
+ ". Inbound deserialization done at the broker.");
|
||||
LOG.info("Native decoding is enabled for " + inboundName + ". Inbound deserialization done at the broker.");
|
||||
}
|
||||
else {
|
||||
LOG.info("Native decoding is disabled for " + inboundName
|
||||
+ ". Inbound message conversion done by Spring Cloud Stream.");
|
||||
LOG.info("Native decoding is disabled for " + inboundName + ". Inbound message conversion done by Spring Cloud Stream.");
|
||||
}
|
||||
|
||||
stream = stream.mapValues((value) -> {
|
||||
@@ -503,93 +426,72 @@ class KafkaStreamsStreamListenerSetupMethodOrchestrator
|
||||
return stream;
|
||||
}
|
||||
|
||||
private void enableNativeDecodingForKTableAlways(Class<?> parameterType,
|
||||
BindingProperties bindingProperties) {
|
||||
if (parameterType.isAssignableFrom(KTable.class)
|
||||
|| parameterType.isAssignableFrom(GlobalKTable.class)) {
|
||||
private void enableNativeDecodingForKTableAlways(Class<?> parameterType, BindingProperties bindingProperties) {
|
||||
if (parameterType.isAssignableFrom(KTable.class) || parameterType.isAssignableFrom(GlobalKTable.class)) {
|
||||
if (bindingProperties.getConsumer() == null) {
|
||||
bindingProperties.setConsumer(new ConsumerProperties());
|
||||
}
|
||||
// No framework level message conversion provided for KTable/GlobalKTable, its
|
||||
// done by the broker.
|
||||
//No framework level message conversion provided for KTable/GlobalKTable, its done by the broker.
|
||||
bindingProperties.getConsumer().setUseNativeDecoding(true);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
private void buildStreamsBuilderAndRetrieveConfig(Method method,
|
||||
ApplicationContext applicationContext, String inboundName) {
|
||||
ConfigurableListableBeanFactory beanFactory = this.applicationContext
|
||||
.getBeanFactory();
|
||||
private void buildStreamsBuilderAndRetrieveConfig(Method method, ApplicationContext applicationContext,
|
||||
String inboundName) {
|
||||
ConfigurableListableBeanFactory beanFactory = this.applicationContext.getBeanFactory();
|
||||
|
||||
Map<String, Object> streamConfigGlobalProperties = applicationContext
|
||||
.getBean("streamConfigGlobalProperties", Map.class);
|
||||
Map<String, Object> streamConfigGlobalProperties = applicationContext.getBean("streamConfigGlobalProperties", Map.class);
|
||||
|
||||
KafkaStreamsConsumerProperties extendedConsumerProperties = this.kafkaStreamsExtendedBindingProperties
|
||||
.getExtendedConsumerProperties(inboundName);
|
||||
streamConfigGlobalProperties
|
||||
.putAll(extendedConsumerProperties.getConfiguration());
|
||||
KafkaStreamsConsumerProperties extendedConsumerProperties = this.kafkaStreamsExtendedBindingProperties.getExtendedConsumerProperties(inboundName);
|
||||
streamConfigGlobalProperties.putAll(extendedConsumerProperties.getConfiguration());
|
||||
|
||||
String applicationId = extendedConsumerProperties.getApplicationId();
|
||||
// override application.id if set at the individual binding level.
|
||||
//override application.id if set at the individual binding level.
|
||||
if (StringUtils.hasText(applicationId)) {
|
||||
streamConfigGlobalProperties.put(StreamsConfig.APPLICATION_ID_CONFIG,
|
||||
applicationId);
|
||||
streamConfigGlobalProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId);
|
||||
}
|
||||
|
||||
int concurrency = this.bindingServiceProperties.getConsumerProperties(inboundName)
|
||||
.getConcurrency();
|
||||
int concurrency = this.bindingServiceProperties.getConsumerProperties(inboundName).getConcurrency();
|
||||
// override concurrency if set at the individual binding level.
|
||||
if (concurrency > 1) {
|
||||
streamConfigGlobalProperties.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG,
|
||||
concurrency);
|
||||
streamConfigGlobalProperties.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, concurrency);
|
||||
}
|
||||
|
||||
Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers = applicationContext
|
||||
.getBean("kafkaStreamsDlqDispatchers", Map.class);
|
||||
Map<String, KafkaStreamsDlqDispatch> kafkaStreamsDlqDispatchers = applicationContext.getBean("kafkaStreamsDlqDispatchers", Map.class);
|
||||
|
||||
KafkaStreamsConfiguration kafkaStreamsConfiguration = new KafkaStreamsConfiguration(
|
||||
streamConfigGlobalProperties) {
|
||||
KafkaStreamsConfiguration kafkaStreamsConfiguration = new KafkaStreamsConfiguration(streamConfigGlobalProperties) {
|
||||
@Override
|
||||
public Properties asProperties() {
|
||||
Properties properties = super.asProperties();
|
||||
properties.put(SendToDlqAndContinue.KAFKA_STREAMS_DLQ_DISPATCHERS,
|
||||
kafkaStreamsDlqDispatchers);
|
||||
properties.put(SendToDlqAndContinue.KAFKA_STREAMS_DLQ_DISPATCHERS, kafkaStreamsDlqDispatchers);
|
||||
return properties;
|
||||
}
|
||||
};
|
||||
|
||||
StreamsBuilderFactoryBean streamsBuilder = this.cleanupConfig == null
|
||||
? new StreamsBuilderFactoryBean(kafkaStreamsConfiguration)
|
||||
: new StreamsBuilderFactoryBean(kafkaStreamsConfiguration,
|
||||
this.cleanupConfig);
|
||||
: new StreamsBuilderFactoryBean(kafkaStreamsConfiguration, this.cleanupConfig);
|
||||
streamsBuilder.setAutoStartup(false);
|
||||
BeanDefinition streamsBuilderBeanDefinition = BeanDefinitionBuilder
|
||||
.genericBeanDefinition(
|
||||
(Class<StreamsBuilderFactoryBean>) streamsBuilder.getClass(),
|
||||
() -> streamsBuilder)
|
||||
.getRawBeanDefinition();
|
||||
((BeanDefinitionRegistry) beanFactory).registerBeanDefinition(
|
||||
"stream-builder-" + method.getName(), streamsBuilderBeanDefinition);
|
||||
StreamsBuilderFactoryBean streamsBuilderX = applicationContext.getBean(
|
||||
"&stream-builder-" + method.getName(), StreamsBuilderFactoryBean.class);
|
||||
BeanDefinition streamsBuilderBeanDefinition =
|
||||
BeanDefinitionBuilder.genericBeanDefinition((Class<StreamsBuilderFactoryBean>) streamsBuilder.getClass(), () -> streamsBuilder)
|
||||
.getRawBeanDefinition();
|
||||
((BeanDefinitionRegistry) beanFactory).registerBeanDefinition("stream-builder-" + method.getName(), streamsBuilderBeanDefinition);
|
||||
StreamsBuilderFactoryBean streamsBuilderX = applicationContext.getBean("&stream-builder-" + method.getName(), StreamsBuilderFactoryBean.class);
|
||||
this.methodStreamsBuilderFactoryBeanMap.put(method, streamsBuilderX);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void setApplicationContext(ApplicationContext applicationContext)
|
||||
throws BeansException {
|
||||
public final void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
|
||||
this.applicationContext = (ConfigurableApplicationContext) applicationContext;
|
||||
}
|
||||
|
||||
private void validateStreamListenerMethod(StreamListener streamListener,
|
||||
Method method, String[] methodAnnotatedOutboundNames) {
|
||||
private void validateStreamListenerMethod(StreamListener streamListener, Method method, String[] methodAnnotatedOutboundNames) {
|
||||
String methodAnnotatedInboundName = streamListener.value();
|
||||
if (methodAnnotatedOutboundNames != null) {
|
||||
for (String s : methodAnnotatedOutboundNames) {
|
||||
if (StringUtils.hasText(s)) {
|
||||
Assert.isTrue(isDeclarativeOutput(method, s),
|
||||
"Method must be declarative");
|
||||
Assert.isTrue(isDeclarativeOutput(method, s), "Method must be declarative");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -597,11 +499,8 @@ class KafkaStreamsStreamListenerSetupMethodOrchestrator
|
||||
int methodArgumentsLength = method.getParameterTypes().length;
|
||||
|
||||
for (int parameterIndex = 0; parameterIndex < methodArgumentsLength; parameterIndex++) {
|
||||
MethodParameter methodParameter = MethodParameter.forExecutable(method,
|
||||
parameterIndex);
|
||||
Assert.isTrue(
|
||||
isDeclarativeInput(methodAnnotatedInboundName, methodParameter),
|
||||
"Method must be declarative");
|
||||
MethodParameter methodParameter = MethodParameter.forExecutable(method, parameterIndex);
|
||||
Assert.isTrue(isDeclarativeInput(methodAnnotatedInboundName, methodParameter), "Method must be declarative");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -613,8 +512,7 @@ class KafkaStreamsStreamListenerSetupMethodOrchestrator
|
||||
if (returnType.isArray()) {
|
||||
Class<?> targetBeanClass = this.applicationContext.getType(targetBeanName);
|
||||
declarative = this.streamListenerResultAdapters.stream()
|
||||
.anyMatch((slpa) -> slpa.supports(returnType.getComponentType(),
|
||||
targetBeanClass));
|
||||
.anyMatch((slpa) -> slpa.supports(returnType.getComponentType(), targetBeanClass));
|
||||
return declarative;
|
||||
}
|
||||
Class<?> targetBeanClass = this.applicationContext.getType(targetBeanName);
|
||||
@@ -624,24 +522,10 @@ class KafkaStreamsStreamListenerSetupMethodOrchestrator
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private boolean isDeclarativeInput(String targetBeanName,
|
||||
MethodParameter methodParameter) {
|
||||
if (!methodParameter.getParameterType().isAssignableFrom(Object.class)
|
||||
&& this.applicationContext.containsBean(targetBeanName)) {
|
||||
private boolean isDeclarativeInput(String targetBeanName, MethodParameter methodParameter) {
|
||||
if (!methodParameter.getParameterType().isAssignableFrom(Object.class) && this.applicationContext.containsBean(targetBeanName)) {
|
||||
Class<?> targetBeanClass = this.applicationContext.getType(targetBeanName);
|
||||
if (targetBeanClass != null) {
|
||||
boolean supports = KStream.class.isAssignableFrom(targetBeanClass)
|
||||
&& KStream.class.isAssignableFrom(methodParameter.getParameterType());
|
||||
if (!supports) {
|
||||
supports = KTable.class.isAssignableFrom(targetBeanClass)
|
||||
&& KTable.class.isAssignableFrom(methodParameter.getParameterType());
|
||||
if (!supports) {
|
||||
supports = GlobalKTable.class.isAssignableFrom(targetBeanClass)
|
||||
&& GlobalKTable.class.isAssignableFrom(methodParameter.getParameterType());
|
||||
}
|
||||
}
|
||||
return supports;
|
||||
}
|
||||
return this.streamListenerParameterAdapter.supports(targetBeanClass, methodParameter);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
@@ -649,38 +533,30 @@ class KafkaStreamsStreamListenerSetupMethodOrchestrator
|
||||
private static String[] getOutboundBindingTargetNames(Method method) {
|
||||
SendTo sendTo = AnnotationUtils.findAnnotation(method, SendTo.class);
|
||||
if (sendTo != null) {
|
||||
Assert.isTrue(!ObjectUtils.isEmpty(sendTo.value()),
|
||||
StreamListenerErrorMessages.ATLEAST_ONE_OUTPUT);
|
||||
Assert.isTrue(sendTo.value().length >= 1,
|
||||
"At least one outbound destination need to be provided.");
|
||||
Assert.isTrue(!ObjectUtils.isEmpty(sendTo.value()), StreamListenerErrorMessages.ATLEAST_ONE_OUTPUT);
|
||||
Assert.isTrue(sendTo.value().length >= 1, "At least one outbound destination need to be provided.");
|
||||
return sendTo.value();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
private KafkaStreamsStateStoreProperties buildStateStoreSpec(Method method) {
|
||||
if (!this.registeredStoresPerMethod.containsKey(method)) {
|
||||
KafkaStreamsStateStore spec = AnnotationUtils.findAnnotation(method,
|
||||
KafkaStreamsStateStore.class);
|
||||
if (spec != null) {
|
||||
Assert.isTrue(!ObjectUtils.isEmpty(spec.name()), "name cannot be empty");
|
||||
Assert.isTrue(spec.name().length() >= 1, "name cannot be empty.");
|
||||
this.registeredStoresPerMethod.put(method, new ArrayList<>());
|
||||
this.registeredStoresPerMethod.get(method).add(spec.name());
|
||||
KafkaStreamsStateStoreProperties props = new KafkaStreamsStateStoreProperties();
|
||||
props.setName(spec.name());
|
||||
props.setType(spec.type());
|
||||
props.setLength(spec.lengthMs());
|
||||
props.setKeySerdeString(spec.keySerde());
|
||||
props.setRetention(spec.retentionMs());
|
||||
props.setValueSerdeString(spec.valueSerde());
|
||||
props.setCacheEnabled(spec.cache());
|
||||
props.setLoggingDisabled(!spec.logging());
|
||||
return props;
|
||||
}
|
||||
private static KafkaStreamsStateStoreProperties buildStateStoreSpec(Method method) {
|
||||
KafkaStreamsStateStore spec = AnnotationUtils.findAnnotation(method, KafkaStreamsStateStore.class);
|
||||
if (spec != null) {
|
||||
Assert.isTrue(!ObjectUtils.isEmpty(spec.name()), "name cannot be empty");
|
||||
Assert.isTrue(spec.name().length() >= 1, "name cannot be empty.");
|
||||
KafkaStreamsStateStoreProperties props = new KafkaStreamsStateStoreProperties();
|
||||
props.setName(spec.name());
|
||||
props.setType(spec.type());
|
||||
props.setLength(spec.lengthMs());
|
||||
props.setKeySerdeString(spec.keySerde());
|
||||
props.setRetention(spec.retentionMs());
|
||||
props.setValueSerdeString(spec.valueSerde());
|
||||
props.setCacheEnabled(spec.cache());
|
||||
props.setLoggingDisabled(!spec.logging());
|
||||
return props;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -32,23 +32,20 @@ import org.springframework.util.StringUtils;
|
||||
/**
|
||||
* Resolver for key and value Serde.
|
||||
*
|
||||
* On the inbound, if native decoding is enabled, then any deserialization on the value is
|
||||
* handled by Kafka. First, we look for any key/value Serde set on the binding itself, if
|
||||
* that is not available then look at the common Serde set at the global level. If that
|
||||
* fails, it falls back to byte[]. If native decoding is disabled, then the binder will do
|
||||
* the deserialization on value and ignore any Serde set for value and rely on the
|
||||
* contentType provided. Keys are always deserialized at the broker.
|
||||
* On the inbound, if native decoding is enabled, then any deserialization on the value is handled by Kafka.
|
||||
* First, we look for any key/value Serde set on the binding itself, if that is not available then look at the
|
||||
* common Serde set at the global level. If that fails, it falls back to byte[].
|
||||
* If native decoding is disabled, then the binder will do the deserialization on value and ignore any Serde set for value
|
||||
* and rely on the contentType provided. Keys are always deserialized at the broker.
|
||||
*
|
||||
*
|
||||
* Same rules apply on the outbound. If native encoding is enabled, then value
|
||||
* serialization is done at the broker using any binder level Serde for value, if not
|
||||
* using common Serde, if not, then byte[]. If native encoding is disabled, then the
|
||||
* binder will do serialization using a contentType. Keys are always serialized by the
|
||||
* broker.
|
||||
* Same rules apply on the outbound. If native encoding is enabled, then value serialization is done at the broker using
|
||||
* any binder level Serde for value, if not using common Serde, if not, then byte[].
|
||||
* If native encoding is disabled, then the binder will do serialization using a contentType. Keys are always serialized
|
||||
* by the broker.
|
||||
*
|
||||
* For state store, use serdes class specified in
|
||||
* {@link org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsStateStore}
|
||||
* to create Serde accordingly.
|
||||
* {@link org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsStateStore} to create Serde accordingly.
|
||||
*
|
||||
* @author Soby Chacko
|
||||
* @author Lei Chen
|
||||
@@ -60,19 +57,18 @@ class KeyValueSerdeResolver {
|
||||
private final KafkaStreamsBinderConfigurationProperties binderConfigurationProperties;
|
||||
|
||||
KeyValueSerdeResolver(Map<String, Object> streamConfigGlobalProperties,
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties) {
|
||||
KafkaStreamsBinderConfigurationProperties binderConfigurationProperties) {
|
||||
this.streamConfigGlobalProperties = streamConfigGlobalProperties;
|
||||
this.binderConfigurationProperties = binderConfigurationProperties;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide the {@link Serde} for inbound key.
|
||||
* @param extendedConsumerProperties binding level extended
|
||||
* {@link KafkaStreamsConsumerProperties}
|
||||
*
|
||||
* @param extendedConsumerProperties binding level extended {@link KafkaStreamsConsumerProperties}
|
||||
* @return configurd {@link Serde} for the inbound key.
|
||||
*/
|
||||
public Serde<?> getInboundKeySerde(
|
||||
KafkaStreamsConsumerProperties extendedConsumerProperties) {
|
||||
public Serde<?> getInboundKeySerde(KafkaStreamsConsumerProperties extendedConsumerProperties) {
|
||||
String keySerdeString = extendedConsumerProperties.getKeySerde();
|
||||
|
||||
return getKeySerde(keySerdeString);
|
||||
@@ -80,18 +76,18 @@ class KeyValueSerdeResolver {
|
||||
|
||||
/**
|
||||
* Provide the {@link Serde} for inbound value.
|
||||
*
|
||||
* @param consumerProperties {@link ConsumerProperties} on binding
|
||||
* @param extendedConsumerProperties binding level extended
|
||||
* {@link KafkaStreamsConsumerProperties}
|
||||
* @param extendedConsumerProperties binding level extended {@link KafkaStreamsConsumerProperties}
|
||||
* @return configurd {@link Serde} for the inbound value.
|
||||
*/
|
||||
public Serde<?> getInboundValueSerde(ConsumerProperties consumerProperties,
|
||||
KafkaStreamsConsumerProperties extendedConsumerProperties) {
|
||||
public Serde<?> getInboundValueSerde(ConsumerProperties consumerProperties, KafkaStreamsConsumerProperties extendedConsumerProperties) {
|
||||
Serde<?> valueSerde;
|
||||
|
||||
String valueSerdeString = extendedConsumerProperties.getValueSerde();
|
||||
try {
|
||||
if (consumerProperties != null && consumerProperties.isUseNativeDecoding()) {
|
||||
if (consumerProperties != null &&
|
||||
consumerProperties.isUseNativeDecoding()) {
|
||||
valueSerde = getValueSerde(valueSerdeString);
|
||||
}
|
||||
else {
|
||||
@@ -107,6 +103,7 @@ class KeyValueSerdeResolver {
|
||||
|
||||
/**
|
||||
* Provide the {@link Serde} for outbound key.
|
||||
*
|
||||
* @param properties binding level extended {@link KafkaStreamsProducerProperties}
|
||||
* @return configurd {@link Serde} for the outbound key.
|
||||
*/
|
||||
@@ -116,18 +113,16 @@ class KeyValueSerdeResolver {
|
||||
|
||||
/**
|
||||
* Provide the {@link Serde} for outbound value.
|
||||
*
|
||||
* @param producerProperties {@link ProducerProperties} on binding
|
||||
* @param kafkaStreamsProducerProperties binding level extended
|
||||
* {@link KafkaStreamsProducerProperties}
|
||||
* @param kafkaStreamsProducerProperties binding level extended {@link KafkaStreamsProducerProperties}
|
||||
* @return configurd {@link Serde} for the outbound value.
|
||||
*/
|
||||
public Serde<?> getOutboundValueSerde(ProducerProperties producerProperties,
|
||||
KafkaStreamsProducerProperties kafkaStreamsProducerProperties) {
|
||||
public Serde<?> getOutboundValueSerde(ProducerProperties producerProperties, KafkaStreamsProducerProperties kafkaStreamsProducerProperties) {
|
||||
Serde<?> valueSerde;
|
||||
try {
|
||||
if (producerProperties.isUseNativeEncoding()) {
|
||||
valueSerde = getValueSerde(
|
||||
kafkaStreamsProducerProperties.getValueSerde());
|
||||
valueSerde = getValueSerde(kafkaStreamsProducerProperties.getValueSerde());
|
||||
}
|
||||
else {
|
||||
valueSerde = Serdes.ByteArray();
|
||||
@@ -142,6 +137,7 @@ class KeyValueSerdeResolver {
|
||||
|
||||
/**
|
||||
* Provide the {@link Serde} for state store.
|
||||
*
|
||||
* @param keySerdeString serde class used for key
|
||||
* @return {@link Serde} for the state store key.
|
||||
*/
|
||||
@@ -151,6 +147,7 @@ class KeyValueSerdeResolver {
|
||||
|
||||
/**
|
||||
* Provide the {@link Serde} for state store value.
|
||||
*
|
||||
* @param valueSerdeString serde class used for value
|
||||
* @return {@link Serde} for the state store value.
|
||||
*/
|
||||
@@ -170,12 +167,8 @@ class KeyValueSerdeResolver {
|
||||
keySerde = Utils.newInstance(keySerdeString, Serde.class);
|
||||
}
|
||||
else {
|
||||
keySerde = this.binderConfigurationProperties.getConfiguration()
|
||||
.containsKey("default.key.serde")
|
||||
? Utils.newInstance(this.binderConfigurationProperties
|
||||
.getConfiguration().get("default.key.serde"),
|
||||
Serde.class)
|
||||
: Serdes.ByteArray();
|
||||
keySerde = this.binderConfigurationProperties.getConfiguration().containsKey("default.key.serde") ?
|
||||
Utils.newInstance(this.binderConfigurationProperties.getConfiguration().get("default.key.serde"), Serde.class) : Serdes.ByteArray();
|
||||
}
|
||||
keySerde.configure(this.streamConfigGlobalProperties, true);
|
||||
|
||||
@@ -186,21 +179,15 @@ class KeyValueSerdeResolver {
|
||||
return keySerde;
|
||||
}
|
||||
|
||||
private Serde<?> getValueSerde(String valueSerdeString)
|
||||
throws ClassNotFoundException {
|
||||
private Serde<?> getValueSerde(String valueSerdeString) throws ClassNotFoundException {
|
||||
Serde<?> valueSerde;
|
||||
if (StringUtils.hasText(valueSerdeString)) {
|
||||
valueSerde = Utils.newInstance(valueSerdeString, Serde.class);
|
||||
}
|
||||
else {
|
||||
valueSerde = this.binderConfigurationProperties.getConfiguration()
|
||||
.containsKey("default.value.serde")
|
||||
? Utils.newInstance(this.binderConfigurationProperties
|
||||
.getConfiguration().get("default.value.serde"),
|
||||
Serde.class)
|
||||
: Serdes.ByteArray();
|
||||
valueSerde = this.binderConfigurationProperties.getConfiguration().containsKey("default.value.serde") ?
|
||||
Utils.newInstance(this.binderConfigurationProperties.getConfiguration().get("default.value.serde"), Serde.class) : Serdes.ByteArray();
|
||||
}
|
||||
return valueSerde;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -21,8 +21,8 @@ import org.apache.kafka.streams.errors.InvalidStateStoreException;
|
||||
import org.apache.kafka.streams.state.QueryableStoreType;
|
||||
|
||||
/**
|
||||
* Registry that contains {@link QueryableStoreType}s those created from the user
|
||||
* applications.
|
||||
* Registry that contains {@link QueryableStoreType}s those created from
|
||||
* the user applications.
|
||||
*
|
||||
* @author Soby Chacko
|
||||
* @author Renwei Han
|
||||
@@ -39,15 +39,14 @@ public class QueryableStoreRegistry {
|
||||
|
||||
/**
|
||||
* Retrieve and return a queryable store by name created in the application.
|
||||
*
|
||||
* @param storeName name of the queryable store
|
||||
* @param storeType type of the queryable store
|
||||
* @param <T> generic queryable store
|
||||
* @param <T> generic queryable store
|
||||
* @return queryable store.
|
||||
* @deprecated in favor of
|
||||
* {@link InteractiveQueryService#getQueryableStore(String, QueryableStoreType)}
|
||||
* @deprecated in favor of {@link InteractiveQueryService#getQueryableStore(String, QueryableStoreType)}
|
||||
*/
|
||||
public <T> T getQueryableStoreType(String storeName,
|
||||
QueryableStoreType<T> storeType) {
|
||||
public <T> T getQueryableStoreType(String storeName, QueryableStoreType<T> storeType) {
|
||||
|
||||
for (KafkaStreams kafkaStream : this.kafkaStreamsRegistry.getKafkaStreams()) {
|
||||
try {
|
||||
@@ -57,7 +56,7 @@ public class QueryableStoreRegistry {
|
||||
}
|
||||
}
|
||||
catch (InvalidStateStoreException ignored) {
|
||||
// pass through
|
||||
//pass through
|
||||
}
|
||||
}
|
||||
return null;
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -32,8 +32,8 @@ import org.apache.kafka.streams.processor.internals.StreamTask;
|
||||
import org.springframework.util.ReflectionUtils;
|
||||
|
||||
/**
|
||||
* Custom implementation for {@link DeserializationExceptionHandler} that sends the
|
||||
* records in error to a DLQ topic, then continue stream processing on new records.
|
||||
* Custom implementation for {@link DeserializationExceptionHandler} that sends the records
|
||||
* in error to a DLQ topic, then continue stream processing on new records.
|
||||
*
|
||||
* @author Soby Chacko
|
||||
* @since 2.0.0
|
||||
@@ -46,13 +46,14 @@ public class SendToDlqAndContinue implements DeserializationExceptionHandler {
|
||||
public static final String KAFKA_STREAMS_DLQ_DISPATCHERS = "spring.cloud.stream.kafka.streams.dlq.dispatchers";
|
||||
|
||||
/**
|
||||
* DLQ dispatcher per topic in the application context. The key here is not the actual
|
||||
* DLQ topic but the incoming topic that caused the error.
|
||||
* DLQ dispatcher per topic in the application context. The key here is not the actual DLQ topic
|
||||
* but the incoming topic that caused the error.
|
||||
*/
|
||||
private Map<String, KafkaStreamsDlqDispatch> dlqDispatchers = new HashMap<>();
|
||||
|
||||
/**
|
||||
* For a given topic, send the key/value record to DLQ topic.
|
||||
*
|
||||
* @param topic incoming topic that caused the error
|
||||
* @param key to send
|
||||
* @param value to send
|
||||
@@ -65,23 +66,16 @@ public class SendToDlqAndContinue implements DeserializationExceptionHandler {
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public DeserializationHandlerResponse handle(ProcessorContext context,
|
||||
ConsumerRecord<byte[], byte[]> record, Exception exception) {
|
||||
KafkaStreamsDlqDispatch kafkaStreamsDlqDispatch = this.dlqDispatchers
|
||||
.get(record.topic());
|
||||
kafkaStreamsDlqDispatch.sendToDlq(record.key(), record.value(),
|
||||
record.partition());
|
||||
public DeserializationHandlerResponse handle(ProcessorContext context, ConsumerRecord<byte[], byte[]> record, Exception exception) {
|
||||
KafkaStreamsDlqDispatch kafkaStreamsDlqDispatch = this.dlqDispatchers.get(record.topic());
|
||||
kafkaStreamsDlqDispatch.sendToDlq(record.key(), record.value(), record.partition());
|
||||
context.commit();
|
||||
|
||||
// The following conditional block should be reconsidered when we have a solution
|
||||
// for this SO problem:
|
||||
// The following conditional block should be reconsidered when we have a solution for this SO problem:
|
||||
// https://stackoverflow.com/questions/48470899/kafka-streams-deserialization-handler
|
||||
// Currently it seems like when deserialization error happens, there is no commits
|
||||
// happening and the
|
||||
// following code will use reflection to get access to the underlying
|
||||
// KafkaConsumer.
|
||||
// It works with Kafka 1.0.0, but there is no guarantee it will work in future
|
||||
// versions of kafka as
|
||||
// Currently it seems like when deserialization error happens, there is no commits happening and the
|
||||
// following code will use reflection to get access to the underlying KafkaConsumer.
|
||||
// It works with Kafka 1.0.0, but there is no guarantee it will work in future versions of kafka as
|
||||
// we access private fields by name using reflection, but it is a temporary fix.
|
||||
if (context instanceof ProcessorContextImpl) {
|
||||
ProcessorContextImpl processorContextImpl = (ProcessorContextImpl) context;
|
||||
@@ -93,13 +87,11 @@ public class SendToDlqAndContinue implements DeserializationExceptionHandler {
|
||||
StreamTask streamTask = (StreamTask) taskField;
|
||||
Field consumer = ReflectionUtils.findField(StreamTask.class, "consumer");
|
||||
ReflectionUtils.makeAccessible(consumer);
|
||||
Object kafkaConsumerField = ReflectionUtils.getField(consumer,
|
||||
streamTask);
|
||||
Object kafkaConsumerField = ReflectionUtils.getField(consumer, streamTask);
|
||||
if (kafkaConsumerField.getClass().isAssignableFrom(KafkaConsumer.class)) {
|
||||
KafkaConsumer kafkaConsumer = (KafkaConsumer) kafkaConsumerField;
|
||||
final Map<TopicPartition, OffsetAndMetadata> consumedOffsetsAndMetadata = new HashMap<>();
|
||||
TopicPartition tp = new TopicPartition(record.topic(),
|
||||
record.partition());
|
||||
TopicPartition tp = new TopicPartition(record.topic(), record.partition());
|
||||
OffsetAndMetadata oam = new OffsetAndMetadata(record.offset() + 1);
|
||||
consumedOffsetsAndMetadata.put(tp, oam);
|
||||
kafkaConsumer.commitSync(consumedOffsetsAndMetadata);
|
||||
@@ -112,12 +104,10 @@ public class SendToDlqAndContinue implements DeserializationExceptionHandler {
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public void configure(Map<String, ?> configs) {
|
||||
this.dlqDispatchers = (Map<String, KafkaStreamsDlqDispatch>) configs
|
||||
.get(KAFKA_STREAMS_DLQ_DISPATCHERS);
|
||||
this.dlqDispatchers = (Map<String, KafkaStreamsDlqDispatch>) configs.get(KAFKA_STREAMS_DLQ_DISPATCHERS);
|
||||
}
|
||||
|
||||
void addKStreamDlqDispatch(String topic,
|
||||
KafkaStreamsDlqDispatch kafkaStreamsDlqDispatch) {
|
||||
void addKStreamDlqDispatch(String topic, KafkaStreamsDlqDispatch kafkaStreamsDlqDispatch) {
|
||||
this.dlqDispatchers.put(topic, kafkaStreamsDlqDispatch);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -23,13 +23,13 @@ import org.springframework.kafka.KafkaException;
|
||||
import org.springframework.kafka.config.StreamsBuilderFactoryBean;
|
||||
|
||||
/**
|
||||
* Iterate through all {@link StreamsBuilderFactoryBean} in the application context and
|
||||
* start them. As each one completes starting, register the associated KafkaStreams object
|
||||
* into {@link QueryableStoreRegistry}.
|
||||
* Iterate through all {@link StreamsBuilderFactoryBean} in the application context
|
||||
* and start them. As each one completes starting, register the associated KafkaStreams
|
||||
* object into {@link QueryableStoreRegistry}.
|
||||
*
|
||||
* This {@link SmartLifecycle} class ensures that the bean created from it is started very
|
||||
* late through the bootstrap process by setting the phase value closer to
|
||||
* Integer.MAX_VALUE. This is to guarantee that the {@link StreamsBuilderFactoryBean} on a
|
||||
* This {@link SmartLifecycle} class ensures that the bean created from it is started very late
|
||||
* through the bootstrap process by setting the phase value closer to Integer.MAX_VALUE.
|
||||
* This is to guarantee that the {@link StreamsBuilderFactoryBean} on a
|
||||
* {@link org.springframework.cloud.stream.annotation.StreamListener} method with multiple
|
||||
* bindings is only started after all the binding phases have completed successfully.
|
||||
*
|
||||
@@ -38,14 +38,12 @@ import org.springframework.kafka.config.StreamsBuilderFactoryBean;
|
||||
class StreamsBuilderFactoryManager implements SmartLifecycle {
|
||||
|
||||
private final KafkaStreamsBindingInformationCatalogue kafkaStreamsBindingInformationCatalogue;
|
||||
|
||||
private final KafkaStreamsRegistry kafkaStreamsRegistry;
|
||||
|
||||
private volatile boolean running;
|
||||
|
||||
StreamsBuilderFactoryManager(
|
||||
KafkaStreamsBindingInformationCatalogue kafkaStreamsBindingInformationCatalogue,
|
||||
KafkaStreamsRegistry kafkaStreamsRegistry) {
|
||||
StreamsBuilderFactoryManager(KafkaStreamsBindingInformationCatalogue kafkaStreamsBindingInformationCatalogue,
|
||||
KafkaStreamsRegistry kafkaStreamsRegistry) {
|
||||
this.kafkaStreamsBindingInformationCatalogue = kafkaStreamsBindingInformationCatalogue;
|
||||
this.kafkaStreamsRegistry = kafkaStreamsRegistry;
|
||||
}
|
||||
@@ -67,12 +65,10 @@ class StreamsBuilderFactoryManager implements SmartLifecycle {
|
||||
public synchronized void start() {
|
||||
if (!this.running) {
|
||||
try {
|
||||
Set<StreamsBuilderFactoryBean> streamsBuilderFactoryBeans = this.kafkaStreamsBindingInformationCatalogue
|
||||
.getStreamsBuilderFactoryBeans();
|
||||
Set<StreamsBuilderFactoryBean> streamsBuilderFactoryBeans = this.kafkaStreamsBindingInformationCatalogue.getStreamsBuilderFactoryBeans();
|
||||
for (StreamsBuilderFactoryBean streamsBuilderFactoryBean : streamsBuilderFactoryBeans) {
|
||||
streamsBuilderFactoryBean.start();
|
||||
this.kafkaStreamsRegistry.registerKafkaStreams(
|
||||
streamsBuilderFactoryBean.getKafkaStreams());
|
||||
this.kafkaStreamsRegistry.registerKafkaStreams(streamsBuilderFactoryBean.getKafkaStreams());
|
||||
}
|
||||
this.running = true;
|
||||
}
|
||||
@@ -83,22 +79,21 @@ class StreamsBuilderFactoryManager implements SmartLifecycle {
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void stop() {
|
||||
if (this.running) {
|
||||
try {
|
||||
Set<StreamsBuilderFactoryBean> streamsBuilderFactoryBeans = this.kafkaStreamsBindingInformationCatalogue
|
||||
.getStreamsBuilderFactoryBeans();
|
||||
for (StreamsBuilderFactoryBean streamsBuilderFactoryBean : streamsBuilderFactoryBeans) {
|
||||
streamsBuilderFactoryBean.stop();
|
||||
public synchronized void stop() {
|
||||
if (this.running) {
|
||||
try {
|
||||
Set<StreamsBuilderFactoryBean> streamsBuilderFactoryBeans = this.kafkaStreamsBindingInformationCatalogue.getStreamsBuilderFactoryBeans();
|
||||
for (StreamsBuilderFactoryBean streamsBuilderFactoryBean : streamsBuilderFactoryBeans) {
|
||||
streamsBuilderFactoryBean.stop();
|
||||
}
|
||||
}
|
||||
catch (Exception ex) {
|
||||
throw new IllegalStateException(ex);
|
||||
}
|
||||
finally {
|
||||
this.running = false;
|
||||
}
|
||||
}
|
||||
catch (Exception ex) {
|
||||
throw new IllegalStateException(ex);
|
||||
}
|
||||
finally {
|
||||
this.running = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2017-2019 the original author or authors.
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -24,14 +24,12 @@ import org.springframework.cloud.stream.annotation.Output;
|
||||
/**
|
||||
* Bindable interface for {@link KStream} input and output.
|
||||
*
|
||||
* This interface can be used as a bindable interface with
|
||||
* {@link org.springframework.cloud.stream.annotation.EnableBinding} when both input and
|
||||
* output types are single KStream. In other scenarios where multiple types are required,
|
||||
* other similar bindable interfaces can be created and used. For example, there are cases
|
||||
* in which multiple KStreams are required on the outbound in the case of KStream
|
||||
* branching or multiple input types are required either in the form of multiple KStreams
|
||||
* and a combination of KStreams and KTables. In those cases, new bindable interfaces
|
||||
* compatible with the requirements must be created. Here are some examples.
|
||||
* This interface can be used as a bindable interface with {@link org.springframework.cloud.stream.annotation.EnableBinding}
|
||||
* when both input and output types are single KStream. In other scenarios where multiple types are required, other
|
||||
* similar bindable interfaces can be created and used. For example, there are cases in which multiple KStreams
|
||||
* are required on the outbound in the case of KStream branching or multiple input types are required either in the
|
||||
* form of multiple KStreams and a combination of KStreams and KTables. In those cases, new bindable interfaces compatible
|
||||
* with the requirements must be created. Here are some examples.
|
||||
*
|
||||
* <pre class="code">
|
||||
* interface KStreamBranchProcessor {
|
||||
@@ -75,6 +73,7 @@ public interface KafkaStreamsProcessor {
|
||||
|
||||
/**
|
||||
* Input binding.
|
||||
*
|
||||
* @return {@link Input} binding for {@link KStream} type.
|
||||
*/
|
||||
@Input("input")
|
||||
@@ -82,9 +81,9 @@ public interface KafkaStreamsProcessor {
|
||||
|
||||
/**
|
||||
* Output binding.
|
||||
*
|
||||
* @return {@link Output} binding for {@link KStream} type.
|
||||
*/
|
||||
@Output("output")
|
||||
KStream<?, ?> output();
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -16,6 +16,7 @@
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.annotations;
|
||||
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
@@ -26,23 +27,21 @@ import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStr
|
||||
/**
|
||||
* Interface for Kafka Stream state store.
|
||||
*
|
||||
* This interface can be used to inject a state store specification into KStream building
|
||||
* process so that the desired store can be built by StreamBuilder and added to topology
|
||||
* for later use by processors. This is particularly useful when need to combine stream
|
||||
* DSL with low level processor APIs. In those cases, if a writable state store is desired
|
||||
* in processors, it needs to be created using this annotation. Here is the example.
|
||||
* This interface can be used to inject a state store specification into KStream building process so
|
||||
* that the desired store can be built by StreamBuilder and added to topology for later use by processors.
|
||||
* This is particularly useful when need to combine stream DSL with low level processor APIs. In those cases,
|
||||
* if a writable state store is desired in processors, it needs to be created using this annotation.
|
||||
* Here is the example.
|
||||
*
|
||||
* <pre class="code">
|
||||
* @StreamListener("input")
|
||||
* @KafkaStreamsStateStore(name="mystate", type= KafkaStreamsStateStoreProperties.StoreType.WINDOW,
|
||||
* size=300000)
|
||||
* @KafkaStreamsStateStore(name="mystate", type= KafkaStreamsStateStoreProperties.StoreType.WINDOW, size=300000)
|
||||
* public void process(KStream<Object, Product> input) {
|
||||
* ......
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* With that, you should be able to read/write this state store in your
|
||||
* processor/transformer code.
|
||||
* With that, you should be able to read/write this state store in your processor/transformer code.
|
||||
*
|
||||
* <pre class="code">
|
||||
* new Processor<Object, Product>() {
|
||||
@@ -65,51 +64,57 @@ public @interface KafkaStreamsStateStore {
|
||||
|
||||
/**
|
||||
* Provides name of the state store.
|
||||
*
|
||||
* @return name of state store.
|
||||
*/
|
||||
String name() default "";
|
||||
|
||||
/**
|
||||
* State store type.
|
||||
*
|
||||
* @return {@link KafkaStreamsStateStoreProperties.StoreType} of state store.
|
||||
*/
|
||||
KafkaStreamsStateStoreProperties.StoreType type() default KafkaStreamsStateStoreProperties.StoreType.KEYVALUE;
|
||||
|
||||
/**
|
||||
* Serde used for key.
|
||||
*
|
||||
* @return key serde of state store.
|
||||
*/
|
||||
String keySerde() default "org.apache.kafka.common.serialization.Serdes$StringSerde";
|
||||
|
||||
/**
|
||||
* Serde used for value.
|
||||
*
|
||||
* @return value serde of state store.
|
||||
*/
|
||||
String valueSerde() default "org.apache.kafka.common.serialization.Serdes$StringSerde";
|
||||
|
||||
/**
|
||||
* Length in milli-second of Windowed store window.
|
||||
*
|
||||
* @return length in milli-second of window(for windowed store).
|
||||
*/
|
||||
long lengthMs() default 0;
|
||||
|
||||
/**
|
||||
* Retention period for Windowed store windows.
|
||||
* @return the maximum period of time in milli-second to keep each window in this
|
||||
* store(for windowed store).
|
||||
*
|
||||
* @return the maximum period of time in milli-second to keep each window in this store(for windowed store).
|
||||
*/
|
||||
long retentionMs() default 0;
|
||||
|
||||
/**
|
||||
* Whether catching is enabled or not.
|
||||
*
|
||||
* @return whether caching should be enabled on the created store.
|
||||
*/
|
||||
boolean cache() default false;
|
||||
|
||||
/**
|
||||
* Whether logging is enabled or not.
|
||||
*
|
||||
* @return whether logging should be enabled on the created store.
|
||||
*/
|
||||
boolean logging() default true;
|
||||
|
||||
}
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.function;
|
||||
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.boot.context.properties.EnableConfigurationProperties;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsFunctionProcessor;
|
||||
import org.springframework.cloud.stream.function.StreamFunctionProperties;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
/**
|
||||
* @author Soby Chacko
|
||||
*/
|
||||
@Configuration
|
||||
@ConditionalOnProperty("spring.cloud.stream.function.definition")
|
||||
@EnableConfigurationProperties(StreamFunctionProperties.class)
|
||||
public class KafkaStreamsFunctionAutoConfiguration {
|
||||
|
||||
@Bean
|
||||
public KafkaStreamsFunctionProcessorInvoker kafkaStreamsFunctionProcessorInvoker(
|
||||
KafkaStreamsFunctionBeanPostProcessor kafkaStreamsFunctionBeanPostProcessor,
|
||||
KafkaStreamsFunctionProcessor kafkaStreamsFunctionProcessor,
|
||||
StreamFunctionProperties properties) {
|
||||
return new KafkaStreamsFunctionProcessorInvoker(kafkaStreamsFunctionBeanPostProcessor.getResolvableType(),
|
||||
properties.getDefinition(), kafkaStreamsFunctionProcessor);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public KafkaStreamsFunctionBeanPostProcessor kafkaStreamsFunctionBeanPostProcessor(
|
||||
StreamFunctionProperties properties) {
|
||||
return new KafkaStreamsFunctionBeanPostProcessor(properties);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,71 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.function;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.beans.factory.BeanFactory;
|
||||
import org.springframework.beans.factory.BeanFactoryAware;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.beans.factory.annotation.AnnotatedBeanDefinition;
|
||||
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
|
||||
import org.springframework.cloud.stream.function.StreamFunctionProperties;
|
||||
import org.springframework.core.ResolvableType;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Soby Chacko
|
||||
* @since 2.1.0
|
||||
*
|
||||
*/
|
||||
class KafkaStreamsFunctionBeanPostProcessor implements InitializingBean, BeanFactoryAware {
|
||||
|
||||
private final StreamFunctionProperties kafkaStreamsFunctionProperties;
|
||||
private ConfigurableListableBeanFactory beanFactory;
|
||||
private ResolvableType resolvableType;
|
||||
|
||||
KafkaStreamsFunctionBeanPostProcessor(StreamFunctionProperties properties) {
|
||||
this.kafkaStreamsFunctionProperties = properties;
|
||||
}
|
||||
|
||||
public ResolvableType getResolvableType() {
|
||||
return this.resolvableType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterPropertiesSet() throws Exception {
|
||||
final Class<?> classObj = ClassUtils.resolveClassName(((AnnotatedBeanDefinition)
|
||||
this.beanFactory.getBeanDefinition(kafkaStreamsFunctionProperties.getDefinition()))
|
||||
.getMetadata().getClassName(),
|
||||
ClassUtils.getDefaultClassLoader());
|
||||
|
||||
try {
|
||||
Method method = classObj.getMethod(this.kafkaStreamsFunctionProperties.getDefinition());
|
||||
this.resolvableType = ResolvableType.forMethodReturnType(method, classObj);
|
||||
}
|
||||
catch (NoSuchMethodException e) {
|
||||
//ignore
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setBeanFactory(BeanFactory beanFactory) throws BeansException {
|
||||
this.beanFactory = (ConfigurableListableBeanFactory) beanFactory;
|
||||
}
|
||||
}
|
||||
@@ -1,46 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.function;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsFunctionProcessor;
|
||||
import org.springframework.core.ResolvableType;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Soby Chacko
|
||||
* @since 2.1.0
|
||||
*/
|
||||
class KafkaStreamsFunctionProcessorInvoker {
|
||||
|
||||
private final KafkaStreamsFunctionProcessor kafkaStreamsFunctionProcessor;
|
||||
private final ResolvableType resolvableType;
|
||||
private final String functionName;
|
||||
|
||||
KafkaStreamsFunctionProcessorInvoker(ResolvableType resolvableType, String functionName,
|
||||
KafkaStreamsFunctionProcessor kafkaStreamsFunctionProcessor) {
|
||||
this.kafkaStreamsFunctionProcessor = kafkaStreamsFunctionProcessor;
|
||||
this.resolvableType = resolvableType;
|
||||
this.functionName = functionName;
|
||||
}
|
||||
|
||||
@PostConstruct
|
||||
void invoke() {
|
||||
this.kafkaStreamsFunctionProcessor.orchestrateStreamListenerSetupMethod(resolvableType, functionName);
|
||||
}
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.function;
|
||||
|
||||
import java.lang.reflect.Type;
|
||||
|
||||
import org.apache.kafka.streams.kstream.GlobalKTable;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.apache.kafka.streams.kstream.KTable;
|
||||
|
||||
import org.springframework.cloud.function.context.WrapperDetector;
|
||||
|
||||
/**
|
||||
* @author Soby Chacko
|
||||
*/
|
||||
public class KafkaStreamsFunctionWrapperDetector implements WrapperDetector {
|
||||
@Override
|
||||
public boolean isWrapper(Type type) {
|
||||
if (type instanceof Class<?>) {
|
||||
Class<?> cls = (Class<?>) type;
|
||||
return KStream.class.isAssignableFrom(cls) ||
|
||||
KTable.class.isAssignableFrom(cls) ||
|
||||
GlobalKTable.class.isAssignableFrom(cls);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2017-2019 the original author or authors.
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -19,10 +19,9 @@ package org.springframework.cloud.stream.binder.kafka.streams.properties;
|
||||
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||
|
||||
/**
|
||||
* {@link ConfigurationProperties} that can be used by end user Kafka Stream applications.
|
||||
* This class provides convenient ways to access the commonly used kafka stream properties
|
||||
* from the user application. For example, windowing operations are common use cases in
|
||||
* stream processing and one can provide window specific properties at runtime and use
|
||||
* {@link ConfigurationProperties} that can be used by end user Kafka Stream applications. This class provides
|
||||
* convenient ways to access the commonly used kafka stream properties from the user application. For example, windowing
|
||||
* operations are common use cases in stream processing and one can provide window specific properties at runtime and use
|
||||
* those properties in the applications using this class.
|
||||
*
|
||||
* @author Soby Chacko
|
||||
@@ -64,7 +63,5 @@ public class KafkaStreamsApplicationSupportProperties {
|
||||
public void setAdvanceBy(int advanceBy) {
|
||||
this.advanceBy = advanceBy;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -25,8 +25,7 @@ import org.springframework.cloud.stream.binder.kafka.properties.KafkaBinderConfi
|
||||
* @author Soby Chacko
|
||||
* @author Gary Russell
|
||||
*/
|
||||
public class KafkaStreamsBinderConfigurationProperties
|
||||
extends KafkaBinderConfigurationProperties {
|
||||
public class KafkaStreamsBinderConfigurationProperties extends KafkaBinderConfigurationProperties {
|
||||
|
||||
public KafkaStreamsBinderConfigurationProperties(KafkaProperties kafkaProperties) {
|
||||
super(kafkaProperties);
|
||||
@@ -36,7 +35,6 @@ public class KafkaStreamsBinderConfigurationProperties
|
||||
* Enumeration for various Serde errors.
|
||||
*/
|
||||
public enum SerdeError {
|
||||
|
||||
/**
|
||||
* Deserialization error handler with log and continue.
|
||||
*/
|
||||
@@ -49,7 +47,6 @@ public class KafkaStreamsBinderConfigurationProperties
|
||||
* Deserialization error handler with DLQ send.
|
||||
*/
|
||||
sendToDlq
|
||||
|
||||
}
|
||||
|
||||
private String applicationId;
|
||||
@@ -63,10 +60,9 @@ public class KafkaStreamsBinderConfigurationProperties
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link org.apache.kafka.streams.errors.DeserializationExceptionHandler} to use when
|
||||
* there is a Serde error.
|
||||
* {@link KafkaStreamsBinderConfigurationProperties.SerdeError} values are used to
|
||||
* provide the exception handler on consumer binding.
|
||||
* {@link org.apache.kafka.streams.errors.DeserializationExceptionHandler} to use
|
||||
* when there is a Serde error. {@link KafkaStreamsBinderConfigurationProperties.SerdeError}
|
||||
* values are used to provide the exception handler on consumer binding.
|
||||
*/
|
||||
private KafkaStreamsBinderConfigurationProperties.SerdeError serdeError;
|
||||
|
||||
@@ -74,8 +70,7 @@ public class KafkaStreamsBinderConfigurationProperties
|
||||
return this.serdeError;
|
||||
}
|
||||
|
||||
public void setSerdeError(
|
||||
KafkaStreamsBinderConfigurationProperties.SerdeError serdeError) {
|
||||
public void setSerdeError(KafkaStreamsBinderConfigurationProperties.SerdeError serdeError) {
|
||||
this.serdeError = serdeError;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2017-2019 the original author or authors.
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -19,8 +19,7 @@ package org.springframework.cloud.stream.binder.kafka.streams.properties;
|
||||
import org.springframework.cloud.stream.binder.BinderSpecificPropertiesProvider;
|
||||
|
||||
/**
|
||||
* Extended binding properties holder that delegates to Kafka Streams producer and
|
||||
* consumer properties.
|
||||
* Extended binding properties holder that delegates to Kafka Streams producer and consumer properties.
|
||||
*
|
||||
* @author Marius Bogoevici
|
||||
*/
|
||||
@@ -45,5 +44,4 @@ public class KafkaStreamsBindingProperties implements BinderSpecificPropertiesPr
|
||||
public void setProducer(KafkaStreamsProducerProperties producer) {
|
||||
this.producer = producer;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2017-2019 the original author or authors.
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -16,26 +16,19 @@
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.properties;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||
import org.springframework.cloud.stream.binder.AbstractExtendedBindingProperties;
|
||||
import org.springframework.cloud.stream.binder.BinderSpecificPropertiesProvider;
|
||||
|
||||
/**
|
||||
* Kafka streams specific extended binding properties class that extends from
|
||||
* {@link AbstractExtendedBindingProperties}.
|
||||
* Kafka streams specific extended binding properties class that extends from {@link AbstractExtendedBindingProperties}.
|
||||
*
|
||||
* @author Marius Bogoevici
|
||||
* @author Oleg Zhurakousky
|
||||
*/
|
||||
@ConfigurationProperties("spring.cloud.stream.kafka.streams")
|
||||
public class KafkaStreamsExtendedBindingProperties
|
||||
// @checkstyle:off
|
||||
extends
|
||||
AbstractExtendedBindingProperties<KafkaStreamsConsumerProperties, KafkaStreamsProducerProperties, KafkaStreamsBindingProperties> {
|
||||
extends AbstractExtendedBindingProperties<KafkaStreamsConsumerProperties, KafkaStreamsProducerProperties, KafkaStreamsBindingProperties> {
|
||||
|
||||
// @checkstyle:on
|
||||
private static final String DEFAULTS_PREFIX = "spring.cloud.stream.kafka.streams.default";
|
||||
|
||||
@Override
|
||||
@@ -43,14 +36,8 @@ public class KafkaStreamsExtendedBindingProperties
|
||||
return DEFAULTS_PREFIX;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, KafkaStreamsBindingProperties> getBindings() {
|
||||
return this.doGetBindings();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<? extends BinderSpecificPropertiesProvider> getExtendedPropertiesEntryClass() {
|
||||
return KafkaStreamsBindingProperties.class;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -51,5 +51,4 @@ public class KafkaStreamsProducerProperties extends KafkaProducerProperties {
|
||||
public void setValueSerde(String valueSerde) {
|
||||
this.valueSerde = valueSerde;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -16,6 +16,7 @@
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.properties;
|
||||
|
||||
|
||||
/**
|
||||
* Properties for Kafka Streams state store.
|
||||
*
|
||||
@@ -27,7 +28,6 @@ public class KafkaStreamsStateStoreProperties {
|
||||
* Enumeration for store type.
|
||||
*/
|
||||
public enum StoreType {
|
||||
|
||||
/**
|
||||
* Key value store.
|
||||
*/
|
||||
@@ -51,9 +51,9 @@ public class KafkaStreamsStateStoreProperties {
|
||||
public String toString() {
|
||||
return this.type;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Name for this state store.
|
||||
*/
|
||||
@@ -94,6 +94,7 @@ public class KafkaStreamsStateStoreProperties {
|
||||
*/
|
||||
private boolean loggingDisabled;
|
||||
|
||||
|
||||
public String getName() {
|
||||
return this.name;
|
||||
}
|
||||
@@ -157,5 +158,4 @@ public class KafkaStreamsStateStoreProperties {
|
||||
public void setLoggingDisabled(boolean loggingDisabled) {
|
||||
this.loggingDisabled = loggingDisabled;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -34,28 +34,25 @@ import org.springframework.util.MimeType;
|
||||
import org.springframework.util.MimeTypeUtils;
|
||||
|
||||
/**
|
||||
* A {@link Serde} implementation that wraps the list of {@link MessageConverter}s from
|
||||
* {@link CompositeMessageConverterFactory}.
|
||||
* A {@link Serde} implementation that wraps the list of {@link MessageConverter}s
|
||||
* from {@link CompositeMessageConverterFactory}.
|
||||
*
|
||||
* The primary motivation for this class is to provide an avro based {@link Serde} that is
|
||||
* compatible with the schema registry that Spring Cloud Stream provides. When using the
|
||||
* schema registry support from Spring Cloud Stream in a Kafka Streams binder based
|
||||
* application, the applications can deserialize the incoming Kafka Streams records using
|
||||
* the built in Avro {@link MessageConverter}. However, this same message conversion
|
||||
* approach will not work downstream in other operations in the topology for Kafka Streams
|
||||
* as some of them needs a {@link Serde} instance that can talk to the Spring Cloud Stream
|
||||
* provided Schema Registry. This implementation will solve that problem.
|
||||
* schema registry support from Spring Cloud Stream in a Kafka Streams binder based application,
|
||||
* the applications can deserialize the incoming Kafka Streams records using the built in
|
||||
* Avro {@link MessageConverter}. However, this same message conversion approach will not work
|
||||
* downstream in other operations in the topology for Kafka Streams as some of them needs a
|
||||
* {@link Serde} instance that can talk to the Spring Cloud Stream provided Schema Registry.
|
||||
* This implementation will solve that problem.
|
||||
*
|
||||
* Only Avro and JSON based converters are exposed as binder provided {@link Serde}
|
||||
* implementations currently.
|
||||
* Only Avro and JSON based converters are exposed as binder provided {@link Serde} implementations currently.
|
||||
*
|
||||
* Users of this class must call the
|
||||
* {@link CompositeNonNativeSerde#configure(Map, boolean)} method to configure the
|
||||
* {@link Serde} object. At the very least the configuration map must include a key called
|
||||
* "valueClass" to indicate the type of the target object for deserialization. If any
|
||||
* other content type other than JSON is needed (only Avro is available now other than
|
||||
* JSON), that needs to be included in the configuration map with the key "contentType".
|
||||
* For example,
|
||||
* Users of this class must call the {@link CompositeNonNativeSerde#configure(Map, boolean)} method
|
||||
* to configure the {@link Serde} object. At the very least the configuration map must include a key
|
||||
* called "valueClass" to indicate the type of the target object for deserialization. If any other
|
||||
* content type other than JSON is needed (only Avro is available now other than JSON), that needs
|
||||
* to be included in the configuration map with the key "contentType". For example,
|
||||
*
|
||||
* <pre class="code">
|
||||
* Map<String, Object> config = new HashMap<>();
|
||||
@@ -65,14 +62,14 @@ import org.springframework.util.MimeTypeUtils;
|
||||
*
|
||||
* Then use the above map when calling the configure method.
|
||||
*
|
||||
* This class is only intended to be used when writing a Spring Cloud Stream Kafka Streams
|
||||
* application that uses Spring Cloud Stream schema registry for schema evolution.
|
||||
* This class is only intended to be used when writing a Spring Cloud Stream Kafka Streams application
|
||||
* that uses Spring Cloud Stream schema registry for schema evolution.
|
||||
*
|
||||
* An instance of this class is provided as a bean by the binder configuration and
|
||||
* typically the applications can autowire that bean. This is the expected usage pattern
|
||||
* of this class.
|
||||
* An instance of this class is provided as a bean by the binder configuration and typically the applications
|
||||
* can autowire that bean. This is the expected usage pattern of this class.
|
||||
*
|
||||
* @param <T> type of the object to marshall
|
||||
*
|
||||
* @author Soby Chacko
|
||||
* @since 2.1
|
||||
*/
|
||||
@@ -82,19 +79,15 @@ public class CompositeNonNativeSerde<T> implements Serde<T> {
|
||||
|
||||
private static final String AVRO_FORMAT = "avro";
|
||||
|
||||
private static final MimeType DEFAULT_AVRO_MIME_TYPE = new MimeType("application",
|
||||
"*+" + AVRO_FORMAT);
|
||||
private static final MimeType DEFAULT_AVRO_MIME_TYPE = new MimeType("application", "*+" + AVRO_FORMAT);
|
||||
|
||||
private final CompositeNonNativeDeserializer<T> compositeNonNativeDeserializer;
|
||||
|
||||
private final CompositeNonNativeSerializer<T> compositeNonNativeSerializer;
|
||||
|
||||
public CompositeNonNativeSerde(
|
||||
CompositeMessageConverterFactory compositeMessageConverterFactory) {
|
||||
this.compositeNonNativeDeserializer = new CompositeNonNativeDeserializer<>(
|
||||
compositeMessageConverterFactory);
|
||||
this.compositeNonNativeSerializer = new CompositeNonNativeSerializer<>(
|
||||
compositeMessageConverterFactory);
|
||||
public CompositeNonNativeSerde(CompositeMessageConverterFactory compositeMessageConverterFactory) {
|
||||
this.compositeNonNativeDeserializer = new CompositeNonNativeDeserializer<>(compositeMessageConverterFactory);
|
||||
this.compositeNonNativeSerializer = new CompositeNonNativeSerializer<>(compositeMessageConverterFactory);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -105,7 +98,7 @@ public class CompositeNonNativeSerde<T> implements Serde<T> {
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
// No-op
|
||||
//No-op
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -149,19 +142,15 @@ public class CompositeNonNativeSerde<T> implements Serde<T> {
|
||||
|
||||
private Class<?> valueClass;
|
||||
|
||||
CompositeNonNativeDeserializer(
|
||||
CompositeMessageConverterFactory compositeMessageConverterFactory) {
|
||||
this.messageConverter = compositeMessageConverterFactory
|
||||
.getMessageConverterForAllRegistered();
|
||||
CompositeNonNativeDeserializer(CompositeMessageConverterFactory compositeMessageConverterFactory) {
|
||||
this.messageConverter = compositeMessageConverterFactory.getMessageConverterForAllRegistered();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void configure(Map<String, ?> configs, boolean isKey) {
|
||||
Assert.isTrue(configs.containsKey(VALUE_CLASS_HEADER),
|
||||
"Deserializers must provide a configuration for valueClass.");
|
||||
Assert.isTrue(configs.containsKey(VALUE_CLASS_HEADER), "Deserializers must provide a configuration for valueClass.");
|
||||
final Object valueClass = configs.get(VALUE_CLASS_HEADER);
|
||||
Assert.isTrue(valueClass instanceof Class,
|
||||
"Deserializers must provide a valid value for valueClass.");
|
||||
Assert.isTrue(valueClass instanceof Class, "Deserializers must provide a valid value for valueClass.");
|
||||
this.valueClass = (Class<?>) valueClass;
|
||||
this.mimeType = resolveMimeType(configs);
|
||||
}
|
||||
@@ -170,19 +159,16 @@ public class CompositeNonNativeSerde<T> implements Serde<T> {
|
||||
@Override
|
||||
public U deserialize(String topic, byte[] data) {
|
||||
Message<?> message = MessageBuilder.withPayload(data)
|
||||
.setHeader(MessageHeaders.CONTENT_TYPE, this.mimeType.toString())
|
||||
.build();
|
||||
U messageConverted = (U) this.messageConverter.fromMessage(message,
|
||||
this.valueClass);
|
||||
.setHeader(MessageHeaders.CONTENT_TYPE, this.mimeType.toString()).build();
|
||||
U messageConverted = (U) this.messageConverter.fromMessage(message, this.valueClass);
|
||||
Assert.notNull(messageConverted, "Deserialization failed.");
|
||||
return messageConverted;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
// No-op
|
||||
//No-op
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -193,13 +179,10 @@ public class CompositeNonNativeSerde<T> implements Serde<T> {
|
||||
private static class CompositeNonNativeSerializer<V> implements Serializer<V> {
|
||||
|
||||
private final MessageConverter messageConverter;
|
||||
|
||||
private MimeType mimeType;
|
||||
|
||||
CompositeNonNativeSerializer(
|
||||
CompositeMessageConverterFactory compositeMessageConverterFactory) {
|
||||
this.messageConverter = compositeMessageConverterFactory
|
||||
.getMessageConverterForAllRegistered();
|
||||
CompositeNonNativeSerializer(CompositeMessageConverterFactory compositeMessageConverterFactory) {
|
||||
this.messageConverter = compositeMessageConverterFactory.getMessageConverterForAllRegistered();
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -213,16 +196,14 @@ public class CompositeNonNativeSerde<T> implements Serde<T> {
|
||||
Map<String, Object> headers = new HashMap<>(message.getHeaders());
|
||||
headers.put(MessageHeaders.CONTENT_TYPE, this.mimeType.toString());
|
||||
MessageHeaders messageHeaders = new MessageHeaders(headers);
|
||||
final Object payload = this.messageConverter
|
||||
.toMessage(message.getPayload(), messageHeaders).getPayload();
|
||||
final Object payload = this.messageConverter.toMessage(message.getPayload(),
|
||||
messageHeaders).getPayload();
|
||||
return (byte[]) payload;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
// No-op
|
||||
//No-op
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,9 +1,5 @@
|
||||
org.springframework.boot.autoconfigure.EnableAutoConfiguration=\
|
||||
org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsBinderSupportAutoConfiguration,\
|
||||
org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsApplicationSupportAutoConfiguration,\
|
||||
org.springframework.cloud.stream.binder.kafka.streams.function.KafkaStreamsFunctionAutoConfiguration
|
||||
|
||||
org.springframework.cloud.function.context.WrapperDetector=\
|
||||
org.springframework.cloud.stream.binder.kafka.streams.function.KafkaStreamsFunctionWrapperDetector
|
||||
org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsApplicationSupportAutoConfiguration
|
||||
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -18,6 +18,7 @@ package org.springframework.cloud.stream.binder.kafka.streams.bootstrap;
|
||||
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.springframework.boot.WebApplicationType;
|
||||
@@ -32,6 +33,7 @@ import org.springframework.kafka.test.rule.KafkaEmbedded;
|
||||
/**
|
||||
* @author Soby Chacko
|
||||
*/
|
||||
@Ignore("Temporarily disabling the test as builds are getting slower due to this.")
|
||||
public class KafkaStreamsBinderBootstrapTest {
|
||||
|
||||
@ClassRule
|
||||
@@ -39,34 +41,24 @@ public class KafkaStreamsBinderBootstrapTest {
|
||||
|
||||
@Test
|
||||
public void testKafkaStreamsBinderWithCustomEnvironmentCanStart() {
|
||||
ConfigurableApplicationContext applicationContext = new SpringApplicationBuilder(
|
||||
SimpleApplication.class).web(WebApplicationType.NONE).run(
|
||||
"--spring.cloud.stream.kafka.streams.default.consumer.application-id"
|
||||
+ "=testKafkaStreamsBinderWithCustomEnvironmentCanStart",
|
||||
"--spring.cloud.stream.bindings.input.destination=foo",
|
||||
ConfigurableApplicationContext applicationContext = new SpringApplicationBuilder(SimpleApplication.class)
|
||||
.web(WebApplicationType.NONE)
|
||||
.run("--spring.cloud.stream.bindings.input.destination=foo",
|
||||
"--spring.cloud.stream.bindings.input.binder=kBind1",
|
||||
"--spring.cloud.stream.binders.kBind1.type=kstream",
|
||||
"--spring.cloud.stream.binders.kBind1.environment"
|
||||
+ ".spring.cloud.stream.kafka.streams.binder.brokers"
|
||||
+ "=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.binders.kBind1.environment.spring"
|
||||
+ ".cloud.stream.kafka.streams.binder.zkNodes="
|
||||
+ embeddedKafka.getZookeeperConnectionString());
|
||||
"--spring.cloud.stream.binders.kBind1.environment.spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.binders.kBind1.environment.spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString());
|
||||
|
||||
applicationContext.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testKafkaStreamsBinderWithStandardConfigurationCanStart() {
|
||||
ConfigurableApplicationContext applicationContext = new SpringApplicationBuilder(
|
||||
SimpleApplication.class).web(WebApplicationType.NONE).run(
|
||||
"--spring.cloud.stream.kafka.streams.default.consumer.application-id"
|
||||
+ "=testKafkaStreamsBinderWithStandardConfigurationCanStart",
|
||||
"--spring.cloud.stream.bindings.input.destination=foo",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes="
|
||||
+ embeddedKafka.getZookeeperConnectionString());
|
||||
ConfigurableApplicationContext applicationContext = new SpringApplicationBuilder(SimpleApplication.class)
|
||||
.web(WebApplicationType.NONE)
|
||||
.run("--spring.cloud.stream.bindings.input.destination=foo",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString());
|
||||
|
||||
applicationContext.close();
|
||||
}
|
||||
@@ -79,14 +71,10 @@ public class KafkaStreamsBinderBootstrapTest {
|
||||
public void handle(@Input("input") KStream<Object, String> stream) {
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
interface StreamSourceProcessor {
|
||||
|
||||
@Input("input")
|
||||
KStream<?, ?> inputStream();
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,225 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.function;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.kafka.clients.consumer.Consumer;
|
||||
import org.apache.kafka.clients.consumer.ConsumerConfig;
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
||||
import org.apache.kafka.streams.KeyValue;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.apache.kafka.streams.kstream.Materialized;
|
||||
import org.apache.kafka.streams.kstream.Predicate;
|
||||
import org.apache.kafka.streams.kstream.TimeWindows;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.WebApplicationType;
|
||||
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
|
||||
import org.springframework.boot.context.properties.EnableConfigurationProperties;
|
||||
import org.springframework.cloud.stream.annotation.EnableBinding;
|
||||
import org.springframework.cloud.stream.annotation.Input;
|
||||
import org.springframework.cloud.stream.annotation.Output;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsApplicationSupportProperties;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
|
||||
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
|
||||
import org.springframework.kafka.core.KafkaTemplate;
|
||||
import org.springframework.kafka.test.EmbeddedKafkaBroker;
|
||||
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
|
||||
import org.springframework.kafka.test.utils.KafkaTestUtils;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
public class KafkaStreamsBinderWordCountBranchesFunctionTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"counts", "foo", "bar");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
|
||||
private static Consumer<String, String> consumer;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("groupx", "false",
|
||||
embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromEmbeddedTopics(consumer, "counts", "foo", "bar");
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDown() {
|
||||
consumer.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testKstreamWordCountWithStringInputAndPojoOuput() throws Exception {
|
||||
SpringApplication app = new SpringApplication(WordCountProcessorApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
|
||||
ConfigurableApplicationContext context = app.run("--server.port=0",
|
||||
"--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.function.definition=process",
|
||||
"--spring.cloud.stream.bindings.input.destination=words",
|
||||
"--spring.cloud.stream.bindings.output1.destination=counts",
|
||||
"--spring.cloud.stream.bindings.output1.contentType=application/json",
|
||||
"--spring.cloud.stream.bindings.output2.destination=foo",
|
||||
"--spring.cloud.stream.bindings.output2.contentType=application/json",
|
||||
"--spring.cloud.stream.bindings.output3.destination=bar",
|
||||
"--spring.cloud.stream.bindings.output3.contentType=application/json",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.timeWindow.length=5000",
|
||||
"--spring.cloud.stream.kafka.streams.timeWindow.advanceBy=0",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId" +
|
||||
"=KafkaStreamsBinderWordCountBranchesFunctionTests-abc",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString());
|
||||
try {
|
||||
receiveAndValidate(context);
|
||||
}
|
||||
finally {
|
||||
context.close();
|
||||
}
|
||||
}
|
||||
|
||||
private void receiveAndValidate(ConfigurableApplicationContext context) throws Exception {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("words");
|
||||
template.sendDefault("english");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts");
|
||||
assertThat(cr.value().contains("\"word\":\"english\",\"count\":1")).isTrue();
|
||||
|
||||
template.sendDefault("french");
|
||||
template.sendDefault("french");
|
||||
cr = KafkaTestUtils.getSingleRecord(consumer, "foo");
|
||||
assertThat(cr.value().contains("\"word\":\"french\",\"count\":2")).isTrue();
|
||||
|
||||
template.sendDefault("spanish");
|
||||
template.sendDefault("spanish");
|
||||
template.sendDefault("spanish");
|
||||
cr = KafkaTestUtils.getSingleRecord(consumer, "bar");
|
||||
assertThat(cr.value().contains("\"word\":\"spanish\",\"count\":3")).isTrue();
|
||||
}
|
||||
|
||||
static class WordCount {
|
||||
|
||||
private String word;
|
||||
|
||||
private long count;
|
||||
|
||||
private Date start;
|
||||
|
||||
private Date end;
|
||||
|
||||
WordCount(String word, long count, Date start, Date end) {
|
||||
this.word = word;
|
||||
this.count = count;
|
||||
this.start = start;
|
||||
this.end = end;
|
||||
}
|
||||
|
||||
public String getWord() {
|
||||
return word;
|
||||
}
|
||||
|
||||
public void setWord(String word) {
|
||||
this.word = word;
|
||||
}
|
||||
|
||||
public long getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
public void setCount(long count) {
|
||||
this.count = count;
|
||||
}
|
||||
|
||||
public Date getStart() {
|
||||
return start;
|
||||
}
|
||||
|
||||
public void setStart(Date start) {
|
||||
this.start = start;
|
||||
}
|
||||
|
||||
public Date getEnd() {
|
||||
return end;
|
||||
}
|
||||
|
||||
public void setEnd(Date end) {
|
||||
this.end = end;
|
||||
}
|
||||
}
|
||||
|
||||
@EnableBinding(KStreamProcessorX.class)
|
||||
@EnableAutoConfiguration
|
||||
@EnableConfigurationProperties(KafkaStreamsApplicationSupportProperties.class)
|
||||
public static class WordCountProcessorApplication {
|
||||
|
||||
@Bean
|
||||
@SuppressWarnings("unchecked")
|
||||
public Function<KStream<Object, String>, KStream<?, WordCount>[]> process() {
|
||||
|
||||
Predicate<Object, WordCount> isEnglish = (k, v) -> v.word.equals("english");
|
||||
Predicate<Object, WordCount> isFrench = (k, v) -> v.word.equals("french");
|
||||
Predicate<Object, WordCount> isSpanish = (k, v) -> v.word.equals("spanish");
|
||||
|
||||
return input -> input
|
||||
.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
|
||||
.groupBy((key, value) -> value)
|
||||
.windowedBy(TimeWindows.of(5000))
|
||||
.count(Materialized.as("WordCounts-branch"))
|
||||
.toStream()
|
||||
.map((key, value) -> new KeyValue<>(null, new WordCount(key.key(), value,
|
||||
new Date(key.window().start()), new Date(key.window().end()))))
|
||||
.branch(isEnglish, isFrench, isSpanish);
|
||||
}
|
||||
}
|
||||
|
||||
interface KStreamProcessorX {
|
||||
|
||||
@Input("input")
|
||||
KStream<?, ?> input();
|
||||
|
||||
@Output("output1")
|
||||
KStream<?, ?> output1();
|
||||
|
||||
@Output("output2")
|
||||
KStream<?, ?> output2();
|
||||
|
||||
@Output("output3")
|
||||
KStream<?, ?> output3();
|
||||
}
|
||||
}
|
||||
@@ -1,190 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.function;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.kafka.clients.consumer.Consumer;
|
||||
import org.apache.kafka.clients.consumer.ConsumerConfig;
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
||||
import org.apache.kafka.common.serialization.Serdes;
|
||||
import org.apache.kafka.streams.KeyValue;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.apache.kafka.streams.kstream.Materialized;
|
||||
import org.apache.kafka.streams.kstream.Serialized;
|
||||
import org.apache.kafka.streams.kstream.TimeWindows;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.WebApplicationType;
|
||||
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
|
||||
import org.springframework.boot.context.properties.EnableConfigurationProperties;
|
||||
import org.springframework.cloud.stream.annotation.EnableBinding;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsProcessor;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsApplicationSupportProperties;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
|
||||
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
|
||||
import org.springframework.kafka.core.KafkaTemplate;
|
||||
import org.springframework.kafka.test.EmbeddedKafkaBroker;
|
||||
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
|
||||
import org.springframework.kafka.test.utils.KafkaTestUtils;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
public class KafkaStreamsBinderWordCountFunctionTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"counts");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
|
||||
private static Consumer<String, String> consumer;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false",
|
||||
embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts");
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDown() {
|
||||
consumer.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testKstreamWordCountFunction() throws Exception {
|
||||
SpringApplication app = new SpringApplication(WordCountProcessorApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
|
||||
try (ConfigurableApplicationContext context = app.run("--server.port=0",
|
||||
"--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.function.definition=process",
|
||||
"--spring.cloud.stream.bindings.input.destination=words",
|
||||
"--spring.cloud.stream.bindings.output.destination=counts",
|
||||
"--spring.cloud.stream.bindings.output.contentType=application/json",
|
||||
"--spring.cloud.stream.kafka.streams.default.consumer.application-id=basic-word-count",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.bindings.output.producer.headerMode=raw",
|
||||
"--spring.cloud.stream.bindings.input.consumer.headerMode=raw",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString())) {
|
||||
receiveAndValidate(context);
|
||||
}
|
||||
}
|
||||
|
||||
private void receiveAndValidate(ConfigurableApplicationContext context) throws Exception {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
try {
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("words");
|
||||
template.sendDefault("foobar");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts");
|
||||
assertThat(cr.value().contains("\"word\":\"foobar\",\"count\":1")).isTrue();
|
||||
}
|
||||
finally {
|
||||
pf.destroy();
|
||||
}
|
||||
}
|
||||
|
||||
static class WordCount {
|
||||
|
||||
private String word;
|
||||
|
||||
private long count;
|
||||
|
||||
private Date start;
|
||||
|
||||
private Date end;
|
||||
|
||||
WordCount(String word, long count, Date start, Date end) {
|
||||
this.word = word;
|
||||
this.count = count;
|
||||
this.start = start;
|
||||
this.end = end;
|
||||
}
|
||||
|
||||
public String getWord() {
|
||||
return word;
|
||||
}
|
||||
|
||||
public void setWord(String word) {
|
||||
this.word = word;
|
||||
}
|
||||
|
||||
public long getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
public void setCount(long count) {
|
||||
this.count = count;
|
||||
}
|
||||
|
||||
public Date getStart() {
|
||||
return start;
|
||||
}
|
||||
|
||||
public void setStart(Date start) {
|
||||
this.start = start;
|
||||
}
|
||||
|
||||
public Date getEnd() {
|
||||
return end;
|
||||
}
|
||||
|
||||
public void setEnd(Date end) {
|
||||
this.end = end;
|
||||
}
|
||||
}
|
||||
|
||||
@EnableBinding(KafkaStreamsProcessor.class)
|
||||
@EnableAutoConfiguration
|
||||
@EnableConfigurationProperties(KafkaStreamsApplicationSupportProperties.class)
|
||||
static class WordCountProcessorApplication {
|
||||
|
||||
@Bean
|
||||
public Function<KStream<Object, String>, KStream<?, WordCount>> process() {
|
||||
|
||||
return input -> input
|
||||
.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
|
||||
.map((key, value) -> new KeyValue<>(value, value))
|
||||
.groupByKey(Serialized.with(Serdes.String(), Serdes.String()))
|
||||
.windowedBy(TimeWindows.of(5000))
|
||||
.count(Materialized.as("foo-WordCounts"))
|
||||
.toStream()
|
||||
.map((key, value) -> new KeyValue<>(null, new WordCount(key.key(), value,
|
||||
new Date(key.window().start()), new Date(key.window().end()))));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,353 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.function;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.kafka.clients.consumer.Consumer;
|
||||
import org.apache.kafka.clients.consumer.ConsumerConfig;
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecords;
|
||||
import org.apache.kafka.clients.producer.ProducerConfig;
|
||||
import org.apache.kafka.common.serialization.LongDeserializer;
|
||||
import org.apache.kafka.common.serialization.LongSerializer;
|
||||
import org.apache.kafka.streams.KeyValue;
|
||||
import org.apache.kafka.streams.kstream.GlobalKTable;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.WebApplicationType;
|
||||
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
|
||||
import org.springframework.boot.context.properties.EnableConfigurationProperties;
|
||||
import org.springframework.cloud.stream.annotation.EnableBinding;
|
||||
import org.springframework.cloud.stream.annotation.Input;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsProcessor;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsApplicationSupportProperties;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
|
||||
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
|
||||
import org.springframework.kafka.core.KafkaTemplate;
|
||||
import org.springframework.kafka.support.serializer.JsonDeserializer;
|
||||
import org.springframework.kafka.support.serializer.JsonSerde;
|
||||
import org.springframework.kafka.test.EmbeddedKafkaBroker;
|
||||
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
|
||||
import org.springframework.kafka.test.utils.KafkaTestUtils;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
public class StreamToGlobalKTableFunctionTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"enriched-order");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
|
||||
private static Consumer<Long, EnrichedOrder> consumer;
|
||||
|
||||
@Test
|
||||
public void testStreamToGlobalKTable() throws Exception {
|
||||
SpringApplication app = new SpringApplication(OrderEnricherApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
try (ConfigurableApplicationContext ignored = app.run("--server.port=0",
|
||||
"--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.function.definition=process",
|
||||
"--spring.cloud.stream.bindings.input.destination=orders",
|
||||
"--spring.cloud.stream.bindings.input-x.destination=customers",
|
||||
"--spring.cloud.stream.bindings.input-y.destination=products",
|
||||
"--spring.cloud.stream.bindings.output.destination=enriched-order",
|
||||
"--spring.cloud.stream.bindings.input.consumer.useNativeDecoding=true",
|
||||
"--spring.cloud.stream.bindings.input-x.consumer.useNativeDecoding=true",
|
||||
"--spring.cloud.stream.bindings.input-y.consumer.useNativeDecoding=true",
|
||||
"--spring.cloud.stream.bindings.output.producer.useNativeEncoding=true",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.keySerde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$LongSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.valueSerde" +
|
||||
"=org.springframework.cloud.stream.binder.kafka.streams.function" +
|
||||
".StreamToGlobalKTableFunctionTests$OrderSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-x.consumer.keySerde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$LongSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-x.consumer.valueSerde" +
|
||||
"=org.springframework.cloud.stream.binder.kafka.streams.function" +
|
||||
".StreamToGlobalKTableFunctionTests$CustomerSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-y.consumer.keySerde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$LongSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-y.consumer.valueSerde" +
|
||||
"=org.springframework.cloud.stream.binder.kafka.streams.function" +
|
||||
".StreamToGlobalKTableFunctionTests$ProductSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.keySerde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$LongSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.valueSerde" +
|
||||
"=org.springframework.cloud.stream.binder.kafka.streams." +
|
||||
"function.StreamToGlobalKTableFunctionTests$EnrichedOrderSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=10000",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId=" +
|
||||
"StreamToGlobalKTableJoinFunctionTests-abc",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString())) {
|
||||
Map<String, Object> senderPropsCustomer = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
senderPropsCustomer.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class);
|
||||
CustomerSerde customerSerde = new CustomerSerde();
|
||||
senderPropsCustomer.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
|
||||
customerSerde.serializer().getClass());
|
||||
|
||||
DefaultKafkaProducerFactory<Long, Customer> pfCustomer =
|
||||
new DefaultKafkaProducerFactory<>(senderPropsCustomer);
|
||||
KafkaTemplate<Long, Customer> template = new KafkaTemplate<>(pfCustomer, true);
|
||||
template.setDefaultTopic("customers");
|
||||
for (long i = 0; i < 5; i++) {
|
||||
final Customer customer = new Customer();
|
||||
customer.setName("customer-" + i);
|
||||
template.sendDefault(i, customer);
|
||||
}
|
||||
|
||||
Map<String, Object> senderPropsProduct = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
senderPropsProduct.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class);
|
||||
ProductSerde productSerde = new ProductSerde();
|
||||
senderPropsProduct.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, productSerde.serializer().getClass());
|
||||
|
||||
DefaultKafkaProducerFactory<Long, Product> pfProduct =
|
||||
new DefaultKafkaProducerFactory<>(senderPropsProduct);
|
||||
KafkaTemplate<Long, Product> productTemplate = new KafkaTemplate<>(pfProduct, true);
|
||||
productTemplate.setDefaultTopic("products");
|
||||
|
||||
for (long i = 0; i < 5; i++) {
|
||||
final Product product = new Product();
|
||||
product.setName("product-" + i);
|
||||
productTemplate.sendDefault(i, product);
|
||||
}
|
||||
|
||||
Map<String, Object> senderPropsOrder = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
senderPropsOrder.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class);
|
||||
OrderSerde orderSerde = new OrderSerde();
|
||||
senderPropsOrder.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, orderSerde.serializer().getClass());
|
||||
|
||||
DefaultKafkaProducerFactory<Long, Order> pfOrder = new DefaultKafkaProducerFactory<>(senderPropsOrder);
|
||||
KafkaTemplate<Long, Order> orderTemplate = new KafkaTemplate<>(pfOrder, true);
|
||||
orderTemplate.setDefaultTopic("orders");
|
||||
|
||||
for (long i = 0; i < 5; i++) {
|
||||
final Order order = new Order();
|
||||
order.setCustomerId(i);
|
||||
order.setProductId(i);
|
||||
orderTemplate.sendDefault(i, order);
|
||||
}
|
||||
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false",
|
||||
embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class);
|
||||
EnrichedOrderSerde enrichedOrderSerde = new EnrichedOrderSerde();
|
||||
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
|
||||
enrichedOrderSerde.deserializer().getClass());
|
||||
consumerProps.put(JsonDeserializer.VALUE_DEFAULT_TYPE,
|
||||
"org.springframework.cloud.stream.binder.kafka.streams." +
|
||||
"function.StreamToGlobalKTableFunctionTests.EnrichedOrder");
|
||||
DefaultKafkaConsumerFactory<Long, EnrichedOrder> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "enriched-order");
|
||||
|
||||
int count = 0;
|
||||
long start = System.currentTimeMillis();
|
||||
List<KeyValue<Long, EnrichedOrder>> enrichedOrders = new ArrayList<>();
|
||||
do {
|
||||
ConsumerRecords<Long, EnrichedOrder> records = KafkaTestUtils.getRecords(consumer);
|
||||
count = count + records.count();
|
||||
for (ConsumerRecord<Long, EnrichedOrder> record : records) {
|
||||
enrichedOrders.add(new KeyValue<>(record.key(), record.value()));
|
||||
}
|
||||
} while (count < 5 && (System.currentTimeMillis() - start) < 30000);
|
||||
|
||||
assertThat(count == 5).isTrue();
|
||||
assertThat(enrichedOrders.size() == 5).isTrue();
|
||||
|
||||
enrichedOrders.sort(Comparator.comparing(o -> o.key));
|
||||
|
||||
for (int i = 0; i < 5; i++) {
|
||||
KeyValue<Long, EnrichedOrder> enrichedOrderKeyValue = enrichedOrders.get(i);
|
||||
assertThat(enrichedOrderKeyValue.key == i).isTrue();
|
||||
EnrichedOrder enrichedOrder = enrichedOrderKeyValue.value;
|
||||
assertThat(enrichedOrder.getOrder().customerId == i).isTrue();
|
||||
assertThat(enrichedOrder.getOrder().productId == i).isTrue();
|
||||
assertThat(enrichedOrder.getCustomer().name.equals("customer-" + i)).isTrue();
|
||||
assertThat(enrichedOrder.getProduct().name.equals("product-" + i)).isTrue();
|
||||
}
|
||||
pfCustomer.destroy();
|
||||
pfProduct.destroy();
|
||||
pfOrder.destroy();
|
||||
consumer.close();
|
||||
}
|
||||
}
|
||||
|
||||
interface CustomGlobalKTableProcessor extends KafkaStreamsProcessor {
|
||||
|
||||
@Input("input-x")
|
||||
GlobalKTable<?, ?> inputX();
|
||||
|
||||
@Input("input-y")
|
||||
GlobalKTable<?, ?> inputY();
|
||||
}
|
||||
|
||||
@EnableBinding(CustomGlobalKTableProcessor.class)
|
||||
@EnableAutoConfiguration
|
||||
@EnableConfigurationProperties(KafkaStreamsApplicationSupportProperties.class)
|
||||
public static class OrderEnricherApplication {
|
||||
|
||||
@Bean
|
||||
public Function<KStream<Long, Order>,
|
||||
Function<GlobalKTable<Long, Customer>,
|
||||
Function<GlobalKTable<Long, Product>, KStream<Long, EnrichedOrder>>>> process() {
|
||||
|
||||
return orderStream -> (
|
||||
customers -> (
|
||||
products -> (
|
||||
orderStream.join(customers,
|
||||
(orderId, order) -> order.getCustomerId(),
|
||||
(order, customer) -> new CustomerOrder(customer, order))
|
||||
.join(products,
|
||||
(orderId, customerOrder) -> customerOrder
|
||||
.productId(),
|
||||
(customerOrder, product) -> {
|
||||
EnrichedOrder enrichedOrder = new EnrichedOrder();
|
||||
enrichedOrder.setProduct(product);
|
||||
enrichedOrder.setCustomer(customerOrder.customer);
|
||||
enrichedOrder.setOrder(customerOrder.order);
|
||||
return enrichedOrder;
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
static class Order {
|
||||
|
||||
long customerId;
|
||||
long productId;
|
||||
|
||||
public long getCustomerId() {
|
||||
return customerId;
|
||||
}
|
||||
|
||||
public void setCustomerId(long customerId) {
|
||||
this.customerId = customerId;
|
||||
}
|
||||
|
||||
public long getProductId() {
|
||||
return productId;
|
||||
}
|
||||
|
||||
public void setProductId(long productId) {
|
||||
this.productId = productId;
|
||||
}
|
||||
}
|
||||
|
||||
static class Customer {
|
||||
|
||||
String name;
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
}
|
||||
|
||||
static class Product {
|
||||
|
||||
String name;
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
}
|
||||
|
||||
static class EnrichedOrder {
|
||||
|
||||
Product product;
|
||||
Customer customer;
|
||||
Order order;
|
||||
|
||||
public Product getProduct() {
|
||||
return product;
|
||||
}
|
||||
|
||||
public void setProduct(Product product) {
|
||||
this.product = product;
|
||||
}
|
||||
|
||||
public Customer getCustomer() {
|
||||
return customer;
|
||||
}
|
||||
|
||||
public void setCustomer(Customer customer) {
|
||||
this.customer = customer;
|
||||
}
|
||||
|
||||
public Order getOrder() {
|
||||
return order;
|
||||
}
|
||||
|
||||
public void setOrder(Order order) {
|
||||
this.order = order;
|
||||
}
|
||||
}
|
||||
|
||||
private static class CustomerOrder {
|
||||
private final Customer customer;
|
||||
private final Order order;
|
||||
|
||||
CustomerOrder(final Customer customer, final Order order) {
|
||||
this.customer = customer;
|
||||
this.order = order;
|
||||
}
|
||||
|
||||
long productId() {
|
||||
return order.getProductId();
|
||||
}
|
||||
}
|
||||
|
||||
public static class OrderSerde extends JsonSerde<Order> {
|
||||
}
|
||||
|
||||
public static class CustomerSerde extends JsonSerde<Customer> {
|
||||
}
|
||||
|
||||
public static class ProductSerde extends JsonSerde<Product> {
|
||||
}
|
||||
|
||||
public static class EnrichedOrderSerde extends JsonSerde<EnrichedOrder> {
|
||||
}
|
||||
}
|
||||
@@ -1,410 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.function;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.kafka.clients.consumer.Consumer;
|
||||
import org.apache.kafka.clients.consumer.ConsumerConfig;
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecords;
|
||||
import org.apache.kafka.clients.producer.ProducerConfig;
|
||||
import org.apache.kafka.common.serialization.LongDeserializer;
|
||||
import org.apache.kafka.common.serialization.LongSerializer;
|
||||
import org.apache.kafka.common.serialization.Serdes;
|
||||
import org.apache.kafka.common.serialization.StringDeserializer;
|
||||
import org.apache.kafka.common.serialization.StringSerializer;
|
||||
import org.apache.kafka.streams.KeyValue;
|
||||
import org.apache.kafka.streams.kstream.Joined;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.apache.kafka.streams.kstream.KTable;
|
||||
import org.apache.kafka.streams.kstream.Serialized;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.WebApplicationType;
|
||||
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
|
||||
import org.springframework.boot.context.properties.EnableConfigurationProperties;
|
||||
import org.springframework.cloud.stream.annotation.EnableBinding;
|
||||
import org.springframework.cloud.stream.annotation.Input;
|
||||
import org.springframework.cloud.stream.annotation.Output;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsApplicationSupportProperties;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
|
||||
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
|
||||
import org.springframework.kafka.core.KafkaTemplate;
|
||||
import org.springframework.kafka.test.EmbeddedKafkaBroker;
|
||||
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
|
||||
import org.springframework.kafka.test.utils.KafkaTestUtils;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
public class StreamToTableJoinFunctionTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1,
|
||||
true, "output-topic-1", "output-topic-2");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
|
||||
@Test
|
||||
public void testStreamToTable() throws Exception {
|
||||
SpringApplication app = new SpringApplication(CountClicksPerRegionApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
|
||||
Consumer<String, Long> consumer;
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-1",
|
||||
"false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
|
||||
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class);
|
||||
DefaultKafkaConsumerFactory<String, Long> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "output-topic-1");
|
||||
|
||||
try (ConfigurableApplicationContext ignored = app.run("--server.port=0",
|
||||
"--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.function.definition=process1",
|
||||
"--spring.cloud.stream.bindings.input-1.destination=user-clicks-1",
|
||||
"--spring.cloud.stream.bindings.input-2.destination=user-regions-1",
|
||||
"--spring.cloud.stream.bindings.output.destination=output-topic-1",
|
||||
"--spring.cloud.stream.bindings.input-1.consumer.useNativeDecoding=true",
|
||||
"--spring.cloud.stream.bindings.input-2.consumer.useNativeDecoding=true",
|
||||
"--spring.cloud.stream.bindings.output.producer.useNativeEncoding=true",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-1.consumer.keySerde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-1.consumer.valueSerde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$LongSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-2.consumer.keySerde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-2.consumer.valueSerde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.keySerde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.valueSerde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$LongSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=10000",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-1.consumer.applicationId" +
|
||||
"=StreamToTableJoinFunctionTests-abc",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString())) {
|
||||
|
||||
// Input 1: Region per user (multiple records allowed per user).
|
||||
List<KeyValue<String, String>> userRegions = Arrays.asList(
|
||||
new KeyValue<>("alice", "asia"), /* Alice lived in Asia originally... */
|
||||
new KeyValue<>("bob", "americas"),
|
||||
new KeyValue<>("chao", "asia"),
|
||||
new KeyValue<>("dave", "europe"),
|
||||
new KeyValue<>("alice", "europe"), /* ...but moved to Europe some time later. */
|
||||
new KeyValue<>("eve", "americas"),
|
||||
new KeyValue<>("fang", "asia")
|
||||
);
|
||||
|
||||
Map<String, Object> senderProps1 = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
senderProps1.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
||||
senderProps1.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
||||
|
||||
DefaultKafkaProducerFactory<String, String> pf1 = new DefaultKafkaProducerFactory<>(senderProps1);
|
||||
KafkaTemplate<String, String> template1 = new KafkaTemplate<>(pf1, true);
|
||||
template1.setDefaultTopic("user-regions-1");
|
||||
|
||||
for (KeyValue<String, String> keyValue : userRegions) {
|
||||
template1.sendDefault(keyValue.key, keyValue.value);
|
||||
}
|
||||
|
||||
// Input 2: Clicks per user (multiple records allowed per user).
|
||||
List<KeyValue<String, Long>> userClicks = Arrays.asList(
|
||||
new KeyValue<>("alice", 13L),
|
||||
new KeyValue<>("bob", 4L),
|
||||
new KeyValue<>("chao", 25L),
|
||||
new KeyValue<>("bob", 19L),
|
||||
new KeyValue<>("dave", 56L),
|
||||
new KeyValue<>("eve", 78L),
|
||||
new KeyValue<>("alice", 40L),
|
||||
new KeyValue<>("fang", 99L)
|
||||
);
|
||||
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
senderProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
||||
senderProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, LongSerializer.class);
|
||||
|
||||
DefaultKafkaProducerFactory<String, Long> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
KafkaTemplate<String, Long> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("user-clicks-1");
|
||||
|
||||
for (KeyValue<String, Long> keyValue : userClicks) {
|
||||
template.sendDefault(keyValue.key, keyValue.value);
|
||||
}
|
||||
|
||||
List<KeyValue<String, Long>> expectedClicksPerRegion = Arrays.asList(
|
||||
new KeyValue<>("americas", 101L),
|
||||
new KeyValue<>("europe", 109L),
|
||||
new KeyValue<>("asia", 124L)
|
||||
);
|
||||
|
||||
//Verify that we receive the expected data
|
||||
int count = 0;
|
||||
long start = System.currentTimeMillis();
|
||||
List<KeyValue<String, Long>> actualClicksPerRegion = new ArrayList<>();
|
||||
do {
|
||||
ConsumerRecords<String, Long> records = KafkaTestUtils.getRecords(consumer);
|
||||
count = count + records.count();
|
||||
for (ConsumerRecord<String, Long> record : records) {
|
||||
actualClicksPerRegion.add(new KeyValue<>(record.key(), record.value()));
|
||||
}
|
||||
} while (count < expectedClicksPerRegion.size() && (System.currentTimeMillis() - start) < 30000);
|
||||
|
||||
assertThat(count == expectedClicksPerRegion.size()).isTrue();
|
||||
assertThat(actualClicksPerRegion).hasSameElementsAs(expectedClicksPerRegion);
|
||||
}
|
||||
finally {
|
||||
consumer.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGlobalStartOffsetWithLatestAndIndividualBindingWthEarliest() throws Exception {
|
||||
SpringApplication app = new SpringApplication(CountClicksPerRegionApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
|
||||
Consumer<String, Long> consumer;
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-2",
|
||||
"false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
|
||||
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class);
|
||||
DefaultKafkaConsumerFactory<String, Long> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "output-topic-2");
|
||||
|
||||
// Produce data first to the input topic to test the startOffset setting on the
|
||||
// binding (which is set to earliest below).
|
||||
// Input 1: Clicks per user (multiple records allowed per user).
|
||||
List<KeyValue<String, Long>> userClicks = Arrays.asList(
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L)
|
||||
);
|
||||
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
senderProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
||||
senderProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, LongSerializer.class);
|
||||
|
||||
DefaultKafkaProducerFactory<String, Long> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
KafkaTemplate<String, Long> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("user-clicks-2");
|
||||
|
||||
for (KeyValue<String, Long> keyValue : userClicks) {
|
||||
template.sendDefault(keyValue.key, keyValue.value);
|
||||
}
|
||||
|
||||
try (ConfigurableApplicationContext ignored = app.run("--server.port=0",
|
||||
"--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.function.definition=process1",
|
||||
"--spring.cloud.stream.bindings.input-1.destination=user-clicks-2",
|
||||
"--spring.cloud.stream.bindings.input-2.destination=user-regions-2",
|
||||
"--spring.cloud.stream.bindings.output.destination=output-topic-2",
|
||||
"--spring.cloud.stream.bindings.input-1.consumer.useNativeDecoding=true",
|
||||
"--spring.cloud.stream.bindings.input-2.consumer.useNativeDecoding=true",
|
||||
"--spring.cloud.stream.bindings.output.producer.useNativeEncoding=true",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.auto.offset.reset=latest",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-1.consumer.startOffset=earliest",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-1.consumer.keySerde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-1.consumer.valueSerde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$LongSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-2.consumer.keySerde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-2.consumer.valueSerde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.keySerde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.valueSerde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$LongSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde" +
|
||||
"=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=10000",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-1.consumer.application-id" +
|
||||
"=StreamToTableJoinFunctionTests-foobar",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString())) {
|
||||
Thread.sleep(1000L);
|
||||
|
||||
// Input 2: Region per user (multiple records allowed per user).
|
||||
List<KeyValue<String, String>> userRegions = Arrays.asList(
|
||||
new KeyValue<>("alice", "asia"), /* Alice lived in Asia originally... */
|
||||
new KeyValue<>("bob", "americas"),
|
||||
new KeyValue<>("chao", "asia"),
|
||||
new KeyValue<>("dave", "europe"),
|
||||
new KeyValue<>("alice", "europe"), /* ...but moved to Europe some time later. */
|
||||
new KeyValue<>("eve", "americas"),
|
||||
new KeyValue<>("fang", "asia")
|
||||
);
|
||||
|
||||
Map<String, Object> senderProps1 = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
senderProps1.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
||||
senderProps1.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
||||
|
||||
DefaultKafkaProducerFactory<String, String> pf1 = new DefaultKafkaProducerFactory<>(senderProps1);
|
||||
KafkaTemplate<String, String> template1 = new KafkaTemplate<>(pf1, true);
|
||||
template1.setDefaultTopic("user-regions-2");
|
||||
|
||||
for (KeyValue<String, String> keyValue : userRegions) {
|
||||
template1.sendDefault(keyValue.key, keyValue.value);
|
||||
}
|
||||
|
||||
|
||||
// Input 1: Clicks per user (multiple records allowed per user).
|
||||
List<KeyValue<String, Long>> userClicks1 = Arrays.asList(
|
||||
new KeyValue<>("bob", 4L),
|
||||
new KeyValue<>("chao", 25L),
|
||||
new KeyValue<>("bob", 19L),
|
||||
new KeyValue<>("dave", 56L),
|
||||
new KeyValue<>("eve", 78L),
|
||||
new KeyValue<>("fang", 99L)
|
||||
);
|
||||
|
||||
for (KeyValue<String, Long> keyValue : userClicks1) {
|
||||
template.sendDefault(keyValue.key, keyValue.value);
|
||||
}
|
||||
|
||||
|
||||
List<KeyValue<String, Long>> expectedClicksPerRegion = Arrays.asList(
|
||||
new KeyValue<>("americas", 101L),
|
||||
new KeyValue<>("europe", 56L),
|
||||
new KeyValue<>("asia", 124L),
|
||||
//1000 alice entries which were there in the topic before the consumer started.
|
||||
//Since we set the startOffset to earliest for the topic, it will read them,
|
||||
//but the join fails to associate with a valid region, thus UNKNOWN.
|
||||
new KeyValue<>("UNKNOWN", 1000L)
|
||||
);
|
||||
|
||||
//Verify that we receive the expected data
|
||||
int count = 0;
|
||||
long start = System.currentTimeMillis();
|
||||
List<KeyValue<String, Long>> actualClicksPerRegion = new ArrayList<>();
|
||||
do {
|
||||
ConsumerRecords<String, Long> records = KafkaTestUtils.getRecords(consumer);
|
||||
count = count + records.count();
|
||||
for (ConsumerRecord<String, Long> record : records) {
|
||||
actualClicksPerRegion.add(new KeyValue<>(record.key(), record.value()));
|
||||
}
|
||||
} while (count < expectedClicksPerRegion.size() && (System.currentTimeMillis() - start) < 30000);
|
||||
|
||||
assertThat(count).isEqualTo(expectedClicksPerRegion.size());
|
||||
assertThat(actualClicksPerRegion).hasSameElementsAs(expectedClicksPerRegion);
|
||||
}
|
||||
finally {
|
||||
consumer.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Tuple for a region and its associated number of clicks.
|
||||
*/
|
||||
private static final class RegionWithClicks {
|
||||
|
||||
private final String region;
|
||||
private final long clicks;
|
||||
|
||||
RegionWithClicks(String region, long clicks) {
|
||||
if (region == null || region.isEmpty()) {
|
||||
throw new IllegalArgumentException("region must be set");
|
||||
}
|
||||
if (clicks < 0) {
|
||||
throw new IllegalArgumentException("clicks must not be negative");
|
||||
}
|
||||
this.region = region;
|
||||
this.clicks = clicks;
|
||||
}
|
||||
|
||||
public String getRegion() {
|
||||
return region;
|
||||
}
|
||||
|
||||
public long getClicks() {
|
||||
return clicks;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@EnableBinding(KStreamKTableProcessor.class)
|
||||
@EnableAutoConfiguration
|
||||
@EnableConfigurationProperties(KafkaStreamsApplicationSupportProperties.class)
|
||||
public static class CountClicksPerRegionApplication {
|
||||
|
||||
@Bean
|
||||
public Function<KStream<String, Long>, Function<KTable<String, String>, KStream<String, Long>>> process1() {
|
||||
return userClicksStream -> (userRegionsTable -> (userClicksStream
|
||||
.leftJoin(userRegionsTable, (clicks, region) -> new RegionWithClicks(region == null ?
|
||||
"UNKNOWN" : region, clicks),
|
||||
Joined.with(Serdes.String(), Serdes.Long(), null))
|
||||
.map((user, regionWithClicks) -> new KeyValue<>(regionWithClicks.getRegion(),
|
||||
regionWithClicks.getClicks()))
|
||||
.groupByKey(Serialized.with(Serdes.String(), Serdes.Long()))
|
||||
.reduce((firstClicks, secondClicks) -> firstClicks + secondClicks)
|
||||
.toStream()));
|
||||
}
|
||||
}
|
||||
|
||||
interface KStreamKTableProcessor {
|
||||
|
||||
/**
|
||||
* Input binding.
|
||||
*
|
||||
* @return {@link Input} binding for {@link KStream} type.
|
||||
*/
|
||||
@Input("input-1")
|
||||
KStream<?, ?> input1();
|
||||
|
||||
/**
|
||||
* Input binding.
|
||||
*
|
||||
* @return {@link Input} binding for {@link KStream} type.
|
||||
*/
|
||||
@Input("input-2")
|
||||
KTable<?, ?> input2();
|
||||
|
||||
/**
|
||||
* Output binding.
|
||||
*
|
||||
* @return {@link Output} binding for {@link KStream} type.
|
||||
*/
|
||||
@Output("output")
|
||||
KStream<?, ?> output();
|
||||
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -69,32 +69,27 @@ import static org.mockito.Mockito.verify;
|
||||
public abstract class DeserializationErrorHandlerByKafkaTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"counts", "error.words.group", "error.word1.groupx", "error.word2.groupx");
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true, "counts", "error.words.group",
|
||||
"error.word1.groupx", "error.word2.groupx");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
|
||||
.getEmbeddedKafka();
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
|
||||
@SpyBean
|
||||
org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsMessageConversionDelegate conversionDelegate;
|
||||
org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsMessageConversionDelegate KafkaStreamsMessageConversionDelegate;
|
||||
|
||||
private static Consumer<String, String> consumer;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers",
|
||||
embeddedKafka.getBrokersAsString());
|
||||
System.setProperty("spring.cloud.stream.kafka.streams.binder.zkNodes",
|
||||
embeddedKafka.getZookeeperConnectionString());
|
||||
System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers", embeddedKafka.getBrokersAsString());
|
||||
System.setProperty("spring.cloud.stream.kafka.streams.binder.zkNodes", embeddedKafka.getZookeeperConnectionString());
|
||||
|
||||
System.setProperty("server.port", "0");
|
||||
System.setProperty("spring.jmx.enabled", "false");
|
||||
System.setProperty("server.port","0");
|
||||
System.setProperty("spring.jmx.enabled","false");
|
||||
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("fooc", "false",
|
||||
embeddedKafka);
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("fooc", "false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts");
|
||||
}
|
||||
@@ -104,50 +99,42 @@ public abstract class DeserializationErrorHandlerByKafkaTests {
|
||||
consumer.close();
|
||||
}
|
||||
|
||||
// @checkstyle:off
|
||||
@SpringBootTest(properties = {
|
||||
"spring.cloud.stream.bindings.input.consumer.useNativeDecoding=true",
|
||||
"spring.cloud.stream.bindings.output.producer.useNativeEncoding=true",
|
||||
"spring.cloud.stream.kafka.streams.bindings.input.consumer.application-id=deser-kafka-dlq",
|
||||
"spring.cloud.stream.bindings.input.group=group",
|
||||
"spring.cloud.stream.kafka.streams.binder.serdeError=sendToDlq",
|
||||
"spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde="
|
||||
+ "org.apache.kafka.common.serialization.Serdes$IntegerSerde" }, webEnvironment = SpringBootTest.WebEnvironment.NONE)
|
||||
// @checkstyle:on
|
||||
public static class DeserializationByKafkaAndDlqTests
|
||||
extends DeserializationErrorHandlerByKafkaTests {
|
||||
"spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=" +
|
||||
"org.apache.kafka.common.serialization.Serdes$IntegerSerde"},
|
||||
webEnvironment= SpringBootTest.WebEnvironment.NONE
|
||||
)
|
||||
public static class DeserializationByKafkaAndDlqTests extends DeserializationErrorHandlerByKafkaTests {
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void test() throws Exception {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("words");
|
||||
template.sendDefault("foobar");
|
||||
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobar",
|
||||
"false", embeddedKafka);
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobar", "false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
Consumer<String, String> consumer1 = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer1, "error.words.group");
|
||||
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer1,
|
||||
"error.words.group");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer1, "error.words.group");
|
||||
assertThat(cr.value().equals("foobar")).isTrue();
|
||||
|
||||
// Ensuring that the deserialization was indeed done by Kafka natively
|
||||
verify(conversionDelegate, never()).deserializeOnInbound(any(Class.class),
|
||||
any(KStream.class));
|
||||
verify(conversionDelegate, never()).serializeOnOutbound(any(KStream.class));
|
||||
//Ensuring that the deserialization was indeed done by Kafka natively
|
||||
verify(KafkaStreamsMessageConversionDelegate, never()).deserializeOnInbound(any(Class.class), any(KStream.class));
|
||||
verify(KafkaStreamsMessageConversionDelegate, never()).serializeOnOutbound(any(KStream.class));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// @checkstyle:off
|
||||
@SpringBootTest(properties = {
|
||||
"spring.cloud.stream.bindings.input.consumer.useNativeDecoding=true",
|
||||
"spring.cloud.stream.bindings.output.producer.useNativeEncoding=true",
|
||||
@@ -155,18 +142,17 @@ public abstract class DeserializationErrorHandlerByKafkaTests {
|
||||
"spring.cloud.stream.kafka.streams.default.consumer.applicationId=deser-kafka-dlq-multi-input",
|
||||
"spring.cloud.stream.bindings.input.group=groupx",
|
||||
"spring.cloud.stream.kafka.streams.binder.serdeError=sendToDlq",
|
||||
"spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde="
|
||||
+ "org.apache.kafka.common.serialization.Serdes$IntegerSerde" }, webEnvironment = SpringBootTest.WebEnvironment.NONE)
|
||||
// @checkstyle:on
|
||||
public static class DeserializationByKafkaAndDlqTestsWithMultipleInputs
|
||||
extends DeserializationErrorHandlerByKafkaTests {
|
||||
"spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=" +
|
||||
"org.apache.kafka.common.serialization.Serdes$IntegerSerde"},
|
||||
webEnvironment= SpringBootTest.WebEnvironment.NONE
|
||||
)
|
||||
public static class DeserializationByKafkaAndDlqTestsWithMultipleInputs extends DeserializationErrorHandlerByKafkaTests {
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void test() {
|
||||
public void test() throws Exception {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("word1");
|
||||
template.sendDefault("foobar");
|
||||
@@ -174,30 +160,22 @@ public abstract class DeserializationErrorHandlerByKafkaTests {
|
||||
template.setDefaultTopic("word2");
|
||||
template.sendDefault("foobar");
|
||||
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobarx",
|
||||
"false", embeddedKafka);
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobarx", "false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
Consumer<String, String> consumer1 = cf.createConsumer();
|
||||
embeddedKafka.consumeFromEmbeddedTopics(consumer1, "error.word1.groupx",
|
||||
"error.word2.groupx");
|
||||
embeddedKafka.consumeFromEmbeddedTopics(consumer1, "error.word1.groupx", "error.word2.groupx");
|
||||
|
||||
// TODO: Investigate why the ordering matters below: i.e.
|
||||
// if we consume from error.word1.groupx first, an exception is thrown.
|
||||
ConsumerRecord<String, String> cr1 = KafkaTestUtils.getSingleRecord(consumer1,
|
||||
"error.word2.groupx");
|
||||
//TODO: Investigate why the ordering matters below: i.e. if we consume from error.word1.groupx first, an exception is thrown.
|
||||
ConsumerRecord<String, String> cr1 = KafkaTestUtils.getSingleRecord(consumer1, "error.word2.groupx");
|
||||
assertThat(cr1.value().equals("foobar")).isTrue();
|
||||
ConsumerRecord<String, String> cr2 = KafkaTestUtils.getSingleRecord(consumer1,
|
||||
"error.word1.groupx");
|
||||
ConsumerRecord<String, String> cr2 = KafkaTestUtils.getSingleRecord(consumer1, "error.word1.groupx");
|
||||
assertThat(cr2.value().equals("foobar")).isTrue();
|
||||
|
||||
// Ensuring that the deserialization was indeed done by Kafka natively
|
||||
verify(conversionDelegate, never()).deserializeOnInbound(any(Class.class),
|
||||
any(KStream.class));
|
||||
verify(conversionDelegate, never()).serializeOnOutbound(any(KStream.class));
|
||||
//Ensuring that the deserialization was indeed done by Kafka natively
|
||||
verify(KafkaStreamsMessageConversionDelegate, never()).deserializeOnInbound(any(Class.class), any(KStream.class));
|
||||
verify(KafkaStreamsMessageConversionDelegate, never()).serializeOnOutbound(any(KStream.class));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@EnableBinding(KafkaStreamsProcessor.class)
|
||||
@@ -214,15 +192,14 @@ public abstract class DeserializationErrorHandlerByKafkaTests {
|
||||
public KStream<?, String> process(KStream<Object, String> input) {
|
||||
|
||||
return input
|
||||
.flatMapValues(
|
||||
value -> Arrays.asList(value.toLowerCase().split("\\W+")))
|
||||
.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
|
||||
.map((key, value) -> new KeyValue<>(value, value))
|
||||
.groupByKey(Serialized.with(Serdes.String(), Serdes.String()))
|
||||
.windowedBy(timeWindows).count(Materialized.as("foo-WordCounts-x"))
|
||||
.toStream().map((key, value) -> new KeyValue<>(null,
|
||||
"Count for " + key.key() + " : " + value));
|
||||
.windowedBy(timeWindows)
|
||||
.count(Materialized.as("foo-WordCounts-x"))
|
||||
.toStream()
|
||||
.map((key, value) -> new KeyValue<>(null, "Count for " + key.key() + " : " + value));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -63,35 +63,28 @@ import static org.mockito.Mockito.verify;
|
||||
public abstract class DeserializtionErrorHandlerByBinderTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"counts-id", "error.foos.foobar-group", "error.foos1.fooz-group",
|
||||
"error.foos2.fooz-group");
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true, "counts-id", "error.foos.foobar-group",
|
||||
"error.foos1.fooz-group", "error.foos2.fooz-group");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
|
||||
.getEmbeddedKafka();
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
|
||||
@SpyBean
|
||||
org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsMessageConversionDelegate conversionDelegate;
|
||||
org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsMessageConversionDelegate KafkaStreamsMessageConversionDelegate;
|
||||
|
||||
private static Consumer<Integer, String> consumer;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers",
|
||||
embeddedKafka.getBrokersAsString());
|
||||
System.setProperty("spring.cloud.stream.kafka.streams.binder.zkNodes",
|
||||
embeddedKafka.getZookeeperConnectionString());
|
||||
System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers", embeddedKafka.getBrokersAsString());
|
||||
System.setProperty("spring.cloud.stream.kafka.streams.binder.zkNodes", embeddedKafka.getZookeeperConnectionString());
|
||||
|
||||
System.setProperty("server.port", "0");
|
||||
System.setProperty("spring.jmx.enabled", "false");
|
||||
System.setProperty("server.port","0");
|
||||
System.setProperty("spring.jmx.enabled","false");
|
||||
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foob", "false",
|
||||
embeddedKafka);
|
||||
// consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
|
||||
// Deserializer.class.getName());
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foob", "false", embeddedKafka);
|
||||
//consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, Deserializer.class.getName());
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts-id");
|
||||
}
|
||||
@@ -101,77 +94,64 @@ public abstract class DeserializtionErrorHandlerByBinderTests {
|
||||
consumer.close();
|
||||
}
|
||||
|
||||
@SpringBootTest(properties = { "spring.cloud.stream.bindings.input.destination=foos",
|
||||
@SpringBootTest(properties = {
|
||||
"spring.cloud.stream.bindings.input.destination=foos",
|
||||
"spring.cloud.stream.bindings.output.destination=counts-id",
|
||||
"spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"spring.cloud.stream.bindings.output.producer.headerMode=raw",
|
||||
"spring.cloud.stream.kafka.streams.bindings.output.producer.keySerde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$IntegerSerde",
|
||||
"spring.cloud.stream.kafka.streams.bindings.output.producer.keySerde=org.apache.kafka.common.serialization.Serdes$IntegerSerde",
|
||||
"spring.cloud.stream.bindings.input.consumer.headerMode=raw",
|
||||
"spring.cloud.stream.kafka.streams.binder.serdeError=sendToDlq",
|
||||
"spring.cloud.stream.kafka.streams.bindings.input.consumer.application-id"
|
||||
+ "=deserializationByBinderAndDlqTests",
|
||||
"spring.cloud.stream.bindings.input.group=foobar-group" }, webEnvironment = SpringBootTest.WebEnvironment.NONE)
|
||||
public static class DeserializationByBinderAndDlqTests
|
||||
extends DeserializtionErrorHandlerByBinderTests {
|
||||
"spring.cloud.stream.kafka.streams.bindings.input.consumer.application-id=deserializationByBinderAndDlqTests",
|
||||
"spring.cloud.stream.bindings.input.group=foobar-group"},
|
||||
webEnvironment= SpringBootTest.WebEnvironment.NONE
|
||||
)
|
||||
public static class DeserializationByBinderAndDlqTests extends DeserializtionErrorHandlerByBinderTests {
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void test() throws Exception {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("foos");
|
||||
template.sendDefault("hello");
|
||||
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobar",
|
||||
"false", embeddedKafka);
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobar", "false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
Consumer<String, String> consumer1 = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer1,
|
||||
"error.foos.foobar-group");
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer1, "error.foos.foobar-group");
|
||||
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer1,
|
||||
"error.foos.foobar-group");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer1, "error.foos.foobar-group");
|
||||
assertThat(cr.value().equals("hello")).isTrue();
|
||||
|
||||
// Ensuring that the deserialization was indeed done by the binder
|
||||
verify(conversionDelegate).deserializeOnInbound(any(Class.class),
|
||||
any(KStream.class));
|
||||
//Ensuring that the deserialization was indeed done by the binder
|
||||
verify(KafkaStreamsMessageConversionDelegate).deserializeOnInbound(any(Class.class), any(KStream.class));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@SpringBootTest(properties = {
|
||||
"spring.cloud.stream.bindings.input.destination=foos1,foos2",
|
||||
"spring.cloud.stream.bindings.output.destination=counts-id",
|
||||
"spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"spring.cloud.stream.kafka.streams.bindings.output.producer.keySerde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$IntegerSerde",
|
||||
"spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"spring.cloud.stream.kafka.streams.bindings.output.producer.keySerde=org.apache.kafka.common.serialization.Serdes$IntegerSerde",
|
||||
"spring.cloud.stream.kafka.streams.binder.serdeError=sendToDlq",
|
||||
"spring.cloud.stream.kafka.streams.bindings.input.consumer.application-id"
|
||||
+ "=deserializationByBinderAndDlqTestsWithMultipleInputs",
|
||||
"spring.cloud.stream.bindings.input.group=fooz-group" }, webEnvironment = SpringBootTest.WebEnvironment.NONE)
|
||||
public static class DeserializationByBinderAndDlqTestsWithMultipleInputs
|
||||
extends DeserializtionErrorHandlerByBinderTests {
|
||||
"spring.cloud.stream.kafka.streams.bindings.input.consumer.application-id=deserializationByBinderAndDlqTestsWithMultipleInputs",
|
||||
"spring.cloud.stream.bindings.input.group=fooz-group"},
|
||||
webEnvironment= SpringBootTest.WebEnvironment.NONE
|
||||
)
|
||||
public static class DeserializationByBinderAndDlqTestsWithMultipleInputs extends DeserializtionErrorHandlerByBinderTests {
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void test() throws Exception {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("foos1");
|
||||
template.sendDefault("hello");
|
||||
@@ -179,28 +159,21 @@ public abstract class DeserializtionErrorHandlerByBinderTests {
|
||||
template.setDefaultTopic("foos2");
|
||||
template.sendDefault("hello");
|
||||
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobar1",
|
||||
"false", embeddedKafka);
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobar1", "false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
Consumer<String, String> consumer1 = cf.createConsumer();
|
||||
embeddedKafka.consumeFromEmbeddedTopics(consumer1, "error.foos1.fooz-group",
|
||||
"error.foos2.fooz-group");
|
||||
embeddedKafka.consumeFromEmbeddedTopics(consumer1, "error.foos1.fooz-group", "error.foos2.fooz-group");
|
||||
|
||||
ConsumerRecord<String, String> cr1 = KafkaTestUtils.getSingleRecord(consumer1,
|
||||
"error.foos1.fooz-group");
|
||||
ConsumerRecord<String, String> cr1 = KafkaTestUtils.getSingleRecord(consumer1, "error.foos1.fooz-group");
|
||||
assertThat(cr1.value().equals("hello")).isTrue();
|
||||
|
||||
ConsumerRecord<String, String> cr2 = KafkaTestUtils.getSingleRecord(consumer1,
|
||||
"error.foos2.fooz-group");
|
||||
ConsumerRecord<String, String> cr2 = KafkaTestUtils.getSingleRecord(consumer1, "error.foos2.fooz-group");
|
||||
assertThat(cr2.value().equals("hello")).isTrue();
|
||||
|
||||
// Ensuring that the deserialization was indeed done by the binder
|
||||
verify(conversionDelegate).deserializeOnInbound(any(Class.class),
|
||||
any(KStream.class));
|
||||
//Ensuring that the deserialization was indeed done by the binder
|
||||
verify(KafkaStreamsMessageConversionDelegate).deserializeOnInbound(any(Class.class), any(KStream.class));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@EnableBinding(KafkaStreamsProcessor.class)
|
||||
@@ -210,17 +183,16 @@ public abstract class DeserializtionErrorHandlerByBinderTests {
|
||||
@StreamListener("input")
|
||||
@SendTo("output")
|
||||
public KStream<Integer, Long> process(KStream<Object, Product> input) {
|
||||
return input.filter((key, product) -> product.getId() == 123)
|
||||
return input
|
||||
.filter((key, product) -> product.getId() == 123)
|
||||
.map((key, value) -> new KeyValue<>(value, value))
|
||||
.groupByKey(Serialized.with(new JsonSerde<>(Product.class),
|
||||
new JsonSerde<>(Product.class)))
|
||||
.groupByKey(Serialized.with(new JsonSerde<>(Product.class), new JsonSerde<>(Product.class)))
|
||||
.windowedBy(TimeWindows.of(5000))
|
||||
.count(Materialized.as("id-count-store-x")).toStream()
|
||||
.count(Materialized.as("id-count-store-x"))
|
||||
.toStream()
|
||||
.map((key, value) -> new KeyValue<>(key.key().id, value));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class Product {
|
||||
|
||||
Integer id;
|
||||
@@ -232,7 +204,5 @@ public abstract class DeserializtionErrorHandlerByBinderTests {
|
||||
public void setId(Integer id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,319 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.integration;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.kafka.clients.consumer.Consumer;
|
||||
import org.apache.kafka.clients.consumer.ConsumerConfig;
|
||||
import org.apache.kafka.clients.producer.ProducerRecord;
|
||||
import org.apache.kafka.streams.KafkaStreams;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.assertj.core.util.Lists;
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.WebApplicationType;
|
||||
import org.springframework.boot.actuate.health.Health;
|
||||
import org.springframework.boot.actuate.health.HealthIndicator;
|
||||
import org.springframework.boot.actuate.health.Status;
|
||||
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
|
||||
import org.springframework.boot.context.properties.EnableConfigurationProperties;
|
||||
import org.springframework.cloud.stream.annotation.EnableBinding;
|
||||
import org.springframework.cloud.stream.annotation.Input;
|
||||
import org.springframework.cloud.stream.annotation.Output;
|
||||
import org.springframework.cloud.stream.annotation.StreamListener;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsProcessor;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsApplicationSupportProperties;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
|
||||
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
|
||||
import org.springframework.kafka.core.KafkaTemplate;
|
||||
import org.springframework.kafka.support.SendResult;
|
||||
import org.springframework.kafka.test.EmbeddedKafkaBroker;
|
||||
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
|
||||
import org.springframework.kafka.test.utils.KafkaTestUtils;
|
||||
import org.springframework.messaging.handler.annotation.SendTo;
|
||||
import org.springframework.util.concurrent.ListenableFuture;
|
||||
import org.springframework.util.concurrent.ListenableFutureCallback;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
/**
|
||||
* @author Arnaud Jardiné
|
||||
*/
|
||||
public class KafkaStreamsBinderHealthIndicatorTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"out", "out2");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
|
||||
.getEmbeddedKafka();
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() {
|
||||
System.setProperty("logging.level.org.apache.kafka", "OFF");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void healthIndicatorUpTest() throws Exception {
|
||||
try (ConfigurableApplicationContext context = singleStream()) {
|
||||
receive(context,
|
||||
Lists.newArrayList(new ProducerRecord<>("in", "{\"id\":\"123\"}"),
|
||||
new ProducerRecord<>("in", "{\"id\":\"123\"}")),
|
||||
Status.UP, "out");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void healthIndicatorDownTest() throws Exception {
|
||||
try (ConfigurableApplicationContext context = singleStream()) {
|
||||
receive(context,
|
||||
Lists.newArrayList(new ProducerRecord<>("in", "{\"id\":\"123\"}"),
|
||||
new ProducerRecord<>("in", "{\"id\":\"124\"}")),
|
||||
Status.DOWN, "out");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void healthIndicatorUpMultipleKStreamsTest() throws Exception {
|
||||
try (ConfigurableApplicationContext context = multipleStream()) {
|
||||
receive(context,
|
||||
Lists.newArrayList(new ProducerRecord<>("in", "{\"id\":\"123\"}"),
|
||||
new ProducerRecord<>("in2", "{\"id\":\"123\"}")),
|
||||
Status.UP, "out", "out2");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void healthIndicatorDownMultipleKStreamsTest() throws Exception {
|
||||
try (ConfigurableApplicationContext context = multipleStream()) {
|
||||
receive(context,
|
||||
Lists.newArrayList(new ProducerRecord<>("in", "{\"id\":\"123\"}"),
|
||||
new ProducerRecord<>("in2", "{\"id\":\"124\"}")),
|
||||
Status.DOWN, "out", "out2");
|
||||
}
|
||||
}
|
||||
|
||||
private static Status getStatusKStream(Map<String, Object> details) {
|
||||
Health health = (Health) details.get("kstream");
|
||||
return health != null ? health.getStatus() : Status.DOWN;
|
||||
}
|
||||
|
||||
private static boolean waitFor(Map<String, Object> details) {
|
||||
Health health = (Health) details.get("kstream");
|
||||
if (health.getStatus() == Status.UP) {
|
||||
Map<String, Object> moreDetails = health.getDetails();
|
||||
Health kStreamHealth = (Health) moreDetails
|
||||
.get("kafkaStreamsBinderHealthIndicator");
|
||||
String status = (String) kStreamHealth.getDetails().get("threadState");
|
||||
return status != null
|
||||
&& (status.equalsIgnoreCase(KafkaStreams.State.REBALANCING.name())
|
||||
|| status.equalsIgnoreCase("PARTITIONS_REVOKED")
|
||||
|| status.equalsIgnoreCase("PARTITIONS_ASSIGNED")
|
||||
|| status.equalsIgnoreCase(
|
||||
KafkaStreams.State.PENDING_SHUTDOWN.name()));
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private void receive(ConfigurableApplicationContext context,
|
||||
List<ProducerRecord<Integer, String>> records, Status expected,
|
||||
String... topics) throws Exception {
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-id0",
|
||||
"false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
try (Consumer<String, String> consumer = cf.createConsumer()) {
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
CountDownLatch latch = new CountDownLatch(records.size());
|
||||
for (ProducerRecord<Integer, String> record : records) {
|
||||
ListenableFuture<SendResult<Integer, String>> future = template
|
||||
.send(record);
|
||||
future.addCallback(
|
||||
new ListenableFutureCallback<SendResult<Integer, String>>() {
|
||||
@Override
|
||||
public void onFailure(Throwable ex) {
|
||||
Assert.fail();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onSuccess(SendResult<Integer, String> result) {
|
||||
latch.countDown();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
latch.await(5, TimeUnit.SECONDS);
|
||||
|
||||
embeddedKafka.consumeFromEmbeddedTopics(consumer, topics);
|
||||
KafkaTestUtils.getRecords(consumer, 1000);
|
||||
|
||||
TimeUnit.SECONDS.sleep(2);
|
||||
checkHealth(context, expected);
|
||||
}
|
||||
finally {
|
||||
pf.destroy();
|
||||
}
|
||||
}
|
||||
|
||||
private static void checkHealth(ConfigurableApplicationContext context,
|
||||
Status expected) throws InterruptedException {
|
||||
HealthIndicator healthIndicator = context.getBean("bindersHealthIndicator",
|
||||
HealthIndicator.class);
|
||||
Health health = healthIndicator.health();
|
||||
while (waitFor(health.getDetails())) {
|
||||
TimeUnit.SECONDS.sleep(2);
|
||||
health = healthIndicator.health();
|
||||
}
|
||||
assertThat(health.getStatus()).isEqualTo(expected);
|
||||
assertThat(getStatusKStream(health.getDetails())).isEqualTo(expected);
|
||||
}
|
||||
|
||||
private ConfigurableApplicationContext singleStream() {
|
||||
SpringApplication app = new SpringApplication(KStreamApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
return app.run("--server.port=0", "--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.bindings.input.destination=in",
|
||||
"--spring.cloud.stream.bindings.output.destination=out",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde="
|
||||
+ "org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde="
|
||||
+ "org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.keySerde="
|
||||
+ "org.apache.kafka.common.serialization.Serdes$IntegerSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId="
|
||||
+ "ApplicationHealthTest-xyz",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes="
|
||||
+ embeddedKafka.getZookeeperConnectionString());
|
||||
}
|
||||
|
||||
private ConfigurableApplicationContext multipleStream() {
|
||||
System.setProperty("logging.level.org.apache.kafka", "OFF");
|
||||
SpringApplication app = new SpringApplication(AnotherKStreamApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
return app.run("--server.port=0", "--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.bindings.input.destination=in",
|
||||
"--spring.cloud.stream.bindings.output.destination=out",
|
||||
"--spring.cloud.stream.bindings.input2.destination=in2",
|
||||
"--spring.cloud.stream.bindings.output2.destination=out2",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde="
|
||||
+ "org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde="
|
||||
+ "org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.keySerde="
|
||||
+ "org.apache.kafka.common.serialization.Serdes$IntegerSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output2.producer.keySerde="
|
||||
+ "org.apache.kafka.common.serialization.Serdes$IntegerSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId="
|
||||
+ "ApplicationHealthTest-xyz",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input2.consumer.applicationId="
|
||||
+ "ApplicationHealthTest2-xyz",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes="
|
||||
+ embeddedKafka.getZookeeperConnectionString());
|
||||
}
|
||||
|
||||
@EnableBinding(KafkaStreamsProcessor.class)
|
||||
@EnableAutoConfiguration
|
||||
@EnableConfigurationProperties(KafkaStreamsApplicationSupportProperties.class)
|
||||
public static class KStreamApplication {
|
||||
|
||||
@StreamListener("input")
|
||||
@SendTo("output")
|
||||
public KStream<Object, Product> process(KStream<Object, Product> input) {
|
||||
return input.filter((key, product) -> {
|
||||
if (product.getId() != 123) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@EnableBinding({ KafkaStreamsProcessor.class, KafkaStreamsProcessorX.class })
|
||||
@EnableAutoConfiguration
|
||||
@EnableConfigurationProperties(KafkaStreamsApplicationSupportProperties.class)
|
||||
public static class AnotherKStreamApplication {
|
||||
|
||||
@StreamListener("input")
|
||||
@SendTo("output")
|
||||
public KStream<Object, Product> process(KStream<Object, Product> input) {
|
||||
return input.filter((key, product) -> {
|
||||
if (product.getId() != 123) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
@StreamListener("input2")
|
||||
@SendTo("output2")
|
||||
public KStream<Object, Product> process2(KStream<Object, Product> input) {
|
||||
return input.filter((key, product) -> {
|
||||
if (product.getId() != 123) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public interface KafkaStreamsProcessorX {
|
||||
|
||||
@Input("input2")
|
||||
KStream<?, ?> input();
|
||||
|
||||
@Output("output2")
|
||||
KStream<?, ?> output();
|
||||
|
||||
}
|
||||
|
||||
static class Product {
|
||||
|
||||
Integer id;
|
||||
|
||||
public Integer getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(Integer id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2017-2019 the original author or authors.
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -55,34 +55,30 @@ import org.springframework.messaging.handler.annotation.SendTo;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
/**
|
||||
* This test case demonstrates a kafk-streams topology which consumes messages from
|
||||
* multiple kafka topics(destinations).
|
||||
*
|
||||
* See
|
||||
* {@link KafkaStreamsBinderMultipleInputTopicsTest#testKstreamWordCountWithStringInputAndPojoOuput}
|
||||
* where the input topic names are specified as comma-separated String values for the
|
||||
* property spring.cloud.stream.bindings.input.destination.
|
||||
*
|
||||
* @author Sarath Shyam
|
||||
*
|
||||
* This test case demonstrates a kafk-streams topology which consumes messages from
|
||||
* multiple kafka topics(destinations).
|
||||
* See {@link KafkaStreamsBinderMultipleInputTopicsTest#testKstreamWordCountWithStringInputAndPojoOuput} where
|
||||
* the input topic names are specified as comma-separated String values for
|
||||
* the property spring.cloud.stream.bindings.input.destination.
|
||||
*
|
||||
*
|
||||
*/
|
||||
public class KafkaStreamsBinderMultipleInputTopicsTest {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"counts");
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true, "counts");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
|
||||
.getEmbeddedKafka();
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
|
||||
private static Consumer<String, String> consumer;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false",
|
||||
embeddedKafka);
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts");
|
||||
}
|
||||
@@ -94,8 +90,7 @@ public class KafkaStreamsBinderMultipleInputTopicsTest {
|
||||
|
||||
@Test
|
||||
public void testKstreamWordCountWithStringInputAndPojoOuput() throws Exception {
|
||||
SpringApplication app = new SpringApplication(
|
||||
WordCountProcessorApplication.class);
|
||||
SpringApplication app = new SpringApplication(WordCountProcessorApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
|
||||
ConfigurableApplicationContext context = app.run("--server.port=0",
|
||||
@@ -104,49 +99,45 @@ public class KafkaStreamsBinderMultipleInputTopicsTest {
|
||||
"--spring.cloud.stream.bindings.output.destination=counts",
|
||||
"--spring.cloud.stream.bindings.output.contentType=application/json",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.bindings.input.consumer.headerMode=raw",
|
||||
"--spring.cloud.stream.kafka.streams.timeWindow.length=5000",
|
||||
"--spring.cloud.stream.kafka.streams.timeWindow.advanceBy=0",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId"
|
||||
+ "=WordCountProcessorApplication-xyz",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes="
|
||||
+ embeddedKafka.getZookeeperConnectionString());
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId=WordCountProcessorApplication-xyz",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString());
|
||||
try {
|
||||
receiveAndValidate(context);
|
||||
}
|
||||
finally {
|
||||
} finally {
|
||||
context.close();
|
||||
}
|
||||
}
|
||||
|
||||
private void receiveAndValidate(ConfigurableApplicationContext context)
|
||||
throws Exception {
|
||||
private void receiveAndValidate(ConfigurableApplicationContext context) throws Exception {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("words1");
|
||||
template.sendDefault("foobar1");
|
||||
template.setDefaultTopic("words2");
|
||||
template.sendDefault("foobar2");
|
||||
// Sleep a bit so that both the messages are processed before reading from the
|
||||
// output topic.
|
||||
// Else assertions might fail arbitrarily.
|
||||
|
||||
//Sleep a bit so that both the messages are processed before reading from the output topic.
|
||||
//Else assertions might fail arbitrarily.
|
||||
Thread.sleep(5000);
|
||||
ConsumerRecords<String, String> received = KafkaTestUtils.getRecords(consumer);
|
||||
|
||||
ConsumerRecords<String, String> received = KafkaTestUtils.getRecords(consumer);
|
||||
|
||||
List<String> wordCounts = new ArrayList<>(2);
|
||||
|
||||
received.records("counts")
|
||||
.forEach((consumerRecord) -> wordCounts.add((consumerRecord.value())));
|
||||
|
||||
received.records("counts").forEach((consumerRecord) -> {
|
||||
wordCounts.add((consumerRecord.value()));
|
||||
});
|
||||
System.out.println(wordCounts);
|
||||
assertThat(wordCounts.contains("{\"word\":\"foobar1\",\"count\":1}")).isTrue();
|
||||
assertThat(wordCounts.contains("{\"word\":\"foobar2\",\"count\":1}")).isTrue();
|
||||
|
||||
}
|
||||
|
||||
@EnableBinding(KafkaStreamsProcessor.class)
|
||||
@@ -154,22 +145,22 @@ public class KafkaStreamsBinderMultipleInputTopicsTest {
|
||||
@EnableConfigurationProperties(KafkaStreamsApplicationSupportProperties.class)
|
||||
static class WordCountProcessorApplication {
|
||||
|
||||
|
||||
@StreamListener
|
||||
@SendTo("output")
|
||||
public KStream<?, WordCount> process(
|
||||
@Input("input") KStream<Object, String> input) {
|
||||
public KStream<?, WordCount> process(@Input("input") KStream<Object, String> input) {
|
||||
|
||||
input.map((k, v) -> {
|
||||
input.map((k,v) -> {
|
||||
System.out.println(k);
|
||||
System.out.println(v);
|
||||
return new KeyValue<>(k, v);
|
||||
return new KeyValue<>(k,v);
|
||||
});
|
||||
return input
|
||||
.flatMapValues(
|
||||
value -> Arrays.asList(value.toLowerCase().split("\\W+")))
|
||||
.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
|
||||
.map((key, value) -> new KeyValue<>(value, value))
|
||||
.groupByKey(Serialized.with(Serdes.String(), Serdes.String()))
|
||||
.count(Materialized.as("WordCounts")).toStream()
|
||||
.count(Materialized.as("WordCounts"))
|
||||
.toStream()
|
||||
.map((key, value) -> new KeyValue<>(null, new WordCount(key, value)));
|
||||
}
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2017-2019 the original author or authors.
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -57,23 +57,18 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
public class KafkaStreamsBinderPojoInputAndPrimitiveTypeOutputTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"counts-id");
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true, "counts-id");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
|
||||
.getEmbeddedKafka();
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
|
||||
private static Consumer<Integer, String> consumer;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-id",
|
||||
"false", embeddedKafka);
|
||||
// consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
|
||||
// Deserializer.class.getName());
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-id", "false", embeddedKafka);
|
||||
//consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, Deserializer.class.getName());
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts-id");
|
||||
}
|
||||
@@ -92,36 +87,26 @@ public class KafkaStreamsBinderPojoInputAndPrimitiveTypeOutputTests {
|
||||
"--spring.cloud.stream.bindings.input.destination=foos",
|
||||
"--spring.cloud.stream.bindings.output.destination=counts-id",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde="
|
||||
+ "org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde="
|
||||
+ "org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.keySerde="
|
||||
+ "org.apache.kafka.common.serialization.Serdes$IntegerSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId="
|
||||
+ "KafkaStreamsBinderPojoInputAndPrimitiveTypeOutputTests-xyz",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes="
|
||||
+ embeddedKafka.getZookeeperConnectionString());
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.keySerde=org.apache.kafka.common.serialization.Serdes$IntegerSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId=KafkaStreamsBinderPojoInputAndPrimitiveTypeOutputTests-xyz",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString());
|
||||
try {
|
||||
receiveAndValidateFoo(context);
|
||||
}
|
||||
finally {
|
||||
} finally {
|
||||
context.close();
|
||||
}
|
||||
}
|
||||
|
||||
private void receiveAndValidateFoo(ConfigurableApplicationContext context)
|
||||
throws Exception {
|
||||
private void receiveAndValidateFoo(ConfigurableApplicationContext context) throws Exception {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("foos");
|
||||
template.sendDefault("{\"id\":\"123\"}");
|
||||
ConsumerRecord<Integer, String> cr = KafkaTestUtils.getSingleRecord(consumer,
|
||||
"counts-id");
|
||||
ConsumerRecord<Integer, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts-id");
|
||||
|
||||
assertThat(cr.key()).isEqualTo(123);
|
||||
ObjectMapper om = new ObjectMapper();
|
||||
@@ -136,15 +121,15 @@ public class KafkaStreamsBinderPojoInputAndPrimitiveTypeOutputTests {
|
||||
@StreamListener("input")
|
||||
@SendTo("output")
|
||||
public KStream<Integer, Long> process(KStream<Object, Product> input) {
|
||||
return input.filter((key, product) -> product.getId() == 123)
|
||||
return input
|
||||
.filter((key, product) -> product.getId() == 123)
|
||||
.map((key, value) -> new KeyValue<>(value, value))
|
||||
.groupByKey(Serialized.with(new JsonSerde<>(Product.class),
|
||||
new JsonSerde<>(Product.class)))
|
||||
.groupByKey(Serialized.with(new JsonSerde<>(Product.class), new JsonSerde<>(Product.class)))
|
||||
.windowedBy(TimeWindows.of(5000))
|
||||
.count(Materialized.as("id-count-store-x")).toStream()
|
||||
.count(Materialized.as("id-count-store-x"))
|
||||
.toStream()
|
||||
.map((key, value) -> new KeyValue<>(key.key().id, value));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class Product {
|
||||
@@ -158,7 +143,5 @@ public class KafkaStreamsBinderPojoInputAndPrimitiveTypeOutputTests {
|
||||
public void setId(Integer id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -73,21 +73,17 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
public class KafkaStreamsBinderWordCountIntegrationTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"counts");
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true, "counts");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
|
||||
.getEmbeddedKafka();
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
|
||||
private static Consumer<String, String> consumer;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false",
|
||||
embeddedKafka);
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts");
|
||||
}
|
||||
@@ -98,10 +94,8 @@ public class KafkaStreamsBinderWordCountIntegrationTests {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testKstreamWordCountWithApplicationIdSpecifiedAtDefaultConsumer()
|
||||
throws Exception {
|
||||
SpringApplication app = new SpringApplication(
|
||||
WordCountProcessorApplication.class);
|
||||
public void testKstreamWordCountWithApplicationIdSpecifiedAtDefaultConsumer() throws Exception {
|
||||
SpringApplication app = new SpringApplication(WordCountProcessorApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
|
||||
try (ConfigurableApplicationContext context = app.run("--server.port=0",
|
||||
@@ -111,27 +105,21 @@ public class KafkaStreamsBinderWordCountIntegrationTests {
|
||||
"--spring.cloud.stream.bindings.output.contentType=application/json",
|
||||
"--spring.cloud.stream.kafka.streams.default.consumer.application-id=basic-word-count",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.bindings.output.producer.headerMode=raw",
|
||||
"--spring.cloud.stream.bindings.input.consumer.headerMode=raw",
|
||||
"--spring.cloud.stream.kafka.streams.timeWindow.length=5000",
|
||||
"--spring.cloud.stream.kafka.streams.timeWindow.advanceBy=0",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes="
|
||||
+ embeddedKafka.getZookeeperConnectionString())) {
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString())) {
|
||||
receiveAndValidate(context);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testKstreamWordCountWithInputBindingLevelApplicationId()
|
||||
throws Exception {
|
||||
SpringApplication app = new SpringApplication(
|
||||
WordCountProcessorApplication.class);
|
||||
public void testKstreamWordCountWithInputBindingLevelApplicationId() throws Exception {
|
||||
SpringApplication app = new SpringApplication(WordCountProcessorApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
|
||||
try (ConfigurableApplicationContext context = app.run("--server.port=0",
|
||||
@@ -141,55 +129,43 @@ public class KafkaStreamsBinderWordCountIntegrationTests {
|
||||
"--spring.cloud.stream.bindings.output.contentType=application/json",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.application-id=basic-word-count",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.timeWindow.length=5000",
|
||||
"--spring.cloud.stream.kafka.streams.timeWindow.advanceBy=0",
|
||||
"--spring.cloud.stream.bindings.input.consumer.concurrency=2",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes="
|
||||
+ embeddedKafka.getZookeeperConnectionString())) {
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString())) {
|
||||
receiveAndValidate(context);
|
||||
// Assertions on StreamBuilderFactoryBean
|
||||
StreamsBuilderFactoryBean streamsBuilderFactoryBean = context
|
||||
.getBean("&stream-builder-process", StreamsBuilderFactoryBean.class);
|
||||
//Assertions on StreamBuilderFactoryBean
|
||||
StreamsBuilderFactoryBean streamsBuilderFactoryBean = context.getBean("&stream-builder-process", StreamsBuilderFactoryBean.class);
|
||||
KafkaStreams kafkaStreams = streamsBuilderFactoryBean.getKafkaStreams();
|
||||
ReadOnlyWindowStore<Object, Object> store = kafkaStreams
|
||||
.store("foo-WordCounts", QueryableStoreTypes.windowStore());
|
||||
ReadOnlyWindowStore<Object, Object> store = kafkaStreams.store("foo-WordCounts", QueryableStoreTypes.windowStore());
|
||||
assertThat(store).isNotNull();
|
||||
|
||||
Map streamConfigGlobalProperties = context
|
||||
.getBean("streamConfigGlobalProperties", Map.class);
|
||||
Map streamConfigGlobalProperties = context.getBean("streamConfigGlobalProperties", Map.class);
|
||||
|
||||
// Ensure that concurrency settings are mapped to number of stream task
|
||||
// threads in Kafka Streams.
|
||||
final Integer concurrency = (Integer) streamConfigGlobalProperties
|
||||
.get(StreamsConfig.NUM_STREAM_THREADS_CONFIG);
|
||||
//Ensure that concurrency settings are mapped to number of stream task threads in Kafka Streams.
|
||||
final Integer concurrency = (Integer)streamConfigGlobalProperties.get(StreamsConfig.NUM_STREAM_THREADS_CONFIG);
|
||||
assertThat(concurrency).isEqualTo(2);
|
||||
|
||||
sendTombStoneRecordsAndVerifyGracefulHandling();
|
||||
|
||||
CleanupConfig cleanup = TestUtils.getPropertyValue(streamsBuilderFactoryBean,
|
||||
"cleanupConfig", CleanupConfig.class);
|
||||
CleanupConfig cleanup = TestUtils.getPropertyValue(streamsBuilderFactoryBean, "cleanupConfig",
|
||||
CleanupConfig.class);
|
||||
assertThat(cleanup.cleanupOnStart()).isTrue();
|
||||
assertThat(cleanup.cleanupOnStop()).isFalse();
|
||||
}
|
||||
}
|
||||
|
||||
private void receiveAndValidate(ConfigurableApplicationContext context)
|
||||
throws Exception {
|
||||
private void receiveAndValidate(ConfigurableApplicationContext context) throws Exception {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
try {
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("words");
|
||||
template.sendDefault("foobar");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer,
|
||||
"counts");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts");
|
||||
assertThat(cr.value().contains("\"word\":\"foobar\",\"count\":1")).isTrue();
|
||||
}
|
||||
finally {
|
||||
@@ -199,17 +175,14 @@ public class KafkaStreamsBinderWordCountIntegrationTests {
|
||||
|
||||
private void sendTombStoneRecordsAndVerifyGracefulHandling() throws Exception {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
try {
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("words");
|
||||
template.sendDefault(null);
|
||||
ConsumerRecords<String, String> received = consumer
|
||||
.poll(Duration.ofMillis(5000));
|
||||
// By asserting that the received record is empty, we are ensuring that the
|
||||
// tombstone record
|
||||
// was handled by the binder gracefully.
|
||||
ConsumerRecords<String, String> received = consumer.poll(Duration.ofMillis(5000));
|
||||
//By asserting that the received record is empty, we are ensuring that the tombstone record
|
||||
//was handled by the binder gracefully.
|
||||
assertThat(received.isEmpty()).isTrue();
|
||||
}
|
||||
finally {
|
||||
@@ -227,20 +200,16 @@ public class KafkaStreamsBinderWordCountIntegrationTests {
|
||||
|
||||
@StreamListener
|
||||
@SendTo("output")
|
||||
public KStream<?, WordCount> process(
|
||||
@Input("input") KStream<Object, String> input) {
|
||||
public KStream<?, WordCount> process(@Input("input") KStream<Object, String> input) {
|
||||
|
||||
return input
|
||||
.flatMapValues(
|
||||
value -> Arrays.asList(value.toLowerCase().split("\\W+")))
|
||||
.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
|
||||
.map((key, value) -> new KeyValue<>(value, value))
|
||||
.groupByKey(Serialized.with(Serdes.String(), Serdes.String()))
|
||||
.windowedBy(timeWindows).count(Materialized.as("foo-WordCounts"))
|
||||
.windowedBy(timeWindows)
|
||||
.count(Materialized.as("foo-WordCounts"))
|
||||
.toStream()
|
||||
.map((key, value) -> new KeyValue<>(null,
|
||||
new WordCount(key.key(), value,
|
||||
new Date(key.window().start()),
|
||||
new Date(key.window().end()))));
|
||||
.map((key, value) -> new KeyValue<>(null, new WordCount(key.key(), value, new Date(key.window().start()), new Date(key.window().end()))));
|
||||
}
|
||||
|
||||
@Bean
|
||||
@@ -298,7 +267,6 @@ public class KafkaStreamsBinderWordCountIntegrationTests {
|
||||
public void setEnd(Date end) {
|
||||
this.end = end;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2017-2019 the original author or authors.
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -62,21 +62,17 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
public class KafkaStreamsInteractiveQueryIntegrationTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"counts-id");
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true, "counts-id");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
|
||||
.getEmbeddedKafka();
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
|
||||
private static Consumer<String, String> consumer;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-id",
|
||||
"false", embeddedKafka);
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-id", "false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts-id");
|
||||
}
|
||||
@@ -95,55 +91,40 @@ public class KafkaStreamsInteractiveQueryIntegrationTests {
|
||||
"--spring.cloud.stream.bindings.input.destination=foos",
|
||||
"--spring.cloud.stream.bindings.output.destination=counts-id",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId=ProductCountApplication-abc",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.application.server"
|
||||
+ "=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes="
|
||||
+ embeddedKafka.getZookeeperConnectionString());
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.application.server=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString());
|
||||
try {
|
||||
receiveAndValidateFoo(context);
|
||||
}
|
||||
finally {
|
||||
} finally {
|
||||
context.close();
|
||||
}
|
||||
}
|
||||
|
||||
private void receiveAndValidateFoo(ConfigurableApplicationContext context)
|
||||
throws Exception {
|
||||
private void receiveAndValidateFoo(ConfigurableApplicationContext context) throws Exception {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("foos");
|
||||
template.sendDefault("{\"id\":\"123\"}");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer,
|
||||
"counts-id");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts-id");
|
||||
assertThat(cr.value().contains("Count for product with ID 123: 1")).isTrue();
|
||||
|
||||
ProductCountApplication.Foo foo = context
|
||||
.getBean(ProductCountApplication.Foo.class);
|
||||
ProductCountApplication.Foo foo = context.getBean(ProductCountApplication.Foo.class);
|
||||
assertThat(foo.getProductStock(123).equals(1L));
|
||||
|
||||
// perform assertions on HostInfo related methods in InteractiveQueryService
|
||||
InteractiveQueryService interactiveQueryService = context
|
||||
.getBean(InteractiveQueryService.class);
|
||||
//perform assertions on HostInfo related methods in InteractiveQueryService
|
||||
InteractiveQueryService interactiveQueryService = context.getBean(InteractiveQueryService.class);
|
||||
HostInfo currentHostInfo = interactiveQueryService.getCurrentHostInfo();
|
||||
assertThat(currentHostInfo.host() + ":" + currentHostInfo.port())
|
||||
.isEqualTo(embeddedKafka.getBrokersAsString());
|
||||
assertThat(currentHostInfo.host() + ":" + currentHostInfo.port()).isEqualTo(embeddedKafka.getBrokersAsString());
|
||||
|
||||
HostInfo hostInfo = interactiveQueryService.getHostInfo("prod-id-count-store",
|
||||
123, new IntegerSerializer());
|
||||
assertThat(hostInfo.host() + ":" + hostInfo.port())
|
||||
.isEqualTo(embeddedKafka.getBrokersAsString());
|
||||
HostInfo hostInfo = interactiveQueryService.getHostInfo("prod-id-count-store", 123, new IntegerSerializer());
|
||||
assertThat(hostInfo.host() + ":" + hostInfo.port()).isEqualTo(embeddedKafka.getBrokersAsString());
|
||||
|
||||
HostInfo hostInfoFoo = interactiveQueryService
|
||||
.getHostInfo("prod-id-count-store-foo", 123, new IntegerSerializer());
|
||||
HostInfo hostInfoFoo = interactiveQueryService.getHostInfo("prod-id-count-store-foo", 123, new IntegerSerializer());
|
||||
assertThat(hostInfoFoo).isNull();
|
||||
}
|
||||
|
||||
@@ -156,13 +137,13 @@ public class KafkaStreamsInteractiveQueryIntegrationTests {
|
||||
@SuppressWarnings("deprecation")
|
||||
public KStream<?, String> process(KStream<Object, Product> input) {
|
||||
|
||||
return input.filter((key, product) -> product.getId() == 123)
|
||||
return input
|
||||
.filter((key, product) -> product.getId() == 123)
|
||||
.map((key, value) -> new KeyValue<>(value.id, value))
|
||||
.groupByKey(Serialized.with(new Serdes.IntegerSerde(),
|
||||
new JsonSerde<>(Product.class)))
|
||||
.count(Materialized.as("prod-id-count-store")).toStream()
|
||||
.map((key, value) -> new KeyValue<>(null,
|
||||
"Count for product with ID 123: " + value));
|
||||
.groupByKey(Serialized.with(new Serdes.IntegerSerde(), new JsonSerde<>(Product.class)))
|
||||
.count(Materialized.as("prod-id-count-store"))
|
||||
.toStream()
|
||||
.map((key, value) -> new KeyValue<>(null, "Count for product with ID 123: " + value));
|
||||
}
|
||||
|
||||
@Bean
|
||||
@@ -171,7 +152,6 @@ public class KafkaStreamsInteractiveQueryIntegrationTests {
|
||||
}
|
||||
|
||||
static class Foo {
|
||||
|
||||
InteractiveQueryService interactiveQueryService;
|
||||
|
||||
Foo(InteractiveQueryService interactiveQueryService) {
|
||||
@@ -179,15 +159,12 @@ public class KafkaStreamsInteractiveQueryIntegrationTests {
|
||||
}
|
||||
|
||||
public Long getProductStock(Integer id) {
|
||||
ReadOnlyKeyValueStore<Object, Object> keyValueStore = interactiveQueryService
|
||||
.getQueryableStore("prod-id-count-store",
|
||||
QueryableStoreTypes.keyValueStore());
|
||||
ReadOnlyKeyValueStore<Object, Object> keyValueStore =
|
||||
interactiveQueryService.getQueryableStore("prod-id-count-store", QueryableStoreTypes.keyValueStore());
|
||||
|
||||
return (Long) keyValueStore.get(id);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class Product {
|
||||
@@ -201,7 +178,5 @@ public class KafkaStreamsInteractiveQueryIntegrationTests {
|
||||
public void setId(Integer id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -69,32 +69,26 @@ import static org.mockito.Mockito.verify;
|
||||
public abstract class KafkaStreamsNativeEncodingDecodingTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"counts");
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true, "counts");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
|
||||
.getEmbeddedKafka();
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
|
||||
@SpyBean
|
||||
org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsMessageConversionDelegate conversionDelegate;
|
||||
org.springframework.cloud.stream.binder.kafka.streams.KafkaStreamsMessageConversionDelegate KafkaStreamsMessageConversionDelegate;
|
||||
|
||||
private static Consumer<String, String> consumer;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() {
|
||||
System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers",
|
||||
embeddedKafka.getBrokersAsString());
|
||||
System.setProperty("spring.cloud.stream.kafka.streams.binder.zkNodes",
|
||||
embeddedKafka.getZookeeperConnectionString());
|
||||
public static void setUp() throws Exception {
|
||||
System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers", embeddedKafka.getBrokersAsString());
|
||||
System.setProperty("spring.cloud.stream.kafka.streams.binder.zkNodes", embeddedKafka.getZookeeperConnectionString());
|
||||
|
||||
System.setProperty("server.port", "0");
|
||||
System.setProperty("spring.jmx.enabled", "false");
|
||||
System.setProperty("server.port","0");
|
||||
System.setProperty("spring.jmx.enabled","false");
|
||||
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false",
|
||||
embeddedKafka);
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts");
|
||||
}
|
||||
@@ -107,54 +101,44 @@ public abstract class KafkaStreamsNativeEncodingDecodingTests {
|
||||
@SpringBootTest(properties = {
|
||||
"spring.cloud.stream.bindings.input.consumer.useNativeDecoding=true",
|
||||
"spring.cloud.stream.bindings.output.producer.useNativeEncoding=true",
|
||||
"spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId"
|
||||
+ "=NativeEncodingDecodingEnabledTests-abc" }, webEnvironment = SpringBootTest.WebEnvironment.NONE)
|
||||
public static class NativeEncodingDecodingEnabledTests
|
||||
extends KafkaStreamsNativeEncodingDecodingTests {
|
||||
"spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId=NativeEncodingDecodingEnabledTests-abc"
|
||||
},
|
||||
webEnvironment= SpringBootTest.WebEnvironment.NONE
|
||||
)
|
||||
public static class NativeEncodingDecodingEnabledTests extends KafkaStreamsNativeEncodingDecodingTests {
|
||||
|
||||
@Test
|
||||
public void test() throws Exception {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("words");
|
||||
template.sendDefault("foobar");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer,
|
||||
"counts");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts");
|
||||
assertThat(cr.value().equals("Count for foobar : 1")).isTrue();
|
||||
|
||||
verify(conversionDelegate, never()).serializeOnOutbound(any(KStream.class));
|
||||
verify(conversionDelegate, never()).deserializeOnInbound(any(Class.class),
|
||||
any(KStream.class));
|
||||
verify(KafkaStreamsMessageConversionDelegate, never()).serializeOnOutbound(any(KStream.class));
|
||||
verify(KafkaStreamsMessageConversionDelegate, never()).deserializeOnInbound(any(Class.class), any(KStream.class));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// @checkstyle:off
|
||||
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.NONE, properties = "spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId"
|
||||
+ "=NativeEncodingDecodingEnabledTests-xyz")
|
||||
// @checkstyle:on
|
||||
public static class NativeEncodingDecodingDisabledTests
|
||||
extends KafkaStreamsNativeEncodingDecodingTests {
|
||||
@SpringBootTest(webEnvironment= SpringBootTest.WebEnvironment.NONE,
|
||||
properties = "spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId=NativeEncodingDecodingEnabledTests-xyz")
|
||||
public static class NativeEncodingDecodingDisabledTests extends KafkaStreamsNativeEncodingDecodingTests {
|
||||
|
||||
@Test
|
||||
public void test() throws Exception {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("words");
|
||||
template.sendDefault("foobar");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer,
|
||||
"counts");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts");
|
||||
assertThat(cr.value().equals("Count for foobar : 1")).isTrue();
|
||||
|
||||
verify(conversionDelegate).serializeOnOutbound(any(KStream.class));
|
||||
verify(conversionDelegate).deserializeOnInbound(any(Class.class),
|
||||
any(KStream.class));
|
||||
verify(KafkaStreamsMessageConversionDelegate).serializeOnOutbound(any(KStream.class));
|
||||
verify(KafkaStreamsMessageConversionDelegate).deserializeOnInbound(any(Class.class), any(KStream.class));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@EnableBinding(KafkaStreamsProcessor.class)
|
||||
@@ -171,15 +155,14 @@ public abstract class KafkaStreamsNativeEncodingDecodingTests {
|
||||
public KStream<?, String> process(KStream<Object, String> input) {
|
||||
|
||||
return input
|
||||
.flatMapValues(
|
||||
value -> Arrays.asList(value.toLowerCase().split("\\W+")))
|
||||
.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
|
||||
.map((key, value) -> new KeyValue<>(value, value))
|
||||
.groupByKey(Serialized.with(Serdes.String(), Serdes.String()))
|
||||
.windowedBy(timeWindows).count(Materialized.as("foo-WordCounts-x"))
|
||||
.toStream().map((key, value) -> new KeyValue<>(null,
|
||||
"Count for " + key.key() + " : " + value));
|
||||
.windowedBy(timeWindows)
|
||||
.count(Materialized.as("foo-WordCounts-x"))
|
||||
.toStream()
|
||||
.map((key, value) -> new KeyValue<>(null, "Count for " + key.key() + " : " + value));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -16,6 +16,7 @@
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka.streams.integration;
|
||||
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
@@ -49,11 +50,9 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
public class KafkaStreamsStateStoreIntegrationTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"counts-id");
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true, "counts-id");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
|
||||
.getEmbeddedKafka();
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
|
||||
@Test
|
||||
public void testKstreamStateStore() throws Exception {
|
||||
@@ -63,75 +62,31 @@ public class KafkaStreamsStateStoreIntegrationTests {
|
||||
"--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.bindings.input.destination=foobar",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId"
|
||||
+ "=KafkaStreamsStateStoreIntegrationTests-abc",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes="
|
||||
+ embeddedKafka.getZookeeperConnectionString());
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId=KafkaStreamsStateStoreIntegrationTests-abc",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString());
|
||||
try {
|
||||
Thread.sleep(2000);
|
||||
receiveAndValidateFoo(context);
|
||||
}
|
||||
catch (Exception e) {
|
||||
} catch (Exception e) {
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
} finally {
|
||||
context.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSameStateStoreIsCreatedOnlyOnceWhenMultipleInputBindingsArePresent() throws Exception {
|
||||
SpringApplication app = new SpringApplication(ProductCountApplicationWithMultipleInputBindings.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
ConfigurableApplicationContext context = app.run("--server.port=0",
|
||||
"--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.bindings.input.destination=foobar",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input1.consumer.applicationId"
|
||||
+ "=KafkaStreamsStateStoreIntegrationTests-abc",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes="
|
||||
+ embeddedKafka.getZookeeperConnectionString());
|
||||
try {
|
||||
Thread.sleep(2000);
|
||||
// We are not particularly interested in querying the state store here, as that is verified by the other test
|
||||
// in this class. This test verifies that the same store is not attempted to be created by multiple input bindings.
|
||||
// Normally, that will cause an exception to be thrown. However by not getting any exceptions, we are verifying
|
||||
// that the binder is handling it appropriately.
|
||||
//For more info, see this issue: https://github.com/spring-cloud/spring-cloud-stream-binder-kafka/issues/551
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
context.close();
|
||||
}
|
||||
}
|
||||
|
||||
private void receiveAndValidateFoo(ConfigurableApplicationContext context)
|
||||
throws Exception {
|
||||
private void receiveAndValidateFoo(ConfigurableApplicationContext context) throws Exception {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("foobar");
|
||||
template.sendDefault("{\"id\":\"123\"}");
|
||||
Thread.sleep(1000);
|
||||
|
||||
// assertions
|
||||
ProductCountApplication productCount = context
|
||||
.getBean(ProductCountApplication.class);
|
||||
//assertions
|
||||
ProductCountApplication productCount = context.getBean(ProductCountApplication.class);
|
||||
WindowStore<Object, String> state = productCount.state;
|
||||
assertThat(state != null).isTrue();
|
||||
assertThat(state.name()).isEqualTo("mystate");
|
||||
@@ -144,71 +99,33 @@ public class KafkaStreamsStateStoreIntegrationTests {
|
||||
public static class ProductCountApplication {
|
||||
|
||||
WindowStore<Object, String> state;
|
||||
|
||||
boolean processed;
|
||||
|
||||
@StreamListener("input")
|
||||
@KafkaStreamsStateStore(name = "mystate", type = KafkaStreamsStateStoreProperties.StoreType.WINDOW, lengthMs = 300000)
|
||||
@SuppressWarnings({ "deprecation", "unchecked" })
|
||||
@SuppressWarnings({"deprecation", "unchecked"})
|
||||
public void process(KStream<Object, Product> input) {
|
||||
|
||||
input.process(() -> new Processor<Object, Product>() {
|
||||
input
|
||||
.process(() -> new Processor<Object, Product>() {
|
||||
|
||||
@Override
|
||||
public void init(ProcessorContext processorContext) {
|
||||
state = (WindowStore) processorContext.getStateStore("mystate");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void process(Object s, Product product) {
|
||||
processed = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
if (state != null) {
|
||||
state.close();
|
||||
@Override
|
||||
public void init(ProcessorContext processorContext) {
|
||||
state = (WindowStore) processorContext.getStateStore("mystate");
|
||||
}
|
||||
}
|
||||
}, "mystate");
|
||||
}
|
||||
}
|
||||
|
||||
@EnableBinding(KafkaStreamsProcessorY.class)
|
||||
@EnableAutoConfiguration
|
||||
public static class ProductCountApplicationWithMultipleInputBindings {
|
||||
|
||||
WindowStore<Object, String> state;
|
||||
|
||||
boolean processed;
|
||||
|
||||
@StreamListener
|
||||
@KafkaStreamsStateStore(name = "mystate", type = KafkaStreamsStateStoreProperties.StoreType.WINDOW, lengthMs = 300000)
|
||||
@SuppressWarnings({ "deprecation", "unchecked" })
|
||||
public void process(@Input("input1")KStream<Object, Product> input, @Input("input2")KStream<Object, Product> input2) {
|
||||
|
||||
input.process(() -> new Processor<Object, Product>() {
|
||||
|
||||
@Override
|
||||
public void init(ProcessorContext processorContext) {
|
||||
state = (WindowStore) processorContext.getStateStore("mystate");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void process(Object s, Product product) {
|
||||
processed = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
if (state != null) {
|
||||
state.close();
|
||||
@Override
|
||||
public void process(Object s, Product product) {
|
||||
processed = true;
|
||||
}
|
||||
}
|
||||
}, "mystate");
|
||||
|
||||
//simple use of input2, we are not using input2 for anything other than triggering some test behavior.
|
||||
input2.foreach((key, value) -> { });
|
||||
@Override
|
||||
public void close() {
|
||||
if (state != null) {
|
||||
state.close();
|
||||
}
|
||||
}
|
||||
}, "mystate");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -223,7 +140,6 @@ public class KafkaStreamsStateStoreIntegrationTests {
|
||||
public void setId(Integer id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
interface KafkaStreamsProcessorX {
|
||||
@@ -231,13 +147,4 @@ public class KafkaStreamsStateStoreIntegrationTests {
|
||||
@Input("input")
|
||||
KStream<?, ?> input();
|
||||
}
|
||||
|
||||
interface KafkaStreamsProcessorY {
|
||||
|
||||
@Input("input1")
|
||||
KStream<?, ?> input1();
|
||||
|
||||
@Input("input2")
|
||||
KStream<?, ?> input2();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -60,21 +60,17 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
public class KafkastreamsBinderPojoInputStringOutputIntegrationTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"counts-id");
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true, "counts-id");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
|
||||
.getEmbeddedKafka();
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
|
||||
private static Consumer<String, String> consumer;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-id",
|
||||
"false", embeddedKafka);
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-id", "false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts-id");
|
||||
}
|
||||
@@ -93,24 +89,19 @@ public class KafkastreamsBinderPojoInputStringOutputIntegrationTests {
|
||||
"--spring.cloud.stream.bindings.input.destination=foos",
|
||||
"--spring.cloud.stream.bindings.output.destination=counts-id",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.keySerde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$IntegerSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.keySerde=org.apache.kafka.common.serialization.Serdes$IntegerSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId=ProductCountApplication-xyz",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes="
|
||||
+ embeddedKafka.getZookeeperConnectionString());
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString());
|
||||
try {
|
||||
receiveAndValidateFoo(context);
|
||||
// Assertions on StreamBuilderFactoryBean
|
||||
StreamsBuilderFactoryBean streamsBuilderFactoryBean = context
|
||||
.getBean("&stream-builder-process", StreamsBuilderFactoryBean.class);
|
||||
CleanupConfig cleanup = TestUtils.getPropertyValue(streamsBuilderFactoryBean,
|
||||
"cleanupConfig", CleanupConfig.class);
|
||||
//Assertions on StreamBuilderFactoryBean
|
||||
StreamsBuilderFactoryBean streamsBuilderFactoryBean = context.getBean("&stream-builder-process",
|
||||
StreamsBuilderFactoryBean.class);
|
||||
CleanupConfig cleanup = TestUtils.getPropertyValue(streamsBuilderFactoryBean, "cleanupConfig",
|
||||
CleanupConfig.class);
|
||||
assertThat(cleanup.cleanupOnStart()).isFalse();
|
||||
assertThat(cleanup.cleanupOnStop()).isTrue();
|
||||
}
|
||||
@@ -119,16 +110,13 @@ public class KafkastreamsBinderPojoInputStringOutputIntegrationTests {
|
||||
}
|
||||
}
|
||||
|
||||
private void receiveAndValidateFoo(ConfigurableApplicationContext context)
|
||||
throws Exception {
|
||||
private void receiveAndValidateFoo(ConfigurableApplicationContext context) throws Exception {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("foos");
|
||||
template.sendDefault("{\"id\":\"123\"}");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer,
|
||||
"counts-id");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts-id");
|
||||
assertThat(cr.value().contains("Count for product with ID 123: 1")).isTrue();
|
||||
}
|
||||
|
||||
@@ -140,16 +128,15 @@ public class KafkastreamsBinderPojoInputStringOutputIntegrationTests {
|
||||
@SendTo("output")
|
||||
public KStream<Integer, String> process(KStream<Object, Product> input) {
|
||||
|
||||
return input.filter((key, product) -> product.getId() == 123)
|
||||
return input
|
||||
.filter((key, product) -> product.getId() == 123)
|
||||
.map((key, value) -> new KeyValue<>(value, value))
|
||||
.groupByKey(Serialized.with(new JsonSerde<>(Product.class),
|
||||
new JsonSerde<>(Product.class)))
|
||||
.groupByKey(Serialized.with(new JsonSerde<>(Product.class), new JsonSerde<>(Product.class)))
|
||||
.windowedBy(TimeWindows.of(5000))
|
||||
.count(Materialized.as("id-count-store")).toStream()
|
||||
.map((key, value) -> new KeyValue<>(key.key().id,
|
||||
"Count for product with ID 123: " + value));
|
||||
.count(Materialized.as("id-count-store"))
|
||||
.toStream()
|
||||
.map((key, value) -> new KeyValue<>(key.key().id, "Count for product with ID 123: " + value));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class Product {
|
||||
@@ -163,7 +150,5 @@ public class KafkastreamsBinderPojoInputStringOutputIntegrationTests {
|
||||
public void setId(Integer id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -66,26 +66,21 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
public class PerRecordAvroContentTypeTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"received-sensors");
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true, "received-sensors");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
|
||||
.getEmbeddedKafka();
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
|
||||
private static Consumer<String, byte[]> consumer;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("avro-ct-test",
|
||||
"false", embeddedKafka);
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("avro-ct-test", "false", embeddedKafka);
|
||||
|
||||
// Receive the data as byte[]
|
||||
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
|
||||
ByteArrayDeserializer.class);
|
||||
//Receive the data as byte[]
|
||||
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
|
||||
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, byte[]> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
DefaultKafkaConsumerFactory<String, byte[]> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "received-sensors");
|
||||
}
|
||||
@@ -107,15 +102,12 @@ public class PerRecordAvroContentTypeTests {
|
||||
"--spring.cloud.stream.bindings.output.contentType=application/avro",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.application-id=per-record-avro-contentType-test",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString())) {
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString())) {
|
||||
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
// Use a custom avro test serializer
|
||||
senderProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
|
||||
TestAvroSerializer.class);
|
||||
DefaultKafkaProducerFactory<Integer, Sensor> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
//Use a custom avro test serializer
|
||||
senderProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, TestAvroSerializer.class);
|
||||
DefaultKafkaProducerFactory<Integer, Sensor> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
try {
|
||||
KafkaTemplate<Integer, Sensor> template = new KafkaTemplate<>(pf, true);
|
||||
|
||||
@@ -125,32 +117,26 @@ public class PerRecordAvroContentTypeTests {
|
||||
sensor.setAcceleration(random.nextFloat() * 10);
|
||||
sensor.setVelocity(random.nextFloat() * 100);
|
||||
sensor.setTemperature(random.nextFloat() * 50);
|
||||
// Send with avro content type set.
|
||||
//Send with avro content type set.
|
||||
Message<?> message = MessageBuilder.withPayload(sensor)
|
||||
.setHeader("contentType", "application/avro").build();
|
||||
.setHeader("contentType", "application/avro")
|
||||
.build();
|
||||
template.setDefaultTopic("sensors");
|
||||
template.send(message);
|
||||
|
||||
// Serialized byte[] ^^ is received by the binding process and deserialzed
|
||||
// it using avro converter.
|
||||
// Then finally, the data will be output to a return topic as byte[]
|
||||
// (using the same avro converter).
|
||||
//Serialized byte[] ^^ is received by the binding process and deserialzed it using avro converter.
|
||||
//Then finally, the data will be output to a return topic as byte[] (using the same avro converter).
|
||||
|
||||
// Receive the byte[] from return topic
|
||||
ConsumerRecord<String, byte[]> cr = KafkaTestUtils
|
||||
.getSingleRecord(consumer, "received-sensors");
|
||||
//Receive the byte[] from return topic
|
||||
ConsumerRecord<String, byte[]> cr = KafkaTestUtils.getSingleRecord(consumer, "received-sensors");
|
||||
final byte[] value = cr.value();
|
||||
|
||||
// Convert the byte[] received back to avro object and verify that it is
|
||||
// the same as the one we sent ^^.
|
||||
//Convert the byte[] received back to avro object and verify that it is the same as the one we sent ^^.
|
||||
AvroSchemaMessageConverter avroSchemaMessageConverter = new AvroSchemaMessageConverter();
|
||||
|
||||
Message<?> receivedMessage = MessageBuilder.withPayload(value)
|
||||
.setHeader("contentType",
|
||||
MimeTypeUtils.parseMimeType("application/avro"))
|
||||
.build();
|
||||
Sensor messageConverted = (Sensor) avroSchemaMessageConverter
|
||||
.fromMessage(receivedMessage, Sensor.class);
|
||||
.setHeader("contentType", MimeTypeUtils.parseMimeType("application/avro")).build();
|
||||
Sensor messageConverted = (Sensor)avroSchemaMessageConverter.fromMessage(receivedMessage, Sensor.class);
|
||||
assertThat(messageConverted).isEqualTo(sensor);
|
||||
}
|
||||
finally {
|
||||
@@ -166,8 +152,7 @@ public class PerRecordAvroContentTypeTests {
|
||||
@StreamListener
|
||||
@SendTo("output")
|
||||
public KStream<?, Sensor> process(@Input("input") KStream<Object, Sensor> input) {
|
||||
// return the same Sensor object unchanged so that we can do test
|
||||
// verifications
|
||||
//return the same Sensor object unchanged so that we can do test verifications
|
||||
return input.map(KeyValue::new);
|
||||
}
|
||||
|
||||
@@ -176,7 +161,5 @@ public class PerRecordAvroContentTypeTests {
|
||||
public MessageConverter sensorMessageConverter() throws IOException {
|
||||
return new AvroSchemaMessageConverter();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -62,18 +62,52 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
public class StreamToGlobalKTableJoinIntegrationTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"enriched-order");
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true, "enriched-order");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
|
||||
.getEmbeddedKafka();
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
|
||||
private static Consumer<Long, EnrichedOrder> consumer;
|
||||
|
||||
interface CustomGlobalKTableProcessor extends KafkaStreamsProcessor {
|
||||
|
||||
@Input("input-x")
|
||||
GlobalKTable<?, ?> inputX();
|
||||
|
||||
@Input("input-y")
|
||||
GlobalKTable<?, ?> inputY();
|
||||
}
|
||||
|
||||
@EnableBinding(CustomGlobalKTableProcessor.class)
|
||||
@EnableAutoConfiguration
|
||||
@EnableConfigurationProperties(KafkaStreamsApplicationSupportProperties.class)
|
||||
public static class OrderEnricherApplication {
|
||||
|
||||
@StreamListener
|
||||
@SendTo("output")
|
||||
public KStream<Long, EnrichedOrder> process(@Input("input") KStream<Long, Order> ordersStream,
|
||||
@Input("input-x") GlobalKTable<Long, Customer> customers,
|
||||
@Input("input-y") GlobalKTable<Long, Product> products) {
|
||||
|
||||
KStream<Long, CustomerOrder> customerOrdersStream = ordersStream.join(customers,
|
||||
(orderId, order) -> order.getCustomerId(),
|
||||
(order, customer) -> new CustomerOrder(customer, order));
|
||||
|
||||
return customerOrdersStream.join(products,
|
||||
(orderId, customerOrder) -> customerOrder
|
||||
.productId(),
|
||||
(customerOrder, product) -> {
|
||||
EnrichedOrder enrichedOrder = new EnrichedOrder();
|
||||
enrichedOrder.setProduct(product);
|
||||
enrichedOrder.setCustomer(customerOrder.customer);
|
||||
enrichedOrder.setOrder(customerOrder.order);
|
||||
return enrichedOrder;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStreamToGlobalKTable() throws Exception {
|
||||
SpringApplication app = new SpringApplication(
|
||||
StreamToGlobalKTableJoinIntegrationTests.OrderEnricherApplication.class);
|
||||
SpringApplication app = new SpringApplication(StreamToGlobalKTableJoinIntegrationTests.OrderEnricherApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
try (ConfigurableApplicationContext ignored = app.run("--server.port=0",
|
||||
"--spring.jmx.enabled=false",
|
||||
@@ -85,49 +119,27 @@ public class StreamToGlobalKTableJoinIntegrationTests {
|
||||
"--spring.cloud.stream.bindings.input-x.consumer.useNativeDecoding=true",
|
||||
"--spring.cloud.stream.bindings.input-y.consumer.useNativeDecoding=true",
|
||||
"--spring.cloud.stream.bindings.output.producer.useNativeEncoding=true",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.keySerde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$LongSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.valueSerde"
|
||||
+ "=org.springframework.cloud.stream.binder.kafka.streams.integration"
|
||||
+ ".StreamToGlobalKTableJoinIntegrationTests$OrderSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-x.consumer.keySerde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$LongSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-x.consumer.valueSerde"
|
||||
+ "=org.springframework.cloud.stream.binder.kafka.streams.integration."
|
||||
+ "StreamToGlobalKTableJoinIntegrationTests$CustomerSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-y.consumer.keySerde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$LongSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-y.consumer.valueSerde"
|
||||
+ "=org.springframework.cloud.stream.binder.kafka.streams."
|
||||
+ "integration.StreamToGlobalKTableJoinIntegrationTests$ProductSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.keySerde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$LongSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.valueSerde"
|
||||
+ "=org.springframework.cloud.stream.binder.kafka.streams.integration"
|
||||
+ ".StreamToGlobalKTableJoinIntegrationTests$EnrichedOrderSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.keySerde=org.apache.kafka.common.serialization.Serdes$LongSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.valueSerde=org.springframework.cloud.stream.binder.kafka.streams.integration.StreamToGlobalKTableJoinIntegrationTests$OrderSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-x.consumer.keySerde=org.apache.kafka.common.serialization.Serdes$LongSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-x.consumer.valueSerde=org.springframework.cloud.stream.binder.kafka.streams.integration.StreamToGlobalKTableJoinIntegrationTests$CustomerSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-y.consumer.keySerde=org.apache.kafka.common.serialization.Serdes$LongSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-y.consumer.valueSerde=org.springframework.cloud.stream.binder.kafka.streams.integration.StreamToGlobalKTableJoinIntegrationTests$ProductSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.keySerde=org.apache.kafka.common.serialization.Serdes$LongSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.valueSerde=org.springframework.cloud.stream.binder.kafka.streams.integration.StreamToGlobalKTableJoinIntegrationTests$EnrichedOrderSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=10000",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId"
|
||||
+ "=StreamToGlobalKTableJoinIntegrationTests-abc",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes="
|
||||
+ embeddedKafka.getZookeeperConnectionString())) {
|
||||
Map<String, Object> senderPropsCustomer = KafkaTestUtils
|
||||
.producerProps(embeddedKafka);
|
||||
senderPropsCustomer.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
|
||||
LongSerializer.class);
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId=StreamToGlobalKTableJoinIntegrationTests-abc",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString())) {
|
||||
Map<String, Object> senderPropsCustomer = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
senderPropsCustomer.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class);
|
||||
CustomerSerde customerSerde = new CustomerSerde();
|
||||
senderPropsCustomer.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
|
||||
customerSerde.serializer().getClass());
|
||||
senderPropsCustomer.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, customerSerde.serializer().getClass());
|
||||
|
||||
DefaultKafkaProducerFactory<Long, Customer> pfCustomer = new DefaultKafkaProducerFactory<>(
|
||||
senderPropsCustomer);
|
||||
KafkaTemplate<Long, Customer> template = new KafkaTemplate<>(pfCustomer,
|
||||
true);
|
||||
DefaultKafkaProducerFactory<Long, Customer> pfCustomer = new DefaultKafkaProducerFactory<>(senderPropsCustomer);
|
||||
KafkaTemplate<Long, Customer> template = new KafkaTemplate<>(pfCustomer, true);
|
||||
template.setDefaultTopic("customers");
|
||||
for (long i = 0; i < 5; i++) {
|
||||
final Customer customer = new Customer();
|
||||
@@ -135,18 +147,13 @@ public class StreamToGlobalKTableJoinIntegrationTests {
|
||||
template.sendDefault(i, customer);
|
||||
}
|
||||
|
||||
Map<String, Object> senderPropsProduct = KafkaTestUtils
|
||||
.producerProps(embeddedKafka);
|
||||
senderPropsProduct.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
|
||||
LongSerializer.class);
|
||||
Map<String, Object> senderPropsProduct = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
senderPropsProduct.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class);
|
||||
ProductSerde productSerde = new ProductSerde();
|
||||
senderPropsProduct.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
|
||||
productSerde.serializer().getClass());
|
||||
senderPropsProduct.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, productSerde.serializer().getClass());
|
||||
|
||||
DefaultKafkaProducerFactory<Long, Product> pfProduct = new DefaultKafkaProducerFactory<>(
|
||||
senderPropsProduct);
|
||||
KafkaTemplate<Long, Product> productTemplate = new KafkaTemplate<>(pfProduct,
|
||||
true);
|
||||
DefaultKafkaProducerFactory<Long, Product> pfProduct = new DefaultKafkaProducerFactory<>(senderPropsProduct);
|
||||
KafkaTemplate<Long, Product> productTemplate = new KafkaTemplate<>(pfProduct, true);
|
||||
productTemplate.setDefaultTopic("products");
|
||||
|
||||
for (long i = 0; i < 5; i++) {
|
||||
@@ -155,16 +162,12 @@ public class StreamToGlobalKTableJoinIntegrationTests {
|
||||
productTemplate.sendDefault(i, product);
|
||||
}
|
||||
|
||||
Map<String, Object> senderPropsOrder = KafkaTestUtils
|
||||
.producerProps(embeddedKafka);
|
||||
senderPropsOrder.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
|
||||
LongSerializer.class);
|
||||
Map<String, Object> senderPropsOrder = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
senderPropsOrder.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class);
|
||||
OrderSerde orderSerde = new OrderSerde();
|
||||
senderPropsOrder.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
|
||||
orderSerde.serializer().getClass());
|
||||
senderPropsOrder.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, orderSerde.serializer().getClass());
|
||||
|
||||
DefaultKafkaProducerFactory<Long, Order> pfOrder = new DefaultKafkaProducerFactory<>(
|
||||
senderPropsOrder);
|
||||
DefaultKafkaProducerFactory<Long, Order> pfOrder = new DefaultKafkaProducerFactory<>(senderPropsOrder);
|
||||
KafkaTemplate<Long, Order> orderTemplate = new KafkaTemplate<>(pfOrder, true);
|
||||
orderTemplate.setDefaultTopic("orders");
|
||||
|
||||
@@ -175,19 +178,13 @@ public class StreamToGlobalKTableJoinIntegrationTests {
|
||||
orderTemplate.sendDefault(i, order);
|
||||
}
|
||||
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group",
|
||||
"false", embeddedKafka);
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
|
||||
LongDeserializer.class);
|
||||
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class);
|
||||
EnrichedOrderSerde enrichedOrderSerde = new EnrichedOrderSerde();
|
||||
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
|
||||
enrichedOrderSerde.deserializer().getClass());
|
||||
consumerProps.put(JsonDeserializer.VALUE_DEFAULT_TYPE,
|
||||
"org.springframework.cloud.stream.binder.kafka.streams.integration."
|
||||
+ "StreamToGlobalKTableJoinIntegrationTests.EnrichedOrder");
|
||||
DefaultKafkaConsumerFactory<Long, EnrichedOrder> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, enrichedOrderSerde.deserializer().getClass());
|
||||
consumerProps.put(JsonDeserializer.VALUE_DEFAULT_TYPE, "org.springframework.cloud.stream.binder.kafka.streams.integration.StreamToGlobalKTableJoinIntegrationTests.EnrichedOrder");
|
||||
DefaultKafkaConsumerFactory<Long, EnrichedOrder> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "enriched-order");
|
||||
@@ -196,14 +193,12 @@ public class StreamToGlobalKTableJoinIntegrationTests {
|
||||
long start = System.currentTimeMillis();
|
||||
List<KeyValue<Long, EnrichedOrder>> enrichedOrders = new ArrayList<>();
|
||||
do {
|
||||
ConsumerRecords<Long, EnrichedOrder> records = KafkaTestUtils
|
||||
.getRecords(consumer);
|
||||
ConsumerRecords<Long, EnrichedOrder> records = KafkaTestUtils.getRecords(consumer);
|
||||
count = count + records.count();
|
||||
for (ConsumerRecord<Long, EnrichedOrder> record : records) {
|
||||
enrichedOrders.add(new KeyValue<>(record.key(), record.value()));
|
||||
}
|
||||
}
|
||||
while (count < 5 && (System.currentTimeMillis() - start) < 30000);
|
||||
} while (count < 5 && (System.currentTimeMillis() - start) < 30000);
|
||||
|
||||
assertThat(count == 5).isTrue();
|
||||
assertThat(enrichedOrders.size() == 5).isTrue();
|
||||
@@ -211,16 +206,13 @@ public class StreamToGlobalKTableJoinIntegrationTests {
|
||||
enrichedOrders.sort(Comparator.comparing(o -> o.key));
|
||||
|
||||
for (int i = 0; i < 5; i++) {
|
||||
KeyValue<Long, EnrichedOrder> enrichedOrderKeyValue = enrichedOrders
|
||||
.get(i);
|
||||
KeyValue<Long, EnrichedOrder> enrichedOrderKeyValue = enrichedOrders.get(i);
|
||||
assertThat(enrichedOrderKeyValue.key == i).isTrue();
|
||||
EnrichedOrder enrichedOrder = enrichedOrderKeyValue.value;
|
||||
assertThat(enrichedOrder.getOrder().customerId == i).isTrue();
|
||||
assertThat(enrichedOrder.getOrder().productId == i).isTrue();
|
||||
assertThat(enrichedOrder.getCustomer().name.equals("customer-" + i))
|
||||
.isTrue();
|
||||
assertThat(enrichedOrder.getProduct().name.equals("product-" + i))
|
||||
.isTrue();
|
||||
assertThat(enrichedOrder.getCustomer().name.equals("customer-" + i)).isTrue();
|
||||
assertThat(enrichedOrder.getProduct().name.equals("product-" + i)).isTrue();
|
||||
}
|
||||
pfCustomer.destroy();
|
||||
pfProduct.destroy();
|
||||
@@ -230,49 +222,9 @@ public class StreamToGlobalKTableJoinIntegrationTests {
|
||||
|
||||
}
|
||||
|
||||
interface CustomGlobalKTableProcessor extends KafkaStreamsProcessor {
|
||||
|
||||
@Input("input-x")
|
||||
GlobalKTable<?, ?> inputX();
|
||||
|
||||
@Input("input-y")
|
||||
GlobalKTable<?, ?> inputY();
|
||||
|
||||
}
|
||||
|
||||
@EnableBinding(CustomGlobalKTableProcessor.class)
|
||||
@EnableAutoConfiguration
|
||||
@EnableConfigurationProperties(KafkaStreamsApplicationSupportProperties.class)
|
||||
public static class OrderEnricherApplication {
|
||||
|
||||
@StreamListener
|
||||
@SendTo("output")
|
||||
public KStream<Long, EnrichedOrder> process(
|
||||
@Input("input") KStream<Long, Order> ordersStream,
|
||||
@Input("input-x") GlobalKTable<Long, Customer> customers,
|
||||
@Input("input-y") GlobalKTable<Long, Product> products) {
|
||||
|
||||
KStream<Long, CustomerOrder> customerOrdersStream = ordersStream.join(
|
||||
customers, (orderId, order) -> order.getCustomerId(),
|
||||
(order, customer) -> new CustomerOrder(customer, order));
|
||||
|
||||
return customerOrdersStream.join(products,
|
||||
(orderId, customerOrder) -> customerOrder.productId(),
|
||||
(customerOrder, product) -> {
|
||||
EnrichedOrder enrichedOrder = new EnrichedOrder();
|
||||
enrichedOrder.setProduct(product);
|
||||
enrichedOrder.setCustomer(customerOrder.customer);
|
||||
enrichedOrder.setOrder(customerOrder.order);
|
||||
return enrichedOrder;
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class Order {
|
||||
|
||||
long customerId;
|
||||
|
||||
long productId;
|
||||
|
||||
public long getCustomerId() {
|
||||
@@ -290,7 +242,6 @@ public class StreamToGlobalKTableJoinIntegrationTests {
|
||||
public void setProductId(long productId) {
|
||||
this.productId = productId;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class Customer {
|
||||
@@ -304,7 +255,6 @@ public class StreamToGlobalKTableJoinIntegrationTests {
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class Product {
|
||||
@@ -318,15 +268,12 @@ public class StreamToGlobalKTableJoinIntegrationTests {
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class EnrichedOrder {
|
||||
|
||||
Product product;
|
||||
|
||||
Customer customer;
|
||||
|
||||
Order order;
|
||||
|
||||
public Product getProduct() {
|
||||
@@ -352,13 +299,10 @@ public class StreamToGlobalKTableJoinIntegrationTests {
|
||||
public void setOrder(Order order) {
|
||||
this.order = order;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static class CustomerOrder {
|
||||
|
||||
private final Customer customer;
|
||||
|
||||
private final Order order;
|
||||
|
||||
CustomerOrder(final Customer customer, final Order order) {
|
||||
@@ -369,23 +313,10 @@ public class StreamToGlobalKTableJoinIntegrationTests {
|
||||
long productId() {
|
||||
return order.getProductId();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class OrderSerde extends JsonSerde<Order> {
|
||||
|
||||
}
|
||||
|
||||
public static class CustomerSerde extends JsonSerde<Customer> {
|
||||
|
||||
}
|
||||
|
||||
public static class ProductSerde extends JsonSerde<Product> {
|
||||
|
||||
}
|
||||
|
||||
public static class EnrichedOrderSerde extends JsonSerde<EnrichedOrder> {
|
||||
|
||||
}
|
||||
|
||||
public static class OrderSerde extends JsonSerde<Order> {}
|
||||
public static class CustomerSerde extends JsonSerde<Customer> {}
|
||||
public static class ProductSerde extends JsonSerde<Product> {}
|
||||
public static class EnrichedOrderSerde extends JsonSerde<EnrichedOrder> {}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -65,28 +65,47 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
public class StreamToTableJoinIntegrationTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"output-topic-1", "output-topic-2");
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true, "output-topic-1", "output-topic-2");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
|
||||
.getEmbeddedKafka();
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
|
||||
@EnableBinding(KafkaStreamsProcessorX.class)
|
||||
@EnableAutoConfiguration
|
||||
@EnableConfigurationProperties(KafkaStreamsApplicationSupportProperties.class)
|
||||
public static class CountClicksPerRegionApplication {
|
||||
|
||||
@StreamListener
|
||||
@SendTo("output")
|
||||
public KStream<String, Long> process(@Input("input") KStream<String, Long> userClicksStream,
|
||||
@Input("input-x") KTable<String, String> userRegionsTable) {
|
||||
|
||||
return userClicksStream
|
||||
.leftJoin(userRegionsTable, (clicks, region) -> new RegionWithClicks(region == null ? "UNKNOWN" : region, clicks),
|
||||
Joined.with(Serdes.String(), Serdes.Long(), null))
|
||||
.map((user, regionWithClicks) -> new KeyValue<>(regionWithClicks.getRegion(), regionWithClicks.getClicks()))
|
||||
.groupByKey(Serialized.with(Serdes.String(), Serdes.Long()))
|
||||
.reduce((firstClicks, secondClicks) -> firstClicks + secondClicks)
|
||||
.toStream();
|
||||
}
|
||||
}
|
||||
|
||||
interface KafkaStreamsProcessorX extends KafkaStreamsProcessor {
|
||||
|
||||
@Input("input-x")
|
||||
KTable<?, ?> inputX();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStreamToTable() throws Exception {
|
||||
SpringApplication app = new SpringApplication(
|
||||
CountClicksPerRegionApplication.class);
|
||||
SpringApplication app = new SpringApplication(CountClicksPerRegionApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
|
||||
Consumer<String, Long> consumer;
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-1",
|
||||
"false", embeddedKafka);
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-1", "false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
|
||||
StringDeserializer.class);
|
||||
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
|
||||
LongDeserializer.class);
|
||||
DefaultKafkaConsumerFactory<String, Long> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
|
||||
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class);
|
||||
DefaultKafkaConsumerFactory<String, Long> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "output-topic-1");
|
||||
|
||||
@@ -98,47 +117,35 @@ public class StreamToTableJoinIntegrationTests {
|
||||
"--spring.cloud.stream.bindings.input.consumer.useNativeDecoding=true",
|
||||
"--spring.cloud.stream.bindings.input-x.consumer.useNativeDecoding=true",
|
||||
"--spring.cloud.stream.bindings.output.producer.useNativeEncoding=true",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.keySerde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.valueSerde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$LongSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.inputX.consumer.keySerde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.inputX.consumer.valueSerde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.keySerde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.valueSerde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$LongSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.keySerde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.valueSerde=org.apache.kafka.common.serialization.Serdes$LongSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.inputX.consumer.keySerde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.inputX.consumer.valueSerde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.keySerde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.valueSerde=org.apache.kafka.common.serialization.Serdes$LongSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=10000",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId"
|
||||
+ "=StreamToTableJoinIntegrationTests-abc",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes="
|
||||
+ embeddedKafka.getZookeeperConnectionString())) {
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId=StreamToTableJoinIntegrationTests-abc",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString())) {
|
||||
|
||||
// Input 1: Region per user (multiple records allowed per user).
|
||||
List<KeyValue<String, String>> userRegions = Arrays.asList(new KeyValue<>(
|
||||
"alice", "asia"), /* Alice lived in Asia originally... */
|
||||
new KeyValue<>("bob", "americas"), new KeyValue<>("chao", "asia"),
|
||||
new KeyValue<>("dave", "europe"), new KeyValue<>("alice",
|
||||
"europe"), /* ...but moved to Europe some time later. */
|
||||
new KeyValue<>("eve", "americas"), new KeyValue<>("fang", "asia"));
|
||||
List<KeyValue<String, String>> userRegions = Arrays.asList(
|
||||
new KeyValue<>("alice", "asia"), /* Alice lived in Asia originally... */
|
||||
new KeyValue<>("bob", "americas"),
|
||||
new KeyValue<>("chao", "asia"),
|
||||
new KeyValue<>("dave", "europe"),
|
||||
new KeyValue<>("alice", "europe"), /* ...but moved to Europe some time later. */
|
||||
new KeyValue<>("eve", "americas"),
|
||||
new KeyValue<>("fang", "asia")
|
||||
);
|
||||
|
||||
Map<String, Object> senderProps1 = KafkaTestUtils
|
||||
.producerProps(embeddedKafka);
|
||||
senderProps1.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
|
||||
StringSerializer.class);
|
||||
senderProps1.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
|
||||
StringSerializer.class);
|
||||
Map<String, Object> senderProps1 = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
senderProps1.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
||||
senderProps1.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
||||
|
||||
DefaultKafkaProducerFactory<String, String> pf1 = new DefaultKafkaProducerFactory<>(
|
||||
senderProps1);
|
||||
DefaultKafkaProducerFactory<String, String> pf1 = new DefaultKafkaProducerFactory<>(senderProps1);
|
||||
KafkaTemplate<String, String> template1 = new KafkaTemplate<>(pf1, true);
|
||||
template1.setDefaultTopic("user-regions-1");
|
||||
|
||||
@@ -148,19 +155,21 @@ public class StreamToTableJoinIntegrationTests {
|
||||
|
||||
// Input 2: Clicks per user (multiple records allowed per user).
|
||||
List<KeyValue<String, Long>> userClicks = Arrays.asList(
|
||||
new KeyValue<>("alice", 13L), new KeyValue<>("bob", 4L),
|
||||
new KeyValue<>("chao", 25L), new KeyValue<>("bob", 19L),
|
||||
new KeyValue<>("dave", 56L), new KeyValue<>("eve", 78L),
|
||||
new KeyValue<>("alice", 40L), new KeyValue<>("fang", 99L));
|
||||
new KeyValue<>("alice", 13L),
|
||||
new KeyValue<>("bob", 4L),
|
||||
new KeyValue<>("chao", 25L),
|
||||
new KeyValue<>("bob", 19L),
|
||||
new KeyValue<>("dave", 56L),
|
||||
new KeyValue<>("eve", 78L),
|
||||
new KeyValue<>("alice", 40L),
|
||||
new KeyValue<>("fang", 99L)
|
||||
);
|
||||
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
senderProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
|
||||
StringSerializer.class);
|
||||
senderProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
|
||||
LongSerializer.class);
|
||||
senderProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
||||
senderProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, LongSerializer.class);
|
||||
|
||||
DefaultKafkaProducerFactory<String, Long> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
DefaultKafkaProducerFactory<String, Long> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
KafkaTemplate<String, Long> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("user-clicks-1");
|
||||
|
||||
@@ -169,24 +178,22 @@ public class StreamToTableJoinIntegrationTests {
|
||||
}
|
||||
|
||||
List<KeyValue<String, Long>> expectedClicksPerRegion = Arrays.asList(
|
||||
new KeyValue<>("americas", 101L), new KeyValue<>("europe", 109L),
|
||||
new KeyValue<>("asia", 124L));
|
||||
new KeyValue<>("americas", 101L),
|
||||
new KeyValue<>("europe", 109L),
|
||||
new KeyValue<>("asia", 124L)
|
||||
);
|
||||
|
||||
// Verify that we receive the expected data
|
||||
//Verify that we receive the expected data
|
||||
int count = 0;
|
||||
long start = System.currentTimeMillis();
|
||||
List<KeyValue<String, Long>> actualClicksPerRegion = new ArrayList<>();
|
||||
do {
|
||||
ConsumerRecords<String, Long> records = KafkaTestUtils
|
||||
.getRecords(consumer);
|
||||
ConsumerRecords<String, Long> records = KafkaTestUtils.getRecords(consumer);
|
||||
count = count + records.count();
|
||||
for (ConsumerRecord<String, Long> record : records) {
|
||||
actualClicksPerRegion
|
||||
.add(new KeyValue<>(record.key(), record.value()));
|
||||
actualClicksPerRegion.add(new KeyValue<>(record.key(), record.value()));
|
||||
}
|
||||
}
|
||||
while (count < expectedClicksPerRegion.size()
|
||||
&& (System.currentTimeMillis() - start) < 30000);
|
||||
} while (count < expectedClicksPerRegion.size() && (System.currentTimeMillis() - start) < 30000);
|
||||
|
||||
assertThat(count == expectedClicksPerRegion.size()).isTrue();
|
||||
assertThat(actualClicksPerRegion).hasSameElementsAs(expectedClicksPerRegion);
|
||||
@@ -197,22 +204,16 @@ public class StreamToTableJoinIntegrationTests {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGlobalStartOffsetWithLatestAndIndividualBindingWthEarliest()
|
||||
throws Exception {
|
||||
SpringApplication app = new SpringApplication(
|
||||
CountClicksPerRegionApplication.class);
|
||||
public void testGlobalStartOffsetWithLatestAndIndividualBindingWthEarliest() throws Exception {
|
||||
SpringApplication app = new SpringApplication(CountClicksPerRegionApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
|
||||
Consumer<String, Long> consumer;
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-2",
|
||||
"false", embeddedKafka);
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-2", "false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
|
||||
StringDeserializer.class);
|
||||
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
|
||||
LongDeserializer.class);
|
||||
DefaultKafkaConsumerFactory<String, Long> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
|
||||
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class);
|
||||
DefaultKafkaConsumerFactory<String, Long> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "output-topic-2");
|
||||
|
||||
@@ -220,27 +221,30 @@ public class StreamToTableJoinIntegrationTests {
|
||||
// binding (which is set to earliest below).
|
||||
// Input 1: Clicks per user (multiple records allowed per user).
|
||||
List<KeyValue<String, Long>> userClicks = Arrays.asList(
|
||||
new KeyValue<>("alice", 100L), new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L), new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L), new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L), new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L), new KeyValue<>("alice", 100L));
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L),
|
||||
new KeyValue<>("alice", 100L)
|
||||
);
|
||||
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
senderProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
|
||||
StringSerializer.class);
|
||||
senderProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
|
||||
LongSerializer.class);
|
||||
senderProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
||||
senderProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, LongSerializer.class);
|
||||
|
||||
DefaultKafkaProducerFactory<String, Long> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
DefaultKafkaProducerFactory<String, Long> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
KafkaTemplate<String, Long> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("user-clicks-2");
|
||||
|
||||
for (KeyValue<String, Long> keyValue : userClicks) {
|
||||
template.sendDefault(keyValue.key, keyValue.value);
|
||||
}
|
||||
// Thread.sleep(10000L);
|
||||
//Thread.sleep(10000L);
|
||||
try (ConfigurableApplicationContext ignored = app.run("--server.port=0",
|
||||
"--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.bindings.input.destination=user-clicks-2",
|
||||
@@ -251,47 +255,36 @@ public class StreamToTableJoinIntegrationTests {
|
||||
"--spring.cloud.stream.bindings.output.producer.useNativeEncoding=true",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.auto.offset.reset=latest",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.startOffset=earliest",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.keySerde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.valueSerde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$LongSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.inputX.consumer.keySerde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.inputX.consumer.valueSerde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.keySerde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.valueSerde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$LongSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.keySerde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.valueSerde=org.apache.kafka.common.serialization.Serdes$LongSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.inputX.consumer.keySerde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.inputX.consumer.valueSerde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.keySerde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.output.producer.valueSerde=org.apache.kafka.common.serialization.Serdes$LongSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=10000",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.application-id=helloxyz-foobar",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes="
|
||||
+ embeddedKafka.getZookeeperConnectionString())) {
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString())) {
|
||||
Thread.sleep(1000L);
|
||||
|
||||
// Input 2: Region per user (multiple records allowed per user).
|
||||
List<KeyValue<String, String>> userRegions = Arrays.asList(new KeyValue<>(
|
||||
"alice", "asia"), /* Alice lived in Asia originally... */
|
||||
new KeyValue<>("bob", "americas"), new KeyValue<>("chao", "asia"),
|
||||
new KeyValue<>("dave", "europe"), new KeyValue<>("alice",
|
||||
"europe"), /* ...but moved to Europe some time later. */
|
||||
new KeyValue<>("eve", "americas"), new KeyValue<>("fang", "asia"));
|
||||
List<KeyValue<String, String>> userRegions = Arrays.asList(
|
||||
new KeyValue<>("alice", "asia"), /* Alice lived in Asia originally... */
|
||||
new KeyValue<>("bob", "americas"),
|
||||
new KeyValue<>("chao", "asia"),
|
||||
new KeyValue<>("dave", "europe"),
|
||||
new KeyValue<>("alice", "europe"), /* ...but moved to Europe some time later. */
|
||||
new KeyValue<>("eve", "americas"),
|
||||
new KeyValue<>("fang", "asia")
|
||||
);
|
||||
|
||||
Map<String, Object> senderProps1 = KafkaTestUtils
|
||||
.producerProps(embeddedKafka);
|
||||
senderProps1.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
|
||||
StringSerializer.class);
|
||||
senderProps1.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
|
||||
StringSerializer.class);
|
||||
Map<String, Object> senderProps1 = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
senderProps1.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
||||
senderProps1.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
||||
|
||||
DefaultKafkaProducerFactory<String, String> pf1 = new DefaultKafkaProducerFactory<>(
|
||||
senderProps1);
|
||||
DefaultKafkaProducerFactory<String, String> pf1 = new DefaultKafkaProducerFactory<>(senderProps1);
|
||||
KafkaTemplate<String, String> template1 = new KafkaTemplate<>(pf1, true);
|
||||
template1.setDefaultTopic("user-regions-2");
|
||||
|
||||
@@ -299,41 +292,45 @@ public class StreamToTableJoinIntegrationTests {
|
||||
template1.sendDefault(keyValue.key, keyValue.value);
|
||||
}
|
||||
|
||||
|
||||
|
||||
// Input 1: Clicks per user (multiple records allowed per user).
|
||||
List<KeyValue<String, Long>> userClicks1 = Arrays.asList(
|
||||
new KeyValue<>("bob", 4L), new KeyValue<>("chao", 25L),
|
||||
new KeyValue<>("bob", 19L), new KeyValue<>("dave", 56L),
|
||||
new KeyValue<>("eve", 78L), new KeyValue<>("fang", 99L));
|
||||
new KeyValue<>("bob", 4L),
|
||||
new KeyValue<>("chao", 25L),
|
||||
new KeyValue<>("bob", 19L),
|
||||
new KeyValue<>("dave", 56L),
|
||||
new KeyValue<>("eve", 78L),
|
||||
new KeyValue<>("fang", 99L)
|
||||
);
|
||||
|
||||
for (KeyValue<String, Long> keyValue : userClicks1) {
|
||||
template.sendDefault(keyValue.key, keyValue.value);
|
||||
}
|
||||
|
||||
List<KeyValue<String, Long>> expectedClicksPerRegion = Arrays.asList(
|
||||
new KeyValue<>("americas", 101L), new KeyValue<>("europe", 56L),
|
||||
new KeyValue<>("asia", 124L),
|
||||
// 1000 alice entries which were there in the topic before the
|
||||
// consumer started.
|
||||
// Since we set the startOffset to earliest for the topic, it will
|
||||
// read them,
|
||||
// but the join fails to associate with a valid region, thus UNKNOWN.
|
||||
new KeyValue<>("UNKNOWN", 1000L));
|
||||
|
||||
// Verify that we receive the expected data
|
||||
|
||||
List<KeyValue<String, Long>> expectedClicksPerRegion = Arrays.asList(
|
||||
new KeyValue<>("americas", 101L),
|
||||
new KeyValue<>("europe", 56L),
|
||||
new KeyValue<>("asia", 124L),
|
||||
//1000 alice entries which were there in the topic before the consumer started.
|
||||
//Since we set the startOffset to earliest for the topic, it will read them,
|
||||
//but the join fails to associate with a valid region, thus UNKNOWN.
|
||||
new KeyValue<>("UNKNOWN", 1000L)
|
||||
);
|
||||
|
||||
//Verify that we receive the expected data
|
||||
int count = 0;
|
||||
long start = System.currentTimeMillis();
|
||||
List<KeyValue<String, Long>> actualClicksPerRegion = new ArrayList<>();
|
||||
do {
|
||||
ConsumerRecords<String, Long> records = KafkaTestUtils
|
||||
.getRecords(consumer);
|
||||
ConsumerRecords<String, Long> records = KafkaTestUtils.getRecords(consumer);
|
||||
count = count + records.count();
|
||||
for (ConsumerRecord<String, Long> record : records) {
|
||||
actualClicksPerRegion
|
||||
.add(new KeyValue<>(record.key(), record.value()));
|
||||
actualClicksPerRegion.add(new KeyValue<>(record.key(), record.value()));
|
||||
}
|
||||
}
|
||||
while (count < expectedClicksPerRegion.size()
|
||||
&& (System.currentTimeMillis() - start) < 30000);
|
||||
} while (count < expectedClicksPerRegion.size() && (System.currentTimeMillis() - start) < 30000);
|
||||
|
||||
assertThat(count).isEqualTo(expectedClicksPerRegion.size());
|
||||
assertThat(actualClicksPerRegion).hasSameElementsAs(expectedClicksPerRegion);
|
||||
@@ -343,76 +340,12 @@ public class StreamToTableJoinIntegrationTests {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTrivialSingleKTableInputAsNonDeclarative() {
|
||||
SpringApplication app = new SpringApplication(
|
||||
TrivialKTableApp.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
app.run("--server.port=0",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input-y.consumer.application-id=" +
|
||||
"testTrivialSingleKTableInputAsNonDeclarative");
|
||||
//All we are verifying is that this application didn't throw any errors.
|
||||
//See this issue: https://github.com/spring-cloud/spring-cloud-stream-binder-kafka/issues/536
|
||||
}
|
||||
|
||||
@EnableBinding(KafkaStreamsProcessorX.class)
|
||||
@EnableAutoConfiguration
|
||||
@EnableConfigurationProperties(KafkaStreamsApplicationSupportProperties.class)
|
||||
public static class CountClicksPerRegionApplication {
|
||||
|
||||
@StreamListener
|
||||
@SendTo("output")
|
||||
public KStream<String, Long> process(
|
||||
@Input("input") KStream<String, Long> userClicksStream,
|
||||
@Input("input-x") KTable<String, String> userRegionsTable) {
|
||||
|
||||
return userClicksStream
|
||||
.leftJoin(userRegionsTable,
|
||||
(clicks, region) -> new RegionWithClicks(
|
||||
region == null ? "UNKNOWN" : region, clicks),
|
||||
Joined.with(Serdes.String(), Serdes.Long(), null))
|
||||
.map((user, regionWithClicks) -> new KeyValue<>(
|
||||
regionWithClicks.getRegion(), regionWithClicks.getClicks()))
|
||||
.groupByKey(Serialized.with(Serdes.String(), Serdes.Long()))
|
||||
.reduce((firstClicks, secondClicks) -> firstClicks + secondClicks)
|
||||
.toStream();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@EnableBinding(KafkaStreamsProcessorY.class)
|
||||
@EnableAutoConfiguration
|
||||
public static class TrivialKTableApp {
|
||||
|
||||
@StreamListener("input-y")
|
||||
public void process(KTable<String, String> inputTable) {
|
||||
inputTable.toStream().foreach((key, value) -> System.out.println("key : value " + key + " : " + value));
|
||||
}
|
||||
}
|
||||
|
||||
interface KafkaStreamsProcessorX extends KafkaStreamsProcessor {
|
||||
|
||||
@Input("input-x")
|
||||
KTable<?, ?> inputX();
|
||||
|
||||
}
|
||||
|
||||
interface KafkaStreamsProcessorY {
|
||||
|
||||
@Input("input-y")
|
||||
KTable<?, ?> inputY();
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Tuple for a region and its associated number of clicks.
|
||||
*/
|
||||
private static final class RegionWithClicks {
|
||||
|
||||
private final String region;
|
||||
|
||||
private final long clicks;
|
||||
|
||||
RegionWithClicks(String region, long clicks) {
|
||||
@@ -435,5 +368,4 @@ public class StreamToTableJoinIntegrationTests {
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2017-2019 the original author or authors.
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -62,21 +62,17 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
public class WordCountMultipleBranchesIntegrationTests {
|
||||
|
||||
@ClassRule
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true,
|
||||
"counts", "foo", "bar");
|
||||
public static EmbeddedKafkaRule embeddedKafkaRule = new EmbeddedKafkaRule(1, true, "counts","foo","bar");
|
||||
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule
|
||||
.getEmbeddedKafka();
|
||||
private static EmbeddedKafkaBroker embeddedKafka = embeddedKafkaRule.getEmbeddedKafka();
|
||||
|
||||
private static Consumer<String, String> consumer;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("groupx",
|
||||
"false", embeddedKafka);
|
||||
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("groupx", "false", embeddedKafka);
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
|
||||
consumerProps);
|
||||
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
|
||||
consumer = cf.createConsumer();
|
||||
embeddedKafka.consumeFromEmbeddedTopics(consumer, "counts", "foo", "bar");
|
||||
}
|
||||
@@ -86,66 +82,6 @@ public class WordCountMultipleBranchesIntegrationTests {
|
||||
consumer.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testKstreamWordCountWithStringInputAndPojoOuput() throws Exception {
|
||||
SpringApplication app = new SpringApplication(
|
||||
WordCountProcessorApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
|
||||
ConfigurableApplicationContext context = app.run("--server.port=0",
|
||||
"--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.bindings.input.destination=words",
|
||||
"--spring.cloud.stream.bindings.output1.destination=counts",
|
||||
"--spring.cloud.stream.bindings.output1.contentType=application/json",
|
||||
"--spring.cloud.stream.bindings.output2.destination=foo",
|
||||
"--spring.cloud.stream.bindings.output2.contentType=application/json",
|
||||
"--spring.cloud.stream.bindings.output3.destination=bar",
|
||||
"--spring.cloud.stream.bindings.output3.contentType=application/json",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde"
|
||||
+ "=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.timeWindow.length=5000",
|
||||
"--spring.cloud.stream.kafka.streams.timeWindow.advanceBy=0",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId"
|
||||
+ "=WordCountMultipleBranchesIntegrationTests-abc",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers="
|
||||
+ embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes="
|
||||
+ embeddedKafka.getZookeeperConnectionString());
|
||||
try {
|
||||
receiveAndValidate(context);
|
||||
}
|
||||
finally {
|
||||
context.close();
|
||||
}
|
||||
}
|
||||
|
||||
private void receiveAndValidate(ConfigurableApplicationContext context)
|
||||
throws Exception {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
|
||||
senderProps);
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("words");
|
||||
template.sendDefault("english");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer,
|
||||
"counts");
|
||||
assertThat(cr.value().contains("\"word\":\"english\",\"count\":1")).isTrue();
|
||||
|
||||
template.sendDefault("french");
|
||||
template.sendDefault("french");
|
||||
cr = KafkaTestUtils.getSingleRecord(consumer, "foo");
|
||||
assertThat(cr.value().contains("\"word\":\"french\",\"count\":2")).isTrue();
|
||||
|
||||
template.sendDefault("spanish");
|
||||
template.sendDefault("spanish");
|
||||
template.sendDefault("spanish");
|
||||
cr = KafkaTestUtils.getSingleRecord(consumer, "bar");
|
||||
assertThat(cr.value().contains("\"word\":\"spanish\",\"count\":3")).isTrue();
|
||||
}
|
||||
|
||||
@EnableBinding(KStreamProcessorX.class)
|
||||
@EnableAutoConfiguration
|
||||
@EnableConfigurationProperties(KafkaStreamsApplicationSupportProperties.class)
|
||||
@@ -155,26 +91,23 @@ public class WordCountMultipleBranchesIntegrationTests {
|
||||
private TimeWindows timeWindows;
|
||||
|
||||
@StreamListener("input")
|
||||
@SendTo({ "output1", "output2", "output3" })
|
||||
@SendTo({"output1","output2","output3"})
|
||||
@SuppressWarnings("unchecked")
|
||||
public KStream<?, WordCount>[] process(KStream<Object, String> input) {
|
||||
|
||||
Predicate<Object, WordCount> isEnglish = (k, v) -> v.word.equals("english");
|
||||
Predicate<Object, WordCount> isFrench = (k, v) -> v.word.equals("french");
|
||||
Predicate<Object, WordCount> isFrench = (k, v) -> v.word.equals("french");
|
||||
Predicate<Object, WordCount> isSpanish = (k, v) -> v.word.equals("spanish");
|
||||
|
||||
return input
|
||||
.flatMapValues(
|
||||
value -> Arrays.asList(value.toLowerCase().split("\\W+")))
|
||||
.groupBy((key, value) -> value).windowedBy(timeWindows)
|
||||
.count(Materialized.as("WordCounts-multi")).toStream()
|
||||
.map((key, value) -> new KeyValue<>(null,
|
||||
new WordCount(key.key(), value,
|
||||
new Date(key.window().start()),
|
||||
new Date(key.window().end()))))
|
||||
.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
|
||||
.groupBy((key, value) -> value)
|
||||
.windowedBy(timeWindows)
|
||||
.count(Materialized.as("WordCounts-multi"))
|
||||
.toStream()
|
||||
.map((key, value) -> new KeyValue<>(null, new WordCount(key.key(), value, new Date(key.window().start()), new Date(key.window().end()))))
|
||||
.branch(isEnglish, isFrench, isSpanish);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
interface KStreamProcessorX {
|
||||
@@ -190,7 +123,56 @@ public class WordCountMultipleBranchesIntegrationTests {
|
||||
|
||||
@Output("output3")
|
||||
KStream<?, ?> output3();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testKstreamWordCountWithStringInputAndPojoOuput() throws Exception {
|
||||
SpringApplication app = new SpringApplication(WordCountProcessorApplication.class);
|
||||
app.setWebApplicationType(WebApplicationType.NONE);
|
||||
|
||||
ConfigurableApplicationContext context = app.run("--server.port=0",
|
||||
"--spring.jmx.enabled=false",
|
||||
"--spring.cloud.stream.bindings.input.destination=words",
|
||||
"--spring.cloud.stream.bindings.output1.destination=counts",
|
||||
"--spring.cloud.stream.bindings.output1.contentType=application/json",
|
||||
"--spring.cloud.stream.bindings.output2.destination=foo",
|
||||
"--spring.cloud.stream.bindings.output2.contentType=application/json",
|
||||
"--spring.cloud.stream.bindings.output3.destination=bar",
|
||||
"--spring.cloud.stream.bindings.output3.contentType=application/json",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde=org.apache.kafka.common.serialization.Serdes$StringSerde",
|
||||
"--spring.cloud.stream.kafka.streams.timeWindow.length=5000",
|
||||
"--spring.cloud.stream.kafka.streams.timeWindow.advanceBy=0",
|
||||
"--spring.cloud.stream.kafka.streams.bindings.input.consumer.applicationId=WordCountMultipleBranchesIntegrationTests-abc",
|
||||
"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString(),
|
||||
"--spring.cloud.stream.kafka.streams.binder.zkNodes=" + embeddedKafka.getZookeeperConnectionString());
|
||||
try {
|
||||
receiveAndValidate(context);
|
||||
} finally {
|
||||
context.close();
|
||||
}
|
||||
}
|
||||
|
||||
private void receiveAndValidate(ConfigurableApplicationContext context) throws Exception {
|
||||
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
|
||||
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
||||
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("words");
|
||||
template.sendDefault("english");
|
||||
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts");
|
||||
assertThat(cr.value().contains("\"word\":\"english\",\"count\":1")).isTrue();
|
||||
|
||||
template.sendDefault("french");
|
||||
template.sendDefault("french");
|
||||
cr = KafkaTestUtils.getSingleRecord(consumer, "foo");
|
||||
assertThat(cr.value().contains("\"word\":\"french\",\"count\":2")).isTrue();
|
||||
|
||||
template.sendDefault("spanish");
|
||||
template.sendDefault("spanish");
|
||||
template.sendDefault("spanish");
|
||||
cr = KafkaTestUtils.getSingleRecord(consumer, "bar");
|
||||
assertThat(cr.value().contains("\"word\":\"spanish\",\"count\":3")).isTrue();
|
||||
}
|
||||
|
||||
static class WordCount {
|
||||
@@ -241,7 +223,6 @@ public class WordCountMultipleBranchesIntegrationTests {
|
||||
public void setEnd(Date end) {
|
||||
this.end = end;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -29,13 +29,13 @@ import org.springframework.messaging.support.MessageBuilder;
|
||||
/**
|
||||
* Custom avro serializer intended to be used for testing only.
|
||||
*
|
||||
* @param <S> Target type to serialize
|
||||
* @author Soby Chacko
|
||||
*
|
||||
* @param <S> Target type to serialize
|
||||
*/
|
||||
public class TestAvroSerializer<S> implements Serializer<S> {
|
||||
|
||||
public TestAvroSerializer() {
|
||||
}
|
||||
public TestAvroSerializer() {}
|
||||
|
||||
@Override
|
||||
public void configure(Map<String, ?> configs, boolean isKey) {
|
||||
@@ -49,14 +49,13 @@ public class TestAvroSerializer<S> implements Serializer<S> {
|
||||
Map<String, Object> headers = new HashMap<>(message.getHeaders());
|
||||
headers.put(MessageHeaders.CONTENT_TYPE, "application/avro");
|
||||
MessageHeaders messageHeaders = new MessageHeaders(headers);
|
||||
final Object payload = avroSchemaMessageConverter
|
||||
.toMessage(message.getPayload(), messageHeaders).getPayload();
|
||||
return (byte[]) payload;
|
||||
final Object payload = avroSchemaMessageConverter.toMessage(message.getPayload(),
|
||||
messageHeaders).getPayload();
|
||||
return (byte[])payload;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -42,7 +42,7 @@ public class CompositeNonNativeSerdeTest {
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testCompositeNonNativeSerdeUsingAvroContentType() {
|
||||
public void testCompositeNonNativeSerdeUsingAvroContentType(){
|
||||
Random random = new Random();
|
||||
Sensor sensor = new Sensor();
|
||||
sensor.setId(UUID.randomUUID().toString() + "-v1");
|
||||
@@ -52,22 +52,18 @@ public class CompositeNonNativeSerdeTest {
|
||||
|
||||
List<MessageConverter> messageConverters = new ArrayList<>();
|
||||
messageConverters.add(new AvroSchemaMessageConverter());
|
||||
CompositeMessageConverterFactory compositeMessageConverterFactory = new CompositeMessageConverterFactory(
|
||||
messageConverters, new ObjectMapper());
|
||||
CompositeNonNativeSerde compositeNonNativeSerde = new CompositeNonNativeSerde(
|
||||
compositeMessageConverterFactory);
|
||||
CompositeMessageConverterFactory compositeMessageConverterFactory =
|
||||
new CompositeMessageConverterFactory(messageConverters, new ObjectMapper());
|
||||
CompositeNonNativeSerde compositeNonNativeSerde = new CompositeNonNativeSerde(compositeMessageConverterFactory);
|
||||
|
||||
Map<String, Object> configs = new HashMap<>();
|
||||
configs.put("valueClass", Sensor.class);
|
||||
configs.put("contentType", "application/avro");
|
||||
compositeNonNativeSerde.configure(configs, false);
|
||||
final byte[] serialized = compositeNonNativeSerde.serializer().serialize(null,
|
||||
sensor);
|
||||
final byte[] serialized = compositeNonNativeSerde.serializer().serialize(null, sensor);
|
||||
|
||||
final Object deserialized = compositeNonNativeSerde.deserializer()
|
||||
.deserialize(null, serialized);
|
||||
final Object deserialized = compositeNonNativeSerde.deserializer().deserialize(null, serialized);
|
||||
|
||||
assertThat(deserialized).isEqualTo(sensor);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<artifactId>spring-cloud-stream-binder-kafka</artifactId>
|
||||
@@ -10,7 +10,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-stream-binder-kafka-parent</artifactId>
|
||||
<version>2.2.0.M1</version>
|
||||
<version>2.1.0.RC2</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
|
||||
@@ -1,387 +0,0 @@
|
||||
/*
|
||||
* Copyright 2017-2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.cloud.stream.binder.kafka;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.DeserializationContext;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.deser.std.StdNodeBasedDeserializer;
|
||||
import com.fasterxml.jackson.databind.module.SimpleModule;
|
||||
import com.fasterxml.jackson.databind.type.TypeFactory;
|
||||
import org.apache.kafka.common.header.Header;
|
||||
import org.apache.kafka.common.header.Headers;
|
||||
import org.apache.kafka.common.header.internals.RecordHeader;
|
||||
|
||||
import org.springframework.kafka.support.AbstractKafkaHeaderMapper;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.messaging.MessageHeaders;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.MimeType;
|
||||
|
||||
/**
|
||||
* Default header mapper for Apache Kafka. Most headers in {@link KafkaHeaders} are not
|
||||
* mapped on outbound messages. The exceptions are correlation and reply headers for
|
||||
* request/reply messaging. Header types are added to a special header
|
||||
* {@link #JSON_TYPES}.
|
||||
*
|
||||
* @author Gary Russell
|
||||
* @author Artem Bilan
|
||||
* @since 2.0
|
||||
* @deprecated will be removed in the next point release after 2.1.0. See issue
|
||||
* https://github.com/spring-cloud/spring-cloud-stream-binder-kafka/issues/509
|
||||
*/
|
||||
public class BinderHeaderMapper extends AbstractKafkaHeaderMapper {
|
||||
|
||||
private static final List<String> DEFAULT_TRUSTED_PACKAGES = Arrays
|
||||
.asList("java.util", "java.lang", "org.springframework.util");
|
||||
|
||||
private static final List<String> DEFAULT_TO_STRING_CLASSES = Arrays.asList(
|
||||
"org.springframework.util.MimeType", "org.springframework.http.MediaType");
|
||||
|
||||
/**
|
||||
* Header name for java types of other headers.
|
||||
*/
|
||||
public static final String JSON_TYPES = "spring_json_header_types";
|
||||
|
||||
private final ObjectMapper objectMapper;
|
||||
|
||||
private final Set<String> trustedPackages = new LinkedHashSet<>(
|
||||
DEFAULT_TRUSTED_PACKAGES);
|
||||
|
||||
private final Set<String> toStringClasses = new LinkedHashSet<>(
|
||||
DEFAULT_TO_STRING_CLASSES);
|
||||
|
||||
/**
|
||||
* Construct an instance with the default object mapper and default header patterns
|
||||
* for outbound headers; all inbound headers are mapped. The default pattern list is
|
||||
* {@code "!id", "!timestamp" and "*"}. In addition, most of the headers in
|
||||
* {@link KafkaHeaders} are never mapped as headers since they represent data in
|
||||
* consumer/producer records.
|
||||
* @see #BinderHeaderMapper(ObjectMapper)
|
||||
*/
|
||||
public BinderHeaderMapper() {
|
||||
this(new ObjectMapper());
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct an instance with the provided object mapper and default header patterns
|
||||
* for outbound headers; all inbound headers are mapped. The patterns are applied in
|
||||
* order, stopping on the first match (positive or negative). Patterns are negated by
|
||||
* preceding them with "!". The default pattern list is
|
||||
* {@code "!id", "!timestamp" and "*"}. In addition, most of the headers in
|
||||
* {@link KafkaHeaders} are never mapped as headers since they represent data in
|
||||
* consumer/producer records.
|
||||
* @param objectMapper the object mapper.
|
||||
* @see org.springframework.util.PatternMatchUtils#simpleMatch(String, String)
|
||||
*/
|
||||
public BinderHeaderMapper(ObjectMapper objectMapper) {
|
||||
this(objectMapper, "!" + MessageHeaders.ID, "!" + MessageHeaders.TIMESTAMP, "*");
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct an instance with a default object mapper and the provided header patterns
|
||||
* for outbound headers; all inbound headers are mapped. The patterns are applied in
|
||||
* order, stopping on the first match (positive or negative). Patterns are negated by
|
||||
* preceding them with "!". The patterns will replace the default patterns; you
|
||||
* generally should not map the {@code "id" and "timestamp"} headers. Note: most of
|
||||
* the headers in {@link KafkaHeaders} are ever mapped as headers since they represent
|
||||
* data in consumer/producer records.
|
||||
* @param patterns the patterns.
|
||||
* @see org.springframework.util.PatternMatchUtils#simpleMatch(String, String)
|
||||
*/
|
||||
public BinderHeaderMapper(String... patterns) {
|
||||
this(new ObjectMapper(), patterns);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct an instance with the provided object mapper and the provided header
|
||||
* patterns for outbound headers; all inbound headers are mapped. The patterns are
|
||||
* applied in order, stopping on the first match (positive or negative). Patterns are
|
||||
* negated by preceding them with "!". The patterns will replace the default patterns;
|
||||
* you generally should not map the {@code "id" and "timestamp"} headers. Note: most
|
||||
* of the headers in {@link KafkaHeaders} are never mapped as headers since they
|
||||
* represent data in consumer/producer records.
|
||||
* @param objectMapper the object mapper.
|
||||
* @param patterns the patterns.
|
||||
* @see org.springframework.util.PatternMatchUtils#simpleMatch(String, String)
|
||||
*/
|
||||
public BinderHeaderMapper(ObjectMapper objectMapper, String... patterns) {
|
||||
super(patterns);
|
||||
Assert.notNull(objectMapper, "'objectMapper' must not be null");
|
||||
Assert.noNullElements(patterns, "'patterns' must not have null elements");
|
||||
this.objectMapper = objectMapper;
|
||||
this.objectMapper.registerModule(new SimpleModule()
|
||||
.addDeserializer(MimeType.class, new MimeTypeJsonDeserializer()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the object mapper.
|
||||
* @return the mapper.
|
||||
*/
|
||||
protected ObjectMapper getObjectMapper() {
|
||||
return this.objectMapper;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide direct access to the trusted packages set for subclasses.
|
||||
* @return the trusted packages.
|
||||
* @since 2.2
|
||||
*/
|
||||
protected Set<String> getTrustedPackages() {
|
||||
return this.trustedPackages;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide direct access to the toString() classes by subclasses.
|
||||
* @return the toString() classes.
|
||||
* @since 2.2
|
||||
*/
|
||||
protected Set<String> getToStringClasses() {
|
||||
return this.toStringClasses;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add packages to the trusted packages list (default {@code java.util, java.lang})
|
||||
* used when constructing objects from JSON. If any of the supplied packages is
|
||||
* {@code "*"}, all packages are trusted. If a class for a non-trusted package is
|
||||
* encountered, the header is returned to the application with value of type
|
||||
* {@link NonTrustedHeaderType}.
|
||||
* @param trustedPackages the packages to trust.
|
||||
*/
|
||||
public void addTrustedPackages(String... trustedPackages) {
|
||||
if (trustedPackages != null) {
|
||||
for (String whiteList : trustedPackages) {
|
||||
if ("*".equals(whiteList)) {
|
||||
this.trustedPackages.clear();
|
||||
break;
|
||||
}
|
||||
else {
|
||||
this.trustedPackages.add(whiteList);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add class names that the outbound mapper should perform toString() operations on
|
||||
* before mapping.
|
||||
* @param classNames the class names.
|
||||
* @since 2.2
|
||||
*/
|
||||
public void addToStringClasses(String... classNames) {
|
||||
this.toStringClasses.addAll(Arrays.asList(classNames));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void fromHeaders(MessageHeaders headers, Headers target) {
|
||||
final Map<String, String> jsonHeaders = new HashMap<>();
|
||||
headers.forEach((k, v) -> {
|
||||
if (matches(k, v)) {
|
||||
if (v instanceof byte[]) {
|
||||
target.add(new RecordHeader(k, (byte[]) v));
|
||||
}
|
||||
else {
|
||||
try {
|
||||
Object value = v;
|
||||
String className = v.getClass().getName();
|
||||
if (this.toStringClasses.contains(className)) {
|
||||
value = v.toString();
|
||||
className = "java.lang.String";
|
||||
}
|
||||
target.add(new RecordHeader(k,
|
||||
getObjectMapper().writeValueAsBytes(value)));
|
||||
jsonHeaders.put(k, className);
|
||||
}
|
||||
catch (Exception e) {
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Could not map " + k + " with type "
|
||||
+ v.getClass().getName());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (jsonHeaders.size() > 0) {
|
||||
try {
|
||||
target.add(new RecordHeader(JSON_TYPES,
|
||||
getObjectMapper().writeValueAsBytes(jsonHeaders)));
|
||||
}
|
||||
catch (IllegalStateException | JsonProcessingException e) {
|
||||
logger.error("Could not add json types header", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void toHeaders(Headers source, final Map<String, Object> headers) {
|
||||
final Map<String, String> jsonTypes = decodeJsonTypes(source);
|
||||
source.forEach(h -> {
|
||||
if (!(h.key().equals(JSON_TYPES))) {
|
||||
if (jsonTypes != null && jsonTypes.containsKey(h.key())) {
|
||||
Class<?> type = Object.class;
|
||||
String requestedType = jsonTypes.get(h.key());
|
||||
boolean trusted = false;
|
||||
try {
|
||||
trusted = trusted(requestedType);
|
||||
if (trusted) {
|
||||
type = ClassUtils.forName(requestedType, null);
|
||||
}
|
||||
}
|
||||
catch (Exception e) {
|
||||
logger.error("Could not load class for header: " + h.key(), e);
|
||||
}
|
||||
if (trusted) {
|
||||
try {
|
||||
headers.put(h.key(),
|
||||
getObjectMapper().readValue(h.value(), type));
|
||||
}
|
||||
catch (IOException e) {
|
||||
logger.error("Could not decode json type: "
|
||||
+ new String(h.value()) + " for key: " + h.key(), e);
|
||||
headers.put(h.key(), h.value());
|
||||
}
|
||||
}
|
||||
else {
|
||||
headers.put(h.key(),
|
||||
new NonTrustedHeaderType(h.value(), requestedType));
|
||||
}
|
||||
}
|
||||
else {
|
||||
headers.put(h.key(), h.value());
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Nullable
|
||||
private Map<String, String> decodeJsonTypes(Headers source) {
|
||||
Map<String, String> types = null;
|
||||
Iterator<Header> iterator = source.iterator();
|
||||
while (iterator.hasNext()) {
|
||||
Header next = iterator.next();
|
||||
if (next.key().equals(JSON_TYPES)) {
|
||||
try {
|
||||
types = getObjectMapper().readValue(next.value(), Map.class);
|
||||
}
|
||||
catch (IOException e) {
|
||||
logger.error(
|
||||
"Could not decode json types: " + new String(next.value()),
|
||||
e);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
return types;
|
||||
}
|
||||
|
||||
protected boolean trusted(String requestedType) {
|
||||
if (!this.trustedPackages.isEmpty()) {
|
||||
int lastDot = requestedType.lastIndexOf('.');
|
||||
if (lastDot < 0) {
|
||||
return false;
|
||||
}
|
||||
String packageName = requestedType.substring(0, lastDot);
|
||||
for (String trustedPackage : this.trustedPackages) {
|
||||
if (packageName.equals(trustedPackage)
|
||||
|| packageName.startsWith(trustedPackage + ".")) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* The {@link StdNodeBasedDeserializer} extension for {@link MimeType}
|
||||
* deserialization. It is presented here for backward compatibility when older
|
||||
* producers send {@link MimeType} headers as serialization version.
|
||||
*/
|
||||
private class MimeTypeJsonDeserializer extends StdNodeBasedDeserializer<MimeType> {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
MimeTypeJsonDeserializer() {
|
||||
super(MimeType.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MimeType convert(JsonNode root, DeserializationContext ctxt)
|
||||
throws IOException {
|
||||
JsonNode type = root.get("type");
|
||||
JsonNode subType = root.get("subtype");
|
||||
JsonNode parameters = root.get("parameters");
|
||||
Map<String, String> params = BinderHeaderMapper.this.objectMapper
|
||||
.readValue(parameters.traverse(), TypeFactory.defaultInstance()
|
||||
.constructMapType(HashMap.class, String.class, String.class));
|
||||
return new MimeType(type.asText(), subType.asText(), params);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a header that could not be decoded due to an untrusted type.
|
||||
*/
|
||||
public static class NonTrustedHeaderType {
|
||||
|
||||
private final byte[] headerValue;
|
||||
|
||||
private final String untrustedType;
|
||||
|
||||
NonTrustedHeaderType(byte[] headerValue, String untrustedType) { // NOSONAR
|
||||
this.headerValue = headerValue; // NOSONAR
|
||||
this.untrustedType = untrustedType;
|
||||
}
|
||||
|
||||
public byte[] getHeaderValue() {
|
||||
return this.headerValue; // NOSONAR
|
||||
}
|
||||
|
||||
public String getUntrustedType() {
|
||||
return this.untrustedType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
try {
|
||||
return "NonTrustedHeaderType [headerValue="
|
||||
+ new String(this.headerValue, StandardCharsets.UTF_8)
|
||||
+ ", untrustedType=" + this.untrustedType + "]";
|
||||
}
|
||||
catch (Exception e) {
|
||||
return "NonTrustedHeaderType [headerValue="
|
||||
+ Arrays.toString(this.headerValue) + ", untrustedType="
|
||||
+ this.untrustedType + "]";
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2017-2019 the original author or authors.
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -28,8 +28,8 @@ import org.springframework.core.env.ConfigurableEnvironment;
|
||||
import org.springframework.core.env.MapPropertySource;
|
||||
|
||||
/**
|
||||
* An {@link EnvironmentPostProcessor} that sets some common configuration properties (log
|
||||
* config etc.,) for Kafka binder.
|
||||
* An {@link EnvironmentPostProcessor} that sets some common configuration properties (log config etc.,) for Kafka
|
||||
* binder.
|
||||
*
|
||||
* @author Ilayaperumal Gopinathan
|
||||
*/
|
||||
@@ -41,42 +41,28 @@ public class KafkaBinderEnvironmentPostProcessor implements EnvironmentPostProce
|
||||
|
||||
private static final String SPRING_KAFKA_CONSUMER = SPRING_KAFKA + ".consumer";
|
||||
|
||||
private static final String SPRING_KAFKA_PRODUCER_KEY_SERIALIZER = SPRING_KAFKA_PRODUCER
|
||||
+ "." + "keySerializer";
|
||||
private static final String SPRING_KAFKA_PRODUCER_KEY_SERIALIZER = SPRING_KAFKA_PRODUCER + "." + "keySerializer";
|
||||
|
||||
private static final String SPRING_KAFKA_PRODUCER_VALUE_SERIALIZER = SPRING_KAFKA_PRODUCER
|
||||
+ "." + "valueSerializer";
|
||||
private static final String SPRING_KAFKA_PRODUCER_VALUE_SERIALIZER = SPRING_KAFKA_PRODUCER + "." + "valueSerializer";
|
||||
|
||||
private static final String SPRING_KAFKA_CONSUMER_KEY_DESERIALIZER = SPRING_KAFKA_CONSUMER
|
||||
+ "." + "keyDeserializer";
|
||||
private static final String SPRING_KAFKA_CONSUMER_KEY_DESERIALIZER = SPRING_KAFKA_CONSUMER + "." + "keyDeserializer";
|
||||
|
||||
private static final String SPRING_KAFKA_CONSUMER_VALUE_DESERIALIZER = SPRING_KAFKA_CONSUMER
|
||||
+ "." + "valueDeserializer";
|
||||
private static final String SPRING_KAFKA_CONSUMER_VALUE_DESERIALIZER = SPRING_KAFKA_CONSUMER + "." + "valueDeserializer";
|
||||
|
||||
private static final String KAFKA_BINDER_DEFAULT_PROPERTIES = "kafkaBinderDefaultProperties";
|
||||
|
||||
@Override
|
||||
public void postProcessEnvironment(ConfigurableEnvironment environment,
|
||||
SpringApplication application) {
|
||||
public void postProcessEnvironment(ConfigurableEnvironment environment, SpringApplication application) {
|
||||
if (!environment.getPropertySources().contains(KAFKA_BINDER_DEFAULT_PROPERTIES)) {
|
||||
Map<String, Object> kafkaBinderDefaultProperties = new HashMap<>();
|
||||
kafkaBinderDefaultProperties.put("logging.level.org.I0Itec.zkclient",
|
||||
"ERROR");
|
||||
kafkaBinderDefaultProperties.put("logging.level.kafka.server.KafkaConfig",
|
||||
"ERROR");
|
||||
kafkaBinderDefaultProperties
|
||||
.put("logging.level.kafka.admin.AdminClient.AdminConfig", "ERROR");
|
||||
kafkaBinderDefaultProperties.put(SPRING_KAFKA_PRODUCER_KEY_SERIALIZER,
|
||||
ByteArraySerializer.class.getName());
|
||||
kafkaBinderDefaultProperties.put(SPRING_KAFKA_PRODUCER_VALUE_SERIALIZER,
|
||||
ByteArraySerializer.class.getName());
|
||||
kafkaBinderDefaultProperties.put(SPRING_KAFKA_CONSUMER_KEY_DESERIALIZER,
|
||||
ByteArrayDeserializer.class.getName());
|
||||
kafkaBinderDefaultProperties.put(SPRING_KAFKA_CONSUMER_VALUE_DESERIALIZER,
|
||||
ByteArrayDeserializer.class.getName());
|
||||
environment.getPropertySources().addLast(new MapPropertySource(
|
||||
KAFKA_BINDER_DEFAULT_PROPERTIES, kafkaBinderDefaultProperties));
|
||||
kafkaBinderDefaultProperties.put("logging.level.org.I0Itec.zkclient", "ERROR");
|
||||
kafkaBinderDefaultProperties.put("logging.level.kafka.server.KafkaConfig", "ERROR");
|
||||
kafkaBinderDefaultProperties.put("logging.level.kafka.admin.AdminClient.AdminConfig", "ERROR");
|
||||
kafkaBinderDefaultProperties.put(SPRING_KAFKA_PRODUCER_KEY_SERIALIZER, ByteArraySerializer.class.getName());
|
||||
kafkaBinderDefaultProperties.put(SPRING_KAFKA_PRODUCER_VALUE_SERIALIZER, ByteArraySerializer.class.getName());
|
||||
kafkaBinderDefaultProperties.put(SPRING_KAFKA_CONSUMER_KEY_DESERIALIZER, ByteArrayDeserializer.class.getName());
|
||||
kafkaBinderDefaultProperties.put(SPRING_KAFKA_CONSUMER_VALUE_DESERIALIZER, ByteArrayDeserializer.class.getName());
|
||||
environment.getPropertySources().addLast(new MapPropertySource(KAFKA_BINDER_DEFAULT_PROPERTIES, kafkaBinderDefaultProperties));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -56,14 +56,14 @@ public class KafkaBinderHealthIndicator implements HealthIndicator {
|
||||
|
||||
private Consumer<?, ?> metadataConsumer;
|
||||
|
||||
public KafkaBinderHealthIndicator(KafkaMessageChannelBinder binder,
|
||||
ConsumerFactory<?, ?> consumerFactory) {
|
||||
public KafkaBinderHealthIndicator(KafkaMessageChannelBinder binder, ConsumerFactory<?, ?> consumerFactory) {
|
||||
this.binder = binder;
|
||||
this.consumerFactory = consumerFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the timeout in seconds to retrieve health information.
|
||||
*
|
||||
* @param timeout the timeout - default 60.
|
||||
*/
|
||||
public void setTimeout(int timeout) {
|
||||
@@ -80,16 +80,16 @@ public class KafkaBinderHealthIndicator implements HealthIndicator {
|
||||
catch (InterruptedException ex) {
|
||||
Thread.currentThread().interrupt();
|
||||
return Health.down()
|
||||
.withDetail("Interrupted while waiting for partition information in",
|
||||
this.timeout + " seconds")
|
||||
.withDetail("Interrupted while waiting for partition information in", this.timeout + " seconds")
|
||||
.build();
|
||||
}
|
||||
catch (ExecutionException ex) {
|
||||
return Health.down(ex).build();
|
||||
}
|
||||
catch (TimeoutException ex) {
|
||||
return Health.down().withDetail("Failed to retrieve partition information in",
|
||||
this.timeout + " seconds").build();
|
||||
return Health.down()
|
||||
.withDetail("Failed to retrieve partition information in", this.timeout + " seconds")
|
||||
.build();
|
||||
}
|
||||
finally {
|
||||
exec.shutdownNow();
|
||||
@@ -107,23 +107,21 @@ public class KafkaBinderHealthIndicator implements HealthIndicator {
|
||||
}
|
||||
synchronized (this.metadataConsumer) {
|
||||
Set<String> downMessages = new HashSet<>();
|
||||
final Map<String, KafkaMessageChannelBinder.TopicInformation> topicsInUse = KafkaBinderHealthIndicator.this.binder
|
||||
.getTopicsInUse();
|
||||
final Map<String, KafkaMessageChannelBinder.TopicInformation> topicsInUse =
|
||||
KafkaBinderHealthIndicator.this.binder.getTopicsInUse();
|
||||
if (topicsInUse.isEmpty()) {
|
||||
return Health.down().withDetail("No topic information available",
|
||||
"Kafka broker is not reachable").build();
|
||||
return Health.down()
|
||||
.withDetail("No topic information available", "Kafka broker is not reachable")
|
||||
.build();
|
||||
}
|
||||
else {
|
||||
for (String topic : topicsInUse.keySet()) {
|
||||
KafkaMessageChannelBinder.TopicInformation topicInformation = topicsInUse
|
||||
.get(topic);
|
||||
KafkaMessageChannelBinder.TopicInformation topicInformation = topicsInUse.get(topic);
|
||||
if (!topicInformation.isTopicPattern()) {
|
||||
List<PartitionInfo> partitionInfos = this.metadataConsumer
|
||||
.partitionsFor(topic);
|
||||
List<PartitionInfo> partitionInfos = this.metadataConsumer.partitionsFor(topic);
|
||||
for (PartitionInfo partitionInfo : partitionInfos) {
|
||||
if (topicInformation.getPartitionInfos()
|
||||
.contains(partitionInfo)
|
||||
&& partitionInfo.leader().id() == -1) {
|
||||
.contains(partitionInfo) && partitionInfo.leader().id() == -1) {
|
||||
downMessages.add(partitionInfo.toString());
|
||||
}
|
||||
}
|
||||
@@ -135,9 +133,8 @@ public class KafkaBinderHealthIndicator implements HealthIndicator {
|
||||
}
|
||||
else {
|
||||
return Health.down()
|
||||
.withDetail("Following partitions in use have no leaders: ",
|
||||
downMessages.toString())
|
||||
.build();
|
||||
.withDetail("Following partitions in use have no leaders: ", downMessages.toString())
|
||||
.build();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -59,8 +59,7 @@ import org.springframework.util.ObjectUtils;
|
||||
* @author Thomas Cheyney
|
||||
* @author Gary Russell
|
||||
*/
|
||||
public class KafkaBinderMetrics
|
||||
implements MeterBinder, ApplicationListener<BindingCreatedEvent> {
|
||||
public class KafkaBinderMetrics implements MeterBinder, ApplicationListener<BindingCreatedEvent> {
|
||||
|
||||
private static final int DEFAULT_TIMEOUT = 60;
|
||||
|
||||
@@ -82,8 +81,7 @@ public class KafkaBinderMetrics
|
||||
|
||||
public KafkaBinderMetrics(KafkaMessageChannelBinder binder,
|
||||
KafkaBinderConfigurationProperties binderConfigurationProperties,
|
||||
ConsumerFactory<?, ?> defaultConsumerFactory,
|
||||
@Nullable MeterRegistry meterRegistry) {
|
||||
ConsumerFactory<?, ?> defaultConsumerFactory, @Nullable MeterRegistry meterRegistry) {
|
||||
|
||||
this.binder = binder;
|
||||
this.binderConfigurationProperties = binderConfigurationProperties;
|
||||
@@ -104,8 +102,8 @@ public class KafkaBinderMetrics
|
||||
|
||||
@Override
|
||||
public void bindTo(MeterRegistry registry) {
|
||||
for (Map.Entry<String, KafkaMessageChannelBinder.TopicInformation> topicInfo : this.binder
|
||||
.getTopicsInUse().entrySet()) {
|
||||
for (Map.Entry<String, KafkaMessageChannelBinder.TopicInformation> topicInfo : this.binder.getTopicsInUse()
|
||||
.entrySet()) {
|
||||
|
||||
if (!topicInfo.getValue().isConsumerTopic()) {
|
||||
continue;
|
||||
@@ -115,7 +113,8 @@ public class KafkaBinderMetrics
|
||||
String group = topicInfo.getValue().getConsumerGroup();
|
||||
|
||||
Gauge.builder(METRIC_NAME, this,
|
||||
(o) -> computeUnconsumedMessages(topic, group)).tag("group", group)
|
||||
(o) -> computeUnconsumedMessages(topic, group))
|
||||
.tag("group", group)
|
||||
.tag("topic", topic)
|
||||
.description("Unconsumed messages for a particular group and topic")
|
||||
.register(registry);
|
||||
@@ -132,21 +131,16 @@ public class KafkaBinderMetrics
|
||||
group,
|
||||
(g) -> createConsumerFactory().createConsumer(g, "monitoring"));
|
||||
synchronized (metadataConsumer) {
|
||||
List<PartitionInfo> partitionInfos = metadataConsumer
|
||||
.partitionsFor(topic);
|
||||
List<PartitionInfo> partitionInfos = metadataConsumer.partitionsFor(topic);
|
||||
List<TopicPartition> topicPartitions = new LinkedList<>();
|
||||
for (PartitionInfo partitionInfo : partitionInfos) {
|
||||
topicPartitions.add(new TopicPartition(partitionInfo.topic(),
|
||||
partitionInfo.partition()));
|
||||
topicPartitions.add(new TopicPartition(partitionInfo.topic(), partitionInfo.partition()));
|
||||
}
|
||||
|
||||
Map<TopicPartition, Long> endOffsets = metadataConsumer
|
||||
.endOffsets(topicPartitions);
|
||||
Map<TopicPartition, Long> endOffsets = metadataConsumer.endOffsets(topicPartitions);
|
||||
|
||||
for (Map.Entry<TopicPartition, Long> endOffset : endOffsets
|
||||
.entrySet()) {
|
||||
OffsetAndMetadata current = metadataConsumer
|
||||
.committed(endOffset.getKey());
|
||||
for (Map.Entry<TopicPartition, Long> endOffset : endOffsets.entrySet()) {
|
||||
OffsetAndMetadata current = metadataConsumer.committed(endOffset.getKey());
|
||||
lag += endOffset.getValue();
|
||||
if (current != null) {
|
||||
lag -= current.offset();
|
||||
@@ -179,22 +173,17 @@ public class KafkaBinderMetrics
|
||||
synchronized (this) {
|
||||
if (this.defaultConsumerFactory == null) {
|
||||
Map<String, Object> props = new HashMap<>();
|
||||
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
|
||||
ByteArrayDeserializer.class);
|
||||
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
|
||||
ByteArrayDeserializer.class);
|
||||
Map<String, Object> mergedConfig = this.binderConfigurationProperties
|
||||
.mergedConsumerConfiguration();
|
||||
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
|
||||
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
|
||||
Map<String, Object> mergedConfig = this.binderConfigurationProperties.mergedConsumerConfiguration();
|
||||
if (!ObjectUtils.isEmpty(mergedConfig)) {
|
||||
props.putAll(mergedConfig);
|
||||
}
|
||||
if (!props.containsKey(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG)) {
|
||||
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,
|
||||
this.binderConfigurationProperties
|
||||
.getKafkaConnectionString());
|
||||
this.binderConfigurationProperties.getKafkaConnectionString());
|
||||
}
|
||||
this.defaultConsumerFactory = new DefaultKafkaConsumerFactory<>(
|
||||
props);
|
||||
this.defaultConsumerFactory = new DefaultKafkaConsumerFactory<>(props);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -205,8 +194,7 @@ public class KafkaBinderMetrics
|
||||
@Override
|
||||
public void onApplicationEvent(BindingCreatedEvent event) {
|
||||
if (this.meterRegistry != null) {
|
||||
// meters are idempotent when called with the same arguments so safe to call
|
||||
// it multiple times
|
||||
// meters are idempotent when called with the same arguments so safe to call it multiple times
|
||||
this.bindTo(this.meterRegistry);
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user