Compare commits
55 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c9c596002e | ||
|
|
47900bd265 | ||
|
|
5148dabe5a | ||
|
|
5f9e842b83 | ||
|
|
15d1b51aa2 | ||
|
|
a2f642ebe4 | ||
|
|
477d712214 | ||
|
|
d1169eeef7 | ||
|
|
e3916f9024 | ||
|
|
7569127dea | ||
|
|
7c3b457f62 | ||
|
|
575c19fe2e | ||
|
|
0f269b3835 | ||
|
|
ecd7d98a21 | ||
|
|
3e4a53a308 | ||
|
|
e94de0d535 | ||
|
|
b2cecb08bc | ||
|
|
6777b5860c | ||
|
|
5ea1f4ce1a | ||
|
|
1b5161bb8d | ||
|
|
687b4eb9bd | ||
|
|
836090f8cc | ||
|
|
75f7ba5593 | ||
|
|
9094502215 | ||
|
|
19b467d8cb | ||
|
|
4aaa488ffc | ||
|
|
ff14259c22 | ||
|
|
14382ec7ba | ||
|
|
3c23785b47 | ||
|
|
86cb46baf2 | ||
|
|
7048a01b94 | ||
|
|
3b221a3a30 | ||
|
|
2ef859912a | ||
|
|
c3ba94ec12 | ||
|
|
e062d0f52e | ||
|
|
8c9e3973ac | ||
|
|
fa5223408d | ||
|
|
29d69a5e5f | ||
|
|
1204d0fcb4 | ||
|
|
16beaa71e9 | ||
|
|
a2fea3a188 | ||
|
|
30a0c1d734 | ||
|
|
4861ae9225 | ||
|
|
b29fc5491a | ||
|
|
6fec783413 | ||
|
|
cb96b31009 | ||
|
|
d7ce9cb43d | ||
|
|
d637b73fc7 | ||
|
|
a1f78b74bd | ||
|
|
aa17d3c50b | ||
|
|
00664fe229 | ||
|
|
e41b7e1db7 | ||
|
|
d55da10ffc | ||
|
|
32b6cfe93e | ||
|
|
0b00a2361d |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -13,11 +13,13 @@ _site/
|
||||
.settings
|
||||
.springBeans
|
||||
.DS_Store
|
||||
*/.idea
|
||||
*.sw*
|
||||
*.iml
|
||||
*.ipr
|
||||
*.iws
|
||||
.idea/*
|
||||
*/.idea
|
||||
.factorypath
|
||||
spring-xd-samples/*/xd
|
||||
dump.rdb
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
<repository>
|
||||
<id>spring-snapshots</id>
|
||||
<name>Spring Snapshots</name>
|
||||
<url>http://repo.spring.io/libs-snapshot-local</url>
|
||||
<url>https://repo.spring.io/libs-snapshot-local</url>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
@@ -29,7 +29,7 @@
|
||||
<repository>
|
||||
<id>spring-milestones</id>
|
||||
<name>Spring Milestones</name>
|
||||
<url>http://repo.spring.io/libs-milestone-local</url>
|
||||
<url>https://repo.spring.io/libs-milestone-local</url>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
@@ -37,7 +37,7 @@
|
||||
<repository>
|
||||
<id>spring-releases</id>
|
||||
<name>Spring Releases</name>
|
||||
<url>http://repo.spring.io/release</url>
|
||||
<url>https://repo.spring.io/release</url>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
@@ -47,7 +47,7 @@
|
||||
<pluginRepository>
|
||||
<id>spring-snapshots</id>
|
||||
<name>Spring Snapshots</name>
|
||||
<url>http://repo.spring.io/libs-snapshot-local</url>
|
||||
<url>https://repo.spring.io/libs-snapshot-local</url>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
@@ -55,7 +55,7 @@
|
||||
<pluginRepository>
|
||||
<id>spring-milestones</id>
|
||||
<name>Spring Milestones</name>
|
||||
<url>http://repo.spring.io/libs-milestone-local</url>
|
||||
<url>https://repo.spring.io/libs-milestone-local</url>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
|
||||
4
LICENSE
4
LICENSE
@@ -1,6 +1,6 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
https://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
@@ -192,7 +192,7 @@
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
https://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
|
||||
72
README.adoc
72
README.adoc
@@ -1,26 +1,70 @@
|
||||
== Samples
|
||||
== Spring Cloud Stream Sample Applications
|
||||
|
||||
There are several samples, all running on the RabbitMQ transport (so you need RabbitMQ running locally to test them).
|
||||
This repository contains a collection of applications written using Spring Cloud Stream. All the applications are self contained.
|
||||
They can be run against either Kafka or RabbitMQ middleware technologies.
|
||||
You have the option of running the samples against local or Docker containerized versions of Kafka and Rabbit.
|
||||
For convenience, `docker-compose.yml` files are provided as part of each application wherever it is applicable.
|
||||
For this reason, Docker Compose is required and it’s recommended to use the https://docs.docker.com/compose/install/[latest version].
|
||||
These compose files bring up the middleware (kafka or Rabbit) and other necessary components for running each app.
|
||||
If you bring up Kafka or RabbitMQ in Docker containers, please make sure that you bring them down while in the same sample directory.
|
||||
You can read the README that is part of each sample and follow along the instructions to run them.
|
||||
|
||||
To build the samples do:
|
||||
You can build the entire samples by going to the root of the repository and then do: `./mvnw clean package`
|
||||
However, the recommended approach to build them is to pick the sample that you are interested in and go to that particular app and follow the instructions there in the README for that app.
|
||||
|
||||
```
|
||||
./mvnw clean build
|
||||
```
|
||||
=== Following is the list of various sample applications provided
|
||||
|
||||
NOTE: The main set of samples are "vanilla" in the sense that they are not deployable as XD modules by the current generation (1.x) of XD. You can still interact with an XD system using the appropriate naming convention for input and output channel names (`<stream>.<index>` format).
|
||||
==== Source samples
|
||||
|
||||
* `source` is a Java config version of the classic "timer" module from Spring XD. It has a "fixedDelay" option (in milliseconds) for the period between emitting messages.
|
||||
* Sample JDBC source using MySQL - MariaDB variant - (Kafka and Rabbit)
|
||||
|
||||
* `sink` is a Java config version of the classic "log" module from Spring XD. It has no options (but some could easily be added), and just logs incoming messages at INFO level.
|
||||
* Source with dynamic destinations (Kafka and Rabbit)
|
||||
|
||||
* `transform` is a simple pass through logging transformer (just logs the incoming message and passes it on).
|
||||
==== Sink samples
|
||||
|
||||
* `double` is a combination of 2 modules defined locally (a source and a sink, so the whole app is self contained).
|
||||
* Simple JDBC sink using MariaDB (Kafka and Rabbit)
|
||||
|
||||
* `multibinder` is a sample application that shows how an application could use multiple binders. In this case, the processor's input/output channels connect to different brokers using their own binder configurations.
|
||||
==== Processor samples
|
||||
|
||||
* `multibinder-differentsystems` shows how an application could use same binder implementation but different configurations for its channels. In this case, a processor's input/output channels connect to same binder implementation but with two separate broker configurations.
|
||||
* Basic StreamListener sample (Kafka and Rabbit)
|
||||
* Transformer sample (Kafka and Rabbit)
|
||||
* Reactive processor sample (Kafka and Rabbit)
|
||||
* Sensor average calculation using reactive patterns (Kafka and Rabbit)
|
||||
|
||||
If you run the source and the sink and point them at the same redis instance (e.g. do nothing to get the one on localhost, or the one they are both bound to as a service on Cloud Foundry) then they will form a "stream" and start talking to each other. All the samples have friendly JMX and Actuator endpoints for inspecting what is going on in the system.
|
||||
==== Multi IO sample
|
||||
|
||||
* Sample with multiple input/output bindings (Kafka and Rabbit)
|
||||
|
||||
==== Multi Binder samples
|
||||
|
||||
* Multi binder - Input with Kafka and output with Rabbit
|
||||
* Multi binder - Same binder type but different clusters (Kafka only, but can be extended for Rabbit as well)
|
||||
|
||||
==== Kinesis
|
||||
|
||||
* Kinesis produce consume sample
|
||||
|
||||
==== Kafka Streams samples
|
||||
|
||||
A collection of various applications in stream processing using Spring Cloud Stream support for Kafka Streams binding.
|
||||
|
||||
* Kafka Streams word count
|
||||
* Kafka Streams branching
|
||||
* Kafka Streams DLQ
|
||||
* Kafka Streams aggregation
|
||||
* Kafka Streams Interactive query basic
|
||||
* Kafka Streams Interactive query advanced
|
||||
* Kafka Streams product tracker
|
||||
* Kafka Streams KTable join
|
||||
* Kafka Streams and normal Kafka binder together
|
||||
|
||||
==== Testing samples
|
||||
|
||||
* Sample with embedded Kafka
|
||||
* General testing patterns in Spring Cloud Stream
|
||||
|
||||
==== Samples Acceptance Tests
|
||||
|
||||
This module is strictly used as an end to end acceptance test framework for the samples in this repo.
|
||||
By default, the tests are not run as part of a normal build.
|
||||
Please see the README in the acceptance test module for more details.
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package config.processor;
|
||||
|
||||
import org.springframework.cloud.stream.annotation.EnableBinding;
|
||||
import org.springframework.cloud.stream.messaging.Processor;
|
||||
import org.springframework.integration.annotation.Transformer;
|
||||
import org.springframework.messaging.Message;
|
||||
|
||||
/**
|
||||
* @author Marius Bogoevici
|
||||
*/
|
||||
@EnableBinding(Processor.class)
|
||||
public class ProcessorModuleDefinition {
|
||||
|
||||
@Transformer(inputChannel = Processor.INPUT, outputChannel = Processor.OUTPUT)
|
||||
public Message<?> transform(Message<?> inbound) {
|
||||
return inbound;
|
||||
}
|
||||
}
|
||||
@@ -1,39 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package config.sink;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.cloud.stream.annotation.EnableBinding;
|
||||
import org.springframework.cloud.stream.messaging.Sink;
|
||||
import org.springframework.integration.annotation.ServiceActivator;
|
||||
|
||||
/**
|
||||
* @author Dave Syer
|
||||
* @author Marius Bogoevici
|
||||
*/
|
||||
@EnableBinding(Sink.class)
|
||||
public class SinkModuleDefinition {
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(SinkModuleDefinition.class);
|
||||
|
||||
@ServiceActivator(inputChannel=Sink.INPUT)
|
||||
public void loggerSink(Object payload) {
|
||||
logger.info("Received: " + payload);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
fixedDelay: 1000
|
||||
@@ -1,6 +0,0 @@
|
||||
spring:
|
||||
cloud:
|
||||
stream:
|
||||
bindings:
|
||||
input: testtock
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
fixedDelay: 5000
|
||||
spring:
|
||||
cloud:
|
||||
stream:
|
||||
bindings:
|
||||
output: testtock
|
||||
|
||||
24
kafka-streams-samples/kafka-streams-aggregate/.gitignore
vendored
Normal file
24
kafka-streams-samples/kafka-streams-aggregate/.gitignore
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
target/
|
||||
!.mvn/wrapper/maven-wrapper.jar
|
||||
|
||||
### STS ###
|
||||
.apt_generated
|
||||
.classpath
|
||||
.factorypath
|
||||
.project
|
||||
.settings
|
||||
.springBeans
|
||||
|
||||
### IntelliJ IDEA ###
|
||||
.idea
|
||||
*.iws
|
||||
*.iml
|
||||
*.ipr
|
||||
|
||||
### NetBeans ###
|
||||
nbproject/private/
|
||||
build/
|
||||
nbbuild/
|
||||
dist/
|
||||
nbdist/
|
||||
.nb-gradle/
|
||||
BIN
kafka-streams-samples/kafka-streams-aggregate/.mvn/wrapper/maven-wrapper.jar
vendored
Normal file
BIN
kafka-streams-samples/kafka-streams-aggregate/.mvn/wrapper/maven-wrapper.jar
vendored
Normal file
Binary file not shown.
1
kafka-streams-samples/kafka-streams-aggregate/.mvn/wrapper/maven-wrapper.properties
vendored
Normal file
1
kafka-streams-samples/kafka-streams-aggregate/.mvn/wrapper/maven-wrapper.properties
vendored
Normal file
@@ -0,0 +1 @@
|
||||
distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.5.0/apache-maven-3.5.0-bin.zip
|
||||
20
kafka-streams-samples/kafka-streams-aggregate/README.adoc
Normal file
20
kafka-streams-samples/kafka-streams-aggregate/README.adoc
Normal file
@@ -0,0 +1,20 @@
|
||||
== What is this app?
|
||||
|
||||
This is an example of a Spring Cloud Stream processor using Kafka Streams aggregation support.
|
||||
|
||||
The application simply aggregates a string for a particular key.
|
||||
|
||||
=== Running the app:
|
||||
|
||||
Go to the root of the repository and do:
|
||||
|
||||
`docker-compose up -d`
|
||||
|
||||
`./mvnw clean package`
|
||||
|
||||
`java -jar target/kafka-streams-aggregate-0.0.1-SNAPSHOT.jar`
|
||||
|
||||
Run the stand-alone `Producers` application a few times to to generate some data.
|
||||
|
||||
Then go to the URL: http://localhost:8080/events
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
version: '3'
|
||||
services:
|
||||
kafka:
|
||||
image: wurstmeister/kafka
|
||||
ports:
|
||||
- "9092:9092"
|
||||
environment:
|
||||
- KAFKA_ADVERTISED_HOST_NAME=127.0.0.1
|
||||
- KAFKA_ADVERTISED_PORT=9092
|
||||
- KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181
|
||||
depends_on:
|
||||
- zookeeper
|
||||
zookeeper:
|
||||
image: wurstmeister/zookeeper
|
||||
ports:
|
||||
- "2181:2181"
|
||||
environment:
|
||||
- KAFKA_ADVERTISED_HOST_NAME=zookeeper
|
||||
225
kafka-streams-samples/kafka-streams-aggregate/mvnw
vendored
Executable file
225
kafka-streams-samples/kafka-streams-aggregate/mvnw
vendored
Executable file
@@ -0,0 +1,225 @@
|
||||
#!/bin/sh
|
||||
# ----------------------------------------------------------------------------
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Maven2 Start Up Batch script
|
||||
#
|
||||
# Required ENV vars:
|
||||
# ------------------
|
||||
# JAVA_HOME - location of a JDK home dir
|
||||
#
|
||||
# Optional ENV vars
|
||||
# -----------------
|
||||
# M2_HOME - location of maven2's installed home dir
|
||||
# MAVEN_OPTS - parameters passed to the Java VM when running Maven
|
||||
# e.g. to debug Maven itself, use
|
||||
# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
|
||||
# MAVEN_SKIP_RC - flag to disable loading of mavenrc files
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
if [ -z "$MAVEN_SKIP_RC" ] ; then
|
||||
|
||||
if [ -f /etc/mavenrc ] ; then
|
||||
. /etc/mavenrc
|
||||
fi
|
||||
|
||||
if [ -f "$HOME/.mavenrc" ] ; then
|
||||
. "$HOME/.mavenrc"
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
# OS specific support. $var _must_ be set to either true or false.
|
||||
cygwin=false;
|
||||
darwin=false;
|
||||
mingw=false
|
||||
case "`uname`" in
|
||||
CYGWIN*) cygwin=true ;;
|
||||
MINGW*) mingw=true;;
|
||||
Darwin*) darwin=true
|
||||
# Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home
|
||||
# See https://developer.apple.com/library/mac/qa/qa1170/_index.html
|
||||
if [ -z "$JAVA_HOME" ]; then
|
||||
if [ -x "/usr/libexec/java_home" ]; then
|
||||
export JAVA_HOME="`/usr/libexec/java_home`"
|
||||
else
|
||||
export JAVA_HOME="/Library/Java/Home"
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -z "$JAVA_HOME" ] ; then
|
||||
if [ -r /etc/gentoo-release ] ; then
|
||||
JAVA_HOME=`java-config --jre-home`
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$M2_HOME" ] ; then
|
||||
## resolve links - $0 may be a link to maven's home
|
||||
PRG="$0"
|
||||
|
||||
# need this for relative symlinks
|
||||
while [ -h "$PRG" ] ; do
|
||||
ls=`ls -ld "$PRG"`
|
||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
PRG="$link"
|
||||
else
|
||||
PRG="`dirname "$PRG"`/$link"
|
||||
fi
|
||||
done
|
||||
|
||||
saveddir=`pwd`
|
||||
|
||||
M2_HOME=`dirname "$PRG"`/..
|
||||
|
||||
# make it fully qualified
|
||||
M2_HOME=`cd "$M2_HOME" && pwd`
|
||||
|
||||
cd "$saveddir"
|
||||
# echo Using m2 at $M2_HOME
|
||||
fi
|
||||
|
||||
# For Cygwin, ensure paths are in UNIX format before anything is touched
|
||||
if $cygwin ; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME=`cygpath --unix "$M2_HOME"`
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
|
||||
[ -n "$CLASSPATH" ] &&
|
||||
CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
|
||||
fi
|
||||
|
||||
# For Migwn, ensure paths are in UNIX format before anything is touched
|
||||
if $mingw ; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME="`(cd "$M2_HOME"; pwd)`"
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`"
|
||||
# TODO classpath?
|
||||
fi
|
||||
|
||||
if [ -z "$JAVA_HOME" ]; then
|
||||
javaExecutable="`which javac`"
|
||||
if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then
|
||||
# readlink(1) is not available as standard on Solaris 10.
|
||||
readLink=`which readlink`
|
||||
if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then
|
||||
if $darwin ; then
|
||||
javaHome="`dirname \"$javaExecutable\"`"
|
||||
javaExecutable="`cd \"$javaHome\" && pwd -P`/javac"
|
||||
else
|
||||
javaExecutable="`readlink -f \"$javaExecutable\"`"
|
||||
fi
|
||||
javaHome="`dirname \"$javaExecutable\"`"
|
||||
javaHome=`expr "$javaHome" : '\(.*\)/bin'`
|
||||
JAVA_HOME="$javaHome"
|
||||
export JAVA_HOME
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$JAVACMD" ] ; then
|
||||
if [ -n "$JAVA_HOME" ] ; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD="$JAVA_HOME/jre/sh/java"
|
||||
else
|
||||
JAVACMD="$JAVA_HOME/bin/java"
|
||||
fi
|
||||
else
|
||||
JAVACMD="`which java`"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ ! -x "$JAVACMD" ] ; then
|
||||
echo "Error: JAVA_HOME is not defined correctly." >&2
|
||||
echo " We cannot execute $JAVACMD" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$JAVA_HOME" ] ; then
|
||||
echo "Warning: JAVA_HOME environment variable is not set."
|
||||
fi
|
||||
|
||||
CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
|
||||
|
||||
# traverses directory structure from process work directory to filesystem root
|
||||
# first directory with .mvn subdirectory is considered project base directory
|
||||
find_maven_basedir() {
|
||||
|
||||
if [ -z "$1" ]
|
||||
then
|
||||
echo "Path not specified to find_maven_basedir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
basedir="$1"
|
||||
wdir="$1"
|
||||
while [ "$wdir" != '/' ] ; do
|
||||
if [ -d "$wdir"/.mvn ] ; then
|
||||
basedir=$wdir
|
||||
break
|
||||
fi
|
||||
# workaround for JBEAP-8937 (on Solaris 10/Sparc)
|
||||
if [ -d "${wdir}" ]; then
|
||||
wdir=`cd "$wdir/.."; pwd`
|
||||
fi
|
||||
# end of workaround
|
||||
done
|
||||
echo "${basedir}"
|
||||
}
|
||||
|
||||
# concatenates all lines of a file
|
||||
concat_lines() {
|
||||
if [ -f "$1" ]; then
|
||||
echo "$(tr -s '\n' ' ' < "$1")"
|
||||
fi
|
||||
}
|
||||
|
||||
BASE_DIR=`find_maven_basedir "$(pwd)"`
|
||||
if [ -z "$BASE_DIR" ]; then
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}
|
||||
echo $MAVEN_PROJECTBASEDIR
|
||||
MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS"
|
||||
|
||||
# For Cygwin, switch paths to Windows format before running java
|
||||
if $cygwin; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME=`cygpath --path --windows "$M2_HOME"`
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
|
||||
[ -n "$CLASSPATH" ] &&
|
||||
CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
|
||||
[ -n "$MAVEN_PROJECTBASEDIR" ] &&
|
||||
MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"`
|
||||
fi
|
||||
|
||||
WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
|
||||
|
||||
exec "$JAVACMD" \
|
||||
$MAVEN_OPTS \
|
||||
-classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \
|
||||
"-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \
|
||||
${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@"
|
||||
143
kafka-streams-samples/kafka-streams-aggregate/mvnw.cmd
vendored
Normal file
143
kafka-streams-samples/kafka-streams-aggregate/mvnw.cmd
vendored
Normal file
@@ -0,0 +1,143 @@
|
||||
@REM ----------------------------------------------------------------------------
|
||||
@REM Licensed to the Apache Software Foundation (ASF) under one
|
||||
@REM or more contributor license agreements. See the NOTICE file
|
||||
@REM distributed with this work for additional information
|
||||
@REM regarding copyright ownership. The ASF licenses this file
|
||||
@REM to you under the Apache License, Version 2.0 (the
|
||||
@REM "License"); you may not use this file except in compliance
|
||||
@REM with the License. You may obtain a copy of the License at
|
||||
@REM
|
||||
@REM https://www.apache.org/licenses/LICENSE-2.0
|
||||
@REM
|
||||
@REM Unless required by applicable law or agreed to in writing,
|
||||
@REM software distributed under the License is distributed on an
|
||||
@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
@REM KIND, either express or implied. See the License for the
|
||||
@REM specific language governing permissions and limitations
|
||||
@REM under the License.
|
||||
@REM ----------------------------------------------------------------------------
|
||||
|
||||
@REM ----------------------------------------------------------------------------
|
||||
@REM Maven2 Start Up Batch script
|
||||
@REM
|
||||
@REM Required ENV vars:
|
||||
@REM JAVA_HOME - location of a JDK home dir
|
||||
@REM
|
||||
@REM Optional ENV vars
|
||||
@REM M2_HOME - location of maven2's installed home dir
|
||||
@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
|
||||
@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending
|
||||
@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
|
||||
@REM e.g. to debug Maven itself, use
|
||||
@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
|
||||
@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
|
||||
@REM ----------------------------------------------------------------------------
|
||||
|
||||
@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
|
||||
@echo off
|
||||
@REM enable echoing my setting MAVEN_BATCH_ECHO to 'on'
|
||||
@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
|
||||
|
||||
@REM set %HOME% to equivalent of $HOME
|
||||
if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
|
||||
|
||||
@REM Execute a user defined script before this one
|
||||
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
|
||||
@REM check for pre script, once with legacy .bat ending and once with .cmd ending
|
||||
if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat"
|
||||
if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd"
|
||||
:skipRcPre
|
||||
|
||||
@setlocal
|
||||
|
||||
set ERROR_CODE=0
|
||||
|
||||
@REM To isolate internal variables from possible post scripts, we use another setlocal
|
||||
@setlocal
|
||||
|
||||
@REM ==== START VALIDATION ====
|
||||
if not "%JAVA_HOME%" == "" goto OkJHome
|
||||
|
||||
echo.
|
||||
echo Error: JAVA_HOME not found in your environment. >&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the >&2
|
||||
echo location of your Java installation. >&2
|
||||
echo.
|
||||
goto error
|
||||
|
||||
:OkJHome
|
||||
if exist "%JAVA_HOME%\bin\java.exe" goto init
|
||||
|
||||
echo.
|
||||
echo Error: JAVA_HOME is set to an invalid directory. >&2
|
||||
echo JAVA_HOME = "%JAVA_HOME%" >&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the >&2
|
||||
echo location of your Java installation. >&2
|
||||
echo.
|
||||
goto error
|
||||
|
||||
@REM ==== END VALIDATION ====
|
||||
|
||||
:init
|
||||
|
||||
@REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
|
||||
@REM Fallback to current working directory if not found.
|
||||
|
||||
set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
|
||||
IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
|
||||
|
||||
set EXEC_DIR=%CD%
|
||||
set WDIR=%EXEC_DIR%
|
||||
:findBaseDir
|
||||
IF EXIST "%WDIR%"\.mvn goto baseDirFound
|
||||
cd ..
|
||||
IF "%WDIR%"=="%CD%" goto baseDirNotFound
|
||||
set WDIR=%CD%
|
||||
goto findBaseDir
|
||||
|
||||
:baseDirFound
|
||||
set MAVEN_PROJECTBASEDIR=%WDIR%
|
||||
cd "%EXEC_DIR%"
|
||||
goto endDetectBaseDir
|
||||
|
||||
:baseDirNotFound
|
||||
set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
|
||||
cd "%EXEC_DIR%"
|
||||
|
||||
:endDetectBaseDir
|
||||
|
||||
IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
|
||||
|
||||
@setlocal EnableExtensions EnableDelayedExpansion
|
||||
for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
|
||||
@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
|
||||
|
||||
:endReadAdditionalConfig
|
||||
|
||||
SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
|
||||
|
||||
set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
|
||||
set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
|
||||
|
||||
%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
|
||||
if ERRORLEVEL 1 goto error
|
||||
goto end
|
||||
|
||||
:error
|
||||
set ERROR_CODE=1
|
||||
|
||||
:end
|
||||
@endlocal & set ERROR_CODE=%ERROR_CODE%
|
||||
|
||||
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
|
||||
@REM check for post script, once with legacy .bat ending and once with .cmd ending
|
||||
if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat"
|
||||
if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd"
|
||||
:skipRcPost
|
||||
|
||||
@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
|
||||
if "%MAVEN_BATCH_PAUSE%" == "on" pause
|
||||
|
||||
if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE%
|
||||
|
||||
exit /B %ERROR_CODE%
|
||||
42
kafka-streams-samples/kafka-streams-aggregate/pom.xml
Normal file
42
kafka-streams-samples/kafka-streams-aggregate/pom.xml
Normal file
@@ -0,0 +1,42 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<artifactId>kafka-streams-aggregate</artifactId>
|
||||
<version>0.0.1-SNAPSHOT</version>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<name>kafka-streams-aggregate</name>
|
||||
<description>Demo project for Spring Boot</description>
|
||||
|
||||
<parent>
|
||||
<groupId>spring.cloud.stream.samples</groupId>
|
||||
<artifactId>spring-cloud-stream-samples-parent</artifactId>
|
||||
<version>0.0.1-SNAPSHOT</version>
|
||||
<relativePath>../..</relativePath>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-stream-binder-kafka-streams</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright 2015 the original author or authors.
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -14,20 +14,30 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package demo;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.context.annotation.ComponentScan;
|
||||
package kafka.streams.table.join;
|
||||
|
||||
/**
|
||||
* @author Ilayaperumal Gopinathan
|
||||
* @author Soby Chacko
|
||||
*/
|
||||
@SpringBootApplication
|
||||
public class RxJavaApplication {
|
||||
public class DomainEvent {
|
||||
|
||||
public static void main(String[] args) {
|
||||
SpringApplication.run(RxJavaApplication.class, args);
|
||||
String eventType;
|
||||
|
||||
String boardUuid;
|
||||
|
||||
public String getEventType() {
|
||||
return eventType;
|
||||
}
|
||||
|
||||
public void setEventType(String eventType) {
|
||||
this.eventType = eventType;
|
||||
}
|
||||
|
||||
public String getBoardUuid() {
|
||||
return boardUuid;
|
||||
}
|
||||
|
||||
public void setBoardUuid(String boardUuid) {
|
||||
this.boardUuid = boardUuid;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,88 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package kafka.streams.table.join;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.apache.kafka.common.serialization.Serde;
|
||||
import org.apache.kafka.common.serialization.Serdes;
|
||||
import org.apache.kafka.common.utils.Bytes;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.apache.kafka.streams.kstream.Materialized;
|
||||
import org.apache.kafka.streams.kstream.Serialized;
|
||||
import org.apache.kafka.streams.state.KeyValueStore;
|
||||
import org.apache.kafka.streams.state.QueryableStoreTypes;
|
||||
import org.apache.kafka.streams.state.ReadOnlyKeyValueStore;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.cloud.stream.annotation.EnableBinding;
|
||||
import org.springframework.cloud.stream.annotation.Input;
|
||||
import org.springframework.cloud.stream.annotation.StreamListener;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.QueryableStoreRegistry;
|
||||
import org.springframework.kafka.support.serializer.JsonSerde;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
@SpringBootApplication
|
||||
public class KafkaStreamsAggregateSample {
|
||||
|
||||
@Autowired
|
||||
private QueryableStoreRegistry queryableStoreRegistry;
|
||||
|
||||
public static void main(String[] args) {
|
||||
SpringApplication.run(KafkaStreamsAggregateSample.class, args);
|
||||
}
|
||||
|
||||
@EnableBinding(KafkaStreamsProcessorX.class)
|
||||
public static class KafkaStreamsAggregateSampleApplication {
|
||||
|
||||
@StreamListener("input")
|
||||
public void process(KStream<Object, DomainEvent> input) {
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
Serde<DomainEvent> domainEventSerde = new JsonSerde<>( DomainEvent.class, mapper );
|
||||
|
||||
input
|
||||
.groupBy(
|
||||
(s, domainEvent) -> domainEvent.boardUuid,
|
||||
Serialized.with(null, domainEventSerde))
|
||||
.aggregate(
|
||||
String::new,
|
||||
(s, domainEvent, board) -> board.concat(domainEvent.eventType),
|
||||
Materialized.<String, String, KeyValueStore<Bytes, byte[]>>as("test-events-snapshots").withKeySerde(Serdes.String()).
|
||||
withValueSerde(Serdes.String())
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@RestController
|
||||
public class FooController {
|
||||
|
||||
@RequestMapping("/events")
|
||||
public String events() {
|
||||
|
||||
final ReadOnlyKeyValueStore<String, String> topFiveStore =
|
||||
queryableStoreRegistry.getQueryableStoreType("test-events-snapshots", QueryableStoreTypes.<String, String>keyValueStore());
|
||||
return topFiveStore.get("12345");
|
||||
}
|
||||
}
|
||||
|
||||
interface KafkaStreamsProcessorX {
|
||||
|
||||
@Input("input")
|
||||
KStream<?, ?> input();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,60 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package kafka.streams.table.join;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.apache.kafka.clients.producer.ProducerConfig;
|
||||
import org.apache.kafka.common.serialization.Serde;
|
||||
import org.apache.kafka.common.serialization.StringSerializer;
|
||||
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
|
||||
import org.springframework.kafka.core.KafkaTemplate;
|
||||
import org.springframework.kafka.support.serializer.JsonSerde;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* @author Soby Chacko
|
||||
*/
|
||||
public class Producers {
|
||||
|
||||
public static void main(String... args) {
|
||||
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
Serde<DomainEvent> domainEventSerde = new JsonSerde<>(DomainEvent.class, mapper);
|
||||
|
||||
|
||||
Map<String, Object> props = new HashMap<>();
|
||||
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
|
||||
props.put(ProducerConfig.RETRIES_CONFIG, 0);
|
||||
props.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384);
|
||||
props.put(ProducerConfig.LINGER_MS_CONFIG, 1);
|
||||
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
|
||||
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
||||
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, domainEventSerde.serializer().getClass());
|
||||
|
||||
DomainEvent ddEvent = new DomainEvent();
|
||||
ddEvent.setBoardUuid("12345");
|
||||
ddEvent.setEventType("thisisanevent");
|
||||
|
||||
DefaultKafkaProducerFactory<String, DomainEvent> pf = new DefaultKafkaProducerFactory<>(props);
|
||||
KafkaTemplate<String, DomainEvent> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic("foobar");
|
||||
|
||||
template.sendDefault("", ddEvent);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
spring.cloud.stream.bindings.input:
|
||||
destination: foobar
|
||||
consumer:
|
||||
headerMode: raw
|
||||
spring.cloud.stream.kafka.streams.binder:
|
||||
brokers: localhost #192.168.99.100
|
||||
zkNodes: localhost #192.168.99.100
|
||||
configuration:
|
||||
default.key.serde: org.apache.kafka.common.serialization.Serdes$StringSerde
|
||||
default.value.serde: org.apache.kafka.common.serialization.Serdes$BytesSerde
|
||||
commit.interval.ms: 1000
|
||||
@@ -0,0 +1,12 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<configuration>
|
||||
<appender name="stdout" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<encoder>
|
||||
<pattern>%d{ISO8601} %5p %t %c{2}:%L - %m%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
<root level="INFO">
|
||||
<appender-ref ref="stdout"/>
|
||||
</root>
|
||||
<logger name="org.apache.kafka.streams.processor.internals" level="WARN"/>
|
||||
</configuration>
|
||||
@@ -0,0 +1,18 @@
|
||||
package kafka.streams.table.join;
|
||||
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
import org.springframework.test.context.junit4.SpringRunner;
|
||||
|
||||
@RunWith(SpringRunner.class)
|
||||
@SpringBootTest
|
||||
public class KafkaStreamsAggregateSampleTests {
|
||||
|
||||
@Test
|
||||
@Ignore
|
||||
public void contextLoads() {
|
||||
}
|
||||
|
||||
}
|
||||
24
kafka-streams-samples/kafka-streams-branching/.gitignore
vendored
Normal file
24
kafka-streams-samples/kafka-streams-branching/.gitignore
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
target/
|
||||
!.mvn/wrapper/maven-wrapper.jar
|
||||
|
||||
### STS ###
|
||||
.apt_generated
|
||||
.classpath
|
||||
.factorypath
|
||||
.project
|
||||
.settings
|
||||
.springBeans
|
||||
|
||||
### IntelliJ IDEA ###
|
||||
.idea
|
||||
*.iws
|
||||
*.iml
|
||||
*.ipr
|
||||
|
||||
### NetBeans ###
|
||||
nbproject/private/
|
||||
build/
|
||||
nbbuild/
|
||||
dist/
|
||||
nbdist/
|
||||
.nb-gradle/
|
||||
BIN
kafka-streams-samples/kafka-streams-branching/.mvn/wrapper/maven-wrapper.jar
vendored
Normal file
BIN
kafka-streams-samples/kafka-streams-branching/.mvn/wrapper/maven-wrapper.jar
vendored
Normal file
Binary file not shown.
1
kafka-streams-samples/kafka-streams-branching/.mvn/wrapper/maven-wrapper.properties
vendored
Normal file
1
kafka-streams-samples/kafka-streams-branching/.mvn/wrapper/maven-wrapper.properties
vendored
Normal file
@@ -0,0 +1 @@
|
||||
distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.5.0/apache-maven-3.5.0-bin.zip
|
||||
39
kafka-streams-samples/kafka-streams-branching/README.adoc
Normal file
39
kafka-streams-samples/kafka-streams-branching/README.adoc
Normal file
@@ -0,0 +1,39 @@
|
||||
== What is this app?
|
||||
|
||||
This is an example of a Spring Cloud Stream processor using Kafka Streams branching support.
|
||||
|
||||
The example is based on the word count application from the https://github.com/confluentinc/examples/blob/3.2.x/kafka-streams/src/main/java/io/confluent/examples/streams/WordCountLambdaExample.java[reference documentation].
|
||||
It uses a single input and 3 output destinations.
|
||||
In essence, the application receives text messages from an input topic, filter them by language (Englihs, French, Spanish and ignoring the rest), and computes word occurrence counts in a configurable time window and report that in the output topics.
|
||||
This sample uses lambda expressions and thus requires Java 8+.
|
||||
|
||||
By default native decoding and encoding are disabled and this means that any deserializaion on inbound and serialization on outbound is performed by the Binder using the configured content types.
|
||||
|
||||
=== Running the app:
|
||||
|
||||
Go to the root of the repository and do:
|
||||
|
||||
`docker-compose up -d`
|
||||
|
||||
`./mvnw clean package`
|
||||
|
||||
`java -jar target/kafka-streams-branching-0.0.1-SNAPSHOT.jar --spring.cloud.stream.kafka.streams.timeWindow.length=60000`
|
||||
|
||||
Issue the following commands:
|
||||
|
||||
`docker exec -it kafka-branch /opt/kafka/bin/kafka-console-producer.sh --broker-list 127.0.0.1:9092 --topic words`
|
||||
|
||||
On another terminal:
|
||||
|
||||
`docker exec -it kafka-branch /opt/kafka/bin/kafka-console-consumer.sh --bootstrap-server 127.0.0.1:9092 --topic english-counts`
|
||||
|
||||
On another terminal:
|
||||
|
||||
`docker exec -it kafka-branch /opt/kafka/bin/kafka-console-consumer.sh --bootstrap-server 127.0.0.1:9092 --topic french-counts`
|
||||
|
||||
On another terminal:
|
||||
|
||||
`docker exec -it kafka-branch /opt/kafka/bin/kafka-console-consumer.sh --bootstrap-server 127.0.0.1:9092 --topic spanish-counts`
|
||||
|
||||
Enter text ("English", "French", "Spanish" - case doesn't matter) in the console producer and watch the output in the respective console consumer.
|
||||
The word "english" goes to topic english-counts, "french" goes to topic french-counts and "spanish" goes to spanish-counts.
|
||||
@@ -0,0 +1,19 @@
|
||||
version: '3'
|
||||
services:
|
||||
kafka:
|
||||
image: wurstmeister/kafka
|
||||
container_name: kafka-branch
|
||||
ports:
|
||||
- "9092:9092"
|
||||
environment:
|
||||
- KAFKA_ADVERTISED_HOST_NAME=127.0.0.1
|
||||
- KAFKA_ADVERTISED_PORT=9092
|
||||
- KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181
|
||||
depends_on:
|
||||
- zookeeper
|
||||
zookeeper:
|
||||
image: wurstmeister/zookeeper
|
||||
ports:
|
||||
- "2181:2181"
|
||||
environment:
|
||||
- KAFKA_ADVERTISED_HOST_NAME=zookeeper
|
||||
225
kafka-streams-samples/kafka-streams-branching/mvnw
vendored
Executable file
225
kafka-streams-samples/kafka-streams-branching/mvnw
vendored
Executable file
@@ -0,0 +1,225 @@
|
||||
#!/bin/sh
|
||||
# ----------------------------------------------------------------------------
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Maven2 Start Up Batch script
|
||||
#
|
||||
# Required ENV vars:
|
||||
# ------------------
|
||||
# JAVA_HOME - location of a JDK home dir
|
||||
#
|
||||
# Optional ENV vars
|
||||
# -----------------
|
||||
# M2_HOME - location of maven2's installed home dir
|
||||
# MAVEN_OPTS - parameters passed to the Java VM when running Maven
|
||||
# e.g. to debug Maven itself, use
|
||||
# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
|
||||
# MAVEN_SKIP_RC - flag to disable loading of mavenrc files
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
if [ -z "$MAVEN_SKIP_RC" ] ; then
|
||||
|
||||
if [ -f /etc/mavenrc ] ; then
|
||||
. /etc/mavenrc
|
||||
fi
|
||||
|
||||
if [ -f "$HOME/.mavenrc" ] ; then
|
||||
. "$HOME/.mavenrc"
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
# OS specific support. $var _must_ be set to either true or false.
|
||||
cygwin=false;
|
||||
darwin=false;
|
||||
mingw=false
|
||||
case "`uname`" in
|
||||
CYGWIN*) cygwin=true ;;
|
||||
MINGW*) mingw=true;;
|
||||
Darwin*) darwin=true
|
||||
# Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home
|
||||
# See https://developer.apple.com/library/mac/qa/qa1170/_index.html
|
||||
if [ -z "$JAVA_HOME" ]; then
|
||||
if [ -x "/usr/libexec/java_home" ]; then
|
||||
export JAVA_HOME="`/usr/libexec/java_home`"
|
||||
else
|
||||
export JAVA_HOME="/Library/Java/Home"
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -z "$JAVA_HOME" ] ; then
|
||||
if [ -r /etc/gentoo-release ] ; then
|
||||
JAVA_HOME=`java-config --jre-home`
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$M2_HOME" ] ; then
|
||||
## resolve links - $0 may be a link to maven's home
|
||||
PRG="$0"
|
||||
|
||||
# need this for relative symlinks
|
||||
while [ -h "$PRG" ] ; do
|
||||
ls=`ls -ld "$PRG"`
|
||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
PRG="$link"
|
||||
else
|
||||
PRG="`dirname "$PRG"`/$link"
|
||||
fi
|
||||
done
|
||||
|
||||
saveddir=`pwd`
|
||||
|
||||
M2_HOME=`dirname "$PRG"`/..
|
||||
|
||||
# make it fully qualified
|
||||
M2_HOME=`cd "$M2_HOME" && pwd`
|
||||
|
||||
cd "$saveddir"
|
||||
# echo Using m2 at $M2_HOME
|
||||
fi
|
||||
|
||||
# For Cygwin, ensure paths are in UNIX format before anything is touched
|
||||
if $cygwin ; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME=`cygpath --unix "$M2_HOME"`
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
|
||||
[ -n "$CLASSPATH" ] &&
|
||||
CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
|
||||
fi
|
||||
|
||||
# For Migwn, ensure paths are in UNIX format before anything is touched
|
||||
if $mingw ; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME="`(cd "$M2_HOME"; pwd)`"
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`"
|
||||
# TODO classpath?
|
||||
fi
|
||||
|
||||
if [ -z "$JAVA_HOME" ]; then
|
||||
javaExecutable="`which javac`"
|
||||
if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then
|
||||
# readlink(1) is not available as standard on Solaris 10.
|
||||
readLink=`which readlink`
|
||||
if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then
|
||||
if $darwin ; then
|
||||
javaHome="`dirname \"$javaExecutable\"`"
|
||||
javaExecutable="`cd \"$javaHome\" && pwd -P`/javac"
|
||||
else
|
||||
javaExecutable="`readlink -f \"$javaExecutable\"`"
|
||||
fi
|
||||
javaHome="`dirname \"$javaExecutable\"`"
|
||||
javaHome=`expr "$javaHome" : '\(.*\)/bin'`
|
||||
JAVA_HOME="$javaHome"
|
||||
export JAVA_HOME
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$JAVACMD" ] ; then
|
||||
if [ -n "$JAVA_HOME" ] ; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD="$JAVA_HOME/jre/sh/java"
|
||||
else
|
||||
JAVACMD="$JAVA_HOME/bin/java"
|
||||
fi
|
||||
else
|
||||
JAVACMD="`which java`"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ ! -x "$JAVACMD" ] ; then
|
||||
echo "Error: JAVA_HOME is not defined correctly." >&2
|
||||
echo " We cannot execute $JAVACMD" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$JAVA_HOME" ] ; then
|
||||
echo "Warning: JAVA_HOME environment variable is not set."
|
||||
fi
|
||||
|
||||
CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
|
||||
|
||||
# traverses directory structure from process work directory to filesystem root
|
||||
# first directory with .mvn subdirectory is considered project base directory
|
||||
find_maven_basedir() {
|
||||
|
||||
if [ -z "$1" ]
|
||||
then
|
||||
echo "Path not specified to find_maven_basedir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
basedir="$1"
|
||||
wdir="$1"
|
||||
while [ "$wdir" != '/' ] ; do
|
||||
if [ -d "$wdir"/.mvn ] ; then
|
||||
basedir=$wdir
|
||||
break
|
||||
fi
|
||||
# workaround for JBEAP-8937 (on Solaris 10/Sparc)
|
||||
if [ -d "${wdir}" ]; then
|
||||
wdir=`cd "$wdir/.."; pwd`
|
||||
fi
|
||||
# end of workaround
|
||||
done
|
||||
echo "${basedir}"
|
||||
}
|
||||
|
||||
# concatenates all lines of a file
|
||||
concat_lines() {
|
||||
if [ -f "$1" ]; then
|
||||
echo "$(tr -s '\n' ' ' < "$1")"
|
||||
fi
|
||||
}
|
||||
|
||||
BASE_DIR=`find_maven_basedir "$(pwd)"`
|
||||
if [ -z "$BASE_DIR" ]; then
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}
|
||||
echo $MAVEN_PROJECTBASEDIR
|
||||
MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS"
|
||||
|
||||
# For Cygwin, switch paths to Windows format before running java
|
||||
if $cygwin; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME=`cygpath --path --windows "$M2_HOME"`
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
|
||||
[ -n "$CLASSPATH" ] &&
|
||||
CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
|
||||
[ -n "$MAVEN_PROJECTBASEDIR" ] &&
|
||||
MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"`
|
||||
fi
|
||||
|
||||
WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
|
||||
|
||||
exec "$JAVACMD" \
|
||||
$MAVEN_OPTS \
|
||||
-classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \
|
||||
"-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \
|
||||
${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@"
|
||||
143
kafka-streams-samples/kafka-streams-branching/mvnw.cmd
vendored
Normal file
143
kafka-streams-samples/kafka-streams-branching/mvnw.cmd
vendored
Normal file
@@ -0,0 +1,143 @@
|
||||
@REM ----------------------------------------------------------------------------
|
||||
@REM Licensed to the Apache Software Foundation (ASF) under one
|
||||
@REM or more contributor license agreements. See the NOTICE file
|
||||
@REM distributed with this work for additional information
|
||||
@REM regarding copyright ownership. The ASF licenses this file
|
||||
@REM to you under the Apache License, Version 2.0 (the
|
||||
@REM "License"); you may not use this file except in compliance
|
||||
@REM with the License. You may obtain a copy of the License at
|
||||
@REM
|
||||
@REM https://www.apache.org/licenses/LICENSE-2.0
|
||||
@REM
|
||||
@REM Unless required by applicable law or agreed to in writing,
|
||||
@REM software distributed under the License is distributed on an
|
||||
@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
@REM KIND, either express or implied. See the License for the
|
||||
@REM specific language governing permissions and limitations
|
||||
@REM under the License.
|
||||
@REM ----------------------------------------------------------------------------
|
||||
|
||||
@REM ----------------------------------------------------------------------------
|
||||
@REM Maven2 Start Up Batch script
|
||||
@REM
|
||||
@REM Required ENV vars:
|
||||
@REM JAVA_HOME - location of a JDK home dir
|
||||
@REM
|
||||
@REM Optional ENV vars
|
||||
@REM M2_HOME - location of maven2's installed home dir
|
||||
@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
|
||||
@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending
|
||||
@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
|
||||
@REM e.g. to debug Maven itself, use
|
||||
@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
|
||||
@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
|
||||
@REM ----------------------------------------------------------------------------
|
||||
|
||||
@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
|
||||
@echo off
|
||||
@REM enable echoing my setting MAVEN_BATCH_ECHO to 'on'
|
||||
@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
|
||||
|
||||
@REM set %HOME% to equivalent of $HOME
|
||||
if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
|
||||
|
||||
@REM Execute a user defined script before this one
|
||||
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
|
||||
@REM check for pre script, once with legacy .bat ending and once with .cmd ending
|
||||
if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat"
|
||||
if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd"
|
||||
:skipRcPre
|
||||
|
||||
@setlocal
|
||||
|
||||
set ERROR_CODE=0
|
||||
|
||||
@REM To isolate internal variables from possible post scripts, we use another setlocal
|
||||
@setlocal
|
||||
|
||||
@REM ==== START VALIDATION ====
|
||||
if not "%JAVA_HOME%" == "" goto OkJHome
|
||||
|
||||
echo.
|
||||
echo Error: JAVA_HOME not found in your environment. >&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the >&2
|
||||
echo location of your Java installation. >&2
|
||||
echo.
|
||||
goto error
|
||||
|
||||
:OkJHome
|
||||
if exist "%JAVA_HOME%\bin\java.exe" goto init
|
||||
|
||||
echo.
|
||||
echo Error: JAVA_HOME is set to an invalid directory. >&2
|
||||
echo JAVA_HOME = "%JAVA_HOME%" >&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the >&2
|
||||
echo location of your Java installation. >&2
|
||||
echo.
|
||||
goto error
|
||||
|
||||
@REM ==== END VALIDATION ====
|
||||
|
||||
:init
|
||||
|
||||
@REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
|
||||
@REM Fallback to current working directory if not found.
|
||||
|
||||
set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
|
||||
IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
|
||||
|
||||
set EXEC_DIR=%CD%
|
||||
set WDIR=%EXEC_DIR%
|
||||
:findBaseDir
|
||||
IF EXIST "%WDIR%"\.mvn goto baseDirFound
|
||||
cd ..
|
||||
IF "%WDIR%"=="%CD%" goto baseDirNotFound
|
||||
set WDIR=%CD%
|
||||
goto findBaseDir
|
||||
|
||||
:baseDirFound
|
||||
set MAVEN_PROJECTBASEDIR=%WDIR%
|
||||
cd "%EXEC_DIR%"
|
||||
goto endDetectBaseDir
|
||||
|
||||
:baseDirNotFound
|
||||
set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
|
||||
cd "%EXEC_DIR%"
|
||||
|
||||
:endDetectBaseDir
|
||||
|
||||
IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
|
||||
|
||||
@setlocal EnableExtensions EnableDelayedExpansion
|
||||
for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
|
||||
@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
|
||||
|
||||
:endReadAdditionalConfig
|
||||
|
||||
SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
|
||||
|
||||
set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
|
||||
set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
|
||||
|
||||
%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
|
||||
if ERRORLEVEL 1 goto error
|
||||
goto end
|
||||
|
||||
:error
|
||||
set ERROR_CODE=1
|
||||
|
||||
:end
|
||||
@endlocal & set ERROR_CODE=%ERROR_CODE%
|
||||
|
||||
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
|
||||
@REM check for post script, once with legacy .bat ending and once with .cmd ending
|
||||
if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat"
|
||||
if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd"
|
||||
:skipRcPost
|
||||
|
||||
@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
|
||||
if "%MAVEN_BATCH_PAUSE%" == "on" pause
|
||||
|
||||
if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE%
|
||||
|
||||
exit /B %ERROR_CODE%
|
||||
40
kafka-streams-samples/kafka-streams-branching/pom.xml
Normal file
40
kafka-streams-samples/kafka-streams-branching/pom.xml
Normal file
@@ -0,0 +1,40 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<artifactId>kafka-streams-branching</artifactId>
|
||||
<version>0.0.1-SNAPSHOT</version>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<name>kafka-streams-branching</name>
|
||||
<description>Demo project for Spring Boot</description>
|
||||
|
||||
<parent>
|
||||
<groupId>spring.cloud.stream.samples</groupId>
|
||||
<artifactId>spring-cloud-stream-samples-parent</artifactId>
|
||||
<version>0.0.1-SNAPSHOT</version>
|
||||
<relativePath>../..</relativePath>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-stream-binder-kafka-streams</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
@@ -0,0 +1,133 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package kafka.streams.branching;
|
||||
|
||||
import org.apache.kafka.streams.KeyValue;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.apache.kafka.streams.kstream.Materialized;
|
||||
import org.apache.kafka.streams.kstream.Predicate;
|
||||
import org.apache.kafka.streams.kstream.TimeWindows;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.cloud.stream.annotation.EnableBinding;
|
||||
import org.springframework.cloud.stream.annotation.Input;
|
||||
import org.springframework.cloud.stream.annotation.Output;
|
||||
import org.springframework.cloud.stream.annotation.StreamListener;
|
||||
import org.springframework.messaging.handler.annotation.SendTo;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
|
||||
@SpringBootApplication
|
||||
public class KafkaStreamsBranchingSample {
|
||||
|
||||
public static void main(String[] args) {
|
||||
SpringApplication.run(KafkaStreamsBranchingSample.class, args);
|
||||
}
|
||||
|
||||
@EnableBinding(KStreamProcessorX.class)
|
||||
public static class WordCountProcessorApplication {
|
||||
|
||||
@Autowired
|
||||
private TimeWindows timeWindows;
|
||||
|
||||
@StreamListener("input")
|
||||
@SendTo({"output1","output2","output3"})
|
||||
@SuppressWarnings("unchecked")
|
||||
public KStream<?, WordCount>[] process(KStream<Object, String> input) {
|
||||
|
||||
Predicate<Object, WordCount> isEnglish = (k, v) -> v.word.equals("english");
|
||||
Predicate<Object, WordCount> isFrench = (k, v) -> v.word.equals("french");
|
||||
Predicate<Object, WordCount> isSpanish = (k, v) -> v.word.equals("spanish");
|
||||
|
||||
return input
|
||||
.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
|
||||
.groupBy((key, value) -> value)
|
||||
.windowedBy(timeWindows)
|
||||
.count(Materialized.as("WordCounts-1"))
|
||||
.toStream()
|
||||
.map((key, value) -> new KeyValue<>(null, new WordCount(key.key(), value, new Date(key.window().start()), new Date(key.window().end()))))
|
||||
.branch(isEnglish, isFrench, isSpanish);
|
||||
}
|
||||
}
|
||||
|
||||
interface KStreamProcessorX {
|
||||
|
||||
@Input("input")
|
||||
KStream<?, ?> input();
|
||||
|
||||
@Output("output1")
|
||||
KStream<?, ?> output1();
|
||||
|
||||
@Output("output2")
|
||||
KStream<?, ?> output2();
|
||||
|
||||
@Output("output3")
|
||||
KStream<?, ?> output3();
|
||||
}
|
||||
|
||||
static class WordCount {
|
||||
|
||||
private String word;
|
||||
|
||||
private long count;
|
||||
|
||||
private Date start;
|
||||
|
||||
private Date end;
|
||||
|
||||
WordCount(String word, long count, Date start, Date end) {
|
||||
this.word = word;
|
||||
this.count = count;
|
||||
this.start = start;
|
||||
this.end = end;
|
||||
}
|
||||
|
||||
public String getWord() {
|
||||
return word;
|
||||
}
|
||||
|
||||
public void setWord(String word) {
|
||||
this.word = word;
|
||||
}
|
||||
|
||||
public long getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
public void setCount(long count) {
|
||||
this.count = count;
|
||||
}
|
||||
|
||||
public Date getStart() {
|
||||
return start;
|
||||
}
|
||||
|
||||
public void setStart(Date start) {
|
||||
this.start = start;
|
||||
}
|
||||
|
||||
public Date getEnd() {
|
||||
return end;
|
||||
}
|
||||
|
||||
public void setEnd(Date end) {
|
||||
this.end = end;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
spring.cloud.stream.bindings.output1.contentType: application/json
|
||||
spring.cloud.stream.bindings.output2.contentType: application/json
|
||||
spring.cloud.stream.bindings.output3.contentType: application/json
|
||||
spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms: 1000
|
||||
spring.cloud.stream.kafka.streams.binder.configuration:
|
||||
default.key.serde: org.apache.kafka.common.serialization.Serdes$StringSerde
|
||||
default.value.serde: org.apache.kafka.common.serialization.Serdes$StringSerde
|
||||
spring.cloud.stream.bindings.output1:
|
||||
destination: english-counts
|
||||
producer:
|
||||
headerMode: raw
|
||||
spring.cloud.stream.bindings.output2:
|
||||
destination: french-counts
|
||||
producer:
|
||||
headerMode: raw
|
||||
spring.cloud.stream.bindings.output3:
|
||||
destination: spanish-counts
|
||||
producer:
|
||||
headerMode: raw
|
||||
spring.cloud.stream.bindings.input:
|
||||
destination: words
|
||||
group: group1
|
||||
consumer:
|
||||
headerMode: raw
|
||||
spring.cloud.stream.kafka.streams.binder:
|
||||
brokers: localhost #192.168.99.100 #localhost
|
||||
zkNodes: localhost #192.168.99.100 #localhost
|
||||
@@ -0,0 +1,12 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<configuration>
|
||||
<appender name="stdout" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<encoder>
|
||||
<pattern>%d{ISO8601} %5p %t %c{2}:%L - %m%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
<root level="INFO">
|
||||
<appender-ref ref="stdout"/>
|
||||
</root>
|
||||
<logger name="org.apache.kafka.streams.processor.internals" level="WARN"/>
|
||||
</configuration>
|
||||
@@ -0,0 +1,34 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package kafka.streams.branching;
|
||||
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
import org.springframework.test.context.junit4.SpringRunner;
|
||||
|
||||
@RunWith(SpringRunner.class)
|
||||
@SpringBootTest
|
||||
public class KafkaStreamsBranchingSampleTests {
|
||||
|
||||
@Test
|
||||
@Ignore
|
||||
public void contextLoads() {
|
||||
}
|
||||
|
||||
}
|
||||
24
kafka-streams-samples/kafka-streams-dlq-sample/.gitignore
vendored
Normal file
24
kafka-streams-samples/kafka-streams-dlq-sample/.gitignore
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
target/
|
||||
!.mvn/wrapper/maven-wrapper.jar
|
||||
|
||||
### STS ###
|
||||
.apt_generated
|
||||
.classpath
|
||||
.factorypath
|
||||
.project
|
||||
.settings
|
||||
.springBeans
|
||||
|
||||
### IntelliJ IDEA ###
|
||||
.idea
|
||||
*.iws
|
||||
*.iml
|
||||
*.ipr
|
||||
|
||||
### NetBeans ###
|
||||
nbproject/private/
|
||||
build/
|
||||
nbbuild/
|
||||
dist/
|
||||
nbdist/
|
||||
.nb-gradle/
|
||||
BIN
kafka-streams-samples/kafka-streams-dlq-sample/.mvn/wrapper/maven-wrapper.jar
vendored
Normal file
BIN
kafka-streams-samples/kafka-streams-dlq-sample/.mvn/wrapper/maven-wrapper.jar
vendored
Normal file
Binary file not shown.
1
kafka-streams-samples/kafka-streams-dlq-sample/.mvn/wrapper/maven-wrapper.properties
vendored
Normal file
1
kafka-streams-samples/kafka-streams-dlq-sample/.mvn/wrapper/maven-wrapper.properties
vendored
Normal file
@@ -0,0 +1 @@
|
||||
distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.5.0/apache-maven-3.5.0-bin.zip
|
||||
41
kafka-streams-samples/kafka-streams-dlq-sample/README.adoc
Normal file
41
kafka-streams-samples/kafka-streams-dlq-sample/README.adoc
Normal file
@@ -0,0 +1,41 @@
|
||||
== What is this app?
|
||||
|
||||
This is an example of a Spring Cloud Stream processor using Kafka Streams support.
|
||||
|
||||
This is a demonstration of deserialization errors and DLQ in Kafka Streams binder.
|
||||
|
||||
The example is based on the word count application from the https://github.com/confluentinc/examples/blob/3.2.x/kafka-streams/src/main/java/io/confluent/examples/streams/WordCountLambdaExample.java[reference documentation].
|
||||
It uses a single input and a single output.
|
||||
In essence, the application receives text messages from an input topic and computes word occurrence counts in a configurable time window and report that in an output topic.
|
||||
This sample uses lambda expressions and thus requires Java 8+.
|
||||
|
||||
=== Running the app:
|
||||
|
||||
`docker-compose up -d`
|
||||
|
||||
Go to the root of the repository and do: `./mvnw clean package`
|
||||
|
||||
`java -jar target/kstream-word-count-0.0.1-SNAPSHOT.jar`
|
||||
|
||||
The default application.yml file demonstrates native decoding by Kafka.
|
||||
The default value serializer is set to IntegerSerde to force a deserialization errors.
|
||||
|
||||
Issue the following commands:
|
||||
|
||||
`docker exec -it kafka-dlq /opt/kafka/bin/kafka-console-producer.sh --broker-list 127.0.0.1:9092 --topic words`
|
||||
|
||||
On another terminal:
|
||||
|
||||
`docker exec -it kafka-dlq /opt/kafka/bin/kafka-console-consumer.sh --bootstrap-server 127.0.0.1:9092 --topic counts`
|
||||
|
||||
On another terminal:
|
||||
|
||||
`docker exec -it kafka-dlq /opt/kafka/bin/kafka-console-consumer.sh --bootstrap-server 127.0.0.1:9092 --topic words-count-dlq`
|
||||
|
||||
On the console producer, enter some text data.
|
||||
You will see that the messages produce deserialization errors and end up in the DLQ topic - words-count-dlq.
|
||||
You will not see any messages coming to the regular destination counts.
|
||||
|
||||
There is another yaml file provided (by-framework-decoding.yml).
|
||||
Use that as application.yml to see how it works when the deserialization done by the framework.
|
||||
In this case also, the messages on error appear in the DLQ topic.
|
||||
@@ -0,0 +1,19 @@
|
||||
version: '3'
|
||||
services:
|
||||
kafka:
|
||||
image: wurstmeister/kafka
|
||||
container_name: kafka-dlq
|
||||
ports:
|
||||
- "9092:9092"
|
||||
environment:
|
||||
- KAFKA_ADVERTISED_HOST_NAME=127.0.0.1
|
||||
- KAFKA_ADVERTISED_PORT=9092
|
||||
- KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181
|
||||
depends_on:
|
||||
- zookeeper
|
||||
zookeeper:
|
||||
image: wurstmeister/zookeeper
|
||||
ports:
|
||||
- "2181:2181"
|
||||
environment:
|
||||
- KAFKA_ADVERTISED_HOST_NAME=zookeeper
|
||||
225
kafka-streams-samples/kafka-streams-dlq-sample/mvnw
vendored
Executable file
225
kafka-streams-samples/kafka-streams-dlq-sample/mvnw
vendored
Executable file
@@ -0,0 +1,225 @@
|
||||
#!/bin/sh
|
||||
# ----------------------------------------------------------------------------
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Maven2 Start Up Batch script
|
||||
#
|
||||
# Required ENV vars:
|
||||
# ------------------
|
||||
# JAVA_HOME - location of a JDK home dir
|
||||
#
|
||||
# Optional ENV vars
|
||||
# -----------------
|
||||
# M2_HOME - location of maven2's installed home dir
|
||||
# MAVEN_OPTS - parameters passed to the Java VM when running Maven
|
||||
# e.g. to debug Maven itself, use
|
||||
# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
|
||||
# MAVEN_SKIP_RC - flag to disable loading of mavenrc files
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
if [ -z "$MAVEN_SKIP_RC" ] ; then
|
||||
|
||||
if [ -f /etc/mavenrc ] ; then
|
||||
. /etc/mavenrc
|
||||
fi
|
||||
|
||||
if [ -f "$HOME/.mavenrc" ] ; then
|
||||
. "$HOME/.mavenrc"
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
# OS specific support. $var _must_ be set to either true or false.
|
||||
cygwin=false;
|
||||
darwin=false;
|
||||
mingw=false
|
||||
case "`uname`" in
|
||||
CYGWIN*) cygwin=true ;;
|
||||
MINGW*) mingw=true;;
|
||||
Darwin*) darwin=true
|
||||
# Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home
|
||||
# See https://developer.apple.com/library/mac/qa/qa1170/_index.html
|
||||
if [ -z "$JAVA_HOME" ]; then
|
||||
if [ -x "/usr/libexec/java_home" ]; then
|
||||
export JAVA_HOME="`/usr/libexec/java_home`"
|
||||
else
|
||||
export JAVA_HOME="/Library/Java/Home"
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -z "$JAVA_HOME" ] ; then
|
||||
if [ -r /etc/gentoo-release ] ; then
|
||||
JAVA_HOME=`java-config --jre-home`
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$M2_HOME" ] ; then
|
||||
## resolve links - $0 may be a link to maven's home
|
||||
PRG="$0"
|
||||
|
||||
# need this for relative symlinks
|
||||
while [ -h "$PRG" ] ; do
|
||||
ls=`ls -ld "$PRG"`
|
||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
PRG="$link"
|
||||
else
|
||||
PRG="`dirname "$PRG"`/$link"
|
||||
fi
|
||||
done
|
||||
|
||||
saveddir=`pwd`
|
||||
|
||||
M2_HOME=`dirname "$PRG"`/..
|
||||
|
||||
# make it fully qualified
|
||||
M2_HOME=`cd "$M2_HOME" && pwd`
|
||||
|
||||
cd "$saveddir"
|
||||
# echo Using m2 at $M2_HOME
|
||||
fi
|
||||
|
||||
# For Cygwin, ensure paths are in UNIX format before anything is touched
|
||||
if $cygwin ; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME=`cygpath --unix "$M2_HOME"`
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
|
||||
[ -n "$CLASSPATH" ] &&
|
||||
CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
|
||||
fi
|
||||
|
||||
# For Migwn, ensure paths are in UNIX format before anything is touched
|
||||
if $mingw ; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME="`(cd "$M2_HOME"; pwd)`"
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`"
|
||||
# TODO classpath?
|
||||
fi
|
||||
|
||||
if [ -z "$JAVA_HOME" ]; then
|
||||
javaExecutable="`which javac`"
|
||||
if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then
|
||||
# readlink(1) is not available as standard on Solaris 10.
|
||||
readLink=`which readlink`
|
||||
if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then
|
||||
if $darwin ; then
|
||||
javaHome="`dirname \"$javaExecutable\"`"
|
||||
javaExecutable="`cd \"$javaHome\" && pwd -P`/javac"
|
||||
else
|
||||
javaExecutable="`readlink -f \"$javaExecutable\"`"
|
||||
fi
|
||||
javaHome="`dirname \"$javaExecutable\"`"
|
||||
javaHome=`expr "$javaHome" : '\(.*\)/bin'`
|
||||
JAVA_HOME="$javaHome"
|
||||
export JAVA_HOME
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$JAVACMD" ] ; then
|
||||
if [ -n "$JAVA_HOME" ] ; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD="$JAVA_HOME/jre/sh/java"
|
||||
else
|
||||
JAVACMD="$JAVA_HOME/bin/java"
|
||||
fi
|
||||
else
|
||||
JAVACMD="`which java`"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ ! -x "$JAVACMD" ] ; then
|
||||
echo "Error: JAVA_HOME is not defined correctly." >&2
|
||||
echo " We cannot execute $JAVACMD" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$JAVA_HOME" ] ; then
|
||||
echo "Warning: JAVA_HOME environment variable is not set."
|
||||
fi
|
||||
|
||||
CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
|
||||
|
||||
# traverses directory structure from process work directory to filesystem root
|
||||
# first directory with .mvn subdirectory is considered project base directory
|
||||
find_maven_basedir() {
|
||||
|
||||
if [ -z "$1" ]
|
||||
then
|
||||
echo "Path not specified to find_maven_basedir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
basedir="$1"
|
||||
wdir="$1"
|
||||
while [ "$wdir" != '/' ] ; do
|
||||
if [ -d "$wdir"/.mvn ] ; then
|
||||
basedir=$wdir
|
||||
break
|
||||
fi
|
||||
# workaround for JBEAP-8937 (on Solaris 10/Sparc)
|
||||
if [ -d "${wdir}" ]; then
|
||||
wdir=`cd "$wdir/.."; pwd`
|
||||
fi
|
||||
# end of workaround
|
||||
done
|
||||
echo "${basedir}"
|
||||
}
|
||||
|
||||
# concatenates all lines of a file
|
||||
concat_lines() {
|
||||
if [ -f "$1" ]; then
|
||||
echo "$(tr -s '\n' ' ' < "$1")"
|
||||
fi
|
||||
}
|
||||
|
||||
BASE_DIR=`find_maven_basedir "$(pwd)"`
|
||||
if [ -z "$BASE_DIR" ]; then
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}
|
||||
echo $MAVEN_PROJECTBASEDIR
|
||||
MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS"
|
||||
|
||||
# For Cygwin, switch paths to Windows format before running java
|
||||
if $cygwin; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME=`cygpath --path --windows "$M2_HOME"`
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
|
||||
[ -n "$CLASSPATH" ] &&
|
||||
CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
|
||||
[ -n "$MAVEN_PROJECTBASEDIR" ] &&
|
||||
MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"`
|
||||
fi
|
||||
|
||||
WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
|
||||
|
||||
exec "$JAVACMD" \
|
||||
$MAVEN_OPTS \
|
||||
-classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \
|
||||
"-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \
|
||||
${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@"
|
||||
143
kafka-streams-samples/kafka-streams-dlq-sample/mvnw.cmd
vendored
Normal file
143
kafka-streams-samples/kafka-streams-dlq-sample/mvnw.cmd
vendored
Normal file
@@ -0,0 +1,143 @@
|
||||
@REM ----------------------------------------------------------------------------
|
||||
@REM Licensed to the Apache Software Foundation (ASF) under one
|
||||
@REM or more contributor license agreements. See the NOTICE file
|
||||
@REM distributed with this work for additional information
|
||||
@REM regarding copyright ownership. The ASF licenses this file
|
||||
@REM to you under the Apache License, Version 2.0 (the
|
||||
@REM "License"); you may not use this file except in compliance
|
||||
@REM with the License. You may obtain a copy of the License at
|
||||
@REM
|
||||
@REM https://www.apache.org/licenses/LICENSE-2.0
|
||||
@REM
|
||||
@REM Unless required by applicable law or agreed to in writing,
|
||||
@REM software distributed under the License is distributed on an
|
||||
@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
@REM KIND, either express or implied. See the License for the
|
||||
@REM specific language governing permissions and limitations
|
||||
@REM under the License.
|
||||
@REM ----------------------------------------------------------------------------
|
||||
|
||||
@REM ----------------------------------------------------------------------------
|
||||
@REM Maven2 Start Up Batch script
|
||||
@REM
|
||||
@REM Required ENV vars:
|
||||
@REM JAVA_HOME - location of a JDK home dir
|
||||
@REM
|
||||
@REM Optional ENV vars
|
||||
@REM M2_HOME - location of maven2's installed home dir
|
||||
@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
|
||||
@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending
|
||||
@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
|
||||
@REM e.g. to debug Maven itself, use
|
||||
@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
|
||||
@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
|
||||
@REM ----------------------------------------------------------------------------
|
||||
|
||||
@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
|
||||
@echo off
|
||||
@REM enable echoing my setting MAVEN_BATCH_ECHO to 'on'
|
||||
@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
|
||||
|
||||
@REM set %HOME% to equivalent of $HOME
|
||||
if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
|
||||
|
||||
@REM Execute a user defined script before this one
|
||||
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
|
||||
@REM check for pre script, once with legacy .bat ending and once with .cmd ending
|
||||
if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat"
|
||||
if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd"
|
||||
:skipRcPre
|
||||
|
||||
@setlocal
|
||||
|
||||
set ERROR_CODE=0
|
||||
|
||||
@REM To isolate internal variables from possible post scripts, we use another setlocal
|
||||
@setlocal
|
||||
|
||||
@REM ==== START VALIDATION ====
|
||||
if not "%JAVA_HOME%" == "" goto OkJHome
|
||||
|
||||
echo.
|
||||
echo Error: JAVA_HOME not found in your environment. >&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the >&2
|
||||
echo location of your Java installation. >&2
|
||||
echo.
|
||||
goto error
|
||||
|
||||
:OkJHome
|
||||
if exist "%JAVA_HOME%\bin\java.exe" goto init
|
||||
|
||||
echo.
|
||||
echo Error: JAVA_HOME is set to an invalid directory. >&2
|
||||
echo JAVA_HOME = "%JAVA_HOME%" >&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the >&2
|
||||
echo location of your Java installation. >&2
|
||||
echo.
|
||||
goto error
|
||||
|
||||
@REM ==== END VALIDATION ====
|
||||
|
||||
:init
|
||||
|
||||
@REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
|
||||
@REM Fallback to current working directory if not found.
|
||||
|
||||
set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
|
||||
IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
|
||||
|
||||
set EXEC_DIR=%CD%
|
||||
set WDIR=%EXEC_DIR%
|
||||
:findBaseDir
|
||||
IF EXIST "%WDIR%"\.mvn goto baseDirFound
|
||||
cd ..
|
||||
IF "%WDIR%"=="%CD%" goto baseDirNotFound
|
||||
set WDIR=%CD%
|
||||
goto findBaseDir
|
||||
|
||||
:baseDirFound
|
||||
set MAVEN_PROJECTBASEDIR=%WDIR%
|
||||
cd "%EXEC_DIR%"
|
||||
goto endDetectBaseDir
|
||||
|
||||
:baseDirNotFound
|
||||
set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
|
||||
cd "%EXEC_DIR%"
|
||||
|
||||
:endDetectBaseDir
|
||||
|
||||
IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
|
||||
|
||||
@setlocal EnableExtensions EnableDelayedExpansion
|
||||
for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
|
||||
@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
|
||||
|
||||
:endReadAdditionalConfig
|
||||
|
||||
SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
|
||||
|
||||
set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
|
||||
set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
|
||||
|
||||
%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
|
||||
if ERRORLEVEL 1 goto error
|
||||
goto end
|
||||
|
||||
:error
|
||||
set ERROR_CODE=1
|
||||
|
||||
:end
|
||||
@endlocal & set ERROR_CODE=%ERROR_CODE%
|
||||
|
||||
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
|
||||
@REM check for post script, once with legacy .bat ending and once with .cmd ending
|
||||
if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat"
|
||||
if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd"
|
||||
:skipRcPost
|
||||
|
||||
@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
|
||||
if "%MAVEN_BATCH_PAUSE%" == "on" pause
|
||||
|
||||
if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE%
|
||||
|
||||
exit /B %ERROR_CODE%
|
||||
42
kafka-streams-samples/kafka-streams-dlq-sample/pom.xml
Normal file
42
kafka-streams-samples/kafka-streams-dlq-sample/pom.xml
Normal file
@@ -0,0 +1,42 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<artifactId>kafka-streams-dlq-sample</artifactId>
|
||||
<version>0.0.1-SNAPSHOT</version>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<name>kafka-streams-dlq-sample</name>
|
||||
<description>Demo project for Spring Boot</description>
|
||||
|
||||
<parent>
|
||||
<groupId>spring.cloud.stream.samples</groupId>
|
||||
<artifactId>spring-cloud-stream-samples-parent</artifactId>
|
||||
<version>0.0.1-SNAPSHOT</version>
|
||||
<relativePath>../..</relativePath>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-stream-binder-kafka-streams</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
||||
@@ -0,0 +1,110 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package kafka.streams.dlq.sample;
|
||||
|
||||
import org.apache.kafka.common.serialization.Serdes;
|
||||
import org.apache.kafka.streams.KeyValue;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.apache.kafka.streams.kstream.TimeWindows;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.cloud.stream.annotation.EnableBinding;
|
||||
import org.springframework.cloud.stream.annotation.StreamListener;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsProcessor;
|
||||
import org.springframework.messaging.handler.annotation.SendTo;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
|
||||
@SpringBootApplication
|
||||
public class KafkaStreamsDlqSample {
|
||||
|
||||
public static void main(String[] args) {
|
||||
SpringApplication.run(KafkaStreamsDlqSample.class, args);
|
||||
}
|
||||
|
||||
@EnableBinding(KafkaStreamsProcessor.class)
|
||||
public static class WordCountProcessorApplication {
|
||||
|
||||
@Autowired
|
||||
private TimeWindows timeWindows;
|
||||
|
||||
@StreamListener("input")
|
||||
@SendTo("output")
|
||||
public KStream<?, WordCount> process(KStream<Object, String> input) {
|
||||
|
||||
return input
|
||||
.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
|
||||
.map((key, value) -> new KeyValue<>(value, value))
|
||||
.groupByKey(Serdes.String(), Serdes.String())
|
||||
.count(timeWindows, "WordCounts-1")
|
||||
.toStream()
|
||||
.map((key, value) -> new KeyValue<>(null, new WordCount(key.key(), value, new Date(key.window().start()), new Date(key.window().end()))));
|
||||
}
|
||||
}
|
||||
|
||||
static class WordCount {
|
||||
|
||||
private String word;
|
||||
|
||||
private long count;
|
||||
|
||||
private Date start;
|
||||
|
||||
private Date end;
|
||||
|
||||
WordCount(String word, long count, Date start, Date end) {
|
||||
this.word = word;
|
||||
this.count = count;
|
||||
this.start = start;
|
||||
this.end = end;
|
||||
}
|
||||
|
||||
public String getWord() {
|
||||
return word;
|
||||
}
|
||||
|
||||
public void setWord(String word) {
|
||||
this.word = word;
|
||||
}
|
||||
|
||||
public long getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
public void setCount(long count) {
|
||||
this.count = count;
|
||||
}
|
||||
|
||||
public Date getStart() {
|
||||
return start;
|
||||
}
|
||||
|
||||
public void setStart(Date start) {
|
||||
this.start = start;
|
||||
}
|
||||
|
||||
public Date getEnd() {
|
||||
return end;
|
||||
}
|
||||
|
||||
public void setEnd(Date end) {
|
||||
this.end = end;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
spring.cloud.stream.bindings.output.contentType: application/json
|
||||
spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms: 1000
|
||||
spring.cloud.stream.kafka.streams.binder.configuration:
|
||||
default.key.serde: org.apache.kafka.common.serialization.Serdes$StringSerde
|
||||
default.value.serde: org.apache.kafka.common.serialization.Serdes$IntegerSerde
|
||||
application.id: default
|
||||
spring.cloud.stream.bindings.output:
|
||||
destination: counts
|
||||
producer:
|
||||
headerMode: raw
|
||||
#useNativeEncoding: true
|
||||
spring.cloud.stream.bindings.input:
|
||||
destination: words
|
||||
group: group1
|
||||
consumer:
|
||||
headerMode: raw
|
||||
useNativeDecoding: true
|
||||
spring.cloud.stream.kafka.streams.bindings.input.consumer.dlqName: words-count-dlq
|
||||
spring.cloud.stream.kafka.streams.binder:
|
||||
brokers: localhost #192.168.99.100
|
||||
zkNodes: localhost #192.168.99.100
|
||||
serdeError: sendToDlq
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,25 @@
|
||||
spring.cloud.stream.bindings.output.contentType: application/json
|
||||
spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms: 1000
|
||||
spring.cloud.stream.kafka.streams.binder.configuration:
|
||||
default.key.serde: org.apache.kafka.common.serialization.Serdes$StringSerde
|
||||
default.value.serde: org.apache.kafka.common.serialization.Serdes$StringSerde
|
||||
application.id: default
|
||||
spring.cloud.stream.bindings.output:
|
||||
destination: counts
|
||||
producer:
|
||||
headerMode: raw
|
||||
#useNativeEncoding: true
|
||||
spring.cloud.stream.bindings.input:
|
||||
contentType: foo/bar
|
||||
destination: words
|
||||
group: group1
|
||||
consumer:
|
||||
headerMode: raw
|
||||
useNativeDecoding: false
|
||||
spring.cloud.stream.kafka.streams.bindings.input.consumer.dlqName: words-count-dlq
|
||||
spring.cloud.stream.kafka.streams.binder:
|
||||
brokers: localhost #192.168.99.100
|
||||
zkNodes: localhost #192.168.99.100
|
||||
serdeError: sendToDlq
|
||||
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<configuration>
|
||||
<appender name="stdout" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<encoder>
|
||||
<pattern>%d{ISO8601} %5p %t %c{2}:%L - %m%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
<root level="INFO">
|
||||
<appender-ref ref="stdout"/>
|
||||
</root>
|
||||
<logger name="org.apache.kafka.streams.processor.internals" level="WARN"/>
|
||||
</configuration>
|
||||
@@ -0,0 +1,18 @@
|
||||
package kafka.streams.dlq.sample;
|
||||
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
import org.springframework.test.context.junit4.SpringRunner;
|
||||
|
||||
@RunWith(SpringRunner.class)
|
||||
@SpringBootTest
|
||||
public class KafkaStreamsDlqExampleTests {
|
||||
|
||||
@Test
|
||||
@Ignore
|
||||
public void contextLoads() {
|
||||
}
|
||||
|
||||
}
|
||||
24
kafka-streams-samples/kafka-streams-interactive-query-advanced/.gitignore
vendored
Normal file
24
kafka-streams-samples/kafka-streams-interactive-query-advanced/.gitignore
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
target/
|
||||
!.mvn/wrapper/maven-wrapper.jar
|
||||
|
||||
### STS ###
|
||||
.apt_generated
|
||||
.classpath
|
||||
.factorypath
|
||||
.project
|
||||
.settings
|
||||
.springBeans
|
||||
|
||||
### IntelliJ IDEA ###
|
||||
.idea
|
||||
*.iws
|
||||
*.iml
|
||||
*.ipr
|
||||
|
||||
### NetBeans ###
|
||||
nbproject/private/
|
||||
build/
|
||||
nbbuild/
|
||||
dist/
|
||||
nbdist/
|
||||
.nb-gradle/
|
||||
BIN
kafka-streams-samples/kafka-streams-interactive-query-advanced/.mvn/wrapper/maven-wrapper.jar
vendored
Normal file
BIN
kafka-streams-samples/kafka-streams-interactive-query-advanced/.mvn/wrapper/maven-wrapper.jar
vendored
Normal file
Binary file not shown.
@@ -0,0 +1 @@
|
||||
distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.5.0/apache-maven-3.5.0-bin.zip
|
||||
@@ -0,0 +1,26 @@
|
||||
== What is this app?
|
||||
|
||||
This is an example of a Spring Cloud Stream processor using Kafka Streams support.
|
||||
|
||||
This example is a Spring Cloud Stream adaptation of this Kafka Streams sample: https://github.com/confluentinc/kafka-streams-examples/tree/4.0.0-post/src/main/java/io/confluent/examples/streams/interactivequeries/kafkamusic
|
||||
|
||||
This sample demonstrates the concept of interactive queries in kafka streams.
|
||||
There is a REST service provided as part of the application that can be used to query the store interactively.
|
||||
|
||||
=== Running the app:
|
||||
|
||||
1. `docker-compose up -d`
|
||||
|
||||
2. Start the confluent schema registry: The following command is based on the confluent platform.
|
||||
|
||||
`./bin/schema-registry-start ./etc/schema-registry/schema-registry.properties`
|
||||
|
||||
3. Go to the root of the repository and do: `./mvnw clean package`
|
||||
|
||||
4. `java -jar target/kafka-streams-interactive-query-0.0.1-SNAPSHOT.jar`
|
||||
|
||||
5. Run the stand-alone `Producers` application to generate data and start the processing.
|
||||
Keep it running for a while.
|
||||
|
||||
6. Go to the URL: http://localhost:8080/charts/top-five?genre=Punk
|
||||
keep refreshing the URL and you will see the song play count information changes.
|
||||
@@ -0,0 +1,19 @@
|
||||
version: '3'
|
||||
services:
|
||||
kafka:
|
||||
image: wurstmeister/kafka
|
||||
container_name: kafka-iq-advanced
|
||||
ports:
|
||||
- "9092:9092"
|
||||
environment:
|
||||
- KAFKA_ADVERTISED_HOST_NAME=127.0.0.1
|
||||
- KAFKA_ADVERTISED_PORT=9092
|
||||
- KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181
|
||||
depends_on:
|
||||
- zookeeper
|
||||
zookeeper:
|
||||
image: wurstmeister/zookeeper
|
||||
ports:
|
||||
- "2181:2181"
|
||||
environment:
|
||||
- KAFKA_ADVERTISED_HOST_NAME=zookeeper
|
||||
225
kafka-streams-samples/kafka-streams-interactive-query-advanced/mvnw
vendored
Executable file
225
kafka-streams-samples/kafka-streams-interactive-query-advanced/mvnw
vendored
Executable file
@@ -0,0 +1,225 @@
|
||||
#!/bin/sh
|
||||
# ----------------------------------------------------------------------------
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Maven2 Start Up Batch script
|
||||
#
|
||||
# Required ENV vars:
|
||||
# ------------------
|
||||
# JAVA_HOME - location of a JDK home dir
|
||||
#
|
||||
# Optional ENV vars
|
||||
# -----------------
|
||||
# M2_HOME - location of maven2's installed home dir
|
||||
# MAVEN_OPTS - parameters passed to the Java VM when running Maven
|
||||
# e.g. to debug Maven itself, use
|
||||
# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
|
||||
# MAVEN_SKIP_RC - flag to disable loading of mavenrc files
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
if [ -z "$MAVEN_SKIP_RC" ] ; then
|
||||
|
||||
if [ -f /etc/mavenrc ] ; then
|
||||
. /etc/mavenrc
|
||||
fi
|
||||
|
||||
if [ -f "$HOME/.mavenrc" ] ; then
|
||||
. "$HOME/.mavenrc"
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
# OS specific support. $var _must_ be set to either true or false.
|
||||
cygwin=false;
|
||||
darwin=false;
|
||||
mingw=false
|
||||
case "`uname`" in
|
||||
CYGWIN*) cygwin=true ;;
|
||||
MINGW*) mingw=true;;
|
||||
Darwin*) darwin=true
|
||||
# Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home
|
||||
# See https://developer.apple.com/library/mac/qa/qa1170/_index.html
|
||||
if [ -z "$JAVA_HOME" ]; then
|
||||
if [ -x "/usr/libexec/java_home" ]; then
|
||||
export JAVA_HOME="`/usr/libexec/java_home`"
|
||||
else
|
||||
export JAVA_HOME="/Library/Java/Home"
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -z "$JAVA_HOME" ] ; then
|
||||
if [ -r /etc/gentoo-release ] ; then
|
||||
JAVA_HOME=`java-config --jre-home`
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$M2_HOME" ] ; then
|
||||
## resolve links - $0 may be a link to maven's home
|
||||
PRG="$0"
|
||||
|
||||
# need this for relative symlinks
|
||||
while [ -h "$PRG" ] ; do
|
||||
ls=`ls -ld "$PRG"`
|
||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
PRG="$link"
|
||||
else
|
||||
PRG="`dirname "$PRG"`/$link"
|
||||
fi
|
||||
done
|
||||
|
||||
saveddir=`pwd`
|
||||
|
||||
M2_HOME=`dirname "$PRG"`/..
|
||||
|
||||
# make it fully qualified
|
||||
M2_HOME=`cd "$M2_HOME" && pwd`
|
||||
|
||||
cd "$saveddir"
|
||||
# echo Using m2 at $M2_HOME
|
||||
fi
|
||||
|
||||
# For Cygwin, ensure paths are in UNIX format before anything is touched
|
||||
if $cygwin ; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME=`cygpath --unix "$M2_HOME"`
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
|
||||
[ -n "$CLASSPATH" ] &&
|
||||
CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
|
||||
fi
|
||||
|
||||
# For Migwn, ensure paths are in UNIX format before anything is touched
|
||||
if $mingw ; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME="`(cd "$M2_HOME"; pwd)`"
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`"
|
||||
# TODO classpath?
|
||||
fi
|
||||
|
||||
if [ -z "$JAVA_HOME" ]; then
|
||||
javaExecutable="`which javac`"
|
||||
if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then
|
||||
# readlink(1) is not available as standard on Solaris 10.
|
||||
readLink=`which readlink`
|
||||
if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then
|
||||
if $darwin ; then
|
||||
javaHome="`dirname \"$javaExecutable\"`"
|
||||
javaExecutable="`cd \"$javaHome\" && pwd -P`/javac"
|
||||
else
|
||||
javaExecutable="`readlink -f \"$javaExecutable\"`"
|
||||
fi
|
||||
javaHome="`dirname \"$javaExecutable\"`"
|
||||
javaHome=`expr "$javaHome" : '\(.*\)/bin'`
|
||||
JAVA_HOME="$javaHome"
|
||||
export JAVA_HOME
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$JAVACMD" ] ; then
|
||||
if [ -n "$JAVA_HOME" ] ; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD="$JAVA_HOME/jre/sh/java"
|
||||
else
|
||||
JAVACMD="$JAVA_HOME/bin/java"
|
||||
fi
|
||||
else
|
||||
JAVACMD="`which java`"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ ! -x "$JAVACMD" ] ; then
|
||||
echo "Error: JAVA_HOME is not defined correctly." >&2
|
||||
echo " We cannot execute $JAVACMD" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$JAVA_HOME" ] ; then
|
||||
echo "Warning: JAVA_HOME environment variable is not set."
|
||||
fi
|
||||
|
||||
CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
|
||||
|
||||
# traverses directory structure from process work directory to filesystem root
|
||||
# first directory with .mvn subdirectory is considered project base directory
|
||||
find_maven_basedir() {
|
||||
|
||||
if [ -z "$1" ]
|
||||
then
|
||||
echo "Path not specified to find_maven_basedir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
basedir="$1"
|
||||
wdir="$1"
|
||||
while [ "$wdir" != '/' ] ; do
|
||||
if [ -d "$wdir"/.mvn ] ; then
|
||||
basedir=$wdir
|
||||
break
|
||||
fi
|
||||
# workaround for JBEAP-8937 (on Solaris 10/Sparc)
|
||||
if [ -d "${wdir}" ]; then
|
||||
wdir=`cd "$wdir/.."; pwd`
|
||||
fi
|
||||
# end of workaround
|
||||
done
|
||||
echo "${basedir}"
|
||||
}
|
||||
|
||||
# concatenates all lines of a file
|
||||
concat_lines() {
|
||||
if [ -f "$1" ]; then
|
||||
echo "$(tr -s '\n' ' ' < "$1")"
|
||||
fi
|
||||
}
|
||||
|
||||
BASE_DIR=`find_maven_basedir "$(pwd)"`
|
||||
if [ -z "$BASE_DIR" ]; then
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}
|
||||
echo $MAVEN_PROJECTBASEDIR
|
||||
MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS"
|
||||
|
||||
# For Cygwin, switch paths to Windows format before running java
|
||||
if $cygwin; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME=`cygpath --path --windows "$M2_HOME"`
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
|
||||
[ -n "$CLASSPATH" ] &&
|
||||
CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
|
||||
[ -n "$MAVEN_PROJECTBASEDIR" ] &&
|
||||
MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"`
|
||||
fi
|
||||
|
||||
WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
|
||||
|
||||
exec "$JAVACMD" \
|
||||
$MAVEN_OPTS \
|
||||
-classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \
|
||||
"-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \
|
||||
${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@"
|
||||
143
kafka-streams-samples/kafka-streams-interactive-query-advanced/mvnw.cmd
vendored
Normal file
143
kafka-streams-samples/kafka-streams-interactive-query-advanced/mvnw.cmd
vendored
Normal file
@@ -0,0 +1,143 @@
|
||||
@REM ----------------------------------------------------------------------------
|
||||
@REM Licensed to the Apache Software Foundation (ASF) under one
|
||||
@REM or more contributor license agreements. See the NOTICE file
|
||||
@REM distributed with this work for additional information
|
||||
@REM regarding copyright ownership. The ASF licenses this file
|
||||
@REM to you under the Apache License, Version 2.0 (the
|
||||
@REM "License"); you may not use this file except in compliance
|
||||
@REM with the License. You may obtain a copy of the License at
|
||||
@REM
|
||||
@REM https://www.apache.org/licenses/LICENSE-2.0
|
||||
@REM
|
||||
@REM Unless required by applicable law or agreed to in writing,
|
||||
@REM software distributed under the License is distributed on an
|
||||
@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
@REM KIND, either express or implied. See the License for the
|
||||
@REM specific language governing permissions and limitations
|
||||
@REM under the License.
|
||||
@REM ----------------------------------------------------------------------------
|
||||
|
||||
@REM ----------------------------------------------------------------------------
|
||||
@REM Maven2 Start Up Batch script
|
||||
@REM
|
||||
@REM Required ENV vars:
|
||||
@REM JAVA_HOME - location of a JDK home dir
|
||||
@REM
|
||||
@REM Optional ENV vars
|
||||
@REM M2_HOME - location of maven2's installed home dir
|
||||
@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
|
||||
@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending
|
||||
@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
|
||||
@REM e.g. to debug Maven itself, use
|
||||
@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
|
||||
@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
|
||||
@REM ----------------------------------------------------------------------------
|
||||
|
||||
@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
|
||||
@echo off
|
||||
@REM enable echoing my setting MAVEN_BATCH_ECHO to 'on'
|
||||
@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
|
||||
|
||||
@REM set %HOME% to equivalent of $HOME
|
||||
if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
|
||||
|
||||
@REM Execute a user defined script before this one
|
||||
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
|
||||
@REM check for pre script, once with legacy .bat ending and once with .cmd ending
|
||||
if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat"
|
||||
if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd"
|
||||
:skipRcPre
|
||||
|
||||
@setlocal
|
||||
|
||||
set ERROR_CODE=0
|
||||
|
||||
@REM To isolate internal variables from possible post scripts, we use another setlocal
|
||||
@setlocal
|
||||
|
||||
@REM ==== START VALIDATION ====
|
||||
if not "%JAVA_HOME%" == "" goto OkJHome
|
||||
|
||||
echo.
|
||||
echo Error: JAVA_HOME not found in your environment. >&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the >&2
|
||||
echo location of your Java installation. >&2
|
||||
echo.
|
||||
goto error
|
||||
|
||||
:OkJHome
|
||||
if exist "%JAVA_HOME%\bin\java.exe" goto init
|
||||
|
||||
echo.
|
||||
echo Error: JAVA_HOME is set to an invalid directory. >&2
|
||||
echo JAVA_HOME = "%JAVA_HOME%" >&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the >&2
|
||||
echo location of your Java installation. >&2
|
||||
echo.
|
||||
goto error
|
||||
|
||||
@REM ==== END VALIDATION ====
|
||||
|
||||
:init
|
||||
|
||||
@REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
|
||||
@REM Fallback to current working directory if not found.
|
||||
|
||||
set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
|
||||
IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
|
||||
|
||||
set EXEC_DIR=%CD%
|
||||
set WDIR=%EXEC_DIR%
|
||||
:findBaseDir
|
||||
IF EXIST "%WDIR%"\.mvn goto baseDirFound
|
||||
cd ..
|
||||
IF "%WDIR%"=="%CD%" goto baseDirNotFound
|
||||
set WDIR=%CD%
|
||||
goto findBaseDir
|
||||
|
||||
:baseDirFound
|
||||
set MAVEN_PROJECTBASEDIR=%WDIR%
|
||||
cd "%EXEC_DIR%"
|
||||
goto endDetectBaseDir
|
||||
|
||||
:baseDirNotFound
|
||||
set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
|
||||
cd "%EXEC_DIR%"
|
||||
|
||||
:endDetectBaseDir
|
||||
|
||||
IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
|
||||
|
||||
@setlocal EnableExtensions EnableDelayedExpansion
|
||||
for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
|
||||
@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
|
||||
|
||||
:endReadAdditionalConfig
|
||||
|
||||
SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
|
||||
|
||||
set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
|
||||
set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
|
||||
|
||||
%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
|
||||
if ERRORLEVEL 1 goto error
|
||||
goto end
|
||||
|
||||
:error
|
||||
set ERROR_CODE=1
|
||||
|
||||
:end
|
||||
@endlocal & set ERROR_CODE=%ERROR_CODE%
|
||||
|
||||
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
|
||||
@REM check for post script, once with legacy .bat ending and once with .cmd ending
|
||||
if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat"
|
||||
if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd"
|
||||
:skipRcPost
|
||||
|
||||
@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
|
||||
if "%MAVEN_BATCH_PAUSE%" == "on" pause
|
||||
|
||||
if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE%
|
||||
|
||||
exit /B %ERROR_CODE%
|
||||
@@ -0,0 +1,95 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<artifactId>kafka-streams-interactive-query-advanced</artifactId>
|
||||
<version>0.0.1-SNAPSHOT</version>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<name>kafka-streams-interactive-query-advanced</name>
|
||||
<description>Spring Cloud Stream sample for KStream interactive queries</description>
|
||||
|
||||
<parent>
|
||||
<groupId>spring.cloud.stream.samples</groupId>
|
||||
<artifactId>spring-cloud-stream-samples-parent</artifactId>
|
||||
<version>0.0.1-SNAPSHOT</version>
|
||||
<relativePath>../..</relativePath>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
<confluent.version>4.0.0</confluent.version>
|
||||
<avro.version>1.8.2</avro.version>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>io.confluent</groupId>
|
||||
<artifactId>kafka-streams-avro-serde</artifactId>
|
||||
<version>${confluent.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.confluent</groupId>
|
||||
<artifactId>kafka-avro-serializer</artifactId>
|
||||
<version>${confluent.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.confluent</groupId>
|
||||
<artifactId>kafka-schema-registry-client</artifactId>
|
||||
<version>${confluent.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-stream-binder-kafka-streams</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.avro</groupId>
|
||||
<artifactId>avro-maven-plugin</artifactId>
|
||||
<version>${avro.version}</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>generate-sources</phase>
|
||||
<goals>
|
||||
<goal>schema</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<sourceDirectory>src/main/resources/avro/kafka/streams/interactive/query</sourceDirectory>
|
||||
<outputDirectory>${project.build.directory}/generated-sources</outputDirectory>
|
||||
<stringType>String</stringType>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||
</plugin>
|
||||
</plugins>
|
||||
|
||||
</build>
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>confluent</id>
|
||||
<url>https://packages.confluent.io/maven/</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
|
||||
|
||||
</project>
|
||||
@@ -0,0 +1,324 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package kafka.streams.interactive.query;
|
||||
|
||||
import io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig;
|
||||
import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde;
|
||||
import kafka.streams.interactive.query.avro.PlayEvent;
|
||||
import kafka.streams.interactive.query.avro.Song;
|
||||
import kafka.streams.interactive.query.avro.SongPlayCount;
|
||||
import org.apache.kafka.common.serialization.Deserializer;
|
||||
import org.apache.kafka.common.serialization.Serde;
|
||||
import org.apache.kafka.common.serialization.Serdes;
|
||||
import org.apache.kafka.common.serialization.Serializer;
|
||||
import org.apache.kafka.common.utils.Bytes;
|
||||
import org.apache.kafka.streams.KeyValue;
|
||||
import org.apache.kafka.streams.kstream.*;
|
||||
import org.apache.kafka.streams.state.KeyValueStore;
|
||||
import org.apache.kafka.streams.state.QueryableStoreTypes;
|
||||
import org.apache.kafka.streams.state.ReadOnlyKeyValueStore;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.cloud.stream.annotation.EnableBinding;
|
||||
import org.springframework.cloud.stream.annotation.Input;
|
||||
import org.springframework.cloud.stream.annotation.StreamListener;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.QueryableStoreRegistry;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RequestParam;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
|
||||
@SpringBootApplication
|
||||
public class KafkaStreamsInteractiveQuerySample {
|
||||
static final String PLAY_EVENTS = "play-events";
|
||||
static final String SONG_FEED = "song-feed";
|
||||
static final String TOP_FIVE_KEY = "all";
|
||||
static final String TOP_FIVE_SONGS_STORE = "top-five-songs";
|
||||
static final String ALL_SONGS = "all-songs";
|
||||
|
||||
@Autowired
|
||||
private QueryableStoreRegistry queryableStoreRegistry;
|
||||
|
||||
public static void main(String[] args) {
|
||||
SpringApplication.run(KafkaStreamsInteractiveQuerySample.class, args);
|
||||
}
|
||||
|
||||
@EnableBinding(KStreamProcessorX.class)
|
||||
public static class KStreamMusicSampleApplication {
|
||||
|
||||
private static final Long MIN_CHARTABLE_DURATION = 30 * 1000L;
|
||||
private static final String SONG_PLAY_COUNT_STORE = "song-play-count";
|
||||
|
||||
static final String TOP_FIVE_SONGS_BY_GENRE_STORE = "top-five-songs-by-genre";
|
||||
static final String TOP_FIVE_KEY = "all";
|
||||
|
||||
@StreamListener
|
||||
public void process(@Input("input") KStream<String, PlayEvent> playEvents,
|
||||
@Input("inputX") KTable<Long, Song> songTable) {
|
||||
|
||||
// create and configure the SpecificAvroSerdes required in this example
|
||||
final Map<String, String> serdeConfig = Collections.singletonMap(
|
||||
AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://localhost:8081");
|
||||
|
||||
final SpecificAvroSerde<PlayEvent> playEventSerde = new SpecificAvroSerde<>();
|
||||
playEventSerde.configure(serdeConfig, false);
|
||||
|
||||
final SpecificAvroSerde<Song> keySongSerde = new SpecificAvroSerde<>();
|
||||
keySongSerde.configure(serdeConfig, true);
|
||||
|
||||
final SpecificAvroSerde<Song> valueSongSerde = new SpecificAvroSerde<>();
|
||||
valueSongSerde.configure(serdeConfig, false);
|
||||
|
||||
final SpecificAvroSerde<SongPlayCount> songPlayCountSerde = new SpecificAvroSerde<>();
|
||||
songPlayCountSerde.configure(serdeConfig, false);
|
||||
|
||||
// Accept play events that have a duration >= the minimum
|
||||
final KStream<Long, PlayEvent> playsBySongId =
|
||||
playEvents.filter((region, event) -> event.getDuration() >= MIN_CHARTABLE_DURATION)
|
||||
// repartition based on song id
|
||||
.map((key, value) -> KeyValue.pair(value.getSongId(), value));
|
||||
|
||||
// join the plays with song as we will use it later for charting
|
||||
final KStream<Long, Song> songPlays = playsBySongId.leftJoin(songTable,
|
||||
(value1, song) -> song,
|
||||
Joined.with(Serdes.Long(), playEventSerde, valueSongSerde));
|
||||
|
||||
// create a state store to track song play counts
|
||||
final KTable<Song, Long> songPlayCounts = songPlays.groupBy((songId, song) -> song,
|
||||
Serialized.with(keySongSerde, valueSongSerde))
|
||||
.count(Materialized.<Song, Long, KeyValueStore<Bytes, byte[]>>as(SONG_PLAY_COUNT_STORE)
|
||||
.withKeySerde(valueSongSerde)
|
||||
.withValueSerde(Serdes.Long()));
|
||||
|
||||
final TopFiveSerde topFiveSerde = new TopFiveSerde();
|
||||
|
||||
// Compute the top five charts for each genre. The results of this computation will continuously update the state
|
||||
// store "top-five-songs-by-genre", and this state store can then be queried interactively via a REST API (cf.
|
||||
// MusicPlaysRestService) for the latest charts per genre.
|
||||
songPlayCounts.groupBy((song, plays) ->
|
||||
KeyValue.pair(song.getGenre().toLowerCase(),
|
||||
new SongPlayCount(song.getId(), plays)),
|
||||
Serialized.with(Serdes.String(), songPlayCountSerde))
|
||||
// aggregate into a TopFiveSongs instance that will keep track
|
||||
// of the current top five for each genre. The data will be available in the
|
||||
// top-five-songs-genre store
|
||||
.aggregate(TopFiveSongs::new,
|
||||
(aggKey, value, aggregate) -> {
|
||||
aggregate.add(value);
|
||||
return aggregate;
|
||||
},
|
||||
(aggKey, value, aggregate) -> {
|
||||
aggregate.remove(value);
|
||||
return aggregate;
|
||||
},
|
||||
Materialized.<String, TopFiveSongs, KeyValueStore<Bytes, byte[]>>as(TOP_FIVE_SONGS_BY_GENRE_STORE)
|
||||
.withKeySerde(Serdes.String())
|
||||
.withValueSerde(topFiveSerde)
|
||||
);
|
||||
|
||||
// Compute the top five chart. The results of this computation will continuously update the state
|
||||
// store "top-five-songs", and this state store can then be queried interactively via a REST API (cf.
|
||||
// MusicPlaysRestService) for the latest charts per genre.
|
||||
songPlayCounts.groupBy((song, plays) ->
|
||||
KeyValue.pair(TOP_FIVE_KEY,
|
||||
new SongPlayCount(song.getId(), plays)),
|
||||
Serialized.with(Serdes.String(), songPlayCountSerde))
|
||||
.aggregate(TopFiveSongs::new,
|
||||
(aggKey, value, aggregate) -> {
|
||||
aggregate.add(value);
|
||||
return aggregate;
|
||||
},
|
||||
(aggKey, value, aggregate) -> {
|
||||
aggregate.remove(value);
|
||||
return aggregate;
|
||||
},
|
||||
Materialized.<String, TopFiveSongs, KeyValueStore<Bytes, byte[]>>as(TOP_FIVE_SONGS_STORE)
|
||||
.withKeySerde(Serdes.String())
|
||||
.withValueSerde(topFiveSerde)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
interface KStreamProcessorX {
|
||||
|
||||
@Input("input")
|
||||
KStream<?, ?> input();
|
||||
|
||||
@Input("inputX")
|
||||
KTable<?, ?> inputX();
|
||||
}
|
||||
|
||||
@RestController
|
||||
public class FooController {
|
||||
|
||||
@RequestMapping("/charts/top-five")
|
||||
public List<SongPlayCountBean> greeting(@RequestParam(value="genre") String genre) {
|
||||
return topFiveSongs(KafkaStreamsInteractiveQuerySample.TOP_FIVE_KEY, KafkaStreamsInteractiveQuerySample.TOP_FIVE_SONGS_STORE);
|
||||
}
|
||||
|
||||
private List<SongPlayCountBean> topFiveSongs(final String key,
|
||||
final String storeName) {
|
||||
|
||||
final ReadOnlyKeyValueStore<String, TopFiveSongs> topFiveStore =
|
||||
queryableStoreRegistry.getQueryableStoreType(storeName, QueryableStoreTypes.<String, TopFiveSongs>keyValueStore());
|
||||
|
||||
// Get the value from the store
|
||||
final TopFiveSongs value = topFiveStore.get(key);
|
||||
if (value == null) {
|
||||
throw new IllegalArgumentException(String.format("Unable to find value in %s for key %s", storeName, key));
|
||||
}
|
||||
final List<SongPlayCountBean> results = new ArrayList<>();
|
||||
value.forEach(songPlayCount -> {
|
||||
final ReadOnlyKeyValueStore<Long, Song> songStore =
|
||||
queryableStoreRegistry.getQueryableStoreType(KafkaStreamsInteractiveQuerySample.ALL_SONGS, QueryableStoreTypes.<Long, Song>keyValueStore());
|
||||
|
||||
final Song song = songStore.get(songPlayCount.getSongId());
|
||||
results.add(new SongPlayCountBean(song.getArtist(),song.getAlbum(), song.getName(),
|
||||
songPlayCount.getPlays()));
|
||||
});
|
||||
return results;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Serde for TopFiveSongs
|
||||
*/
|
||||
private static class TopFiveSerde implements Serde<TopFiveSongs> {
|
||||
|
||||
@Override
|
||||
public void configure(final Map<String, ?> map, final boolean b) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public Serializer<TopFiveSongs> serializer() {
|
||||
return new Serializer<TopFiveSongs>() {
|
||||
@Override
|
||||
public void configure(final Map<String, ?> map, final boolean b) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] serialize(final String s, final TopFiveSongs topFiveSongs) {
|
||||
final ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
final DataOutputStream
|
||||
dataOutputStream =
|
||||
new DataOutputStream(out);
|
||||
try {
|
||||
for (SongPlayCount songPlayCount : topFiveSongs) {
|
||||
dataOutputStream.writeLong(songPlayCount.getSongId());
|
||||
dataOutputStream.writeLong(songPlayCount.getPlays());
|
||||
}
|
||||
dataOutputStream.flush();
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return out.toByteArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public Deserializer<TopFiveSongs> deserializer() {
|
||||
return new Deserializer<TopFiveSongs>() {
|
||||
@Override
|
||||
public void configure(final Map<String, ?> map, final boolean b) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public TopFiveSongs deserialize(final String s, final byte[] bytes) {
|
||||
if (bytes == null || bytes.length == 0) {
|
||||
return null;
|
||||
}
|
||||
final TopFiveSongs result = new TopFiveSongs();
|
||||
|
||||
final DataInputStream
|
||||
dataInputStream =
|
||||
new DataInputStream(new ByteArrayInputStream(bytes));
|
||||
|
||||
try {
|
||||
while(dataInputStream.available() > 0) {
|
||||
result.add(new SongPlayCount(dataInputStream.readLong(),
|
||||
dataInputStream.readLong()));
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Used in aggregations to keep track of the Top five songs
|
||||
*/
|
||||
static class TopFiveSongs implements Iterable<SongPlayCount> {
|
||||
private final Map<Long, SongPlayCount> currentSongs = new HashMap<>();
|
||||
private final TreeSet<SongPlayCount> topFive = new TreeSet<>((o1, o2) -> {
|
||||
final int result = o2.getPlays().compareTo(o1.getPlays());
|
||||
if (result != 0) {
|
||||
return result;
|
||||
}
|
||||
return o1.getSongId().compareTo(o2.getSongId());
|
||||
});
|
||||
|
||||
public void add(final SongPlayCount songPlayCount) {
|
||||
if(currentSongs.containsKey(songPlayCount.getSongId())) {
|
||||
topFive.remove(currentSongs.remove(songPlayCount.getSongId()));
|
||||
}
|
||||
topFive.add(songPlayCount);
|
||||
currentSongs.put(songPlayCount.getSongId(), songPlayCount);
|
||||
if (topFive.size() > 5) {
|
||||
final SongPlayCount last = topFive.last();
|
||||
currentSongs.remove(last.getSongId());
|
||||
topFive.remove(last);
|
||||
}
|
||||
}
|
||||
|
||||
void remove(final SongPlayCount value) {
|
||||
topFive.remove(value);
|
||||
currentSongs.remove(value.getSongId());
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Iterator<SongPlayCount> iterator() {
|
||||
return topFive.iterator();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,154 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package kafka.streams.interactive.query;
|
||||
|
||||
import io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig;
|
||||
import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerializer;
|
||||
import kafka.streams.interactive.query.avro.PlayEvent;
|
||||
import kafka.streams.interactive.query.avro.Song;
|
||||
import org.apache.kafka.clients.producer.ProducerConfig;
|
||||
import org.apache.kafka.common.serialization.LongSerializer;
|
||||
import org.apache.kafka.common.serialization.StringSerializer;
|
||||
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
|
||||
import org.springframework.kafka.core.KafkaTemplate;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* @author Soby Chacko
|
||||
*/
|
||||
public class Producers {
|
||||
|
||||
public static void main(String... args) throws Exception {
|
||||
|
||||
final Map<String, String> serdeConfig = Collections.singletonMap(
|
||||
AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://localhost:8081");
|
||||
final SpecificAvroSerializer<PlayEvent> playEventSerializer = new SpecificAvroSerializer<>();
|
||||
playEventSerializer.configure(serdeConfig, false);
|
||||
final SpecificAvroSerializer<Song> songSerializer = new SpecificAvroSerializer<>();
|
||||
songSerializer.configure(serdeConfig, false);
|
||||
|
||||
final List<Song> songs = Arrays.asList(new Song(1L,
|
||||
"Fresh Fruit For Rotting Vegetables",
|
||||
"Dead Kennedys",
|
||||
"Chemical Warfare",
|
||||
"Punk"),
|
||||
new Song(2L,
|
||||
"We Are the League",
|
||||
"Anti-Nowhere League",
|
||||
"Animal",
|
||||
"Punk"),
|
||||
new Song(3L,
|
||||
"Live In A Dive",
|
||||
"Subhumans",
|
||||
"All Gone Dead",
|
||||
"Punk"),
|
||||
new Song(4L,
|
||||
"PSI",
|
||||
"Wheres The Pope?",
|
||||
"Fear Of God",
|
||||
"Punk"),
|
||||
new Song(5L,
|
||||
"Totally Exploited",
|
||||
"The Exploited",
|
||||
"Punks Not Dead",
|
||||
"Punk"),
|
||||
new Song(6L,
|
||||
"The Audacity Of Hype",
|
||||
"Jello Biafra And The Guantanamo School Of "
|
||||
+ "Medicine",
|
||||
"Three Strikes",
|
||||
"Punk"),
|
||||
new Song(7L,
|
||||
"Licensed to Ill",
|
||||
"The Beastie Boys",
|
||||
"Fight For Your Right",
|
||||
"Hip Hop"),
|
||||
new Song(8L,
|
||||
"De La Soul Is Dead",
|
||||
"De La Soul",
|
||||
"Oodles Of O's",
|
||||
"Hip Hop"),
|
||||
new Song(9L,
|
||||
"Straight Outta Compton",
|
||||
"N.W.A",
|
||||
"Gangsta Gangsta",
|
||||
"Hip Hop"),
|
||||
new Song(10L,
|
||||
"Fear Of A Black Planet",
|
||||
"Public Enemy",
|
||||
"911 Is A Joke",
|
||||
"Hip Hop"),
|
||||
new Song(11L,
|
||||
"Curtain Call - The Hits",
|
||||
"Eminem",
|
||||
"Fack",
|
||||
"Hip Hop"),
|
||||
new Song(12L,
|
||||
"The Calling",
|
||||
"Hilltop Hoods",
|
||||
"The Calling",
|
||||
"Hip Hop")
|
||||
|
||||
);
|
||||
|
||||
Map<String, Object> props = new HashMap<>();
|
||||
props.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://localhost:8081");
|
||||
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
|
||||
props.put(ProducerConfig.RETRIES_CONFIG, 0);
|
||||
props.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384);
|
||||
props.put(ProducerConfig.LINGER_MS_CONFIG, 1);
|
||||
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
|
||||
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
||||
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, playEventSerializer.getClass());
|
||||
|
||||
|
||||
Map<String, Object> props1 = new HashMap<>(props);
|
||||
props1.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class);
|
||||
props1.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, songSerializer.getClass());
|
||||
|
||||
DefaultKafkaProducerFactory<Long, Song> pf1 = new DefaultKafkaProducerFactory<>(props1);
|
||||
KafkaTemplate<Long, Song> template1 = new KafkaTemplate<>(pf1, true);
|
||||
template1.setDefaultTopic(KafkaStreamsInteractiveQuerySample.SONG_FEED);
|
||||
|
||||
songs.forEach(song -> {
|
||||
System.out.println("Writing song information for '" + song.getName() + "' to input topic " +
|
||||
KafkaStreamsInteractiveQuerySample.SONG_FEED);
|
||||
template1.sendDefault(song.getId(), song);
|
||||
});
|
||||
|
||||
|
||||
DefaultKafkaProducerFactory<String, PlayEvent> pf = new DefaultKafkaProducerFactory<>(props);
|
||||
KafkaTemplate<String, PlayEvent> template = new KafkaTemplate<>(pf, true);
|
||||
template.setDefaultTopic(KafkaStreamsInteractiveQuerySample.PLAY_EVENTS);
|
||||
|
||||
|
||||
final long duration = 60 * 1000L;
|
||||
final Random random = new Random();
|
||||
|
||||
// send a play event every 100 milliseconds
|
||||
while (true) {
|
||||
final Song song = songs.get(random.nextInt(songs.size()));
|
||||
System.out.println("Writing play event for song " + song.getName() + " to input topic " +
|
||||
KafkaStreamsInteractiveQuerySample.PLAY_EVENTS);
|
||||
template.sendDefault("uk", new PlayEvent(song.getId(), duration));
|
||||
|
||||
Thread.sleep(100L);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,103 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package kafka.streams.interactive.query;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* @author Soby Chacko
|
||||
*/
|
||||
public class SongPlayCountBean {
|
||||
|
||||
private String artist;
|
||||
private String album;
|
||||
private String name;
|
||||
private Long plays;
|
||||
|
||||
public SongPlayCountBean() {}
|
||||
|
||||
public SongPlayCountBean(final String artist, final String album, final String name, final Long
|
||||
plays) {
|
||||
|
||||
this.artist = artist;
|
||||
this.album = album;
|
||||
this.name = name;
|
||||
this.plays = plays;
|
||||
}
|
||||
|
||||
public String getArtist() {
|
||||
return artist;
|
||||
}
|
||||
|
||||
public void setArtist(final String artist) {
|
||||
this.artist = artist;
|
||||
}
|
||||
|
||||
public String getAlbum() {
|
||||
return album;
|
||||
}
|
||||
|
||||
public void setAlbum(final String album) {
|
||||
this.album = album;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(final String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public Long getPlays() {
|
||||
return plays;
|
||||
}
|
||||
|
||||
public void setPlays(final Long plays) {
|
||||
this.plays = plays;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "SongPlayCountBean{" +
|
||||
"artist='" + artist + '\'' +
|
||||
", album='" + album + '\'' +
|
||||
", name='" + name + '\'' +
|
||||
", plays=" + plays +
|
||||
'}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(final Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
final SongPlayCountBean that = (SongPlayCountBean) o;
|
||||
return Objects.equals(artist, that.artist) &&
|
||||
Objects.equals(album, that.album) &&
|
||||
Objects.equals(name, that.name) &&
|
||||
Objects.equals(plays, that.plays);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(artist, album, name, plays);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
spring.cloud.stream.bindings.input:
|
||||
destination: play-events
|
||||
consumer:
|
||||
useNativeDecoding: true
|
||||
headerMode: raw
|
||||
spring.cloud.stream.bindings.inputX:
|
||||
destination: song-feed
|
||||
consumer:
|
||||
useNativeDecoding: true
|
||||
headerMode: raw
|
||||
spring.cloud.stream.kafka.streams.bindings.input:
|
||||
consumer:
|
||||
keySerde: org.apache.kafka.common.serialization.Serdes$StringSerde
|
||||
valueSerde: io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde
|
||||
spring.cloud.stream.kafka.streams.bindings.inputX:
|
||||
consumer:
|
||||
keySerde: org.apache.kafka.common.serialization.Serdes$LongSerde
|
||||
valueSerde: io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde
|
||||
materializedAs: all-songs
|
||||
spring.cloud.stream.kafka.streams.binder:
|
||||
brokers: localhost #192.168.99.100
|
||||
zkNodes: localhost #192.168.99.100
|
||||
configuration:
|
||||
schema.registry.url: http://localhost:8081
|
||||
commit.interval.ms: 1000
|
||||
@@ -0,0 +1,8 @@
|
||||
{"namespace": "kafka.streams.interactive.query.avro",
|
||||
"type": "record",
|
||||
"name": "PlayEvent",
|
||||
"fields": [
|
||||
{"name": "song_id", "type": "long"},
|
||||
{"name": "duration", "type": "long"}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
{"namespace": "kafka.streams.interactive.query.avro",
|
||||
"type": "record",
|
||||
"name": "Song",
|
||||
"fields": [
|
||||
{"name": "id", "type": "long"},
|
||||
{"name": "album", "type": "string"},
|
||||
{"name": "artist", "type": "string"},
|
||||
{"name": "name", "type": "string"},
|
||||
{"name": "genre", "type": "string"}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
{"namespace": "kafka.streams.interactive.query.avro",
|
||||
"type": "record",
|
||||
"name": "SongPlayCount",
|
||||
"fields": [
|
||||
{"name": "song_id", "type": "long"},
|
||||
{"name": "plays", "type": "long"}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<configuration>
|
||||
<appender name="stdout" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<encoder>
|
||||
<pattern>%d{ISO8601} %5p %t %c{2}:%L - %m%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
<root level="INFO">
|
||||
<appender-ref ref="stdout"/>
|
||||
</root>
|
||||
<logger name="org.apache.kafka.streams.processor.internals" level="WARN"/>
|
||||
</configuration>
|
||||
@@ -0,0 +1,18 @@
|
||||
package kafka.streams.interactive.query;
|
||||
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
import org.springframework.test.context.junit4.SpringRunner;
|
||||
|
||||
@RunWith(SpringRunner.class)
|
||||
@SpringBootTest
|
||||
public class KafkaStreamsInteractiveQueryTests {
|
||||
|
||||
@Test
|
||||
@Ignore
|
||||
public void contextLoads() {
|
||||
}
|
||||
|
||||
}
|
||||
BIN
kafka-streams-samples/kafka-streams-interactive-query-basic/.mvn/wrapper/maven-wrapper.jar
vendored
Normal file
BIN
kafka-streams-samples/kafka-streams-interactive-query-basic/.mvn/wrapper/maven-wrapper.jar
vendored
Normal file
Binary file not shown.
@@ -0,0 +1 @@
|
||||
distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.5.0/apache-maven-3.5.0-bin.zip
|
||||
@@ -0,0 +1,35 @@
|
||||
== What is this app?
|
||||
|
||||
This is an example of a Spring Cloud Stream processor using Kafka Streams support.
|
||||
|
||||
The example is based on a contrived use case of tracking products by interactively querying their status.
|
||||
The program accepts product ID's and track their counts hitherto by interactively querying the underlying store. \
|
||||
This sample uses lambda expressions and thus requires Java 8+.
|
||||
|
||||
=== Running the app:
|
||||
|
||||
Go to the root of the repository and do:
|
||||
|
||||
`docker-compose up -d`
|
||||
|
||||
`./mvnw clean package`
|
||||
|
||||
`java -jar target/kafka-streams-interactive-query-basic-0.0.1-SNAPSHOT.jar --app.product.tracker.productIds=123,124,125`
|
||||
|
||||
The above command will track products with ID's 123,124 and 125 and print their counts seen so far every 30 seconds.
|
||||
|
||||
Issue the following commands:
|
||||
|
||||
`docker exec -it kafka-iq-basic /opt/kafka/bin/kafka-console-producer.sh --broker-list 127.0.0.1:9092 --topic products`
|
||||
|
||||
Enter the following in the console producer (one line at a time) and watch the output on the console (or IDE) where the application is running.
|
||||
|
||||
```
|
||||
{"id":"123"}
|
||||
{"id":"124"}
|
||||
{"id":"125"}
|
||||
{"id":"123"}
|
||||
{"id":"123"}
|
||||
{"id":"123"}
|
||||
```
|
||||
|
||||
@@ -0,0 +1,19 @@
|
||||
version: '3'
|
||||
services:
|
||||
kafka:
|
||||
image: wurstmeister/kafka
|
||||
container_name: kafka-iq-basic
|
||||
ports:
|
||||
- "9092:9092"
|
||||
environment:
|
||||
- KAFKA_ADVERTISED_HOST_NAME=127.0.0.1
|
||||
- KAFKA_ADVERTISED_PORT=9092
|
||||
- KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181
|
||||
depends_on:
|
||||
- zookeeper
|
||||
zookeeper:
|
||||
image: wurstmeister/zookeeper
|
||||
ports:
|
||||
- "2181:2181"
|
||||
environment:
|
||||
- KAFKA_ADVERTISED_HOST_NAME=zookeeper
|
||||
225
kafka-streams-samples/kafka-streams-interactive-query-basic/mvnw
vendored
Executable file
225
kafka-streams-samples/kafka-streams-interactive-query-basic/mvnw
vendored
Executable file
@@ -0,0 +1,225 @@
|
||||
#!/bin/sh
|
||||
# ----------------------------------------------------------------------------
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Maven2 Start Up Batch script
|
||||
#
|
||||
# Required ENV vars:
|
||||
# ------------------
|
||||
# JAVA_HOME - location of a JDK home dir
|
||||
#
|
||||
# Optional ENV vars
|
||||
# -----------------
|
||||
# M2_HOME - location of maven2's installed home dir
|
||||
# MAVEN_OPTS - parameters passed to the Java VM when running Maven
|
||||
# e.g. to debug Maven itself, use
|
||||
# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
|
||||
# MAVEN_SKIP_RC - flag to disable loading of mavenrc files
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
if [ -z "$MAVEN_SKIP_RC" ] ; then
|
||||
|
||||
if [ -f /etc/mavenrc ] ; then
|
||||
. /etc/mavenrc
|
||||
fi
|
||||
|
||||
if [ -f "$HOME/.mavenrc" ] ; then
|
||||
. "$HOME/.mavenrc"
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
# OS specific support. $var _must_ be set to either true or false.
|
||||
cygwin=false;
|
||||
darwin=false;
|
||||
mingw=false
|
||||
case "`uname`" in
|
||||
CYGWIN*) cygwin=true ;;
|
||||
MINGW*) mingw=true;;
|
||||
Darwin*) darwin=true
|
||||
# Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home
|
||||
# See https://developer.apple.com/library/mac/qa/qa1170/_index.html
|
||||
if [ -z "$JAVA_HOME" ]; then
|
||||
if [ -x "/usr/libexec/java_home" ]; then
|
||||
export JAVA_HOME="`/usr/libexec/java_home`"
|
||||
else
|
||||
export JAVA_HOME="/Library/Java/Home"
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -z "$JAVA_HOME" ] ; then
|
||||
if [ -r /etc/gentoo-release ] ; then
|
||||
JAVA_HOME=`java-config --jre-home`
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$M2_HOME" ] ; then
|
||||
## resolve links - $0 may be a link to maven's home
|
||||
PRG="$0"
|
||||
|
||||
# need this for relative symlinks
|
||||
while [ -h "$PRG" ] ; do
|
||||
ls=`ls -ld "$PRG"`
|
||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
PRG="$link"
|
||||
else
|
||||
PRG="`dirname "$PRG"`/$link"
|
||||
fi
|
||||
done
|
||||
|
||||
saveddir=`pwd`
|
||||
|
||||
M2_HOME=`dirname "$PRG"`/..
|
||||
|
||||
# make it fully qualified
|
||||
M2_HOME=`cd "$M2_HOME" && pwd`
|
||||
|
||||
cd "$saveddir"
|
||||
# echo Using m2 at $M2_HOME
|
||||
fi
|
||||
|
||||
# For Cygwin, ensure paths are in UNIX format before anything is touched
|
||||
if $cygwin ; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME=`cygpath --unix "$M2_HOME"`
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
|
||||
[ -n "$CLASSPATH" ] &&
|
||||
CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
|
||||
fi
|
||||
|
||||
# For Migwn, ensure paths are in UNIX format before anything is touched
|
||||
if $mingw ; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME="`(cd "$M2_HOME"; pwd)`"
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`"
|
||||
# TODO classpath?
|
||||
fi
|
||||
|
||||
if [ -z "$JAVA_HOME" ]; then
|
||||
javaExecutable="`which javac`"
|
||||
if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then
|
||||
# readlink(1) is not available as standard on Solaris 10.
|
||||
readLink=`which readlink`
|
||||
if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then
|
||||
if $darwin ; then
|
||||
javaHome="`dirname \"$javaExecutable\"`"
|
||||
javaExecutable="`cd \"$javaHome\" && pwd -P`/javac"
|
||||
else
|
||||
javaExecutable="`readlink -f \"$javaExecutable\"`"
|
||||
fi
|
||||
javaHome="`dirname \"$javaExecutable\"`"
|
||||
javaHome=`expr "$javaHome" : '\(.*\)/bin'`
|
||||
JAVA_HOME="$javaHome"
|
||||
export JAVA_HOME
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$JAVACMD" ] ; then
|
||||
if [ -n "$JAVA_HOME" ] ; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD="$JAVA_HOME/jre/sh/java"
|
||||
else
|
||||
JAVACMD="$JAVA_HOME/bin/java"
|
||||
fi
|
||||
else
|
||||
JAVACMD="`which java`"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ ! -x "$JAVACMD" ] ; then
|
||||
echo "Error: JAVA_HOME is not defined correctly." >&2
|
||||
echo " We cannot execute $JAVACMD" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$JAVA_HOME" ] ; then
|
||||
echo "Warning: JAVA_HOME environment variable is not set."
|
||||
fi
|
||||
|
||||
CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
|
||||
|
||||
# traverses directory structure from process work directory to filesystem root
|
||||
# first directory with .mvn subdirectory is considered project base directory
|
||||
find_maven_basedir() {
|
||||
|
||||
if [ -z "$1" ]
|
||||
then
|
||||
echo "Path not specified to find_maven_basedir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
basedir="$1"
|
||||
wdir="$1"
|
||||
while [ "$wdir" != '/' ] ; do
|
||||
if [ -d "$wdir"/.mvn ] ; then
|
||||
basedir=$wdir
|
||||
break
|
||||
fi
|
||||
# workaround for JBEAP-8937 (on Solaris 10/Sparc)
|
||||
if [ -d "${wdir}" ]; then
|
||||
wdir=`cd "$wdir/.."; pwd`
|
||||
fi
|
||||
# end of workaround
|
||||
done
|
||||
echo "${basedir}"
|
||||
}
|
||||
|
||||
# concatenates all lines of a file
|
||||
concat_lines() {
|
||||
if [ -f "$1" ]; then
|
||||
echo "$(tr -s '\n' ' ' < "$1")"
|
||||
fi
|
||||
}
|
||||
|
||||
BASE_DIR=`find_maven_basedir "$(pwd)"`
|
||||
if [ -z "$BASE_DIR" ]; then
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}
|
||||
echo $MAVEN_PROJECTBASEDIR
|
||||
MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS"
|
||||
|
||||
# For Cygwin, switch paths to Windows format before running java
|
||||
if $cygwin; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME=`cygpath --path --windows "$M2_HOME"`
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
|
||||
[ -n "$CLASSPATH" ] &&
|
||||
CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
|
||||
[ -n "$MAVEN_PROJECTBASEDIR" ] &&
|
||||
MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"`
|
||||
fi
|
||||
|
||||
WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
|
||||
|
||||
exec "$JAVACMD" \
|
||||
$MAVEN_OPTS \
|
||||
-classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \
|
||||
"-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \
|
||||
${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@"
|
||||
@@ -0,0 +1,41 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<artifactId>kafka-streams-interactive-query-basic</artifactId>
|
||||
<version>0.0.1-SNAPSHOT</version>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<name>kafka-streams-interactive-query-basic</name>
|
||||
<description>Spring Cloud Stream sample for KStream interactive queries</description>
|
||||
|
||||
<parent>
|
||||
<groupId>spring.cloud.stream.samples</groupId>
|
||||
<artifactId>spring-cloud-stream-samples-parent</artifactId>
|
||||
<version>0.0.1-SNAPSHOT</version>
|
||||
<relativePath>../..</relativePath>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-stream-binder-kafka-streams</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
||||
@@ -0,0 +1,122 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package kafka.streams.product.tracker;
|
||||
|
||||
import org.apache.kafka.common.serialization.Serdes;
|
||||
import org.apache.kafka.streams.KeyValue;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.apache.kafka.streams.state.QueryableStoreTypes;
|
||||
import org.apache.kafka.streams.state.ReadOnlyKeyValueStore;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||
import org.springframework.boot.context.properties.EnableConfigurationProperties;
|
||||
import org.springframework.cloud.stream.annotation.EnableBinding;
|
||||
import org.springframework.cloud.stream.annotation.StreamListener;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.QueryableStoreRegistry;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsProcessor;
|
||||
import org.springframework.kafka.support.serializer.JsonSerde;
|
||||
import org.springframework.messaging.handler.annotation.SendTo;
|
||||
import org.springframework.scheduling.annotation.EnableScheduling;
|
||||
import org.springframework.scheduling.annotation.Scheduled;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@SpringBootApplication
|
||||
public class KafkaStreamsInteractiveQueryApplication {
|
||||
|
||||
public static void main(String[] args) {
|
||||
SpringApplication.run(KafkaStreamsInteractiveQueryApplication.class, args);
|
||||
}
|
||||
|
||||
@EnableBinding(KafkaStreamsProcessor.class)
|
||||
@EnableConfigurationProperties(ProductTrackerProperties.class)
|
||||
@EnableScheduling
|
||||
public static class InteractiveProductCountApplication {
|
||||
|
||||
private static final String STORE_NAME = "prod-id-count-store";
|
||||
|
||||
@Autowired
|
||||
private QueryableStoreRegistry queryableStoreRegistry;
|
||||
|
||||
@Autowired
|
||||
ProductTrackerProperties productTrackerProperties;
|
||||
|
||||
ReadOnlyKeyValueStore<Object, Object> keyValueStore;
|
||||
|
||||
@StreamListener("input")
|
||||
@SendTo("output")
|
||||
public KStream<Integer, Long> process(KStream<Object, Product> input) {
|
||||
|
||||
return input
|
||||
.filter((key, product) -> productIds().contains(product.getId()))
|
||||
.map((key, value) -> new KeyValue<>(value.id, value))
|
||||
.groupByKey(new Serdes.IntegerSerde(), new JsonSerde<>(Product.class))
|
||||
.count(STORE_NAME)
|
||||
.toStream();
|
||||
}
|
||||
|
||||
private Set<Integer> productIds() {
|
||||
return StringUtils.commaDelimitedListToSet(productTrackerProperties.getProductIds())
|
||||
.stream().map(Integer::parseInt).collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
|
||||
@Scheduled(fixedRate = 30000, initialDelay = 5000)
|
||||
public void printProductCounts() {
|
||||
if (keyValueStore == null) {
|
||||
keyValueStore = queryableStoreRegistry.getQueryableStoreType(STORE_NAME, QueryableStoreTypes.keyValueStore());
|
||||
}
|
||||
|
||||
for (Integer id : productIds()) {
|
||||
System.out.println("Product ID: " + id + " Count: " + keyValueStore.get(id));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ConfigurationProperties(prefix = "app.product.tracker")
|
||||
static class ProductTrackerProperties {
|
||||
|
||||
private String productIds;
|
||||
|
||||
public String getProductIds() {
|
||||
return productIds;
|
||||
}
|
||||
|
||||
public void setProductIds(String productIds) {
|
||||
this.productIds = productIds;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class Product {
|
||||
|
||||
Integer id;
|
||||
|
||||
public Integer getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(Integer id) {
|
||||
this.id = id;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,21 @@
|
||||
spring.cloud.stream.bindings.output.contentType: application/json
|
||||
spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms: 1000
|
||||
spring.cloud.stream.kafka.streams:
|
||||
binder.configuration:
|
||||
key.serde: org.apache.kafka.common.serialization.Serdes$StringSerde
|
||||
value.serde: org.apache.kafka.common.serialization.Serdes$StringSerde
|
||||
bindings.output.producer:
|
||||
keySerde: org.apache.kafka.common.serialization.Serdes$IntegerSerde
|
||||
valueSerde: org.apache.kafka.common.serialization.Serdes$LongSerde
|
||||
spring.cloud.stream.bindings.output:
|
||||
destination: product-counts
|
||||
producer:
|
||||
headerMode: raw
|
||||
useNativeEncoding: true
|
||||
spring.cloud.stream.bindings.input:
|
||||
destination: products
|
||||
consumer:
|
||||
headerMode: raw
|
||||
spring.cloud.stream.kafka.streams.binder:
|
||||
brokers: localhost #192.168.99.100
|
||||
zkNodes: localhost #192.168.99.100
|
||||
@@ -0,0 +1,12 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<configuration>
|
||||
<appender name="stdout" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<encoder>
|
||||
<pattern>%d{ISO8601} %5p %t %c{2}:%L - %m%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
<root level="INFO">
|
||||
<appender-ref ref="stdout"/>
|
||||
</root>
|
||||
<logger name="org.apache.kafka.streams.processor.internals" level="WARN"/>
|
||||
</configuration>
|
||||
24
kafka-streams-samples/kafka-streams-message-channel/.gitignore
vendored
Normal file
24
kafka-streams-samples/kafka-streams-message-channel/.gitignore
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
target/
|
||||
!.mvn/wrapper/maven-wrapper.jar
|
||||
|
||||
### STS ###
|
||||
.apt_generated
|
||||
.classpath
|
||||
.factorypath
|
||||
.project
|
||||
.settings
|
||||
.springBeans
|
||||
|
||||
### IntelliJ IDEA ###
|
||||
.idea
|
||||
*.iws
|
||||
*.iml
|
||||
*.ipr
|
||||
|
||||
### NetBeans ###
|
||||
nbproject/private/
|
||||
build/
|
||||
nbbuild/
|
||||
dist/
|
||||
nbdist/
|
||||
.nb-gradle/
|
||||
BIN
kafka-streams-samples/kafka-streams-message-channel/.mvn/wrapper/maven-wrapper.jar
vendored
Normal file
BIN
kafka-streams-samples/kafka-streams-message-channel/.mvn/wrapper/maven-wrapper.jar
vendored
Normal file
Binary file not shown.
1
kafka-streams-samples/kafka-streams-message-channel/.mvn/wrapper/maven-wrapper.properties
vendored
Normal file
1
kafka-streams-samples/kafka-streams-message-channel/.mvn/wrapper/maven-wrapper.properties
vendored
Normal file
@@ -0,0 +1 @@
|
||||
distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.5.0/apache-maven-3.5.0-bin.zip
|
||||
@@ -0,0 +1,39 @@
|
||||
== What is this app?
|
||||
|
||||
This is an example of a Spring Cloud Stream processor using Kafka Streams support.
|
||||
|
||||
The example is based on the word count application from the https://github.com/confluentinc/examples/blob/3.2.x/kafka-streams/src/main/java/io/confluent/examples/streams/WordCountLambdaExample.java[reference documentation].
|
||||
The application has two inputs one as a KStream and another one with the binding for a message channel.
|
||||
|
||||
=== Running the app:
|
||||
|
||||
Go to the root of the repository and do:
|
||||
|
||||
`docker-compose up -d`
|
||||
|
||||
`./mvnw clean package`
|
||||
|
||||
`java -jar target/kafka-streams-message-channel-0.0.1-SNAPSHOT.jar --spring.cloud.stream.kafka.streams.timeWindow.length=60000`
|
||||
|
||||
Issue the following commands:
|
||||
|
||||
`docker exec -it kafka-mc /opt/kafka/bin/kafka-console-producer.sh --broker-list 127.0.0.1:9092 --topic words`
|
||||
|
||||
On another terminal:
|
||||
|
||||
`docker exec -it kafka-mc /opt/kafka/bin/kafka-console-consumer.sh --bootstrap-server 127.0.0.1:9092 --topic counts`
|
||||
|
||||
Enter some text in the console producer and watch the output in the console consumer.
|
||||
|
||||
Also watch the console for logging statements from the regular sink StreamListener method.
|
||||
|
||||
The default time window is configured for 5 seconds and you can change that using the following property.
|
||||
|
||||
`spring.cloud.stream.kafka.streams.timeWindow.length` (value is expressed in milliseconds)
|
||||
|
||||
In order to switch to a hopping window, you can use the `spring.cloud.stream.kafka.streams.timeWindow.advanceBy` (value in milliseconds).
|
||||
This will create an overlapped hopping windows depending on the value you provide.
|
||||
|
||||
Here is an example with 2 overlapping windows (window length of 10 seconds and a hop (advance) by 5 seconds:
|
||||
|
||||
`java -jar target/kafka-streams-message-channel-0.0.1-SNAPSHOT.jar --spring.cloud.stream.kafka.streams.timeWindow.length=10000 --spring.cloud.stream.kafka.streams.timeWindow.advanceBy=5000`
|
||||
@@ -0,0 +1,19 @@
|
||||
version: '3'
|
||||
services:
|
||||
kafka:
|
||||
image: wurstmeister/kafka
|
||||
container_name: kafka-mc
|
||||
ports:
|
||||
- "9092:9092"
|
||||
environment:
|
||||
- KAFKA_ADVERTISED_HOST_NAME=127.0.0.1
|
||||
- KAFKA_ADVERTISED_PORT=9092
|
||||
- KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181
|
||||
depends_on:
|
||||
- zookeeper
|
||||
zookeeper:
|
||||
image: wurstmeister/zookeeper
|
||||
ports:
|
||||
- "2181:2181"
|
||||
environment:
|
||||
- KAFKA_ADVERTISED_HOST_NAME=zookeeper
|
||||
225
kafka-streams-samples/kafka-streams-message-channel/mvnw
vendored
Executable file
225
kafka-streams-samples/kafka-streams-message-channel/mvnw
vendored
Executable file
@@ -0,0 +1,225 @@
|
||||
#!/bin/sh
|
||||
# ----------------------------------------------------------------------------
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Maven2 Start Up Batch script
|
||||
#
|
||||
# Required ENV vars:
|
||||
# ------------------
|
||||
# JAVA_HOME - location of a JDK home dir
|
||||
#
|
||||
# Optional ENV vars
|
||||
# -----------------
|
||||
# M2_HOME - location of maven2's installed home dir
|
||||
# MAVEN_OPTS - parameters passed to the Java VM when running Maven
|
||||
# e.g. to debug Maven itself, use
|
||||
# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
|
||||
# MAVEN_SKIP_RC - flag to disable loading of mavenrc files
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
if [ -z "$MAVEN_SKIP_RC" ] ; then
|
||||
|
||||
if [ -f /etc/mavenrc ] ; then
|
||||
. /etc/mavenrc
|
||||
fi
|
||||
|
||||
if [ -f "$HOME/.mavenrc" ] ; then
|
||||
. "$HOME/.mavenrc"
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
# OS specific support. $var _must_ be set to either true or false.
|
||||
cygwin=false;
|
||||
darwin=false;
|
||||
mingw=false
|
||||
case "`uname`" in
|
||||
CYGWIN*) cygwin=true ;;
|
||||
MINGW*) mingw=true;;
|
||||
Darwin*) darwin=true
|
||||
# Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home
|
||||
# See https://developer.apple.com/library/mac/qa/qa1170/_index.html
|
||||
if [ -z "$JAVA_HOME" ]; then
|
||||
if [ -x "/usr/libexec/java_home" ]; then
|
||||
export JAVA_HOME="`/usr/libexec/java_home`"
|
||||
else
|
||||
export JAVA_HOME="/Library/Java/Home"
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -z "$JAVA_HOME" ] ; then
|
||||
if [ -r /etc/gentoo-release ] ; then
|
||||
JAVA_HOME=`java-config --jre-home`
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$M2_HOME" ] ; then
|
||||
## resolve links - $0 may be a link to maven's home
|
||||
PRG="$0"
|
||||
|
||||
# need this for relative symlinks
|
||||
while [ -h "$PRG" ] ; do
|
||||
ls=`ls -ld "$PRG"`
|
||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
PRG="$link"
|
||||
else
|
||||
PRG="`dirname "$PRG"`/$link"
|
||||
fi
|
||||
done
|
||||
|
||||
saveddir=`pwd`
|
||||
|
||||
M2_HOME=`dirname "$PRG"`/..
|
||||
|
||||
# make it fully qualified
|
||||
M2_HOME=`cd "$M2_HOME" && pwd`
|
||||
|
||||
cd "$saveddir"
|
||||
# echo Using m2 at $M2_HOME
|
||||
fi
|
||||
|
||||
# For Cygwin, ensure paths are in UNIX format before anything is touched
|
||||
if $cygwin ; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME=`cygpath --unix "$M2_HOME"`
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
|
||||
[ -n "$CLASSPATH" ] &&
|
||||
CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
|
||||
fi
|
||||
|
||||
# For Migwn, ensure paths are in UNIX format before anything is touched
|
||||
if $mingw ; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME="`(cd "$M2_HOME"; pwd)`"
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`"
|
||||
# TODO classpath?
|
||||
fi
|
||||
|
||||
if [ -z "$JAVA_HOME" ]; then
|
||||
javaExecutable="`which javac`"
|
||||
if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then
|
||||
# readlink(1) is not available as standard on Solaris 10.
|
||||
readLink=`which readlink`
|
||||
if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then
|
||||
if $darwin ; then
|
||||
javaHome="`dirname \"$javaExecutable\"`"
|
||||
javaExecutable="`cd \"$javaHome\" && pwd -P`/javac"
|
||||
else
|
||||
javaExecutable="`readlink -f \"$javaExecutable\"`"
|
||||
fi
|
||||
javaHome="`dirname \"$javaExecutable\"`"
|
||||
javaHome=`expr "$javaHome" : '\(.*\)/bin'`
|
||||
JAVA_HOME="$javaHome"
|
||||
export JAVA_HOME
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$JAVACMD" ] ; then
|
||||
if [ -n "$JAVA_HOME" ] ; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD="$JAVA_HOME/jre/sh/java"
|
||||
else
|
||||
JAVACMD="$JAVA_HOME/bin/java"
|
||||
fi
|
||||
else
|
||||
JAVACMD="`which java`"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ ! -x "$JAVACMD" ] ; then
|
||||
echo "Error: JAVA_HOME is not defined correctly." >&2
|
||||
echo " We cannot execute $JAVACMD" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$JAVA_HOME" ] ; then
|
||||
echo "Warning: JAVA_HOME environment variable is not set."
|
||||
fi
|
||||
|
||||
CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
|
||||
|
||||
# traverses directory structure from process work directory to filesystem root
|
||||
# first directory with .mvn subdirectory is considered project base directory
|
||||
find_maven_basedir() {
|
||||
|
||||
if [ -z "$1" ]
|
||||
then
|
||||
echo "Path not specified to find_maven_basedir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
basedir="$1"
|
||||
wdir="$1"
|
||||
while [ "$wdir" != '/' ] ; do
|
||||
if [ -d "$wdir"/.mvn ] ; then
|
||||
basedir=$wdir
|
||||
break
|
||||
fi
|
||||
# workaround for JBEAP-8937 (on Solaris 10/Sparc)
|
||||
if [ -d "${wdir}" ]; then
|
||||
wdir=`cd "$wdir/.."; pwd`
|
||||
fi
|
||||
# end of workaround
|
||||
done
|
||||
echo "${basedir}"
|
||||
}
|
||||
|
||||
# concatenates all lines of a file
|
||||
concat_lines() {
|
||||
if [ -f "$1" ]; then
|
||||
echo "$(tr -s '\n' ' ' < "$1")"
|
||||
fi
|
||||
}
|
||||
|
||||
BASE_DIR=`find_maven_basedir "$(pwd)"`
|
||||
if [ -z "$BASE_DIR" ]; then
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}
|
||||
echo $MAVEN_PROJECTBASEDIR
|
||||
MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS"
|
||||
|
||||
# For Cygwin, switch paths to Windows format before running java
|
||||
if $cygwin; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME=`cygpath --path --windows "$M2_HOME"`
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
|
||||
[ -n "$CLASSPATH" ] &&
|
||||
CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
|
||||
[ -n "$MAVEN_PROJECTBASEDIR" ] &&
|
||||
MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"`
|
||||
fi
|
||||
|
||||
WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
|
||||
|
||||
exec "$JAVACMD" \
|
||||
$MAVEN_OPTS \
|
||||
-classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \
|
||||
"-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \
|
||||
${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@"
|
||||
143
kafka-streams-samples/kafka-streams-message-channel/mvnw.cmd
vendored
Normal file
143
kafka-streams-samples/kafka-streams-message-channel/mvnw.cmd
vendored
Normal file
@@ -0,0 +1,143 @@
|
||||
@REM ----------------------------------------------------------------------------
|
||||
@REM Licensed to the Apache Software Foundation (ASF) under one
|
||||
@REM or more contributor license agreements. See the NOTICE file
|
||||
@REM distributed with this work for additional information
|
||||
@REM regarding copyright ownership. The ASF licenses this file
|
||||
@REM to you under the Apache License, Version 2.0 (the
|
||||
@REM "License"); you may not use this file except in compliance
|
||||
@REM with the License. You may obtain a copy of the License at
|
||||
@REM
|
||||
@REM https://www.apache.org/licenses/LICENSE-2.0
|
||||
@REM
|
||||
@REM Unless required by applicable law or agreed to in writing,
|
||||
@REM software distributed under the License is distributed on an
|
||||
@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
@REM KIND, either express or implied. See the License for the
|
||||
@REM specific language governing permissions and limitations
|
||||
@REM under the License.
|
||||
@REM ----------------------------------------------------------------------------
|
||||
|
||||
@REM ----------------------------------------------------------------------------
|
||||
@REM Maven2 Start Up Batch script
|
||||
@REM
|
||||
@REM Required ENV vars:
|
||||
@REM JAVA_HOME - location of a JDK home dir
|
||||
@REM
|
||||
@REM Optional ENV vars
|
||||
@REM M2_HOME - location of maven2's installed home dir
|
||||
@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
|
||||
@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending
|
||||
@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
|
||||
@REM e.g. to debug Maven itself, use
|
||||
@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
|
||||
@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
|
||||
@REM ----------------------------------------------------------------------------
|
||||
|
||||
@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
|
||||
@echo off
|
||||
@REM enable echoing my setting MAVEN_BATCH_ECHO to 'on'
|
||||
@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
|
||||
|
||||
@REM set %HOME% to equivalent of $HOME
|
||||
if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
|
||||
|
||||
@REM Execute a user defined script before this one
|
||||
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
|
||||
@REM check for pre script, once with legacy .bat ending and once with .cmd ending
|
||||
if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat"
|
||||
if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd"
|
||||
:skipRcPre
|
||||
|
||||
@setlocal
|
||||
|
||||
set ERROR_CODE=0
|
||||
|
||||
@REM To isolate internal variables from possible post scripts, we use another setlocal
|
||||
@setlocal
|
||||
|
||||
@REM ==== START VALIDATION ====
|
||||
if not "%JAVA_HOME%" == "" goto OkJHome
|
||||
|
||||
echo.
|
||||
echo Error: JAVA_HOME not found in your environment. >&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the >&2
|
||||
echo location of your Java installation. >&2
|
||||
echo.
|
||||
goto error
|
||||
|
||||
:OkJHome
|
||||
if exist "%JAVA_HOME%\bin\java.exe" goto init
|
||||
|
||||
echo.
|
||||
echo Error: JAVA_HOME is set to an invalid directory. >&2
|
||||
echo JAVA_HOME = "%JAVA_HOME%" >&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the >&2
|
||||
echo location of your Java installation. >&2
|
||||
echo.
|
||||
goto error
|
||||
|
||||
@REM ==== END VALIDATION ====
|
||||
|
||||
:init
|
||||
|
||||
@REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
|
||||
@REM Fallback to current working directory if not found.
|
||||
|
||||
set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
|
||||
IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
|
||||
|
||||
set EXEC_DIR=%CD%
|
||||
set WDIR=%EXEC_DIR%
|
||||
:findBaseDir
|
||||
IF EXIST "%WDIR%"\.mvn goto baseDirFound
|
||||
cd ..
|
||||
IF "%WDIR%"=="%CD%" goto baseDirNotFound
|
||||
set WDIR=%CD%
|
||||
goto findBaseDir
|
||||
|
||||
:baseDirFound
|
||||
set MAVEN_PROJECTBASEDIR=%WDIR%
|
||||
cd "%EXEC_DIR%"
|
||||
goto endDetectBaseDir
|
||||
|
||||
:baseDirNotFound
|
||||
set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
|
||||
cd "%EXEC_DIR%"
|
||||
|
||||
:endDetectBaseDir
|
||||
|
||||
IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
|
||||
|
||||
@setlocal EnableExtensions EnableDelayedExpansion
|
||||
for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
|
||||
@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
|
||||
|
||||
:endReadAdditionalConfig
|
||||
|
||||
SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
|
||||
|
||||
set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
|
||||
set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
|
||||
|
||||
%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
|
||||
if ERRORLEVEL 1 goto error
|
||||
goto end
|
||||
|
||||
:error
|
||||
set ERROR_CODE=1
|
||||
|
||||
:end
|
||||
@endlocal & set ERROR_CODE=%ERROR_CODE%
|
||||
|
||||
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
|
||||
@REM check for post script, once with legacy .bat ending and once with .cmd ending
|
||||
if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat"
|
||||
if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd"
|
||||
:skipRcPost
|
||||
|
||||
@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
|
||||
if "%MAVEN_BATCH_PAUSE%" == "on" pause
|
||||
|
||||
if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE%
|
||||
|
||||
exit /B %ERROR_CODE%
|
||||
46
kafka-streams-samples/kafka-streams-message-channel/pom.xml
Normal file
46
kafka-streams-samples/kafka-streams-message-channel/pom.xml
Normal file
@@ -0,0 +1,46 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<artifactId>kafka-streams-message-channel</artifactId>
|
||||
<version>0.0.1-SNAPSHOT</version>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<name>kafka-streams-message-channel</name>
|
||||
<description>Demo project for Spring Boot</description>
|
||||
|
||||
<parent>
|
||||
<groupId>spring.cloud.stream.samples</groupId>
|
||||
<artifactId>spring-cloud-stream-samples-parent</artifactId>
|
||||
<version>0.0.1-SNAPSHOT</version>
|
||||
<relativePath>../..</relativePath>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-stream-binder-kafka-streams</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-stream-binder-kafka</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
||||
@@ -0,0 +1,137 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package kafka.streams.message.channel;
|
||||
|
||||
import org.apache.kafka.common.serialization.Serdes;
|
||||
import org.apache.kafka.streams.KeyValue;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.apache.kafka.streams.kstream.Materialized;
|
||||
import org.apache.kafka.streams.kstream.Serialized;
|
||||
import org.apache.kafka.streams.kstream.TimeWindows;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.cloud.stream.annotation.EnableBinding;
|
||||
import org.springframework.cloud.stream.annotation.Input;
|
||||
import org.springframework.cloud.stream.annotation.Output;
|
||||
import org.springframework.cloud.stream.annotation.StreamListener;
|
||||
import org.springframework.messaging.SubscribableChannel;
|
||||
import org.springframework.messaging.handler.annotation.SendTo;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
|
||||
@SpringBootApplication
|
||||
public class KafkaStreamsWordCountApplication {
|
||||
|
||||
public static void main(String[] args) {
|
||||
SpringApplication.run(KafkaStreamsWordCountApplication.class, args);
|
||||
}
|
||||
|
||||
@EnableBinding(MultipleProcessor.class)
|
||||
public static class WordCountProcessorApplication {
|
||||
|
||||
@Autowired
|
||||
private TimeWindows timeWindows;
|
||||
|
||||
@StreamListener("binding2")
|
||||
@SendTo("singleOutput")
|
||||
public KStream<?, WordCount> process(KStream<Object, String> input) {
|
||||
|
||||
return input
|
||||
.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
|
||||
.map((key, value) -> new KeyValue<>(value, value))
|
||||
.groupByKey(Serialized.with(Serdes.String(), Serdes.String()))
|
||||
.windowedBy(timeWindows)
|
||||
.count(Materialized.as("WordCounts-1"))
|
||||
.toStream()
|
||||
.map((key, value) -> new KeyValue<>(null, new WordCount(key.key(), value, new Date(key.window().start()), new Date(key.window().end()))));
|
||||
}
|
||||
|
||||
@StreamListener("binding1")
|
||||
public void sink(String input) {
|
||||
System.out.println("FOOBAR -- " + input);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
interface MultipleProcessor {
|
||||
|
||||
String BINDING_1 = "binding1";
|
||||
String BINDING_2 = "binding2";
|
||||
String OUTPUT = "singleOutput";
|
||||
|
||||
@Input(BINDING_1)
|
||||
SubscribableChannel binding1();
|
||||
|
||||
@Input(BINDING_2)
|
||||
KStream<?, ?> binding2();
|
||||
|
||||
@Output(OUTPUT)
|
||||
KStream<?, ?> singleOutput();
|
||||
}
|
||||
|
||||
static class WordCount {
|
||||
|
||||
private String word;
|
||||
|
||||
private long count;
|
||||
|
||||
private Date start;
|
||||
|
||||
private Date end;
|
||||
|
||||
WordCount(String word, long count, Date start, Date end) {
|
||||
this.word = word;
|
||||
this.count = count;
|
||||
this.start = start;
|
||||
this.end = end;
|
||||
}
|
||||
|
||||
public String getWord() {
|
||||
return word;
|
||||
}
|
||||
|
||||
public void setWord(String word) {
|
||||
this.word = word;
|
||||
}
|
||||
|
||||
public long getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
public void setCount(long count) {
|
||||
this.count = count;
|
||||
}
|
||||
|
||||
public Date getStart() {
|
||||
return start;
|
||||
}
|
||||
|
||||
public void setStart(Date start) {
|
||||
this.start = start;
|
||||
}
|
||||
|
||||
public Date getEnd() {
|
||||
return end;
|
||||
}
|
||||
|
||||
public void setEnd(Date end) {
|
||||
this.end = end;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
spring.cloud.stream.bindings.singleOutput.contentType: application/json
|
||||
spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms: 1000
|
||||
spring.cloud.stream.kafka.streams.binder.configuration:
|
||||
default.key.serde: org.apache.kafka.common.serialization.Serdes$StringSerde
|
||||
default.value.serde: org.apache.kafka.common.serialization.Serdes$StringSerde
|
||||
spring.cloud.stream.bindings.singleOutput:
|
||||
destination: counts
|
||||
producer:
|
||||
headerMode: raw
|
||||
#useNativeEncoding: true
|
||||
spring.cloud.stream.bindings.binding2:
|
||||
destination: words
|
||||
consumer:
|
||||
headerMode: raw
|
||||
spring.cloud.stream.bindings.binding1:
|
||||
destination: words
|
||||
consumer:
|
||||
headerMode: raw
|
||||
spring.cloud.stream.kafka.streams.binder:
|
||||
brokers: localhost #192.168.99.100
|
||||
zkNodes: localhost #192.168.99.100
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<configuration>
|
||||
<appender name="stdout" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<encoder>
|
||||
<pattern>%d{ISO8601} %5p %t %c{2}:%L - %m%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
<root level="INFO">
|
||||
<appender-ref ref="stdout"/>
|
||||
</root>
|
||||
<logger name="org.apache.kafka.streams.processor.internals" level="WARN"/>
|
||||
</configuration>
|
||||
@@ -0,0 +1,18 @@
|
||||
package kafka.streams.message.channel;
|
||||
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
import org.springframework.test.context.junit4.SpringRunner;
|
||||
|
||||
@RunWith(SpringRunner.class)
|
||||
@SpringBootTest
|
||||
public class KafkaStreamsWordCountApplicationTests {
|
||||
|
||||
@Test
|
||||
@Ignore
|
||||
public void contextLoads() {
|
||||
}
|
||||
|
||||
}
|
||||
BIN
kafka-streams-samples/kafka-streams-product-tracker/.mvn/wrapper/maven-wrapper.jar
vendored
Normal file
BIN
kafka-streams-samples/kafka-streams-product-tracker/.mvn/wrapper/maven-wrapper.jar
vendored
Normal file
Binary file not shown.
1
kafka-streams-samples/kafka-streams-product-tracker/.mvn/wrapper/maven-wrapper.properties
vendored
Normal file
1
kafka-streams-samples/kafka-streams-product-tracker/.mvn/wrapper/maven-wrapper.properties
vendored
Normal file
@@ -0,0 +1 @@
|
||||
distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.5.0/apache-maven-3.5.0-bin.zip
|
||||
@@ -0,0 +1,47 @@
|
||||
== What is this app?
|
||||
|
||||
This is an example of a Spring Cloud Stream processor using Kafka Streams support.
|
||||
|
||||
The example is based on a contrived use case of tracking products.
|
||||
Although contrived, this type of use cases are pretty common in the industry where some organizations want to track the statistics of some entities over a time window.
|
||||
In essence, the application receives product information from input topic and count the interested products in a configurable time window and report that in an output topic.
|
||||
This sample uses lambda expressions and thus requires Java 8+.
|
||||
|
||||
=== Running the app:
|
||||
|
||||
Go to the root of the repository and do:
|
||||
|
||||
`docker-compose up -d`
|
||||
|
||||
`./mvnw clean package`
|
||||
|
||||
`java -jar target/kafka-streams-product-tracker-0.0.1-SNAPSHOT.jar --app.product.tracker.productIds=123,124,125 --spring.cloud.stream.kafka.streams.timeWindow.length=60000 --spring.cloud.stream.kafka.streams.timeWindow.advanceBy=30000`
|
||||
|
||||
The above command will track products with ID's 123,124 and 125 every 30 seconds with the counts from the last minute.
|
||||
In other words, every 30 seconds a new 1 minute window is started.
|
||||
|
||||
Issue the following commands:
|
||||
|
||||
`docker exec -it kafka-prod-tracker /opt/kafka/bin/kafka-console-producer.sh --broker-list 127.0.0.1:9092 --topic products`
|
||||
|
||||
On another terminal:
|
||||
|
||||
`docker exec -it kafka-prod-tracker /opt/kafka/bin/kafka-console-consumer.sh --bootstrap-server 127.0.0.1:9092 --key-deserializer org.apache.kafka.common.serialization.IntegerDeserializer --property print.key=true --topic product-counts`
|
||||
|
||||
Enter the following in the console producer (one line at a time) and watch the output on the console consumer:
|
||||
|
||||
```
|
||||
{"id":"123"}
|
||||
{"id":"124"}
|
||||
{"id":"125"}
|
||||
{"id":"123"}
|
||||
{"id":"123"}
|
||||
{"id":"123"}
|
||||
```
|
||||
|
||||
The default time window is configured for 30 seconds and you can change that using the following property.
|
||||
|
||||
`spring.cloud.stream.kafka.streams.timeWindow.length` (value is expressed in milliseconds)
|
||||
|
||||
In order to switch to a hopping window, you can use the `spring.cloud.stream.kafka.streams.timeWindow.advanceBy` (value in milliseconds).
|
||||
This will create an overlapped hopping windows depending on the value you provide.
|
||||
@@ -0,0 +1,19 @@
|
||||
version: '3'
|
||||
services:
|
||||
kafka:
|
||||
image: wurstmeister/kafka
|
||||
container_name: kafka-prod-tracker
|
||||
ports:
|
||||
- "9092:9092"
|
||||
environment:
|
||||
- KAFKA_ADVERTISED_HOST_NAME=127.0.0.1
|
||||
- KAFKA_ADVERTISED_PORT=9092
|
||||
- KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181
|
||||
depends_on:
|
||||
- zookeeper
|
||||
zookeeper:
|
||||
image: wurstmeister/zookeeper
|
||||
ports:
|
||||
- "2181:2181"
|
||||
environment:
|
||||
- KAFKA_ADVERTISED_HOST_NAME=zookeeper
|
||||
225
kafka-streams-samples/kafka-streams-product-tracker/mvnw
vendored
Executable file
225
kafka-streams-samples/kafka-streams-product-tracker/mvnw
vendored
Executable file
@@ -0,0 +1,225 @@
|
||||
#!/bin/sh
|
||||
# ----------------------------------------------------------------------------
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Maven2 Start Up Batch script
|
||||
#
|
||||
# Required ENV vars:
|
||||
# ------------------
|
||||
# JAVA_HOME - location of a JDK home dir
|
||||
#
|
||||
# Optional ENV vars
|
||||
# -----------------
|
||||
# M2_HOME - location of maven2's installed home dir
|
||||
# MAVEN_OPTS - parameters passed to the Java VM when running Maven
|
||||
# e.g. to debug Maven itself, use
|
||||
# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
|
||||
# MAVEN_SKIP_RC - flag to disable loading of mavenrc files
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
if [ -z "$MAVEN_SKIP_RC" ] ; then
|
||||
|
||||
if [ -f /etc/mavenrc ] ; then
|
||||
. /etc/mavenrc
|
||||
fi
|
||||
|
||||
if [ -f "$HOME/.mavenrc" ] ; then
|
||||
. "$HOME/.mavenrc"
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
# OS specific support. $var _must_ be set to either true or false.
|
||||
cygwin=false;
|
||||
darwin=false;
|
||||
mingw=false
|
||||
case "`uname`" in
|
||||
CYGWIN*) cygwin=true ;;
|
||||
MINGW*) mingw=true;;
|
||||
Darwin*) darwin=true
|
||||
# Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home
|
||||
# See https://developer.apple.com/library/mac/qa/qa1170/_index.html
|
||||
if [ -z "$JAVA_HOME" ]; then
|
||||
if [ -x "/usr/libexec/java_home" ]; then
|
||||
export JAVA_HOME="`/usr/libexec/java_home`"
|
||||
else
|
||||
export JAVA_HOME="/Library/Java/Home"
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -z "$JAVA_HOME" ] ; then
|
||||
if [ -r /etc/gentoo-release ] ; then
|
||||
JAVA_HOME=`java-config --jre-home`
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$M2_HOME" ] ; then
|
||||
## resolve links - $0 may be a link to maven's home
|
||||
PRG="$0"
|
||||
|
||||
# need this for relative symlinks
|
||||
while [ -h "$PRG" ] ; do
|
||||
ls=`ls -ld "$PRG"`
|
||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
PRG="$link"
|
||||
else
|
||||
PRG="`dirname "$PRG"`/$link"
|
||||
fi
|
||||
done
|
||||
|
||||
saveddir=`pwd`
|
||||
|
||||
M2_HOME=`dirname "$PRG"`/..
|
||||
|
||||
# make it fully qualified
|
||||
M2_HOME=`cd "$M2_HOME" && pwd`
|
||||
|
||||
cd "$saveddir"
|
||||
# echo Using m2 at $M2_HOME
|
||||
fi
|
||||
|
||||
# For Cygwin, ensure paths are in UNIX format before anything is touched
|
||||
if $cygwin ; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME=`cygpath --unix "$M2_HOME"`
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
|
||||
[ -n "$CLASSPATH" ] &&
|
||||
CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
|
||||
fi
|
||||
|
||||
# For Migwn, ensure paths are in UNIX format before anything is touched
|
||||
if $mingw ; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME="`(cd "$M2_HOME"; pwd)`"
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`"
|
||||
# TODO classpath?
|
||||
fi
|
||||
|
||||
if [ -z "$JAVA_HOME" ]; then
|
||||
javaExecutable="`which javac`"
|
||||
if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then
|
||||
# readlink(1) is not available as standard on Solaris 10.
|
||||
readLink=`which readlink`
|
||||
if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then
|
||||
if $darwin ; then
|
||||
javaHome="`dirname \"$javaExecutable\"`"
|
||||
javaExecutable="`cd \"$javaHome\" && pwd -P`/javac"
|
||||
else
|
||||
javaExecutable="`readlink -f \"$javaExecutable\"`"
|
||||
fi
|
||||
javaHome="`dirname \"$javaExecutable\"`"
|
||||
javaHome=`expr "$javaHome" : '\(.*\)/bin'`
|
||||
JAVA_HOME="$javaHome"
|
||||
export JAVA_HOME
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$JAVACMD" ] ; then
|
||||
if [ -n "$JAVA_HOME" ] ; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD="$JAVA_HOME/jre/sh/java"
|
||||
else
|
||||
JAVACMD="$JAVA_HOME/bin/java"
|
||||
fi
|
||||
else
|
||||
JAVACMD="`which java`"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ ! -x "$JAVACMD" ] ; then
|
||||
echo "Error: JAVA_HOME is not defined correctly." >&2
|
||||
echo " We cannot execute $JAVACMD" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$JAVA_HOME" ] ; then
|
||||
echo "Warning: JAVA_HOME environment variable is not set."
|
||||
fi
|
||||
|
||||
CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
|
||||
|
||||
# traverses directory structure from process work directory to filesystem root
|
||||
# first directory with .mvn subdirectory is considered project base directory
|
||||
find_maven_basedir() {
|
||||
|
||||
if [ -z "$1" ]
|
||||
then
|
||||
echo "Path not specified to find_maven_basedir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
basedir="$1"
|
||||
wdir="$1"
|
||||
while [ "$wdir" != '/' ] ; do
|
||||
if [ -d "$wdir"/.mvn ] ; then
|
||||
basedir=$wdir
|
||||
break
|
||||
fi
|
||||
# workaround for JBEAP-8937 (on Solaris 10/Sparc)
|
||||
if [ -d "${wdir}" ]; then
|
||||
wdir=`cd "$wdir/.."; pwd`
|
||||
fi
|
||||
# end of workaround
|
||||
done
|
||||
echo "${basedir}"
|
||||
}
|
||||
|
||||
# concatenates all lines of a file
|
||||
concat_lines() {
|
||||
if [ -f "$1" ]; then
|
||||
echo "$(tr -s '\n' ' ' < "$1")"
|
||||
fi
|
||||
}
|
||||
|
||||
BASE_DIR=`find_maven_basedir "$(pwd)"`
|
||||
if [ -z "$BASE_DIR" ]; then
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}
|
||||
echo $MAVEN_PROJECTBASEDIR
|
||||
MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS"
|
||||
|
||||
# For Cygwin, switch paths to Windows format before running java
|
||||
if $cygwin; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME=`cygpath --path --windows "$M2_HOME"`
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
|
||||
[ -n "$CLASSPATH" ] &&
|
||||
CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
|
||||
[ -n "$MAVEN_PROJECTBASEDIR" ] &&
|
||||
MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"`
|
||||
fi
|
||||
|
||||
WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
|
||||
|
||||
exec "$JAVACMD" \
|
||||
$MAVEN_OPTS \
|
||||
-classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \
|
||||
"-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \
|
||||
${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@"
|
||||
42
kafka-streams-samples/kafka-streams-product-tracker/pom.xml
Normal file
42
kafka-streams-samples/kafka-streams-product-tracker/pom.xml
Normal file
@@ -0,0 +1,42 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<artifactId>kafka-streams-product-tracker</artifactId>
|
||||
<version>0.0.1-SNAPSHOT</version>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<name>kafka-streams-product-tracker</name>
|
||||
<description>Demo project for Spring Boot</description>
|
||||
|
||||
<parent>
|
||||
<groupId>spring.cloud.stream.samples</groupId>
|
||||
<artifactId>spring-cloud-stream-samples-parent</artifactId>
|
||||
<version>0.0.1-SNAPSHOT</version>
|
||||
<relativePath>../..</relativePath>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-stream-binder-kafka-streams</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
||||
@@ -0,0 +1,152 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package kafka.streams.product.tracker;
|
||||
|
||||
import org.apache.kafka.streams.KeyValue;
|
||||
import org.apache.kafka.streams.kstream.KStream;
|
||||
import org.apache.kafka.streams.kstream.TimeWindows;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||
import org.springframework.boot.context.properties.EnableConfigurationProperties;
|
||||
import org.springframework.cloud.stream.annotation.EnableBinding;
|
||||
import org.springframework.cloud.stream.annotation.StreamListener;
|
||||
import org.springframework.cloud.stream.binder.kafka.streams.annotations.KafkaStreamsProcessor;
|
||||
import org.springframework.kafka.support.serializer.JsonSerde;
|
||||
import org.springframework.messaging.handler.annotation.SendTo;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.time.LocalTime;
|
||||
import java.time.ZoneId;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@SpringBootApplication
|
||||
public class KafkaStreamsProductTrackerApplication {
|
||||
|
||||
public static void main(String[] args) {
|
||||
SpringApplication.run(KafkaStreamsProductTrackerApplication.class, args);
|
||||
}
|
||||
|
||||
@EnableBinding(KafkaStreamsProcessor.class)
|
||||
@EnableConfigurationProperties(ProductTrackerProperties.class)
|
||||
public static class ProductCountApplication {
|
||||
|
||||
@Autowired
|
||||
ProductTrackerProperties productTrackerProperties;
|
||||
|
||||
@Autowired
|
||||
TimeWindows timeWindows;
|
||||
|
||||
@StreamListener("input")
|
||||
@SendTo("output")
|
||||
public KStream<Integer, ProductStatus> process(KStream<Object, Product> input) {
|
||||
return input
|
||||
.filter((key, product) -> productIds().contains(product.getId()))
|
||||
.map((key, value) -> new KeyValue<>(value, value))
|
||||
.groupByKey(new JsonSerde<>(Product.class), new JsonSerde<>(Product.class))
|
||||
.count(timeWindows, "product-counts")
|
||||
.toStream()
|
||||
.map((key, value) -> new KeyValue<>(key.key().id, new ProductStatus(key.key().id,
|
||||
value, Instant.ofEpochMilli(key.window().start()).atZone(ZoneId.systemDefault()).toLocalTime(),
|
||||
Instant.ofEpochMilli(key.window().end()).atZone(ZoneId.systemDefault()).toLocalTime())));
|
||||
}
|
||||
|
||||
private Set<Integer> productIds() {
|
||||
return StringUtils.commaDelimitedListToSet(productTrackerProperties.getProductIds())
|
||||
.stream().map(Integer::parseInt).collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ConfigurationProperties(prefix = "app.product.tracker")
|
||||
static class ProductTrackerProperties {
|
||||
|
||||
private String productIds;
|
||||
|
||||
public String getProductIds() {
|
||||
return productIds;
|
||||
}
|
||||
|
||||
public void setProductIds(String productIds) {
|
||||
this.productIds = productIds;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class Product {
|
||||
|
||||
Integer id;
|
||||
|
||||
public Integer getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(Integer id) {
|
||||
this.id = id;
|
||||
}
|
||||
}
|
||||
|
||||
static class ProductStatus {
|
||||
private Integer id;
|
||||
private long count;
|
||||
private LocalTime windowStart;
|
||||
private LocalTime windowEnd;
|
||||
|
||||
public ProductStatus(Integer id, long count, LocalTime windowStart, LocalTime windowEnd) {
|
||||
this.id = id;
|
||||
this.count = count;
|
||||
this.windowStart = windowStart;
|
||||
this.windowEnd = windowEnd;
|
||||
}
|
||||
|
||||
public Integer getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(Integer id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public long getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
public void setCount(long count) {
|
||||
this.count = count;
|
||||
}
|
||||
|
||||
public LocalTime getWindowStart() {
|
||||
return windowStart;
|
||||
}
|
||||
|
||||
public void setWindowStart(LocalTime windowStart) {
|
||||
this.windowStart = windowStart;
|
||||
}
|
||||
|
||||
public LocalTime getWindowEnd() {
|
||||
return windowEnd;
|
||||
}
|
||||
|
||||
public void setWindowEnd(LocalTime windowEnd) {
|
||||
this.windowEnd = windowEnd;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
spring.cloud.stream.bindings.output.contentType: application/json
|
||||
spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms: 1000
|
||||
spring.cloud.stream.kafka.streams:
|
||||
binder.configuration:
|
||||
key.serde: org.apache.kafka.common.serialization.Serdes$StringSerde
|
||||
value.serde: org.apache.kafka.common.serialization.Serdes$StringSerde
|
||||
bindings.output.producer:
|
||||
keySerde: org.apache.kafka.common.serialization.Serdes$IntegerSerde
|
||||
spring.cloud.stream.bindings.output:
|
||||
destination: product-counts
|
||||
producer:
|
||||
headerMode: raw
|
||||
#useNativeEncoding: true
|
||||
spring.cloud.stream.bindings.input:
|
||||
destination: products
|
||||
consumer:
|
||||
headerMode: raw
|
||||
spring.cloud.stream.kafka.streams.binder:
|
||||
brokers: localhost #192.168.99.100
|
||||
zkNodes: localhost #192.168.99.100
|
||||
@@ -0,0 +1,12 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<configuration>
|
||||
<appender name="stdout" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<encoder>
|
||||
<pattern>%d{ISO8601} %5p %t %c{2}:%L - %m%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
<root level="INFO">
|
||||
<appender-ref ref="stdout"/>
|
||||
</root>
|
||||
<logger name="org.apache.kafka.streams.processor.internals" level="WARN"/>
|
||||
</configuration>
|
||||
24
kafka-streams-samples/kafka-streams-table-join/.gitignore
vendored
Normal file
24
kafka-streams-samples/kafka-streams-table-join/.gitignore
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
target/
|
||||
!.mvn/wrapper/maven-wrapper.jar
|
||||
|
||||
### STS ###
|
||||
.apt_generated
|
||||
.classpath
|
||||
.factorypath
|
||||
.project
|
||||
.settings
|
||||
.springBeans
|
||||
|
||||
### IntelliJ IDEA ###
|
||||
.idea
|
||||
*.iws
|
||||
*.iml
|
||||
*.ipr
|
||||
|
||||
### NetBeans ###
|
||||
nbproject/private/
|
||||
build/
|
||||
nbbuild/
|
||||
dist/
|
||||
nbdist/
|
||||
.nb-gradle/
|
||||
BIN
kafka-streams-samples/kafka-streams-table-join/.mvn/wrapper/maven-wrapper.jar
vendored
Normal file
BIN
kafka-streams-samples/kafka-streams-table-join/.mvn/wrapper/maven-wrapper.jar
vendored
Normal file
Binary file not shown.
1
kafka-streams-samples/kafka-streams-table-join/.mvn/wrapper/maven-wrapper.properties
vendored
Normal file
1
kafka-streams-samples/kafka-streams-table-join/.mvn/wrapper/maven-wrapper.properties
vendored
Normal file
@@ -0,0 +1 @@
|
||||
distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.5.0/apache-maven-3.5.0-bin.zip
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user