diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..48070b1 --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +**/*.iml +**/target +.idea \ No newline at end of file diff --git a/README.md b/README.md index e6f652d..86a1c1f 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,48 @@ -# spring-kafka-event-sourcing-sampler -Showcases how to build a small Event-sourced application using Spring Boot, Spring Kafka, Apache Avro and Apache Kafka +# Event Sourcing using Spring Kafka + +This repository contains a sample application that demonstrates how to implement an Event-sourced systems using the CQRS architectural style. The solution uses Apache Kafka, which we easily integrate into a Spring Boot based application using Spring Kafka, Apache Avro for event serialization and deserialization and uses an in-memory H2 database that contributes to the query side of our CQRS-based system. The application itself is minimal and implements a subset of David Allen's Getting Things Done time management method. + +The code presented in this repository is the joint work of [Boris Fresow](mailto://bfresow@gmail.com) and [Markus Günther](mailto://markus.guenther@gmail.com) as part of an article series on **Building Event-based applications with Spring Kafka** for the German [JavaMagazin](https://jaxenter.de/magazine/java-magazin). + +## Prerequisites + +Running the showcase requires a working installation of Apache ZooKeeper and Apache Kafka. We provide `Dockerfile`s for both of them to get you started easily. Please make sure that [Docker](https://docs.docker.com/engine/installation/) as well as [Docker Compose](https://docs.docker.com/compose/install/) are installed on your system. + +### Versions + +| Application | Version | Docker Image | +| ------------------- | --------- | ----------------------- | +| Apache Kafka | 0.11.0.0 | kafka-sampler/kafka | +| Apache ZooKeeper | 3.4.8-1 | kafka-sampler/zookeeper | + +### Building and Running the Containers + +Before you execute the code samples, make sure that you have a working environment running. If you have not done it already, use the script ```docker/build-images``` to create Docker images for all required applications. After a couple of minutes, you should be ready to go. + +Once the images have been successfully built, you can start the resp. containers using the provided ```docker-compose``` script. Simply issue + +```bash +$ docker-compose up +``` + +for starting Apache Kafka, Apache Zookeeper and Yahoo Kafka Manager. Stopping the containers is best done using a separate terminal and issueing the following commands. + +```bash +$ docker-compose stop +$ docker-compose rm +``` + +The final ```rm``` operation deletes the containers and thus clears all state so you can start over with a clean installation. + +For simplicity, we restrict the Kafka cluster to a single Kafka broker. However, scaling to more Kafka brokers is easily done via `docker-compose`. You will have to provide a sensible value for `KAFKA_ADVERTISED_HOST_NAME` (other than `localhost`) for this to work, though. + +```bash +$ docker-compose scale kafka=3 # scales up to 3 Kafka brokers +$ docker-compose scale kafka=1 # scales down to 1 Kafka broker after the previous upscale +``` + +After changing the number of Kafka brokers, give the cluster some time so that all brokers can finish their cluster-join procedure. This should complete in a couple of seconds and you can inspect the output of the resp. Docker containers just to be sure that everything is fine. Kafka Manager should also reflect the change in the number of Kafka brokers after they successfully joined the cluster. + +## License + +This work is released under the terms of the MIT license. diff --git a/docker/build-images b/docker/build-images new file mode 100755 index 0000000..f2013c3 --- /dev/null +++ b/docker/build-images @@ -0,0 +1,8 @@ +#!/bin/bash + +docker build -t getting-things-done/kafka kafka/ +docker build -t getting-things-done/zookeeper zookeeper +docker build -t getting-things-done/api-gateway ../gtd-api-gateway +docker build -t getting-things-done/discovery-service ../gtd-discovery-service +docker build -t getting-things-done/gtd-command-side ../gtd-command-side +docker build -t getting-things-done/gtd-query-side ../gtd-query-side diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml new file mode 100644 index 0000000..b005440 --- /dev/null +++ b/docker/docker-compose.yml @@ -0,0 +1,56 @@ +version: '2' +services: + zookeeper: + image: getting-things-done/zookeeper + ports: + - "2181:2181" + kafka: + image: getting-things-done/kafka + links: + - zookeeper + environment: + KAFKA_ADVERTISED_PORT: 9092 + KAFKA_ADVERTISED_HOST_NAME: kafka + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_DEFAULT_REPLICATION_FACTOR: 1 + KAFKA_NUM_PARTITIONS: 5 + KAFKA_CREATE_TOPICS: "topic-getting-things-done:5:1" + volumes: + - /var/run/docker.sock:/var/run/docker.sock + discovery: + image: getting-things-done/discovery-service + ports: + - "8761:8761" + environment: + eureka.instance.hostname: discovery + gateway: + image: getting-things-done/api-gateway + links: + - discovery + - gtd-command-side + - gtd-query-side + ports: + - "8765:8765" + environment: + EUREKA_CLIENT_SERVICEURL_DEFAULTZONE: http://discovery:8761/eureka + gtd-command-side: + image: getting-things-done/gtd-command-side + links: + - discovery + ports: + - "8090:8090" + environment: + SERVER_PORT: 8090 + SPRING_KAFKA_BOOTSTRAP_SERVERS: kafka:9092 + SPRING_KAFKA_PRODUCER_BOOTSTRAP_SERVERS: kafka:9092 + EUREKA_CLIENT_SERVICEURL_DEFAULTZONE: http://discovery:8761/eureka/ + gtd-query-side: + image: getting-things-done/gtd-query-side + links: + - discovery + ports: + - "8089:8089" + environment: + SERVER_PORT: 8089 + SPRING_KAFKA_BOOTSTRAP_SERVERS: kafka:9092 + EUREKA_CLIENT_SERVICEURL_DEFAULTZONE: http://discovery:8761/eureka/ diff --git a/docker/kafka/Dockerfile b/docker/kafka/Dockerfile new file mode 100644 index 0000000..cd4d320 --- /dev/null +++ b/docker/kafka/Dockerfile @@ -0,0 +1,32 @@ +FROM phusion/baseimage:latest + +MAINTAINER Markus Günther + +ENV DEBIAN_FRONTEND noninteractive +ENV SCALA_VERSION 2.11 +ENV KAFKA_VERSION 0.11.0.0 +ENV KAFKA_HOME /opt/kafka_"$SCALA_VERSION"-"$KAFKA_VERSION" + +# Install Oracle Java 8, some utilities and Kafka +RUN \ + echo oracle-java8-installer shared/accepted-oracle-license-v1-1 select true | debconf-set-selections && \ + add-apt-repository -y ppa:webupd8team/java && \ + apt-get update && \ + apt-get install -y oracle-java8-installer && \ + apt-get install -y wget supervisor dnsutils curl jq coreutils docker net-tools && \ + rm -rf /var/lib/apt/lists/* && \ + rm -rf /var/cache/oracle-jdk8-installer && \ + apt-get clean && \ + wget -q http://apache.mirrors.spacedump.net/kafka/"$KAFKA_VERSION"/kafka_"$SCALA_VERSION"-"$KAFKA_VERSION".tgz -O /tmp/kafka_"$SCALA_VERSION"-"$KAFKA_VERSION".tgz && \ + tar xfz /tmp/kafka_"$SCALA_VERSION"-"$KAFKA_VERSION".tgz -C /opt && \ + rm /tmp/kafka_"$SCALA_VERSION"-"$KAFKA_VERSION".tgz + +VOLUME ["/kafka"] + +ADD kafka-start /usr/bin/kafka-start +ADD kafka-create-topics /usr/bin/kafka-create-topics +ADD supervisord.conf /etc/supervisor/conf.d/ + +EXPOSE 9092 + +CMD ["/sbin/my_init", "kafka-start"] \ No newline at end of file diff --git a/docker/kafka/kafka-create-topics b/docker/kafka/kafka-create-topics new file mode 100755 index 0000000..fd36454 --- /dev/null +++ b/docker/kafka/kafka-create-topics @@ -0,0 +1,36 @@ +#!/bin/bash + + +if [[ -z "$START_TIMEOUT" ]]; then + START_TIMEOUT=600 +fi + +start_timeout_exceeded=false +count=0 +step=10 +while netstat -lnt | awk '$4 ~ /:'$KAFKA_PORT'$/ {exit 1}'; do + echo "waiting for kafka to be ready" + sleep $step; + count=$(expr $count + $step) + if [ $count -gt $START_TIMEOUT ]; then + start_timeout_exceeded=true + break + fi +done + +if $start_timeout_exceeded; then + echo "Not able to auto-create topic (waited for $START_TIMEOUT sec)" + exit 1 +fi + +if [[ -n $KAFKA_CREATE_TOPICS ]]; then + IFS=','; for topicToCreate in $KAFKA_CREATE_TOPICS; do + echo "creating topics: $topicToCreate" + IFS=':' read -a topicConfig <<< "$topicToCreate" + if [ ${topicConfig[3]} ]; then + JMX_PORT='' $KAFKA_HOME/bin/kafka-topics.sh --create --zookeeper $KAFKA_ZOOKEEPER_CONNECT --replication-factor ${topicConfig[2]} --partitions ${topicConfig[1]} --topic "${topicConfig[0]}" --config cleanup.policy="${topicConfig[3]}" --if-not-exists + else + JMX_PORT='' $KAFKA_HOME/bin/kafka-topics.sh --create --zookeeper $KAFKA_ZOOKEEPER_CONNECT --replication-factor ${topicConfig[2]} --partitions ${topicConfig[1]} --topic "${topicConfig[0]}" --if-not-exists + fi + done +fi \ No newline at end of file diff --git a/docker/kafka/kafka-start b/docker/kafka/kafka-start new file mode 100755 index 0000000..4de0e63 --- /dev/null +++ b/docker/kafka/kafka-start @@ -0,0 +1,138 @@ +#!/bin/bash + +if [[ -z "$KAFKA_PORT" ]]; then + export KAFKA_PORT=9092 +fi + +create-topics.sh & + +if [[ -z "$KAFKA_ADVERTISED_PORT" && \ + -z "$KAFKA_LISTENERS" && \ + -z "$KAFKA_ADVERTISED_LISTENERS" && \ + -S /var/run/docker.sock ]]; then + export KAFKA_ADVERTISED_PORT=$(docker port `hostname` $KAFKA_PORT | sed -r "s/.*:(.*)/\1/g") +fi +if [[ -z "$KAFKA_BROKER_ID" ]]; then + if [[ -n "$BROKER_ID_COMMAND" ]]; then + export KAFKA_BROKER_ID=$(eval $BROKER_ID_COMMAND) + else + # By default auto allocate broker ID + export KAFKA_BROKER_ID=-1 + fi +fi +if [[ -z "$KAFKA_LOG_DIRS" ]]; then + export KAFKA_LOG_DIRS="/kafka/kafka-logs-$HOSTNAME" +fi +if [[ -z "$KAFKA_ZOOKEEPER_CONNECT" ]]; then + export KAFKA_ZOOKEEPER_CONNECT=$(env | grep ZK.*PORT_2181_TCP= | sed -e 's|.*tcp://||' | paste -sd ,) +fi + +if [[ -n "$KAFKA_HEAP_OPTS" ]]; then + sed -r -i "s/(export KAFKA_HEAP_OPTS)=\"(.*)\"/\1=\"$KAFKA_HEAP_OPTS\"/g" $KAFKA_HOME/bin/kafka-server-start.sh + unset KAFKA_HEAP_OPTS +fi + +if [[ -z "$KAFKA_ADVERTISED_HOST_NAME" && -n "$HOSTNAME_COMMAND" ]]; then + export KAFKA_ADVERTISED_HOST_NAME=$(eval $HOSTNAME_COMMAND) +fi + +if [[ -n "$KAFKA_LISTENER_SECURITY_PROTOCOL_MAP" ]]; then + if [[ -n "$KAFKA_ADVERTISED_PORT" && -n "$KAFKA_ADVERTISED_PROTOCOL_NAME" ]]; then + export KAFKA_ADVERTISED_LISTENERS="${KAFKA_ADVERTISED_PROTOCOL_NAME}://${KAFKA_ADVERTISED_HOST_NAME-}:${KAFKA_ADVERTISED_PORT}" + export KAFKA_LISTENERS="$KAFKA_ADVERTISED_PROTOCOL_NAME://:$KAFKA_ADVERTISED_PORT" + fi + + if [[ -z "$KAFKA_PROTOCOL_NAME" ]]; then + export KAFKA_PROTOCOL_NAME="${KAFKA_ADVERTISED_PROTOCOL_NAME}" + fi + + if [[ -n "$KAFKA_PORT" && -n "$KAFKA_PROTOCOL_NAME" ]]; then + export ADD_LISTENER="${KAFKA_PROTOCOL_NAME}://${KAFKA_HOST_NAME-}:${KAFKA_PORT}" + fi + + if [[ -z "$KAFKA_INTER_BROKER_LISTENER_NAME" ]]; then + export KAFKA_INTER_BROKER_LISTENER_NAME=$KAFKA_PROTOCOL_NAME + fi +else + #DEFAULT LISTENERS + export KAFKA_ADVERTISED_LISTENERS="PLAINTEXT://${KAFKA_ADVERTISED_HOST_NAME-}:${KAFKA_ADVERTISED_PORT-$KAFKA_PORT}" + export KAFKA_LISTENERS="PLAINTEXT://${KAFKA_HOST_NAME-}:${KAFKA_PORT-9092}" +fi + +if [[ -n "$ADD_LISTENER" && -n "$KAFKA_LISTENERS" ]]; then + export KAFKA_LISTENERS="${KAFKA_LISTENERS},${ADD_LISTENER}" +fi + +if [[ -n "$ADD_LISTENER" && -z "$KAFKA_LISTENERS" ]]; then + export KAFKA_LISTENERS="${ADD_LISTENER}" +fi + +if [[ -n "$ADD_LISTENER" && -n "$KAFKA_ADVERTISED_LISTENERS" ]]; then + export KAFKA_ADVERTISED_LISTENERS="${KAFKA_ADVERTISED_LISTENERS},${ADD_LISTENER}" +fi + +if [[ -n "$ADD_LISTENER" && -z "$KAFKA_ADVERTISED_LISTENERS" ]]; then + export KAFKA_ADVERTISED_LISTENERS="${ADD_LISTENER}" +fi + +if [[ -n "$KAFKA_INTER_BROKER_LISTENER_NAME" && ! "$KAFKA_INTER_BROKER_LISTENER_NAME"X = "$KAFKA_PROTOCOL_NAME"X ]]; then + if [[ -n "$KAFKA_INTER_BROKER_PORT" ]]; then + export KAFKA_INTER_BROKER_PORT=$(( $KAFKA_PORT + 1 )) + fi + export INTER_BROKER_LISTENER="${KAFKA_INTER_BROKER_LISTENER_NAME}://:${KAFKA_INTER_BROKER_PORT}" + export KAFKA_LISTENERS="${KAFKA_LISTENERS},${INTER_BROKER_LISTENER}" + export KAFKA_ADVERTISED_LISTENERS="${KAFKA_ADVERTISED_LISTENERS},${INTER_BROKER_LISTENER}" + unset KAFKA_INTER_BROKER_PORT + unset KAFKA_SECURITY_INTER_BROKER_PROTOCOL + unset INTER_BROKER_LISTENER +fi + +if [[ -n "$RACK_COMMAND" && -z "$KAFKA_BROKER_RACK" ]]; then + export KAFKA_BROKER_RACK=$(eval $RACK_COMMAND) +fi + +#Issue newline to config file in case there is not one already +echo -e "\n" >> $KAFKA_HOME/config/server.properties + +unset KAFKA_CREATE_TOPICS +unset KAFKA_ADVERTISED_PROTOCOL_NAME +unset KAFKA_PROTOCOL_NAME + +if [[ -n "$KAFKA_ADVERTISED_LISTENERS" ]]; then + unset KAFKA_ADVERTISED_PORT + unset KAFKA_ADVERTISED_HOST_NAME +fi + +if [[ -n "$KAFKA_LISTENERS" ]]; then + unset KAFKA_PORT + unset KAFKA_HOST_NAME +fi + +for VAR in `env` +do + if [[ $VAR =~ ^KAFKA_ && ! $VAR =~ ^KAFKA_HOME ]]; then + kafka_name=`echo "$VAR" | sed -r "s/KAFKA_(.*)=.*/\1/g" | tr '[:upper:]' '[:lower:]' | tr _ .` + env_var=`echo "$VAR" | sed -r "s/(.*)=.*/\1/g"` + if egrep -q "(^|^#)$kafka_name=" $KAFKA_HOME/config/server.properties; then + sed -r -i "s@(^|^#)($kafka_name)=(.*)@\2=${!env_var}@g" $KAFKA_HOME/config/server.properties #note that no config values may contain an '@' char + else + echo "$kafka_name=${!env_var}" >> $KAFKA_HOME/config/server.properties + fi + fi + + if [[ $VAR =~ ^LOG4J_ ]]; then + log4j_name=`echo "$VAR" | sed -r "s/(LOG4J_.*)=.*/\1/g" | tr '[:upper:]' '[:lower:]' | tr _ .` + log4j_env=`echo "$VAR" | sed -r "s/(.*)=.*/\1/g"` + if egrep -q "(^|^#)$log4j_name=" $KAFKA_HOME/config/log4j.properties; then + sed -r -i "s@(^|^#)($log4j_name)=(.*)@\2=${!log4j_env}@g" $KAFKA_HOME/config/log4j.properties #note that no config values may contain an '@' char + else + echo "$log4j_name=${!log4j_env}" >> $KAFKA_HOME/config/log4j.properties + fi + fi +done + +if [[ -n "$CUSTOM_INIT_SCRIPT" ]] ; then + eval $CUSTOM_INIT_SCRIPT +fi + +exec $KAFKA_HOME/bin/kafka-server-start.sh $KAFKA_HOME/config/server.properties \ No newline at end of file diff --git a/docker/kafka/supervisord.conf b/docker/kafka/supervisord.conf new file mode 100644 index 0000000..66c8d18 --- /dev/null +++ b/docker/kafka/supervisord.conf @@ -0,0 +1,7 @@ +[supervisord] +nodaemon=true + +[program:kafka] +command=/usr/bin/kafka-start +autostart=true +autorestart=true \ No newline at end of file diff --git a/docker/zookeeper/Dockerfile b/docker/zookeeper/Dockerfile new file mode 100644 index 0000000..8d9d386 --- /dev/null +++ b/docker/zookeeper/Dockerfile @@ -0,0 +1,20 @@ +FROM phusion/baseimage:latest + +MAINTAINER Markus Guenther + +ENV ZK_VERSION 3.4.8-1 +ENV ZK_HOME /usr/share/zookeeper + +RUN apt-get update && \ + apt-get install -y zookeeper=$ZK_VERSION supervisor dnsutils && \ + rm -rf /var/lib/apt/lists/* && \ + apt-get clean && \ + apt-cache policy zookeeper + +VOLUME ["/zookeeper"] + +ADD supervisord.conf /etc/supervisor/conf.d/ + +EXPOSE 2181 2888 3888 + +CMD ["/sbin/my_init", "supervisord"] \ No newline at end of file diff --git a/docker/zookeeper/supervisord.conf b/docker/zookeeper/supervisord.conf new file mode 100644 index 0000000..1c4e634 --- /dev/null +++ b/docker/zookeeper/supervisord.conf @@ -0,0 +1,7 @@ +[supervisord] +nodaemon=true + +[program:zookeeper] +command=/usr/share/zookeeper/bin/zkServer.sh start-foreground +autostart=true +autorestart=true \ No newline at end of file diff --git a/gtd-api-gateway/Dockerfile b/gtd-api-gateway/Dockerfile new file mode 100644 index 0000000..d5618ba --- /dev/null +++ b/gtd-api-gateway/Dockerfile @@ -0,0 +1,4 @@ +FROM ewolff/docker-java +COPY target/gtd-api-gateway-0.1.0-SNAPSHOT.jar . +CMD /usr/bin/java -Xmx256m -Xms256m -jar gtd-api-gateway-0.1.0-SNAPSHOT.jar +EXPOSE 8765 \ No newline at end of file diff --git a/gtd-api-gateway/pom.xml b/gtd-api-gateway/pom.xml new file mode 100644 index 0000000..81d672d --- /dev/null +++ b/gtd-api-gateway/pom.xml @@ -0,0 +1,50 @@ + + + + 4.0.0 + + + net.mguenther.gtd + gtd-parent + 0.1.0-SNAPSHOT + + + gtd-api-gateway + 0.1.0-SNAPSHOT + {GTD] API Gateway + https://github.com/mguenther/spring-kafka-event-sourcing-sampler + + + UTF-8 + UTF-8 + 1.8 + + + + + org.springframework.cloud + spring-cloud-starter-zuul + + + org.springframework.cloud + spring-cloud-starter-eureka + + + org.springframework.boot + spring-boot-starter-test + test + + + + + + + org.springframework.boot + spring-boot-maven-plugin + + + + + diff --git a/gtd-api-gateway/src/main/java/net/mguenther/gtd/GtdApiGatewayApplication.java b/gtd-api-gateway/src/main/java/net/mguenther/gtd/GtdApiGatewayApplication.java new file mode 100644 index 0000000..a1a858c --- /dev/null +++ b/gtd-api-gateway/src/main/java/net/mguenther/gtd/GtdApiGatewayApplication.java @@ -0,0 +1,26 @@ +package net.mguenther.gtd; + +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.builder.SpringApplicationBuilder; +import org.springframework.cloud.netflix.eureka.EnableEurekaClient; +import org.springframework.cloud.netflix.zuul.EnableZuulProxy; +import org.springframework.context.annotation.Bean; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +@SpringBootApplication +@EnableZuulProxy +@EnableEurekaClient +public class GtdApiGatewayApplication { + + public static void main(String[] args) { + new SpringApplicationBuilder(GtdApiGatewayApplication.class).web(true).run(args); + } + + @Bean + public OnMethodFilter onMethodZuulFilter() { + return new OnMethodFilter(); + } +} diff --git a/gtd-api-gateway/src/main/java/net/mguenther/gtd/OnMethodFilter.java b/gtd-api-gateway/src/main/java/net/mguenther/gtd/OnMethodFilter.java new file mode 100644 index 0000000..733bfcb --- /dev/null +++ b/gtd-api-gateway/src/main/java/net/mguenther/gtd/OnMethodFilter.java @@ -0,0 +1,61 @@ +package net.mguenther.gtd; + +import com.netflix.zuul.ZuulFilter; +import com.netflix.zuul.context.RequestContext; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.cloud.netflix.zuul.filters.support.FilterConstants; + +import java.util.Arrays; +import java.util.List; +import java.util.UUID; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +public class OnMethodFilter extends ZuulFilter { + + private static final Logger log = LoggerFactory.getLogger(OnMethodFilter.class); + private static final List methodsForCommands = Arrays.asList("POST", "PUT", "PATCH", "DELETE"); + + @Override + public String filterType() { + return FilterConstants.PRE_TYPE; + } + + @Override + public int filterOrder() { + return FilterConstants.PRE_DECORATION_FILTER_ORDER - 1; + } + + @Override + public boolean shouldFilter() { + return true; + } + + @Override + public Object run() { + final RequestContext ctx = RequestContext.getCurrentContext(); + final String method = ctx.getRequest().getMethod(); + if (isCommand(ctx)) { + log.info("Resolved incoming request using method {} to service ID 'gtd-es-command-side'.", method); + ctx.set("serviceId", "gtd-es-command-side"); + ctx.setRouteHost(null); + ctx.addOriginResponseHeader("X-Zuul-ServiceId", UUID.randomUUID().toString()); + } else { + log.info("Resolved incoming request using method {} to service ID 'gtd-es-query-side'.", method); + ctx.set("serviceId", "gtd-es-query-side"); + ctx.setRouteHost(null); + ctx.addOriginResponseHeader("X-Zuul-ServiceId", UUID.randomUUID().toString()); + } + return null; + } + + private boolean isCommand(final RequestContext ctx) { + return + StringUtils.isNotEmpty(ctx.getRequest().getMethod()) && + methodsForCommands.contains(ctx.getRequest().getMethod().toUpperCase()); + } +} diff --git a/gtd-api-gateway/src/main/resources/application.properties b/gtd-api-gateway/src/main/resources/application.properties new file mode 100644 index 0000000..1e6109d --- /dev/null +++ b/gtd-api-gateway/src/main/resources/application.properties @@ -0,0 +1,19 @@ +spring.application.name=gtd-es-api-gateway + +info.component=Edge Server + +endpoints.restart.enabled=true +endpoints.shutdown.enabled=true +endpoints.health.sensitive=false + +ribbon.eureka.enabled=true + +eureka.client.serviceUrl.defaultZone=http://localhost:8761/eureka/ +eureka.client.registerWithEureka=false + +server.port=8765 + +zuul.prefix=/api +zuul.routes.gtd.path: /** + +management.security.enabled=false diff --git a/gtd-api-gateway/src/main/resources/application.yml b/gtd-api-gateway/src/main/resources/application.yml new file mode 100644 index 0000000..fcaf965 --- /dev/null +++ b/gtd-api-gateway/src/main/resources/application.yml @@ -0,0 +1,37 @@ +server: + port: 8765 + +management: + security: + enabled: false + +info: + component: Edge Server + +endpoints: + restart: + enabled: true + shutdown: + enabled: true + health: + sensitive: false + +ribbon: + eurekaEnabled: true + +eureka: + client: + enabled: true + healthcheck: + enabled: true + serviceUrl: + defaultZone: ${EUREKA_CLIENT_SERVICEURL_DEFAULTZONE:http://localhost:8761/eureka/} + instance: + appname: gtd-es-api-gateway + preferIpAddress: true + +zuul: + prefix: /api + routes: + gtd: + path: /** diff --git a/gtd-api-gateway/src/main/resources/bootstrap.yml b/gtd-api-gateway/src/main/resources/bootstrap.yml new file mode 100644 index 0000000..c90b069 --- /dev/null +++ b/gtd-api-gateway/src/main/resources/bootstrap.yml @@ -0,0 +1,3 @@ +spring: + application: + name: gtd-es-api-gateway diff --git a/gtd-codec/pom.xml b/gtd-codec/pom.xml new file mode 100644 index 0000000..d9a532d --- /dev/null +++ b/gtd-codec/pom.xml @@ -0,0 +1,86 @@ + + + + 4.0.0 + + + net.mguenther.gtd + gtd-parent + 0.1.0-SNAPSHOT + + + gtd-codec + 0.1.0-SNAPSHOT + [GTD] Event Serialization / Deserialization + https://github.com/mguenther/spring-kafka-event-sourcing-sampler + + + + + org.apache.maven.plugins + maven-compiler-plugin + + + lombok.launch.AnnotationProcessorHider$AnnotationProcessor + + + 1.8 + 1.8 + + + + org.apache.avro + avro-maven-plugin + ${avro.version} + + + schemas + generate-sources + + schema + protocol + idl-protocol + + + ${project.basedir}/src/main/avro/ + ${project.basedir}/src/main/java/ + String + + + + + + + + + + + net.mguenther.gtd + gtd-common + 0.1.0-SNAPSHOT + + + + javax.persistence + javax.persistence-api + 2.2 + + + + org.springframework.kafka + spring-kafka + + + + org.projectlombok + lombok + + + + org.apache.avro + avro + + + diff --git a/gtd-codec/src/main/avro/getting-things-done-v1.avsc b/gtd-codec/src/main/avro/getting-things-done-v1.avsc new file mode 100644 index 0000000..18a45ac --- /dev/null +++ b/gtd-codec/src/main/avro/getting-things-done-v1.avsc @@ -0,0 +1,74 @@ +{ + "namespace": "net.mguenther.gtd.kafka.serialization", + "type": "record", + "name": "AvroItemEvent", + "fields": [ + {"name": "eventId", "type": "string"}, + {"name": "timestamp", "type": "long"}, + {"name": "data", "type": [ + + { + "name": "AvroItemCreated", + "type": "record", + "fields": [ + {"name": "itemId", "type": "string"}, + {"name": "description", "type": "string"} + ] + }, + + { + "name": "AvroItemConcluded", + "type": "record", + "fields": [ + {"name": "itemId", "type": "string"} + ] + }, + + { + "name": "AvroRequiredTimeAssigned", + "type": "record", + "fields": [ + {"name": "itemId", "type": "string"}, + {"name": "requiredTime", "type": "int"} + ] + }, + + { + "name": "AvroDueDateAssigned", + "type": "record", + "fields": [ + {"name": "itemId", "type": "string"}, + {"name": "dueDate", "type": "long"} + ] + }, + + { + "name": "AvroTagAssigned", + "type": "record", + "fields": [ + {"name": "itemId", "type": "string"}, + {"name": "tag", "type": "string"} + ] + }, + + { + "name": "AvroTagRemoved", + "type": "record", + "fields": [ + {"name": "itemId", "type": "string"}, + {"name": "tag", "type": "string"} + ] + }, + + { + "name": "AvroItemMovedToList", + "type": "record", + "fields": [ + {"name": "itemId", "type": "string"}, + {"name": "list", "type": "string"} + ] + } + ] + } + ] +} diff --git a/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/AvroDueDateAssigned.java b/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/AvroDueDateAssigned.java new file mode 100644 index 0000000..62d0d9a --- /dev/null +++ b/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/AvroDueDateAssigned.java @@ -0,0 +1,266 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package net.mguenther.gtd.kafka.serialization; + +import org.apache.avro.specific.SpecificData; + +@SuppressWarnings("all") +@org.apache.avro.specific.AvroGenerated +public class AvroDueDateAssigned extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -1861909984661059955L; + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"AvroDueDateAssigned\",\"namespace\":\"net.mguenther.gtd.kafka.serialization\",\"fields\":[{\"name\":\"itemId\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"dueDate\",\"type\":\"long\"}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + @Deprecated public java.lang.String itemId; + @Deprecated public long dueDate; + + /** + * Default constructor. Note that this does not initialize fields + * to their default values from the schema. If that is desired then + * one should use newBuilder(). + */ + public AvroDueDateAssigned() {} + + /** + * All-args constructor. + * @param itemId The new value for itemId + * @param dueDate The new value for dueDate + */ + public AvroDueDateAssigned(java.lang.String itemId, java.lang.Long dueDate) { + this.itemId = itemId; + this.dueDate = dueDate; + } + + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + public java.lang.Object get(int field$) { + switch (field$) { + case 0: return itemId; + case 1: return dueDate; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value="unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: itemId = (java.lang.String)value$; break; + case 1: dueDate = (java.lang.Long)value$; break; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'itemId' field. + * @return The value of the 'itemId' field. + */ + public java.lang.String getItemId() { + return itemId; + } + + /** + * Sets the value of the 'itemId' field. + * @param value the value to set. + */ + public void setItemId(java.lang.String value) { + this.itemId = value; + } + + /** + * Gets the value of the 'dueDate' field. + * @return The value of the 'dueDate' field. + */ + public java.lang.Long getDueDate() { + return dueDate; + } + + /** + * Sets the value of the 'dueDate' field. + * @param value the value to set. + */ + public void setDueDate(java.lang.Long value) { + this.dueDate = value; + } + + /** + * Creates a new AvroDueDateAssigned RecordBuilder. + * @return A new AvroDueDateAssigned RecordBuilder + */ + public static net.mguenther.gtd.kafka.serialization.AvroDueDateAssigned.Builder newBuilder() { + return new net.mguenther.gtd.kafka.serialization.AvroDueDateAssigned.Builder(); + } + + /** + * Creates a new AvroDueDateAssigned RecordBuilder by copying an existing Builder. + * @param other The existing builder to copy. + * @return A new AvroDueDateAssigned RecordBuilder + */ + public static net.mguenther.gtd.kafka.serialization.AvroDueDateAssigned.Builder newBuilder(net.mguenther.gtd.kafka.serialization.AvroDueDateAssigned.Builder other) { + return new net.mguenther.gtd.kafka.serialization.AvroDueDateAssigned.Builder(other); + } + + /** + * Creates a new AvroDueDateAssigned RecordBuilder by copying an existing AvroDueDateAssigned instance. + * @param other The existing instance to copy. + * @return A new AvroDueDateAssigned RecordBuilder + */ + public static net.mguenther.gtd.kafka.serialization.AvroDueDateAssigned.Builder newBuilder(net.mguenther.gtd.kafka.serialization.AvroDueDateAssigned other) { + return new net.mguenther.gtd.kafka.serialization.AvroDueDateAssigned.Builder(other); + } + + /** + * RecordBuilder for AvroDueDateAssigned instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.String itemId; + private long dueDate; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$); + } + + /** + * Creates a Builder by copying an existing Builder. + * @param other The existing Builder to copy. + */ + private Builder(net.mguenther.gtd.kafka.serialization.AvroDueDateAssigned.Builder other) { + super(other); + if (isValidValue(fields()[0], other.itemId)) { + this.itemId = data().deepCopy(fields()[0].schema(), other.itemId); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.dueDate)) { + this.dueDate = data().deepCopy(fields()[1].schema(), other.dueDate); + fieldSetFlags()[1] = true; + } + } + + /** + * Creates a Builder by copying an existing AvroDueDateAssigned instance + * @param other The existing instance to copy. + */ + private Builder(net.mguenther.gtd.kafka.serialization.AvroDueDateAssigned other) { + super(SCHEMA$); + if (isValidValue(fields()[0], other.itemId)) { + this.itemId = data().deepCopy(fields()[0].schema(), other.itemId); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.dueDate)) { + this.dueDate = data().deepCopy(fields()[1].schema(), other.dueDate); + fieldSetFlags()[1] = true; + } + } + + /** + * Gets the value of the 'itemId' field. + * @return The value. + */ + public java.lang.String getItemId() { + return itemId; + } + + /** + * Sets the value of the 'itemId' field. + * @param value The value of 'itemId'. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroDueDateAssigned.Builder setItemId(java.lang.String value) { + validate(fields()[0], value); + this.itemId = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'itemId' field has been set. + * @return True if the 'itemId' field has been set, false otherwise. + */ + public boolean hasItemId() { + return fieldSetFlags()[0]; + } + + + /** + * Clears the value of the 'itemId' field. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroDueDateAssigned.Builder clearItemId() { + itemId = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'dueDate' field. + * @return The value. + */ + public java.lang.Long getDueDate() { + return dueDate; + } + + /** + * Sets the value of the 'dueDate' field. + * @param value The value of 'dueDate'. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroDueDateAssigned.Builder setDueDate(long value) { + validate(fields()[1], value); + this.dueDate = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'dueDate' field has been set. + * @return True if the 'dueDate' field has been set, false otherwise. + */ + public boolean hasDueDate() { + return fieldSetFlags()[1]; + } + + + /** + * Clears the value of the 'dueDate' field. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroDueDateAssigned.Builder clearDueDate() { + fieldSetFlags()[1] = false; + return this; + } + + @Override + public AvroDueDateAssigned build() { + try { + AvroDueDateAssigned record = new AvroDueDateAssigned(); + record.itemId = fieldSetFlags()[0] ? this.itemId : (java.lang.String) defaultValue(fields()[0]); + record.dueDate = fieldSetFlags()[1] ? this.dueDate : (java.lang.Long) defaultValue(fields()[1]); + return record; + } catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + private static final org.apache.avro.io.DatumWriter + WRITER$ = new org.apache.avro.specific.SpecificDatumWriter(SCHEMA$); + + @Override public void writeExternal(java.io.ObjectOutput out) + throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + private static final org.apache.avro.io.DatumReader + READER$ = new org.apache.avro.specific.SpecificDatumReader(SCHEMA$); + + @Override public void readExternal(java.io.ObjectInput in) + throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + +} diff --git a/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/AvroItemConcluded.java b/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/AvroItemConcluded.java new file mode 100644 index 0000000..45b2a1d --- /dev/null +++ b/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/AvroItemConcluded.java @@ -0,0 +1,197 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package net.mguenther.gtd.kafka.serialization; + +import org.apache.avro.specific.SpecificData; + +@SuppressWarnings("all") +@org.apache.avro.specific.AvroGenerated +public class AvroItemConcluded extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 4912607116650177572L; + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"AvroItemConcluded\",\"namespace\":\"net.mguenther.gtd.kafka.serialization\",\"fields\":[{\"name\":\"itemId\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + @Deprecated public java.lang.String itemId; + + /** + * Default constructor. Note that this does not initialize fields + * to their default values from the schema. If that is desired then + * one should use newBuilder(). + */ + public AvroItemConcluded() {} + + /** + * All-args constructor. + * @param itemId The new value for itemId + */ + public AvroItemConcluded(java.lang.String itemId) { + this.itemId = itemId; + } + + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + public java.lang.Object get(int field$) { + switch (field$) { + case 0: return itemId; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value="unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: itemId = (java.lang.String)value$; break; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'itemId' field. + * @return The value of the 'itemId' field. + */ + public java.lang.String getItemId() { + return itemId; + } + + /** + * Sets the value of the 'itemId' field. + * @param value the value to set. + */ + public void setItemId(java.lang.String value) { + this.itemId = value; + } + + /** + * Creates a new AvroItemConcluded RecordBuilder. + * @return A new AvroItemConcluded RecordBuilder + */ + public static net.mguenther.gtd.kafka.serialization.AvroItemConcluded.Builder newBuilder() { + return new net.mguenther.gtd.kafka.serialization.AvroItemConcluded.Builder(); + } + + /** + * Creates a new AvroItemConcluded RecordBuilder by copying an existing Builder. + * @param other The existing builder to copy. + * @return A new AvroItemConcluded RecordBuilder + */ + public static net.mguenther.gtd.kafka.serialization.AvroItemConcluded.Builder newBuilder(net.mguenther.gtd.kafka.serialization.AvroItemConcluded.Builder other) { + return new net.mguenther.gtd.kafka.serialization.AvroItemConcluded.Builder(other); + } + + /** + * Creates a new AvroItemConcluded RecordBuilder by copying an existing AvroItemConcluded instance. + * @param other The existing instance to copy. + * @return A new AvroItemConcluded RecordBuilder + */ + public static net.mguenther.gtd.kafka.serialization.AvroItemConcluded.Builder newBuilder(net.mguenther.gtd.kafka.serialization.AvroItemConcluded other) { + return new net.mguenther.gtd.kafka.serialization.AvroItemConcluded.Builder(other); + } + + /** + * RecordBuilder for AvroItemConcluded instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.String itemId; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$); + } + + /** + * Creates a Builder by copying an existing Builder. + * @param other The existing Builder to copy. + */ + private Builder(net.mguenther.gtd.kafka.serialization.AvroItemConcluded.Builder other) { + super(other); + if (isValidValue(fields()[0], other.itemId)) { + this.itemId = data().deepCopy(fields()[0].schema(), other.itemId); + fieldSetFlags()[0] = true; + } + } + + /** + * Creates a Builder by copying an existing AvroItemConcluded instance + * @param other The existing instance to copy. + */ + private Builder(net.mguenther.gtd.kafka.serialization.AvroItemConcluded other) { + super(SCHEMA$); + if (isValidValue(fields()[0], other.itemId)) { + this.itemId = data().deepCopy(fields()[0].schema(), other.itemId); + fieldSetFlags()[0] = true; + } + } + + /** + * Gets the value of the 'itemId' field. + * @return The value. + */ + public java.lang.String getItemId() { + return itemId; + } + + /** + * Sets the value of the 'itemId' field. + * @param value The value of 'itemId'. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroItemConcluded.Builder setItemId(java.lang.String value) { + validate(fields()[0], value); + this.itemId = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'itemId' field has been set. + * @return True if the 'itemId' field has been set, false otherwise. + */ + public boolean hasItemId() { + return fieldSetFlags()[0]; + } + + + /** + * Clears the value of the 'itemId' field. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroItemConcluded.Builder clearItemId() { + itemId = null; + fieldSetFlags()[0] = false; + return this; + } + + @Override + public AvroItemConcluded build() { + try { + AvroItemConcluded record = new AvroItemConcluded(); + record.itemId = fieldSetFlags()[0] ? this.itemId : (java.lang.String) defaultValue(fields()[0]); + return record; + } catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + private static final org.apache.avro.io.DatumWriter + WRITER$ = new org.apache.avro.specific.SpecificDatumWriter(SCHEMA$); + + @Override public void writeExternal(java.io.ObjectOutput out) + throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + private static final org.apache.avro.io.DatumReader + READER$ = new org.apache.avro.specific.SpecificDatumReader(SCHEMA$); + + @Override public void readExternal(java.io.ObjectInput in) + throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + +} diff --git a/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/AvroItemCreated.java b/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/AvroItemCreated.java new file mode 100644 index 0000000..f947558 --- /dev/null +++ b/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/AvroItemCreated.java @@ -0,0 +1,267 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package net.mguenther.gtd.kafka.serialization; + +import org.apache.avro.specific.SpecificData; + +@SuppressWarnings("all") +@org.apache.avro.specific.AvroGenerated +public class AvroItemCreated extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -4014809175194756192L; + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"AvroItemCreated\",\"namespace\":\"net.mguenther.gtd.kafka.serialization\",\"fields\":[{\"name\":\"itemId\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"description\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + @Deprecated public java.lang.String itemId; + @Deprecated public java.lang.String description; + + /** + * Default constructor. Note that this does not initialize fields + * to their default values from the schema. If that is desired then + * one should use newBuilder(). + */ + public AvroItemCreated() {} + + /** + * All-args constructor. + * @param itemId The new value for itemId + * @param description The new value for description + */ + public AvroItemCreated(java.lang.String itemId, java.lang.String description) { + this.itemId = itemId; + this.description = description; + } + + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + public java.lang.Object get(int field$) { + switch (field$) { + case 0: return itemId; + case 1: return description; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value="unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: itemId = (java.lang.String)value$; break; + case 1: description = (java.lang.String)value$; break; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'itemId' field. + * @return The value of the 'itemId' field. + */ + public java.lang.String getItemId() { + return itemId; + } + + /** + * Sets the value of the 'itemId' field. + * @param value the value to set. + */ + public void setItemId(java.lang.String value) { + this.itemId = value; + } + + /** + * Gets the value of the 'description' field. + * @return The value of the 'description' field. + */ + public java.lang.String getDescription() { + return description; + } + + /** + * Sets the value of the 'description' field. + * @param value the value to set. + */ + public void setDescription(java.lang.String value) { + this.description = value; + } + + /** + * Creates a new AvroItemCreated RecordBuilder. + * @return A new AvroItemCreated RecordBuilder + */ + public static net.mguenther.gtd.kafka.serialization.AvroItemCreated.Builder newBuilder() { + return new net.mguenther.gtd.kafka.serialization.AvroItemCreated.Builder(); + } + + /** + * Creates a new AvroItemCreated RecordBuilder by copying an existing Builder. + * @param other The existing builder to copy. + * @return A new AvroItemCreated RecordBuilder + */ + public static net.mguenther.gtd.kafka.serialization.AvroItemCreated.Builder newBuilder(net.mguenther.gtd.kafka.serialization.AvroItemCreated.Builder other) { + return new net.mguenther.gtd.kafka.serialization.AvroItemCreated.Builder(other); + } + + /** + * Creates a new AvroItemCreated RecordBuilder by copying an existing AvroItemCreated instance. + * @param other The existing instance to copy. + * @return A new AvroItemCreated RecordBuilder + */ + public static net.mguenther.gtd.kafka.serialization.AvroItemCreated.Builder newBuilder(net.mguenther.gtd.kafka.serialization.AvroItemCreated other) { + return new net.mguenther.gtd.kafka.serialization.AvroItemCreated.Builder(other); + } + + /** + * RecordBuilder for AvroItemCreated instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.String itemId; + private java.lang.String description; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$); + } + + /** + * Creates a Builder by copying an existing Builder. + * @param other The existing Builder to copy. + */ + private Builder(net.mguenther.gtd.kafka.serialization.AvroItemCreated.Builder other) { + super(other); + if (isValidValue(fields()[0], other.itemId)) { + this.itemId = data().deepCopy(fields()[0].schema(), other.itemId); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.description)) { + this.description = data().deepCopy(fields()[1].schema(), other.description); + fieldSetFlags()[1] = true; + } + } + + /** + * Creates a Builder by copying an existing AvroItemCreated instance + * @param other The existing instance to copy. + */ + private Builder(net.mguenther.gtd.kafka.serialization.AvroItemCreated other) { + super(SCHEMA$); + if (isValidValue(fields()[0], other.itemId)) { + this.itemId = data().deepCopy(fields()[0].schema(), other.itemId); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.description)) { + this.description = data().deepCopy(fields()[1].schema(), other.description); + fieldSetFlags()[1] = true; + } + } + + /** + * Gets the value of the 'itemId' field. + * @return The value. + */ + public java.lang.String getItemId() { + return itemId; + } + + /** + * Sets the value of the 'itemId' field. + * @param value The value of 'itemId'. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroItemCreated.Builder setItemId(java.lang.String value) { + validate(fields()[0], value); + this.itemId = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'itemId' field has been set. + * @return True if the 'itemId' field has been set, false otherwise. + */ + public boolean hasItemId() { + return fieldSetFlags()[0]; + } + + + /** + * Clears the value of the 'itemId' field. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroItemCreated.Builder clearItemId() { + itemId = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'description' field. + * @return The value. + */ + public java.lang.String getDescription() { + return description; + } + + /** + * Sets the value of the 'description' field. + * @param value The value of 'description'. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroItemCreated.Builder setDescription(java.lang.String value) { + validate(fields()[1], value); + this.description = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'description' field has been set. + * @return True if the 'description' field has been set, false otherwise. + */ + public boolean hasDescription() { + return fieldSetFlags()[1]; + } + + + /** + * Clears the value of the 'description' field. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroItemCreated.Builder clearDescription() { + description = null; + fieldSetFlags()[1] = false; + return this; + } + + @Override + public AvroItemCreated build() { + try { + AvroItemCreated record = new AvroItemCreated(); + record.itemId = fieldSetFlags()[0] ? this.itemId : (java.lang.String) defaultValue(fields()[0]); + record.description = fieldSetFlags()[1] ? this.description : (java.lang.String) defaultValue(fields()[1]); + return record; + } catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + private static final org.apache.avro.io.DatumWriter + WRITER$ = new org.apache.avro.specific.SpecificDatumWriter(SCHEMA$); + + @Override public void writeExternal(java.io.ObjectOutput out) + throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + private static final org.apache.avro.io.DatumReader + READER$ = new org.apache.avro.specific.SpecificDatumReader(SCHEMA$); + + @Override public void readExternal(java.io.ObjectInput in) + throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + +} diff --git a/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/AvroItemEvent.java b/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/AvroItemEvent.java new file mode 100644 index 0000000..1d45db6 --- /dev/null +++ b/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/AvroItemEvent.java @@ -0,0 +1,336 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package net.mguenther.gtd.kafka.serialization; + +import org.apache.avro.specific.SpecificData; + +@SuppressWarnings("all") +@org.apache.avro.specific.AvroGenerated +public class AvroItemEvent extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -8507380527735012987L; + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"AvroItemEvent\",\"namespace\":\"net.mguenther.gtd.kafka.serialization\",\"fields\":[{\"name\":\"eventId\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"timestamp\",\"type\":\"long\"},{\"name\":\"data\",\"type\":[{\"type\":\"record\",\"name\":\"AvroItemCreated\",\"fields\":[{\"name\":\"itemId\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"description\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]},{\"type\":\"record\",\"name\":\"AvroItemConcluded\",\"fields\":[{\"name\":\"itemId\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]},{\"type\":\"record\",\"name\":\"AvroRequiredTimeAssigned\",\"fields\":[{\"name\":\"itemId\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"requiredTime\",\"type\":\"int\"}]},{\"type\":\"record\",\"name\":\"AvroDueDateAssigned\",\"fields\":[{\"name\":\"itemId\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"dueDate\",\"type\":\"long\"}]},{\"type\":\"record\",\"name\":\"AvroTagAssigned\",\"fields\":[{\"name\":\"itemId\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"tag\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]},{\"type\":\"record\",\"name\":\"AvroTagRemoved\",\"fields\":[{\"name\":\"itemId\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"tag\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]},{\"type\":\"record\",\"name\":\"AvroItemMovedToList\",\"fields\":[{\"name\":\"itemId\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"list\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]}]}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + @Deprecated public java.lang.String eventId; + @Deprecated public long timestamp; + @Deprecated public java.lang.Object data; + + /** + * Default constructor. Note that this does not initialize fields + * to their default values from the schema. If that is desired then + * one should use newBuilder(). + */ + public AvroItemEvent() {} + + /** + * All-args constructor. + * @param eventId The new value for eventId + * @param timestamp The new value for timestamp + * @param data The new value for data + */ + public AvroItemEvent(java.lang.String eventId, java.lang.Long timestamp, java.lang.Object data) { + this.eventId = eventId; + this.timestamp = timestamp; + this.data = data; + } + + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + public java.lang.Object get(int field$) { + switch (field$) { + case 0: return eventId; + case 1: return timestamp; + case 2: return data; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value="unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: eventId = (java.lang.String)value$; break; + case 1: timestamp = (java.lang.Long)value$; break; + case 2: data = (java.lang.Object)value$; break; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'eventId' field. + * @return The value of the 'eventId' field. + */ + public java.lang.String getEventId() { + return eventId; + } + + /** + * Sets the value of the 'eventId' field. + * @param value the value to set. + */ + public void setEventId(java.lang.String value) { + this.eventId = value; + } + + /** + * Gets the value of the 'timestamp' field. + * @return The value of the 'timestamp' field. + */ + public java.lang.Long getTimestamp() { + return timestamp; + } + + /** + * Sets the value of the 'timestamp' field. + * @param value the value to set. + */ + public void setTimestamp(java.lang.Long value) { + this.timestamp = value; + } + + /** + * Gets the value of the 'data' field. + * @return The value of the 'data' field. + */ + public java.lang.Object getData() { + return data; + } + + /** + * Sets the value of the 'data' field. + * @param value the value to set. + */ + public void setData(java.lang.Object value) { + this.data = value; + } + + /** + * Creates a new AvroItemEvent RecordBuilder. + * @return A new AvroItemEvent RecordBuilder + */ + public static net.mguenther.gtd.kafka.serialization.AvroItemEvent.Builder newBuilder() { + return new net.mguenther.gtd.kafka.serialization.AvroItemEvent.Builder(); + } + + /** + * Creates a new AvroItemEvent RecordBuilder by copying an existing Builder. + * @param other The existing builder to copy. + * @return A new AvroItemEvent RecordBuilder + */ + public static net.mguenther.gtd.kafka.serialization.AvroItemEvent.Builder newBuilder(net.mguenther.gtd.kafka.serialization.AvroItemEvent.Builder other) { + return new net.mguenther.gtd.kafka.serialization.AvroItemEvent.Builder(other); + } + + /** + * Creates a new AvroItemEvent RecordBuilder by copying an existing AvroItemEvent instance. + * @param other The existing instance to copy. + * @return A new AvroItemEvent RecordBuilder + */ + public static net.mguenther.gtd.kafka.serialization.AvroItemEvent.Builder newBuilder(net.mguenther.gtd.kafka.serialization.AvroItemEvent other) { + return new net.mguenther.gtd.kafka.serialization.AvroItemEvent.Builder(other); + } + + /** + * RecordBuilder for AvroItemEvent instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.String eventId; + private long timestamp; + private java.lang.Object data; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$); + } + + /** + * Creates a Builder by copying an existing Builder. + * @param other The existing Builder to copy. + */ + private Builder(net.mguenther.gtd.kafka.serialization.AvroItemEvent.Builder other) { + super(other); + if (isValidValue(fields()[0], other.eventId)) { + this.eventId = data().deepCopy(fields()[0].schema(), other.eventId); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.timestamp)) { + this.timestamp = data().deepCopy(fields()[1].schema(), other.timestamp); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.data)) { + this.data = data().deepCopy(fields()[2].schema(), other.data); + fieldSetFlags()[2] = true; + } + } + + /** + * Creates a Builder by copying an existing AvroItemEvent instance + * @param other The existing instance to copy. + */ + private Builder(net.mguenther.gtd.kafka.serialization.AvroItemEvent other) { + super(SCHEMA$); + if (isValidValue(fields()[0], other.eventId)) { + this.eventId = data().deepCopy(fields()[0].schema(), other.eventId); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.timestamp)) { + this.timestamp = data().deepCopy(fields()[1].schema(), other.timestamp); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.data)) { + this.data = data().deepCopy(fields()[2].schema(), other.data); + fieldSetFlags()[2] = true; + } + } + + /** + * Gets the value of the 'eventId' field. + * @return The value. + */ + public java.lang.String getEventId() { + return eventId; + } + + /** + * Sets the value of the 'eventId' field. + * @param value The value of 'eventId'. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroItemEvent.Builder setEventId(java.lang.String value) { + validate(fields()[0], value); + this.eventId = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'eventId' field has been set. + * @return True if the 'eventId' field has been set, false otherwise. + */ + public boolean hasEventId() { + return fieldSetFlags()[0]; + } + + + /** + * Clears the value of the 'eventId' field. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroItemEvent.Builder clearEventId() { + eventId = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'timestamp' field. + * @return The value. + */ + public java.lang.Long getTimestamp() { + return timestamp; + } + + /** + * Sets the value of the 'timestamp' field. + * @param value The value of 'timestamp'. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroItemEvent.Builder setTimestamp(long value) { + validate(fields()[1], value); + this.timestamp = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'timestamp' field has been set. + * @return True if the 'timestamp' field has been set, false otherwise. + */ + public boolean hasTimestamp() { + return fieldSetFlags()[1]; + } + + + /** + * Clears the value of the 'timestamp' field. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroItemEvent.Builder clearTimestamp() { + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'data' field. + * @return The value. + */ + public java.lang.Object getData() { + return data; + } + + /** + * Sets the value of the 'data' field. + * @param value The value of 'data'. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroItemEvent.Builder setData(java.lang.Object value) { + validate(fields()[2], value); + this.data = value; + fieldSetFlags()[2] = true; + return this; + } + + /** + * Checks whether the 'data' field has been set. + * @return True if the 'data' field has been set, false otherwise. + */ + public boolean hasData() { + return fieldSetFlags()[2]; + } + + + /** + * Clears the value of the 'data' field. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroItemEvent.Builder clearData() { + data = null; + fieldSetFlags()[2] = false; + return this; + } + + @Override + public AvroItemEvent build() { + try { + AvroItemEvent record = new AvroItemEvent(); + record.eventId = fieldSetFlags()[0] ? this.eventId : (java.lang.String) defaultValue(fields()[0]); + record.timestamp = fieldSetFlags()[1] ? this.timestamp : (java.lang.Long) defaultValue(fields()[1]); + record.data = fieldSetFlags()[2] ? this.data : (java.lang.Object) defaultValue(fields()[2]); + return record; + } catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + private static final org.apache.avro.io.DatumWriter + WRITER$ = new org.apache.avro.specific.SpecificDatumWriter(SCHEMA$); + + @Override public void writeExternal(java.io.ObjectOutput out) + throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + private static final org.apache.avro.io.DatumReader + READER$ = new org.apache.avro.specific.SpecificDatumReader(SCHEMA$); + + @Override public void readExternal(java.io.ObjectInput in) + throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + +} diff --git a/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/AvroItemMovedToList.java b/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/AvroItemMovedToList.java new file mode 100644 index 0000000..ae576a5 --- /dev/null +++ b/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/AvroItemMovedToList.java @@ -0,0 +1,267 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package net.mguenther.gtd.kafka.serialization; + +import org.apache.avro.specific.SpecificData; + +@SuppressWarnings("all") +@org.apache.avro.specific.AvroGenerated +public class AvroItemMovedToList extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -286412023824743521L; + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"AvroItemMovedToList\",\"namespace\":\"net.mguenther.gtd.kafka.serialization\",\"fields\":[{\"name\":\"itemId\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"list\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + @Deprecated public java.lang.String itemId; + @Deprecated public java.lang.String list; + + /** + * Default constructor. Note that this does not initialize fields + * to their default values from the schema. If that is desired then + * one should use newBuilder(). + */ + public AvroItemMovedToList() {} + + /** + * All-args constructor. + * @param itemId The new value for itemId + * @param list The new value for list + */ + public AvroItemMovedToList(java.lang.String itemId, java.lang.String list) { + this.itemId = itemId; + this.list = list; + } + + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + public java.lang.Object get(int field$) { + switch (field$) { + case 0: return itemId; + case 1: return list; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value="unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: itemId = (java.lang.String)value$; break; + case 1: list = (java.lang.String)value$; break; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'itemId' field. + * @return The value of the 'itemId' field. + */ + public java.lang.String getItemId() { + return itemId; + } + + /** + * Sets the value of the 'itemId' field. + * @param value the value to set. + */ + public void setItemId(java.lang.String value) { + this.itemId = value; + } + + /** + * Gets the value of the 'list' field. + * @return The value of the 'list' field. + */ + public java.lang.String getList() { + return list; + } + + /** + * Sets the value of the 'list' field. + * @param value the value to set. + */ + public void setList(java.lang.String value) { + this.list = value; + } + + /** + * Creates a new AvroItemMovedToList RecordBuilder. + * @return A new AvroItemMovedToList RecordBuilder + */ + public static net.mguenther.gtd.kafka.serialization.AvroItemMovedToList.Builder newBuilder() { + return new net.mguenther.gtd.kafka.serialization.AvroItemMovedToList.Builder(); + } + + /** + * Creates a new AvroItemMovedToList RecordBuilder by copying an existing Builder. + * @param other The existing builder to copy. + * @return A new AvroItemMovedToList RecordBuilder + */ + public static net.mguenther.gtd.kafka.serialization.AvroItemMovedToList.Builder newBuilder(net.mguenther.gtd.kafka.serialization.AvroItemMovedToList.Builder other) { + return new net.mguenther.gtd.kafka.serialization.AvroItemMovedToList.Builder(other); + } + + /** + * Creates a new AvroItemMovedToList RecordBuilder by copying an existing AvroItemMovedToList instance. + * @param other The existing instance to copy. + * @return A new AvroItemMovedToList RecordBuilder + */ + public static net.mguenther.gtd.kafka.serialization.AvroItemMovedToList.Builder newBuilder(net.mguenther.gtd.kafka.serialization.AvroItemMovedToList other) { + return new net.mguenther.gtd.kafka.serialization.AvroItemMovedToList.Builder(other); + } + + /** + * RecordBuilder for AvroItemMovedToList instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.String itemId; + private java.lang.String list; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$); + } + + /** + * Creates a Builder by copying an existing Builder. + * @param other The existing Builder to copy. + */ + private Builder(net.mguenther.gtd.kafka.serialization.AvroItemMovedToList.Builder other) { + super(other); + if (isValidValue(fields()[0], other.itemId)) { + this.itemId = data().deepCopy(fields()[0].schema(), other.itemId); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.list)) { + this.list = data().deepCopy(fields()[1].schema(), other.list); + fieldSetFlags()[1] = true; + } + } + + /** + * Creates a Builder by copying an existing AvroItemMovedToList instance + * @param other The existing instance to copy. + */ + private Builder(net.mguenther.gtd.kafka.serialization.AvroItemMovedToList other) { + super(SCHEMA$); + if (isValidValue(fields()[0], other.itemId)) { + this.itemId = data().deepCopy(fields()[0].schema(), other.itemId); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.list)) { + this.list = data().deepCopy(fields()[1].schema(), other.list); + fieldSetFlags()[1] = true; + } + } + + /** + * Gets the value of the 'itemId' field. + * @return The value. + */ + public java.lang.String getItemId() { + return itemId; + } + + /** + * Sets the value of the 'itemId' field. + * @param value The value of 'itemId'. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroItemMovedToList.Builder setItemId(java.lang.String value) { + validate(fields()[0], value); + this.itemId = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'itemId' field has been set. + * @return True if the 'itemId' field has been set, false otherwise. + */ + public boolean hasItemId() { + return fieldSetFlags()[0]; + } + + + /** + * Clears the value of the 'itemId' field. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroItemMovedToList.Builder clearItemId() { + itemId = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'list' field. + * @return The value. + */ + public java.lang.String getList() { + return list; + } + + /** + * Sets the value of the 'list' field. + * @param value The value of 'list'. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroItemMovedToList.Builder setList(java.lang.String value) { + validate(fields()[1], value); + this.list = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'list' field has been set. + * @return True if the 'list' field has been set, false otherwise. + */ + public boolean hasList() { + return fieldSetFlags()[1]; + } + + + /** + * Clears the value of the 'list' field. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroItemMovedToList.Builder clearList() { + list = null; + fieldSetFlags()[1] = false; + return this; + } + + @Override + public AvroItemMovedToList build() { + try { + AvroItemMovedToList record = new AvroItemMovedToList(); + record.itemId = fieldSetFlags()[0] ? this.itemId : (java.lang.String) defaultValue(fields()[0]); + record.list = fieldSetFlags()[1] ? this.list : (java.lang.String) defaultValue(fields()[1]); + return record; + } catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + private static final org.apache.avro.io.DatumWriter + WRITER$ = new org.apache.avro.specific.SpecificDatumWriter(SCHEMA$); + + @Override public void writeExternal(java.io.ObjectOutput out) + throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + private static final org.apache.avro.io.DatumReader + READER$ = new org.apache.avro.specific.SpecificDatumReader(SCHEMA$); + + @Override public void readExternal(java.io.ObjectInput in) + throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + +} diff --git a/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/AvroRequiredTimeAssigned.java b/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/AvroRequiredTimeAssigned.java new file mode 100644 index 0000000..a7479b8 --- /dev/null +++ b/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/AvroRequiredTimeAssigned.java @@ -0,0 +1,266 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package net.mguenther.gtd.kafka.serialization; + +import org.apache.avro.specific.SpecificData; + +@SuppressWarnings("all") +@org.apache.avro.specific.AvroGenerated +public class AvroRequiredTimeAssigned extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 5118663262735737399L; + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"AvroRequiredTimeAssigned\",\"namespace\":\"net.mguenther.gtd.kafka.serialization\",\"fields\":[{\"name\":\"itemId\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"requiredTime\",\"type\":\"int\"}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + @Deprecated public java.lang.String itemId; + @Deprecated public int requiredTime; + + /** + * Default constructor. Note that this does not initialize fields + * to their default values from the schema. If that is desired then + * one should use newBuilder(). + */ + public AvroRequiredTimeAssigned() {} + + /** + * All-args constructor. + * @param itemId The new value for itemId + * @param requiredTime The new value for requiredTime + */ + public AvroRequiredTimeAssigned(java.lang.String itemId, java.lang.Integer requiredTime) { + this.itemId = itemId; + this.requiredTime = requiredTime; + } + + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + public java.lang.Object get(int field$) { + switch (field$) { + case 0: return itemId; + case 1: return requiredTime; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value="unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: itemId = (java.lang.String)value$; break; + case 1: requiredTime = (java.lang.Integer)value$; break; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'itemId' field. + * @return The value of the 'itemId' field. + */ + public java.lang.String getItemId() { + return itemId; + } + + /** + * Sets the value of the 'itemId' field. + * @param value the value to set. + */ + public void setItemId(java.lang.String value) { + this.itemId = value; + } + + /** + * Gets the value of the 'requiredTime' field. + * @return The value of the 'requiredTime' field. + */ + public java.lang.Integer getRequiredTime() { + return requiredTime; + } + + /** + * Sets the value of the 'requiredTime' field. + * @param value the value to set. + */ + public void setRequiredTime(java.lang.Integer value) { + this.requiredTime = value; + } + + /** + * Creates a new AvroRequiredTimeAssigned RecordBuilder. + * @return A new AvroRequiredTimeAssigned RecordBuilder + */ + public static net.mguenther.gtd.kafka.serialization.AvroRequiredTimeAssigned.Builder newBuilder() { + return new net.mguenther.gtd.kafka.serialization.AvroRequiredTimeAssigned.Builder(); + } + + /** + * Creates a new AvroRequiredTimeAssigned RecordBuilder by copying an existing Builder. + * @param other The existing builder to copy. + * @return A new AvroRequiredTimeAssigned RecordBuilder + */ + public static net.mguenther.gtd.kafka.serialization.AvroRequiredTimeAssigned.Builder newBuilder(net.mguenther.gtd.kafka.serialization.AvroRequiredTimeAssigned.Builder other) { + return new net.mguenther.gtd.kafka.serialization.AvroRequiredTimeAssigned.Builder(other); + } + + /** + * Creates a new AvroRequiredTimeAssigned RecordBuilder by copying an existing AvroRequiredTimeAssigned instance. + * @param other The existing instance to copy. + * @return A new AvroRequiredTimeAssigned RecordBuilder + */ + public static net.mguenther.gtd.kafka.serialization.AvroRequiredTimeAssigned.Builder newBuilder(net.mguenther.gtd.kafka.serialization.AvroRequiredTimeAssigned other) { + return new net.mguenther.gtd.kafka.serialization.AvroRequiredTimeAssigned.Builder(other); + } + + /** + * RecordBuilder for AvroRequiredTimeAssigned instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.String itemId; + private int requiredTime; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$); + } + + /** + * Creates a Builder by copying an existing Builder. + * @param other The existing Builder to copy. + */ + private Builder(net.mguenther.gtd.kafka.serialization.AvroRequiredTimeAssigned.Builder other) { + super(other); + if (isValidValue(fields()[0], other.itemId)) { + this.itemId = data().deepCopy(fields()[0].schema(), other.itemId); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.requiredTime)) { + this.requiredTime = data().deepCopy(fields()[1].schema(), other.requiredTime); + fieldSetFlags()[1] = true; + } + } + + /** + * Creates a Builder by copying an existing AvroRequiredTimeAssigned instance + * @param other The existing instance to copy. + */ + private Builder(net.mguenther.gtd.kafka.serialization.AvroRequiredTimeAssigned other) { + super(SCHEMA$); + if (isValidValue(fields()[0], other.itemId)) { + this.itemId = data().deepCopy(fields()[0].schema(), other.itemId); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.requiredTime)) { + this.requiredTime = data().deepCopy(fields()[1].schema(), other.requiredTime); + fieldSetFlags()[1] = true; + } + } + + /** + * Gets the value of the 'itemId' field. + * @return The value. + */ + public java.lang.String getItemId() { + return itemId; + } + + /** + * Sets the value of the 'itemId' field. + * @param value The value of 'itemId'. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroRequiredTimeAssigned.Builder setItemId(java.lang.String value) { + validate(fields()[0], value); + this.itemId = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'itemId' field has been set. + * @return True if the 'itemId' field has been set, false otherwise. + */ + public boolean hasItemId() { + return fieldSetFlags()[0]; + } + + + /** + * Clears the value of the 'itemId' field. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroRequiredTimeAssigned.Builder clearItemId() { + itemId = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'requiredTime' field. + * @return The value. + */ + public java.lang.Integer getRequiredTime() { + return requiredTime; + } + + /** + * Sets the value of the 'requiredTime' field. + * @param value The value of 'requiredTime'. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroRequiredTimeAssigned.Builder setRequiredTime(int value) { + validate(fields()[1], value); + this.requiredTime = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'requiredTime' field has been set. + * @return True if the 'requiredTime' field has been set, false otherwise. + */ + public boolean hasRequiredTime() { + return fieldSetFlags()[1]; + } + + + /** + * Clears the value of the 'requiredTime' field. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroRequiredTimeAssigned.Builder clearRequiredTime() { + fieldSetFlags()[1] = false; + return this; + } + + @Override + public AvroRequiredTimeAssigned build() { + try { + AvroRequiredTimeAssigned record = new AvroRequiredTimeAssigned(); + record.itemId = fieldSetFlags()[0] ? this.itemId : (java.lang.String) defaultValue(fields()[0]); + record.requiredTime = fieldSetFlags()[1] ? this.requiredTime : (java.lang.Integer) defaultValue(fields()[1]); + return record; + } catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + private static final org.apache.avro.io.DatumWriter + WRITER$ = new org.apache.avro.specific.SpecificDatumWriter(SCHEMA$); + + @Override public void writeExternal(java.io.ObjectOutput out) + throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + private static final org.apache.avro.io.DatumReader + READER$ = new org.apache.avro.specific.SpecificDatumReader(SCHEMA$); + + @Override public void readExternal(java.io.ObjectInput in) + throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + +} diff --git a/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/AvroTagAssigned.java b/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/AvroTagAssigned.java new file mode 100644 index 0000000..cb84aa1 --- /dev/null +++ b/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/AvroTagAssigned.java @@ -0,0 +1,267 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package net.mguenther.gtd.kafka.serialization; + +import org.apache.avro.specific.SpecificData; + +@SuppressWarnings("all") +@org.apache.avro.specific.AvroGenerated +public class AvroTagAssigned extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -2018178117501911402L; + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"AvroTagAssigned\",\"namespace\":\"net.mguenther.gtd.kafka.serialization\",\"fields\":[{\"name\":\"itemId\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"tag\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + @Deprecated public java.lang.String itemId; + @Deprecated public java.lang.String tag; + + /** + * Default constructor. Note that this does not initialize fields + * to their default values from the schema. If that is desired then + * one should use newBuilder(). + */ + public AvroTagAssigned() {} + + /** + * All-args constructor. + * @param itemId The new value for itemId + * @param tag The new value for tag + */ + public AvroTagAssigned(java.lang.String itemId, java.lang.String tag) { + this.itemId = itemId; + this.tag = tag; + } + + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + public java.lang.Object get(int field$) { + switch (field$) { + case 0: return itemId; + case 1: return tag; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value="unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: itemId = (java.lang.String)value$; break; + case 1: tag = (java.lang.String)value$; break; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'itemId' field. + * @return The value of the 'itemId' field. + */ + public java.lang.String getItemId() { + return itemId; + } + + /** + * Sets the value of the 'itemId' field. + * @param value the value to set. + */ + public void setItemId(java.lang.String value) { + this.itemId = value; + } + + /** + * Gets the value of the 'tag' field. + * @return The value of the 'tag' field. + */ + public java.lang.String getTag() { + return tag; + } + + /** + * Sets the value of the 'tag' field. + * @param value the value to set. + */ + public void setTag(java.lang.String value) { + this.tag = value; + } + + /** + * Creates a new AvroTagAssigned RecordBuilder. + * @return A new AvroTagAssigned RecordBuilder + */ + public static net.mguenther.gtd.kafka.serialization.AvroTagAssigned.Builder newBuilder() { + return new net.mguenther.gtd.kafka.serialization.AvroTagAssigned.Builder(); + } + + /** + * Creates a new AvroTagAssigned RecordBuilder by copying an existing Builder. + * @param other The existing builder to copy. + * @return A new AvroTagAssigned RecordBuilder + */ + public static net.mguenther.gtd.kafka.serialization.AvroTagAssigned.Builder newBuilder(net.mguenther.gtd.kafka.serialization.AvroTagAssigned.Builder other) { + return new net.mguenther.gtd.kafka.serialization.AvroTagAssigned.Builder(other); + } + + /** + * Creates a new AvroTagAssigned RecordBuilder by copying an existing AvroTagAssigned instance. + * @param other The existing instance to copy. + * @return A new AvroTagAssigned RecordBuilder + */ + public static net.mguenther.gtd.kafka.serialization.AvroTagAssigned.Builder newBuilder(net.mguenther.gtd.kafka.serialization.AvroTagAssigned other) { + return new net.mguenther.gtd.kafka.serialization.AvroTagAssigned.Builder(other); + } + + /** + * RecordBuilder for AvroTagAssigned instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.String itemId; + private java.lang.String tag; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$); + } + + /** + * Creates a Builder by copying an existing Builder. + * @param other The existing Builder to copy. + */ + private Builder(net.mguenther.gtd.kafka.serialization.AvroTagAssigned.Builder other) { + super(other); + if (isValidValue(fields()[0], other.itemId)) { + this.itemId = data().deepCopy(fields()[0].schema(), other.itemId); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.tag)) { + this.tag = data().deepCopy(fields()[1].schema(), other.tag); + fieldSetFlags()[1] = true; + } + } + + /** + * Creates a Builder by copying an existing AvroTagAssigned instance + * @param other The existing instance to copy. + */ + private Builder(net.mguenther.gtd.kafka.serialization.AvroTagAssigned other) { + super(SCHEMA$); + if (isValidValue(fields()[0], other.itemId)) { + this.itemId = data().deepCopy(fields()[0].schema(), other.itemId); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.tag)) { + this.tag = data().deepCopy(fields()[1].schema(), other.tag); + fieldSetFlags()[1] = true; + } + } + + /** + * Gets the value of the 'itemId' field. + * @return The value. + */ + public java.lang.String getItemId() { + return itemId; + } + + /** + * Sets the value of the 'itemId' field. + * @param value The value of 'itemId'. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroTagAssigned.Builder setItemId(java.lang.String value) { + validate(fields()[0], value); + this.itemId = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'itemId' field has been set. + * @return True if the 'itemId' field has been set, false otherwise. + */ + public boolean hasItemId() { + return fieldSetFlags()[0]; + } + + + /** + * Clears the value of the 'itemId' field. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroTagAssigned.Builder clearItemId() { + itemId = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'tag' field. + * @return The value. + */ + public java.lang.String getTag() { + return tag; + } + + /** + * Sets the value of the 'tag' field. + * @param value The value of 'tag'. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroTagAssigned.Builder setTag(java.lang.String value) { + validate(fields()[1], value); + this.tag = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'tag' field has been set. + * @return True if the 'tag' field has been set, false otherwise. + */ + public boolean hasTag() { + return fieldSetFlags()[1]; + } + + + /** + * Clears the value of the 'tag' field. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroTagAssigned.Builder clearTag() { + tag = null; + fieldSetFlags()[1] = false; + return this; + } + + @Override + public AvroTagAssigned build() { + try { + AvroTagAssigned record = new AvroTagAssigned(); + record.itemId = fieldSetFlags()[0] ? this.itemId : (java.lang.String) defaultValue(fields()[0]); + record.tag = fieldSetFlags()[1] ? this.tag : (java.lang.String) defaultValue(fields()[1]); + return record; + } catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + private static final org.apache.avro.io.DatumWriter + WRITER$ = new org.apache.avro.specific.SpecificDatumWriter(SCHEMA$); + + @Override public void writeExternal(java.io.ObjectOutput out) + throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + private static final org.apache.avro.io.DatumReader + READER$ = new org.apache.avro.specific.SpecificDatumReader(SCHEMA$); + + @Override public void readExternal(java.io.ObjectInput in) + throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + +} diff --git a/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/AvroTagRemoved.java b/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/AvroTagRemoved.java new file mode 100644 index 0000000..2e5e13d --- /dev/null +++ b/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/AvroTagRemoved.java @@ -0,0 +1,267 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package net.mguenther.gtd.kafka.serialization; + +import org.apache.avro.specific.SpecificData; + +@SuppressWarnings("all") +@org.apache.avro.specific.AvroGenerated +public class AvroTagRemoved extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -5189694502141084877L; + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"AvroTagRemoved\",\"namespace\":\"net.mguenther.gtd.kafka.serialization\",\"fields\":[{\"name\":\"itemId\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"tag\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + @Deprecated public java.lang.String itemId; + @Deprecated public java.lang.String tag; + + /** + * Default constructor. Note that this does not initialize fields + * to their default values from the schema. If that is desired then + * one should use newBuilder(). + */ + public AvroTagRemoved() {} + + /** + * All-args constructor. + * @param itemId The new value for itemId + * @param tag The new value for tag + */ + public AvroTagRemoved(java.lang.String itemId, java.lang.String tag) { + this.itemId = itemId; + this.tag = tag; + } + + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + public java.lang.Object get(int field$) { + switch (field$) { + case 0: return itemId; + case 1: return tag; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value="unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: itemId = (java.lang.String)value$; break; + case 1: tag = (java.lang.String)value$; break; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'itemId' field. + * @return The value of the 'itemId' field. + */ + public java.lang.String getItemId() { + return itemId; + } + + /** + * Sets the value of the 'itemId' field. + * @param value the value to set. + */ + public void setItemId(java.lang.String value) { + this.itemId = value; + } + + /** + * Gets the value of the 'tag' field. + * @return The value of the 'tag' field. + */ + public java.lang.String getTag() { + return tag; + } + + /** + * Sets the value of the 'tag' field. + * @param value the value to set. + */ + public void setTag(java.lang.String value) { + this.tag = value; + } + + /** + * Creates a new AvroTagRemoved RecordBuilder. + * @return A new AvroTagRemoved RecordBuilder + */ + public static net.mguenther.gtd.kafka.serialization.AvroTagRemoved.Builder newBuilder() { + return new net.mguenther.gtd.kafka.serialization.AvroTagRemoved.Builder(); + } + + /** + * Creates a new AvroTagRemoved RecordBuilder by copying an existing Builder. + * @param other The existing builder to copy. + * @return A new AvroTagRemoved RecordBuilder + */ + public static net.mguenther.gtd.kafka.serialization.AvroTagRemoved.Builder newBuilder(net.mguenther.gtd.kafka.serialization.AvroTagRemoved.Builder other) { + return new net.mguenther.gtd.kafka.serialization.AvroTagRemoved.Builder(other); + } + + /** + * Creates a new AvroTagRemoved RecordBuilder by copying an existing AvroTagRemoved instance. + * @param other The existing instance to copy. + * @return A new AvroTagRemoved RecordBuilder + */ + public static net.mguenther.gtd.kafka.serialization.AvroTagRemoved.Builder newBuilder(net.mguenther.gtd.kafka.serialization.AvroTagRemoved other) { + return new net.mguenther.gtd.kafka.serialization.AvroTagRemoved.Builder(other); + } + + /** + * RecordBuilder for AvroTagRemoved instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.String itemId; + private java.lang.String tag; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$); + } + + /** + * Creates a Builder by copying an existing Builder. + * @param other The existing Builder to copy. + */ + private Builder(net.mguenther.gtd.kafka.serialization.AvroTagRemoved.Builder other) { + super(other); + if (isValidValue(fields()[0], other.itemId)) { + this.itemId = data().deepCopy(fields()[0].schema(), other.itemId); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.tag)) { + this.tag = data().deepCopy(fields()[1].schema(), other.tag); + fieldSetFlags()[1] = true; + } + } + + /** + * Creates a Builder by copying an existing AvroTagRemoved instance + * @param other The existing instance to copy. + */ + private Builder(net.mguenther.gtd.kafka.serialization.AvroTagRemoved other) { + super(SCHEMA$); + if (isValidValue(fields()[0], other.itemId)) { + this.itemId = data().deepCopy(fields()[0].schema(), other.itemId); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.tag)) { + this.tag = data().deepCopy(fields()[1].schema(), other.tag); + fieldSetFlags()[1] = true; + } + } + + /** + * Gets the value of the 'itemId' field. + * @return The value. + */ + public java.lang.String getItemId() { + return itemId; + } + + /** + * Sets the value of the 'itemId' field. + * @param value The value of 'itemId'. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroTagRemoved.Builder setItemId(java.lang.String value) { + validate(fields()[0], value); + this.itemId = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'itemId' field has been set. + * @return True if the 'itemId' field has been set, false otherwise. + */ + public boolean hasItemId() { + return fieldSetFlags()[0]; + } + + + /** + * Clears the value of the 'itemId' field. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroTagRemoved.Builder clearItemId() { + itemId = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'tag' field. + * @return The value. + */ + public java.lang.String getTag() { + return tag; + } + + /** + * Sets the value of the 'tag' field. + * @param value The value of 'tag'. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroTagRemoved.Builder setTag(java.lang.String value) { + validate(fields()[1], value); + this.tag = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'tag' field has been set. + * @return True if the 'tag' field has been set, false otherwise. + */ + public boolean hasTag() { + return fieldSetFlags()[1]; + } + + + /** + * Clears the value of the 'tag' field. + * @return This builder. + */ + public net.mguenther.gtd.kafka.serialization.AvroTagRemoved.Builder clearTag() { + tag = null; + fieldSetFlags()[1] = false; + return this; + } + + @Override + public AvroTagRemoved build() { + try { + AvroTagRemoved record = new AvroTagRemoved(); + record.itemId = fieldSetFlags()[0] ? this.itemId : (java.lang.String) defaultValue(fields()[0]); + record.tag = fieldSetFlags()[1] ? this.tag : (java.lang.String) defaultValue(fields()[1]); + return record; + } catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + private static final org.apache.avro.io.DatumWriter + WRITER$ = new org.apache.avro.specific.SpecificDatumWriter(SCHEMA$); + + @Override public void writeExternal(java.io.ObjectOutput out) + throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + private static final org.apache.avro.io.DatumReader + READER$ = new org.apache.avro.specific.SpecificDatumReader(SCHEMA$); + + @Override public void readExternal(java.io.ObjectInput in) + throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + +} diff --git a/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/ItemEventConverter.java b/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/ItemEventConverter.java new file mode 100644 index 0000000..85a76b0 --- /dev/null +++ b/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/ItemEventConverter.java @@ -0,0 +1,163 @@ +package net.mguenther.gtd.kafka.serialization; + +import net.mguenther.gtd.domain.event.DueDateAssigned; +import net.mguenther.gtd.domain.event.ItemConcluded; +import net.mguenther.gtd.domain.event.ItemCreated; +import net.mguenther.gtd.domain.event.ItemEvent; +import net.mguenther.gtd.domain.event.ItemMovedToList; +import net.mguenther.gtd.domain.event.RequiredTimeAssigned; +import net.mguenther.gtd.domain.event.TagAssigned; +import net.mguenther.gtd.domain.event.TagRemoved; +import org.springframework.stereotype.Component; + +import java.time.Instant; +import java.util.Date; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +@Component +public class ItemEventConverter { + + private AvroItemEvent wrap(final ItemEvent event, final Object eventPayload) { + + return AvroItemEvent + .newBuilder() + .setEventId(event.getEventId()) + .setTimestamp(event.getTimestamp()) + .setData(eventPayload) + .build(); + } + + public AvroItemEvent from(final ItemEvent event) { + + if (event instanceof ItemCreated) return from((ItemCreated) event); + else if (event instanceof RequiredTimeAssigned) return from((RequiredTimeAssigned) event); + else if (event instanceof DueDateAssigned) return from((DueDateAssigned) event); + else if (event instanceof TagAssigned) return from((TagAssigned) event); + else if (event instanceof TagRemoved) return from((TagRemoved) event); + else if (event instanceof ItemMovedToList) return from((ItemMovedToList) event); + else if (event instanceof ItemConcluded) return from((ItemConcluded) event); + else throw new IllegalArgumentException("Unsupported event type " + event.getClass()); + } + + private AvroItemEvent from(final ItemCreated event) { + + final AvroItemCreated avroEvent = AvroItemCreated.newBuilder() + .setItemId(event.getItemId()) + .setDescription(event.getDescription()) + .build(); + + return wrap(event, avroEvent); + } + + private AvroItemEvent from(final RequiredTimeAssigned event) { + + final AvroRequiredTimeAssigned avroEvent = AvroRequiredTimeAssigned.newBuilder() + .setItemId(event.getItemId()) + .setRequiredTime(event.getRequiredTime()) + .build(); + + return wrap(event, avroEvent); + } + + private AvroItemEvent from(final DueDateAssigned event) { + + final AvroDueDateAssigned avroEvent = AvroDueDateAssigned.newBuilder() + .setItemId(event.getItemId()) + .setDueDate(event.getDueDate().getTime()) + .build(); + + return wrap(event, avroEvent); + } + + private AvroItemEvent from(final TagAssigned event) { + + final AvroTagAssigned avroEvent = AvroTagAssigned.newBuilder() + .setItemId(event.getItemId()) + .setTag(event.getTag()) + .build(); + + return wrap(event, avroEvent); + } + + private AvroItemEvent from(final TagRemoved event) { + + final AvroTagRemoved avroEvent = AvroTagRemoved.newBuilder() + .setItemId(event.getItemId()) + .setTag(event.getTag()) + .build(); + + return wrap(event, avroEvent); + } + + private AvroItemEvent from(final ItemMovedToList event) { + + final AvroItemMovedToList avroEvent = AvroItemMovedToList.newBuilder() + .setItemId(event.getItemId()) + .setList(event.getList()) + .build(); + + return wrap(event, avroEvent); + } + + private AvroItemEvent from(final ItemConcluded event) { + + final AvroItemConcluded avroEvent = AvroItemConcluded.newBuilder() + .setItemId(event.getItemId()) + .build(); + + return wrap(event, avroEvent); + } + + public ItemEvent to(final AvroItemEvent event) { + + final String eventId = String.valueOf(event.getEventId()); + final long timestamp = event.getTimestamp(); + + ItemEvent domainEvent; + + if (event.getData() instanceof AvroItemCreated) { + + final AvroItemCreated payload = (AvroItemCreated) event.getData(); + domainEvent = new ItemCreated(eventId, timestamp, payload.getItemId(), payload.getDescription()); + + } else if (event.getData() instanceof AvroItemConcluded) { + + final AvroItemConcluded payload = (AvroItemConcluded) event.getData(); + domainEvent = new ItemConcluded(eventId, timestamp, payload.getItemId()); + + } else if (event.getData() instanceof AvroRequiredTimeAssigned) { + + final AvroRequiredTimeAssigned payload = (AvroRequiredTimeAssigned) event.getData(); + domainEvent = new RequiredTimeAssigned(eventId, timestamp, payload.getItemId(), payload.getRequiredTime()); + + } else if (event.getData() instanceof AvroDueDateAssigned) { + + final AvroDueDateAssigned payload = (AvroDueDateAssigned) event.getData(); + final Date dueDate = Date.from(Instant.ofEpochMilli(payload.getDueDate())); + domainEvent = new DueDateAssigned(eventId, timestamp, payload.getItemId(), dueDate); + + } else if (event.getData() instanceof AvroTagAssigned) { + + final AvroTagAssigned payload = (AvroTagAssigned) event.getData(); + domainEvent = new TagAssigned(eventId, timestamp, payload.getItemId(), payload.getTag()); + + } else if (event.getData() instanceof AvroTagRemoved) { + + final AvroTagRemoved payload = (AvroTagRemoved) event.getData(); + domainEvent = new TagRemoved(eventId, timestamp, payload.getItemId(), payload.getTag()); + + } else if (event.getData() instanceof AvroItemMovedToList) { + + final AvroItemMovedToList payload = (AvroItemMovedToList) event.getData(); + domainEvent = new ItemMovedToList(eventId, timestamp, payload.getItemId(), payload.getList()); + + } else { + throw new IllegalArgumentException("Unsupported event payload for event with ID " + eventId); + } + + return domainEvent; + } +} diff --git a/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/ItemEventDeserializer.java b/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/ItemEventDeserializer.java new file mode 100644 index 0000000..39f8781 --- /dev/null +++ b/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/ItemEventDeserializer.java @@ -0,0 +1,40 @@ +package net.mguenther.gtd.kafka.serialization; + +import org.apache.avro.io.DatumReader; +import org.apache.avro.io.Decoder; +import org.apache.avro.io.DecoderFactory; +import org.apache.avro.specific.SpecificDatumReader; +import org.apache.kafka.common.errors.SerializationException; +import org.apache.kafka.common.serialization.Deserializer; + +import java.io.IOException; +import java.util.Map; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +public class ItemEventDeserializer implements Deserializer { + + private final DatumReader itemEventReader = new SpecificDatumReader<>(AvroItemEvent.class); + + @Override + public void configure(final Map map, final boolean b) { + // nothing to do here + } + + @Override + public AvroItemEvent deserialize(final String s, final byte[] bytes) { + final Decoder decoder = DecoderFactory.get().binaryDecoder(bytes, null); + try { + return itemEventReader.read(null, decoder); + } catch (IOException e) { + throw new SerializationException("Unable to deserialize byte[] to ItemEvent.", e); + } + } + + @Override + public void close() { + // nothing to do here + } +} diff --git a/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/ItemEventSerializer.java b/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/ItemEventSerializer.java new file mode 100644 index 0000000..fc4846a --- /dev/null +++ b/gtd-codec/src/main/java/net/mguenther/gtd/kafka/serialization/ItemEventSerializer.java @@ -0,0 +1,43 @@ +package net.mguenther.gtd.kafka.serialization; + +import org.apache.avro.io.BinaryEncoder; +import org.apache.avro.io.DatumWriter; +import org.apache.avro.io.EncoderFactory; +import org.apache.avro.specific.SpecificDatumWriter; +import org.apache.kafka.common.errors.SerializationException; +import org.apache.kafka.common.serialization.Serializer; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.Map; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +public class ItemEventSerializer implements Serializer { + + private final DatumWriter eventWriter = new SpecificDatumWriter<>(AvroItemEvent.class); + + @Override + public void configure(Map map, boolean b) { + // nothing to do here + } + + @Override + public byte[] serialize(final String s, final AvroItemEvent event) { + try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { + final BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null); + eventWriter.write(event, encoder); + encoder.flush(); + return out.toByteArray(); + } catch (IOException e) { + throw new SerializationException("Unable to serialize Event to byte[].", e); + } + } + + @Override + public void close() { + // nothing to do here + } +} diff --git a/gtd-command-side/Dockerfile b/gtd-command-side/Dockerfile new file mode 100644 index 0000000..0cf9bc0 --- /dev/null +++ b/gtd-command-side/Dockerfile @@ -0,0 +1,4 @@ +FROM ewolff/docker-java +COPY target/gtd-command-side-0.1.0-SNAPSHOT.jar . +CMD /usr/bin/java -Xmx256m -Xms256m -jar gtd-command-side-0.1.0-SNAPSHOT.jar +EXPOSE 8080 \ No newline at end of file diff --git a/gtd-command-side/pom.xml b/gtd-command-side/pom.xml new file mode 100644 index 0000000..7667843 --- /dev/null +++ b/gtd-command-side/pom.xml @@ -0,0 +1,124 @@ + + + + 4.0.0 + + + net.mguenther.gtd + gtd-parent + 0.1.0-SNAPSHOT + + + gtd-command-side + 0.1.0-SNAPSHOT + [GTD] Command Side + https://github.com/mguenther/spring-kafka-event-sourcing-sampler + + + + + org.apache.maven.plugins + maven-compiler-plugin + + + lombok.launch.AnnotationProcessorHider$AnnotationProcessor + + + 1.8 + 1.8 + + + + org.springframework.boot + spring-boot-maven-plugin + + + + + + + + net.mguenther.gtd + gtd-codec + 0.1.0-SNAPSHOT + + + net.mguenther.gtd + gtd-common + 0.1.0-SNAPSHOT + + + + org.springframework.boot + spring-boot-starter + + + org.springframework.boot + spring-boot-starter-web + + + org.springframework.boot + spring-boot-configuration-processor + true + + + org.springframework.boot + spring-boot-starter-validation + + + org.springframework + spring-tx + + + org.springframework.kafka + spring-kafka + + + + org.springframework.cloud + spring-cloud-starter-eureka + + + org.springframework.boot + spring-boot-starter-actuator + + + + org.apache.avro + avro + + + + org.slf4j + slf4j-api + 1.7.5 + + + org.slf4j + slf4j-log4j12 + 1.7.5 + + + + com.fasterxml.jackson.core + jackson-databind + + + org.apache.commons + commons-lang3 + + + org.projectlombok + lombok + + + + junit + junit + test + + + + diff --git a/gtd-command-side/src/main/java/net/mguenther/gtd/GtdCommandApplication.java b/gtd-command-side/src/main/java/net/mguenther/gtd/GtdCommandApplication.java new file mode 100644 index 0000000..99ab9cb --- /dev/null +++ b/gtd-command-side/src/main/java/net/mguenther/gtd/GtdCommandApplication.java @@ -0,0 +1,20 @@ +package net.mguenther.gtd; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.cloud.client.discovery.EnableDiscoveryClient; +import org.springframework.web.servlet.config.annotation.EnableWebMvc; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +@SpringBootApplication +@EnableDiscoveryClient +@EnableWebMvc +public class GtdCommandApplication { + + public static void main(String[] args) { + SpringApplication.run(GtdCommandApplication.class, args); + } +} diff --git a/gtd-command-side/src/main/java/net/mguenther/gtd/api/CreateItemRequest.java b/gtd-command-side/src/main/java/net/mguenther/gtd/api/CreateItemRequest.java new file mode 100644 index 0000000..3616d7d --- /dev/null +++ b/gtd-command-side/src/main/java/net/mguenther/gtd/api/CreateItemRequest.java @@ -0,0 +1,25 @@ +package net.mguenther.gtd.api; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +public class CreateItemRequest { + + private String description; + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + @Override + public String toString() { + return "CreateItemRequest{" + + "description='" + description + '\'' + + '}'; + } +} diff --git a/gtd-command-side/src/main/java/net/mguenther/gtd/api/ItemCommandResource.java b/gtd-command-side/src/main/java/net/mguenther/gtd/api/ItemCommandResource.java new file mode 100644 index 0000000..906badd --- /dev/null +++ b/gtd-command-side/src/main/java/net/mguenther/gtd/api/ItemCommandResource.java @@ -0,0 +1,91 @@ +package net.mguenther.gtd.api; + +import net.mguenther.gtd.domain.CommandHandler; +import net.mguenther.gtd.domain.commands.AssignDueDate; +import net.mguenther.gtd.domain.commands.AssignRequiredTime; +import net.mguenther.gtd.domain.commands.AssignTags; +import net.mguenther.gtd.domain.commands.ConcludeItem; +import net.mguenther.gtd.domain.commands.ItemCommand; +import net.mguenther.gtd.domain.commands.MoveItemToList; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +import java.time.Instant; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Date; +import java.util.List; +import java.util.Optional; +import java.util.concurrent.CompletableFuture; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +@RestController +public class ItemCommandResource { + + private static final Logger log = LoggerFactory.getLogger(ItemCommandResource.class); + + private final CommandHandler commandHandler; + + @Autowired + public ItemCommandResource(final CommandHandler commandHandler) { + this.commandHandler = commandHandler; + } + + @RequestMapping(path = "/items/{itemId}", method = RequestMethod.DELETE, produces = "application/json") + public CompletableFuture> deleteItem(@PathVariable("itemId") String itemId) { + + log.info("Received a delete item request for item with ID {}.", itemId); + + return commandHandler + .onCommand(new ConcludeItem(itemId)) + .thenApply(dontCare -> ResponseEntity.accepted().build()) + .exceptionally(e -> { + log.warn("Caught an exception at the service boundary.", e); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build(); + }); + } + + @RequestMapping(path = "/items/{itemId}", method = RequestMethod.PUT, consumes = "application/json", produces = "application/json") + public CompletableFuture> updateItem(@PathVariable("itemId") String itemId, + @RequestBody UpdateItemRequest updateItem) { + + log.info("Received an update item request for item with ID {} and updated data {}.", itemId, updateItem); + + return commandHandler + .onCommand(commandsFor(itemId, updateItem).orElse(Collections.emptyList())) + .thenApply(dontCare -> ResponseEntity.accepted().build()) + .exceptionally((e -> { + log.warn("Caught an exception at the service boundary.", e); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build(); + })); + } + + private Optional> commandsFor(final String itemId, final UpdateItemRequest updateItem) { + final List commands = new ArrayList<>(); + if (updateItem.getAssociatedList() != null) { + commands.add(new MoveItemToList(itemId, updateItem.getAssociatedList())); + } + if (updateItem.getDueDate() != null) { + final Date dueDate = Date.from(Instant.ofEpochMilli(updateItem.getDueDate())); + commands.add(new AssignDueDate(itemId, dueDate)); + } + if (updateItem.getRequiredTime() != null) { + commands.add(new AssignRequiredTime(itemId, updateItem.getRequiredTime())); + } + if (updateItem.getTags() != null) { + commands.add(new AssignTags(itemId, updateItem.getTags())); + } + return Optional.of(Collections.unmodifiableList(commands)); + } +} diff --git a/gtd-command-side/src/main/java/net/mguenther/gtd/api/ItemsCommandResource.java b/gtd-command-side/src/main/java/net/mguenther/gtd/api/ItemsCommandResource.java new file mode 100644 index 0000000..1269a1b --- /dev/null +++ b/gtd-command-side/src/main/java/net/mguenther/gtd/api/ItemsCommandResource.java @@ -0,0 +1,52 @@ +package net.mguenther.gtd.api; + +import net.mguenther.gtd.domain.CommandHandler; +import net.mguenther.gtd.domain.commands.CreateItem; +import net.mguenther.gtd.domain.commands.ItemCommand; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +import java.util.concurrent.CompletableFuture; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +@RestController +public class ItemsCommandResource { + + private static final Logger log = LoggerFactory.getLogger(ItemsCommandResource.class); + + private final CommandHandler commandHandler; + + @Autowired + public ItemsCommandResource(final CommandHandler commandHandler) { + this.commandHandler = commandHandler; + } + + @RequestMapping(path = "/items", method = RequestMethod.POST, consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE) + public CompletableFuture> createItem(@RequestBody CreateItemRequest createItem) { + + log.info("Received a create item request with data {}.", createItem); + + return commandHandler + .onCommand(commandsFor(createItem)) + .thenApply(dontCare -> ResponseEntity.accepted().build()) + .exceptionally(e -> { + log.warn("Caught an exception at the service boundary.", e); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build(); + }); + } + + private ItemCommand commandsFor(final CreateItemRequest createItem) { + return new CreateItem(createItem.getDescription()); + } +} diff --git a/gtd-command-side/src/main/java/net/mguenther/gtd/api/UpdateItemRequest.java b/gtd-command-side/src/main/java/net/mguenther/gtd/api/UpdateItemRequest.java new file mode 100644 index 0000000..7c4c10a --- /dev/null +++ b/gtd-command-side/src/main/java/net/mguenther/gtd/api/UpdateItemRequest.java @@ -0,0 +1,60 @@ +package net.mguenther.gtd.api; + +import java.util.List; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +public class UpdateItemRequest { + + private Long dueDate; + + private Integer requiredTime; + + private List tags; + + private String associatedList; + + public Long getDueDate() { + return dueDate; + } + + public void setDueDate(long dueDate) { + this.dueDate = dueDate; + } + + public Integer getRequiredTime() { + return requiredTime; + } + + public void setRequiredTime(int requiredTime) { + this.requiredTime = requiredTime; + } + + public List getTags() { + return tags; + } + + public void setTags(List tags) { + this.tags = tags; + } + + public String getAssociatedList() { + return associatedList; + } + + public void setAssociatedList(String associatedList) { + this.associatedList = associatedList; + } + + @Override + public String toString() { + return "UpdateItemRequest{" + + "dueDate=" + dueDate + + ", requiredTime=" + requiredTime + + ", tags=" + tags + + ", associatedList='" + associatedList + '\'' + + '}'; + } +} diff --git a/gtd-command-side/src/main/java/net/mguenther/gtd/domain/CommandHandler.java b/gtd-command-side/src/main/java/net/mguenther/gtd/domain/CommandHandler.java new file mode 100644 index 0000000..6179844 --- /dev/null +++ b/gtd-command-side/src/main/java/net/mguenther/gtd/domain/CommandHandler.java @@ -0,0 +1,16 @@ +package net.mguenther.gtd.domain; + +import net.mguenther.gtd.domain.commands.ItemCommand; + +import java.util.List; +import java.util.concurrent.CompletableFuture; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +public interface CommandHandler { + + CompletableFuture onCommand(ItemCommand command); + CompletableFuture onCommand(List command); +} diff --git a/gtd-command-side/src/main/java/net/mguenther/gtd/domain/ItemEventPublisher.java b/gtd-command-side/src/main/java/net/mguenther/gtd/domain/ItemEventPublisher.java new file mode 100644 index 0000000..c1c8413 --- /dev/null +++ b/gtd-command-side/src/main/java/net/mguenther/gtd/domain/ItemEventPublisher.java @@ -0,0 +1,12 @@ +package net.mguenther.gtd.domain; + +import net.mguenther.gtd.domain.event.ItemEvent; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +public interface ItemEventPublisher { + + void log(ItemEvent itemEvent); +} diff --git a/gtd-command-side/src/main/java/net/mguenther/gtd/domain/ItemManager.java b/gtd-command-side/src/main/java/net/mguenther/gtd/domain/ItemManager.java new file mode 100644 index 0000000..87b245f --- /dev/null +++ b/gtd-command-side/src/main/java/net/mguenther/gtd/domain/ItemManager.java @@ -0,0 +1,186 @@ +package net.mguenther.gtd.domain; + +import net.mguenther.gtd.domain.commands.AssignDueDate; +import net.mguenther.gtd.domain.commands.AssignRequiredTime; +import net.mguenther.gtd.domain.commands.AssignTags; +import net.mguenther.gtd.domain.commands.ConcludeItem; +import net.mguenther.gtd.domain.commands.CreateItem; +import net.mguenther.gtd.domain.commands.ItemCommand; +import net.mguenther.gtd.domain.commands.MoveItemToList; +import net.mguenther.gtd.domain.event.DueDateAssigned; +import net.mguenther.gtd.domain.event.ItemConcluded; +import net.mguenther.gtd.domain.event.ItemCreated; +import net.mguenther.gtd.domain.event.ItemEvent; +import net.mguenther.gtd.domain.event.ItemMovedToList; +import net.mguenther.gtd.domain.event.RequiredTimeAssigned; +import net.mguenther.gtd.domain.event.TagAssigned; +import net.mguenther.gtd.domain.event.TagRemoved; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Service; + +import java.time.Clock; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; + +import static java.util.Collections.emptyList; +import static java.util.Collections.singletonList; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +@Service +public class ItemManager implements CommandHandler, EventHandler { + + private static final Logger log = LoggerFactory.getLogger(ItemManager.class); + + private final Map items = new ConcurrentHashMap<>(); + + private final ItemEventPublisher publisher; + + public ItemManager(final ItemEventPublisher publisher) { + this.publisher = publisher; + } + + @Override + public CompletableFuture onCommand(final ItemCommand command) { + + return CompletableFuture.runAsync(() -> validate(command).forEach(publisher::log)); + } + + @Override + public CompletableFuture onCommand(final List commands) { + + return CompletableFuture.runAsync(() -> + commands + .stream() + .flatMap(command -> validate(command).stream()) + .forEach(publisher::log)); + } + + private List validate(final ItemCommand command) { + + if (command instanceof CreateItem) { + return validate((CreateItem) command); + } + + final Item item = items.get(command.getItemId()); + if (item == null) { + log.warn("Item with ID {} does not exist but has received a command ({}). This is probably due to a " + + "out-of-order arrival of events, which should not happen due to Kafka's topic ordering guarantees. " + + "Please check your Kafka topic configuration. The command will be dropped.", + command.getItemId(), command); + return emptyList(); + } + + if (command instanceof AssignDueDate) return validate(item, (AssignDueDate) command); + else if (command instanceof AssignRequiredTime) return validate(item, (AssignRequiredTime) command); + else if (command instanceof AssignTags) return validate(item, (AssignTags) command); + else if (command instanceof ConcludeItem) return validate(item, (ConcludeItem) command); + else if (command instanceof MoveItemToList) return validate(item, (MoveItemToList) command); + else return emptyList(); + } + + private List validate(final CreateItem command) { + return singletonList(new ItemCreated(command.getItemId(), command.getDescription())); + } + + private List validate(final Item item, final AssignDueDate command) { + final Date now = Date.from(Instant.now(Clock.systemUTC())); + if (item.isDone() || command.getDueDate().before(now)) { + logValidationFailed(item, command); + return emptyList(); + } else { + return singletonList(new DueDateAssigned(item.getId(), command.getDueDate())); + } + } + + private List validate(final Item item, final AssignRequiredTime command) { + if (item.isDone() || command.getRequiredTime() < 0) { + logValidationFailed(item, command); + return emptyList(); + } else { + return singletonList(new RequiredTimeAssigned(item.getId(), command.getRequiredTime())); + } + } + + private List validate(final Item item, final AssignTags command) { + + if (item.isDone()) { + logValidationFailed(item, command); + return emptyList(); + } + + final List events = new ArrayList<>(); + events.addAll(command.getTags() + .stream() + .filter(tag -> !item.getTags().contains(tag)) + .map(tag -> new TagAssigned(command.getItemId(), tag)) + .collect(Collectors.toList())); + events.addAll(item.getTags() + .stream() + .filter(tag -> !command.getTags().contains(tag)) + .map(tag -> new TagRemoved(command.getItemId(), tag)) + .collect(Collectors.toList())); + return events; + } + + private List validate(final Item item, final ConcludeItem command) { + if (item.isDone()) { + logValidationFailed(item, command); + return emptyList(); + } else { + return singletonList(new ItemConcluded(item.getId())); + } + } + + private List validate(final Item item, final MoveItemToList command) { + if (item.isDone() || command.getList().equals(item.getAssociatedList())) { + logValidationFailed(item, command); + return emptyList(); + } else { + return singletonList(new ItemMovedToList(item.getId(), command.getList())); + } + } + + private void logValidationFailed(final Item currentState, final ItemCommand command) { + log.warn("Received command {} which failed to validate against the current state of aggregate {}. " + + "Skipping this command.", command, currentState); + } + + @Override + public CompletableFuture onEvent(final ItemEvent event) { + + return CompletableFuture.runAsync(() -> { + + if (event instanceof ItemCreated) { + createNewItem((ItemCreated) event); + } else { + modifyExistingItem(event); + } + }); + } + + private void createNewItem(final ItemCreated event) { + final Item newItem = new Item(event); + items.put(newItem.getId(), newItem); + } + + private void modifyExistingItem(final ItemEvent event) { + final Item currentState = items.get(event.getItemId()); + + if (currentState == null) { + throw new IllegalStateException("Event " + event.toString() + " cannot be applied. There is no state for item with ID " + event.getItemId() + "."); + } + + currentState.project(event); + items.put(currentState.getId(), currentState); + } +} diff --git a/gtd-command-side/src/main/java/net/mguenther/gtd/domain/commands/AssignDueDate.java b/gtd-command-side/src/main/java/net/mguenther/gtd/domain/commands/AssignDueDate.java new file mode 100644 index 0000000..3cee834 --- /dev/null +++ b/gtd-command-side/src/main/java/net/mguenther/gtd/domain/commands/AssignDueDate.java @@ -0,0 +1,27 @@ +package net.mguenther.gtd.domain.commands; + +import java.util.Date; +import java.util.Locale; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +public class AssignDueDate extends ItemCommand { + + private final Date dueDate; + + public AssignDueDate(final String itemId, final Date dueDate) { + super(itemId); + this.dueDate = dueDate; + } + + public Date getDueDate() { + return dueDate; + } + + @Override + public String toString() { + return String.format(Locale.getDefault(), "AssignDueDate{id=%s, dueDate=%s}", getItemId(), getDueDate()); + } +} diff --git a/gtd-command-side/src/main/java/net/mguenther/gtd/domain/commands/AssignRequiredTime.java b/gtd-command-side/src/main/java/net/mguenther/gtd/domain/commands/AssignRequiredTime.java new file mode 100644 index 0000000..1cff7a6 --- /dev/null +++ b/gtd-command-side/src/main/java/net/mguenther/gtd/domain/commands/AssignRequiredTime.java @@ -0,0 +1,26 @@ +package net.mguenther.gtd.domain.commands; + +import java.util.Locale; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +public class AssignRequiredTime extends ItemCommand { + + private final int requiredTime; + + public AssignRequiredTime(final String itemId, final int requiredTime) { + super(itemId); + this.requiredTime = requiredTime; + } + + public int getRequiredTime() { + return requiredTime; + } + + @Override + public String toString() { + return String.format(Locale.getDefault(), "AssignRequiredTime{id=%s, requiredTime=%s}", getItemId(), getRequiredTime()); + } +} diff --git a/gtd-command-side/src/main/java/net/mguenther/gtd/domain/commands/AssignTags.java b/gtd-command-side/src/main/java/net/mguenther/gtd/domain/commands/AssignTags.java new file mode 100644 index 0000000..4c5d284 --- /dev/null +++ b/gtd-command-side/src/main/java/net/mguenther/gtd/domain/commands/AssignTags.java @@ -0,0 +1,31 @@ +package net.mguenther.gtd.domain.commands; + +import org.apache.commons.lang3.StringUtils; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Locale; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +public class AssignTags extends ItemCommand { + + private final List tags = new ArrayList<>(); + + public AssignTags(final String itemId, final List tags) { + super(itemId); + this.tags.addAll(tags); + } + + public List getTags() { + return Collections.unmodifiableList(tags); + } + + @Override + public String toString() { + return String.format(Locale.getDefault(), "AssignTags{id=%s, tags=%s}", getItemId(), StringUtils.join(getTags(), ", ")); + } +} diff --git a/gtd-command-side/src/main/java/net/mguenther/gtd/domain/commands/ConcludeItem.java b/gtd-command-side/src/main/java/net/mguenther/gtd/domain/commands/ConcludeItem.java new file mode 100644 index 0000000..8a99443 --- /dev/null +++ b/gtd-command-side/src/main/java/net/mguenther/gtd/domain/commands/ConcludeItem.java @@ -0,0 +1,19 @@ +package net.mguenther.gtd.domain.commands; + +import java.util.Locale; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +public class ConcludeItem extends ItemCommand { + + public ConcludeItem(final String itemId) { + super(itemId); + } + + @Override + public String toString() { + return String.format(Locale.getDefault(), "ConcludeItem{id=%s}", getItemId()); + } +} diff --git a/gtd-command-side/src/main/java/net/mguenther/gtd/domain/commands/CreateItem.java b/gtd-command-side/src/main/java/net/mguenther/gtd/domain/commands/CreateItem.java new file mode 100644 index 0000000..b6df53b --- /dev/null +++ b/gtd-command-side/src/main/java/net/mguenther/gtd/domain/commands/CreateItem.java @@ -0,0 +1,27 @@ +package net.mguenther.gtd.domain.commands; + +import java.util.Locale; +import java.util.UUID; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +public class CreateItem extends ItemCommand { + + private final String description; + + public CreateItem(final String description) { + super(UUID.randomUUID().toString().substring(0, 7)); + this.description = description; + } + + public String getDescription() { + return description; + } + + @Override + public String toString() { + return String.format(Locale.getDefault(), "CreateItem{id=%s, description=%s}", getItemId(), getDescription()); + } +} diff --git a/gtd-command-side/src/main/java/net/mguenther/gtd/domain/commands/ItemCommand.java b/gtd-command-side/src/main/java/net/mguenther/gtd/domain/commands/ItemCommand.java new file mode 100644 index 0000000..56e2c69 --- /dev/null +++ b/gtd-command-side/src/main/java/net/mguenther/gtd/domain/commands/ItemCommand.java @@ -0,0 +1,18 @@ +package net.mguenther.gtd.domain.commands; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +abstract public class ItemCommand { + + private final String itemId; + + public ItemCommand(final String itemId) { + this.itemId = itemId; + } + + public String getItemId() { + return itemId; + } +} diff --git a/gtd-command-side/src/main/java/net/mguenther/gtd/domain/commands/MoveItemToList.java b/gtd-command-side/src/main/java/net/mguenther/gtd/domain/commands/MoveItemToList.java new file mode 100644 index 0000000..1cc4597 --- /dev/null +++ b/gtd-command-side/src/main/java/net/mguenther/gtd/domain/commands/MoveItemToList.java @@ -0,0 +1,26 @@ +package net.mguenther.gtd.domain.commands; + +import java.util.Locale; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +public class MoveItemToList extends ItemCommand { + + private final String list; + + public MoveItemToList(final String itemId, final String list) { + super(itemId); + this.list = list; + } + + public String getList() { + return list; + } + + @Override + public String toString() { + return String.format(Locale.getDefault(), "MoveItemToList{id=%s, list=%s}", getItemId(), getList()); + } +} diff --git a/gtd-command-side/src/main/java/net/mguenther/gtd/kafka/ItemEventConsumer.java b/gtd-command-side/src/main/java/net/mguenther/gtd/kafka/ItemEventConsumer.java new file mode 100644 index 0000000..cf75996 --- /dev/null +++ b/gtd-command-side/src/main/java/net/mguenther/gtd/kafka/ItemEventConsumer.java @@ -0,0 +1,47 @@ +package net.mguenther.gtd.kafka; + +import net.mguenther.gtd.domain.EventHandler; +import net.mguenther.gtd.domain.event.ItemEvent; +import net.mguenther.gtd.kafka.serialization.AvroItemEvent; +import net.mguenther.gtd.kafka.serialization.ItemEventConverter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.kafka.annotation.KafkaListener; +import org.springframework.kafka.support.Acknowledgment; +import org.springframework.stereotype.Component; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +@Component +public class ItemEventConsumer { + + private static final Logger log = LoggerFactory.getLogger(ItemEventConsumer.class); + + private final ItemEventConverter converter; + + private final EventHandler eventHandler; + + @Autowired + public ItemEventConsumer(final ItemEventConverter converter, + final EventHandler eventHandler) { + this.converter = converter; + this.eventHandler = eventHandler; + } + + @KafkaListener(topics = "${gtd.topic}", group = "getting-things-done") + public void consume(final AvroItemEvent itemEvent, final Acknowledgment ack) { + final ItemEvent event = converter.to(itemEvent); + log.debug("Received event {}. Trying to apply it to the latest state of aggregate with ID {}.", event, event.getItemId()); + try { + eventHandler + .onEvent(event) + .thenRun(ack::acknowledge); + } catch (Exception e) { + // log the exception and do *not* acknowledge the event + log.warn("Unable to apply event {} to the latest state of aggregate with ID {}.", event, event.getItemId(), e); + } + } +} diff --git a/gtd-command-side/src/main/java/net/mguenther/gtd/kafka/TransactionalItemEventPublisher.java b/gtd-command-side/src/main/java/net/mguenther/gtd/kafka/TransactionalItemEventPublisher.java new file mode 100644 index 0000000..26a8ecc --- /dev/null +++ b/gtd-command-side/src/main/java/net/mguenther/gtd/kafka/TransactionalItemEventPublisher.java @@ -0,0 +1,62 @@ +package net.mguenther.gtd.kafka; + +import net.mguenther.gtd.domain.ItemEventPublisher; +import net.mguenther.gtd.domain.event.ItemEvent; +import net.mguenther.gtd.kafka.serialization.AvroItemEvent; +import net.mguenther.gtd.kafka.serialization.ItemEventConverter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.kafka.core.KafkaTemplate; +import org.springframework.kafka.support.SendResult; +import org.springframework.stereotype.Component; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +@Component +public class TransactionalItemEventPublisher implements ItemEventPublisher { + + private static final Logger log = LoggerFactory.getLogger(TransactionalItemEventPublisher.class); + + private final String topicName; + + private final KafkaTemplate kafkaTemplate; + + private final ItemEventConverter converter; + + @Autowired + public TransactionalItemEventPublisher(@Value("${gtd.topic}") final String topicName, + final KafkaTemplate kafkaTemplate, + final ItemEventConverter converter) { + this.topicName = topicName; + this.kafkaTemplate = kafkaTemplate; + this.converter = converter; + } + + @Override + public void log(final ItemEvent itemEvent) { + log.info("Attempting to log {} to topic {}.", itemEvent, topicName); + kafkaTemplate.executeInTransaction(operations -> { + final String key = itemEvent.getItemId(); + operations + .send(topicName, key, converter.from(itemEvent)) + .addCallback(this::onSuccess, this::onFailure); + return true; + }); + } + + private void onSuccess(final SendResult result) { + log.info("AvroItemEvent '{}' has been written to topic-partition {}-{} with ingestion timestamp {}.", + result.getProducerRecord().key(), + result.getRecordMetadata().topic(), + result.getRecordMetadata().partition(), + result.getRecordMetadata().timestamp()); + } + + private void onFailure(final Throwable t) { + log.warn("Unable to write AvroItemEvent to topic {}.", topicName, t); + } +} diff --git a/gtd-command-side/src/main/java/net/mguenther/gtd/kafka/TransactionalItemEventPublisherConfig.java b/gtd-command-side/src/main/java/net/mguenther/gtd/kafka/TransactionalItemEventPublisherConfig.java new file mode 100644 index 0000000..d257b3e --- /dev/null +++ b/gtd-command-side/src/main/java/net/mguenther/gtd/kafka/TransactionalItemEventPublisherConfig.java @@ -0,0 +1,45 @@ +package net.mguenther.gtd.kafka; + +import net.mguenther.gtd.kafka.serialization.AvroItemEvent; +import net.mguenther.gtd.kafka.serialization.ItemEventSerializer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.common.serialization.StringSerializer; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.kafka.core.DefaultKafkaProducerFactory; +import org.springframework.kafka.core.KafkaTemplate; +import org.springframework.kafka.core.ProducerFactory; + +import java.util.HashMap; +import java.util.Map; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +@Configuration +public class TransactionalItemEventPublisherConfig { + + @Bean + public ProducerFactory producerFactory() { + final Map config = new HashMap<>(); + // TODO (mgu): Extract to config value + config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "kafka:9092"); + config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); + config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ItemEventSerializer.class); + config.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, 1); + config.put(ProducerConfig.RETRIES_CONFIG, Integer.MAX_VALUE); + config.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, true); + config.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, "getting-things-done"); + final DefaultKafkaProducerFactory factory = + new DefaultKafkaProducerFactory<>(config); + factory.setTransactionIdPrefix("getting-things-done"); + return factory; + } + + @Bean + public KafkaTemplate kafkaTemplate(@Autowired ProducerFactory factory) { + return new KafkaTemplate<>(factory); + } +} diff --git a/gtd-command-side/src/main/resources/application.yml b/gtd-command-side/src/main/resources/application.yml new file mode 100644 index 0000000..6f24908 --- /dev/null +++ b/gtd-command-side/src/main/resources/application.yml @@ -0,0 +1,33 @@ +server: + contextPath: /api + port: 8089 + +management: + security: + enabled: false + +spring: + kafka: + bootstrapServers: localhost:9092 + consumer: + groupId: getting-things-done-command + keyDeserializer: org.apache.kafka.common.serialization.StringDeserializer + valueDeserializer: net.mguenther.gtd.kafka.serialization.ItemEventDeserializer + autoOffsetReset: earliest + enableAutoCommit: false + listener: + ackMode: MANUAL + +gtd: + topic: topic-getting-things-done + +eureka: + client: + enabled: true + healthcheck: + enabled: true + serviceUrl: + defaultZone: ${EUREKA_CLIENT_SERVICEURL_DEFAULTZONE:http://localhost:8761/eureka/} + instance: + appname: gtd-es-command-side + preferIpAddress: true diff --git a/gtd-command-side/src/main/resources/bootstrap.yml b/gtd-command-side/src/main/resources/bootstrap.yml new file mode 100644 index 0000000..8cc9312 --- /dev/null +++ b/gtd-command-side/src/main/resources/bootstrap.yml @@ -0,0 +1,3 @@ +spring: + application: + name: gtd-es-command-side diff --git a/gtd-common/pom.xml b/gtd-common/pom.xml new file mode 100644 index 0000000..f2e963e --- /dev/null +++ b/gtd-common/pom.xml @@ -0,0 +1,26 @@ + + + + 4.0.0 + + + net.mguenther.gtd + gtd-parent + 0.1.0-SNAPSHOT + + + gtd-common + 0.1.0-SNAPSHOT + [GTD] Common Data Types (Events, ...) + https://github.com/mguenther/spring-kafka-event-sourcing-sampler + + + + javax.persistence + javax.persistence-api + 2.2 + + + diff --git a/gtd-common/src/main/java/net/mguenther/gtd/domain/EventHandler.java b/gtd-common/src/main/java/net/mguenther/gtd/domain/EventHandler.java new file mode 100644 index 0000000..81b31b6 --- /dev/null +++ b/gtd-common/src/main/java/net/mguenther/gtd/domain/EventHandler.java @@ -0,0 +1,14 @@ +package net.mguenther.gtd.domain; + +import net.mguenther.gtd.domain.event.ItemEvent; + +import java.util.concurrent.CompletableFuture; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +public interface EventHandler { + + CompletableFuture onEvent(ItemEvent event); +} diff --git a/gtd-common/src/main/java/net/mguenther/gtd/domain/Item.java b/gtd-common/src/main/java/net/mguenther/gtd/domain/Item.java new file mode 100644 index 0000000..be8600c --- /dev/null +++ b/gtd-common/src/main/java/net/mguenther/gtd/domain/Item.java @@ -0,0 +1,133 @@ +package net.mguenther.gtd.domain; + +import net.mguenther.gtd.domain.event.DueDateAssigned; +import net.mguenther.gtd.domain.event.ItemConcluded; +import net.mguenther.gtd.domain.event.ItemCreated; +import net.mguenther.gtd.domain.event.ItemEvent; +import net.mguenther.gtd.domain.event.ItemMovedToList; +import net.mguenther.gtd.domain.event.RequiredTimeAssigned; +import net.mguenther.gtd.domain.event.TagAssigned; +import net.mguenther.gtd.domain.event.TagRemoved; + +import javax.persistence.Column; +import javax.persistence.ElementCollection; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.Id; +import javax.persistence.Table; +import javax.persistence.Temporal; +import javax.persistence.TemporalType; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Date; +import java.util.List; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +@Entity +@Table(name = "ITEM") +public class Item implements Serializable { + + @Id + @Column(name = "ID", unique = true, nullable = false, updatable = false) + private String id; + + @Column(name = "DESCRIPTION", nullable = false) + private String description; + + @Column(name = "REQUIRED_TIME") + private int requiredTime; + + @Column(name = "DUE_DATE") + @Temporal(TemporalType.DATE) + private Date dueDate; + + @ElementCollection(targetClass = String.class, fetch = FetchType.EAGER) + @Column(name = "TAGS") + private List tags; + + @Column(name = "ASSOCIATED_LIST") + private String associatedList; + + @Column(name = "DONE") + private boolean done; + + public Item() { /* to make JPA happy */ } + + public Item(final ItemCreated event) { + this.id = event.getItemId(); + this.description = event.getDescription(); + this.tags = new ArrayList<>(); + } + + public String getId() { + return id; + } + + public boolean isDone() { + return done; + } + + public String getDescription() { + return description; + } + + public int getRequiredTime() { + return requiredTime; + } + + public Date getDueDate() { + return dueDate; + } + + public List getTags() { + return Collections.unmodifiableList(tags); + } + + public String getAssociatedList() { + return associatedList; + } + + public void project(final ItemEvent event) { + if (event instanceof DueDateAssigned) project((DueDateAssigned) event); + else if (event instanceof RequiredTimeAssigned) project((RequiredTimeAssigned) event); + else if (event instanceof TagAssigned) project((TagAssigned) event); + else if (event instanceof ItemConcluded) project((ItemConcluded) event); + else if (event instanceof ItemMovedToList) project((ItemMovedToList) event); + else if (event instanceof TagRemoved) project((TagRemoved) event); + else throw new IllegalStateException("Unrecognized event: " + event.toString()); + } + + public void project(final DueDateAssigned event) { + this.dueDate = event.getDueDate(); + } + + public void project(final RequiredTimeAssigned event) { + this.requiredTime = event.getRequiredTime(); + } + + public void project(final TagAssigned event) { + synchronized (this) { + if (!tags.contains(event.getTag())) { + tags.add(event.getTag()); + } + } + } + + public void project(final TagRemoved event) { + synchronized (this) { + tags.remove(event.getTag()); + } + } + + public void project(final ItemConcluded event) { + this.done = true; + } + + public void project(final ItemMovedToList event) { + this.associatedList = event.getList(); + } +} diff --git a/gtd-common/src/main/java/net/mguenther/gtd/domain/event/DueDateAssigned.java b/gtd-common/src/main/java/net/mguenther/gtd/domain/event/DueDateAssigned.java new file mode 100644 index 0000000..ba9a199 --- /dev/null +++ b/gtd-common/src/main/java/net/mguenther/gtd/domain/event/DueDateAssigned.java @@ -0,0 +1,32 @@ +package net.mguenther.gtd.domain.event; + +import java.util.Date; +import java.util.Locale; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +public class DueDateAssigned extends ItemEvent { + + private final Date dueDate; + + public DueDateAssigned(final String itemId, final Date dueDate) { + super(itemId); + this.dueDate = dueDate; + } + + public DueDateAssigned(final String eventId, final long timestamp, final String itemId, final Date dueDate) { + super(eventId, timestamp, itemId); + this.dueDate = dueDate; + } + + public Date getDueDate() { + return dueDate; + } + + @Override + public String toString() { + return String.format(Locale.getDefault(), "DueDateAssigned{eventId=%s, itemId=%s, dueDate=%s}", getEventId(), getItemId(), getDueDate()); + } +} diff --git a/gtd-common/src/main/java/net/mguenther/gtd/domain/event/ItemConcluded.java b/gtd-common/src/main/java/net/mguenther/gtd/domain/event/ItemConcluded.java new file mode 100644 index 0000000..1beccd6 --- /dev/null +++ b/gtd-common/src/main/java/net/mguenther/gtd/domain/event/ItemConcluded.java @@ -0,0 +1,23 @@ +package net.mguenther.gtd.domain.event; + +import java.util.Locale; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +public class ItemConcluded extends ItemEvent { + + public ItemConcluded(final String itemId) { + super(itemId); + } + + public ItemConcluded(final String eventId, final long timestamp, final String itemId) { + super(eventId, timestamp, itemId); + } + + @Override + public String toString() { + return String.format(Locale.getDefault(), "ItemConcluded{eventId=%s, itemId=%s}", getEventId(), getItemId()); + } +} diff --git a/gtd-common/src/main/java/net/mguenther/gtd/domain/event/ItemCreated.java b/gtd-common/src/main/java/net/mguenther/gtd/domain/event/ItemCreated.java new file mode 100644 index 0000000..9223f27 --- /dev/null +++ b/gtd-common/src/main/java/net/mguenther/gtd/domain/event/ItemCreated.java @@ -0,0 +1,31 @@ +package net.mguenther.gtd.domain.event; + +import java.util.Locale; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +public class ItemCreated extends ItemEvent { + + private final String description; + + public ItemCreated(final String itemId, final String description) { + super(itemId); + this.description = description; + } + + public ItemCreated(final String eventId, final long timestamp, final String itemId, final String description) { + super(eventId, timestamp, itemId); + this.description = description; + } + + public String getDescription() { + return description; + } + + @Override + public String toString() { + return String.format(Locale.getDefault(), "ItemCreated{eventId=%s, itemId=%s, description=%s}", getEventId(), getItemId(), getDescription()); + } +} diff --git a/gtd-common/src/main/java/net/mguenther/gtd/domain/event/ItemEvent.java b/gtd-common/src/main/java/net/mguenther/gtd/domain/event/ItemEvent.java new file mode 100644 index 0000000..4ec8a5e --- /dev/null +++ b/gtd-common/src/main/java/net/mguenther/gtd/domain/event/ItemEvent.java @@ -0,0 +1,36 @@ +package net.mguenther.gtd.domain.event; + +import java.util.UUID; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +abstract public class ItemEvent { + + private final String eventId; + private final long timestamp; + private final String itemId; + + public ItemEvent(final String itemId) { + this(UUID.randomUUID().toString().substring(0, 7), System.currentTimeMillis(), itemId); + } + + public ItemEvent(final String eventId, final long timestamp, final String itemId) { + this.eventId = eventId; + this.timestamp = timestamp; + this.itemId = itemId; + } + + public String getEventId() { + return eventId; + } + + public long getTimestamp() { + return timestamp; + } + + public String getItemId() { + return itemId; + } +} diff --git a/gtd-common/src/main/java/net/mguenther/gtd/domain/event/ItemMovedToList.java b/gtd-common/src/main/java/net/mguenther/gtd/domain/event/ItemMovedToList.java new file mode 100644 index 0000000..b7151d9 --- /dev/null +++ b/gtd-common/src/main/java/net/mguenther/gtd/domain/event/ItemMovedToList.java @@ -0,0 +1,31 @@ +package net.mguenther.gtd.domain.event; + +import java.util.Locale; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +public class ItemMovedToList extends ItemEvent { + + private final String list; + + public ItemMovedToList(final String itemId, final String list) { + super(itemId); + this.list = list; + } + + public ItemMovedToList(final String eventId, final long timestamp, final String itemId, final String list) { + super(eventId, timestamp, itemId); + this.list = list; + } + + public String getList() { + return list; + } + + @Override + public String toString() { + return String.format(Locale.getDefault(), "ItemMovedToList{eventId=%s, itemId=%s, list=%s}", getEventId(), getItemId(), getList()); + } +} diff --git a/gtd-common/src/main/java/net/mguenther/gtd/domain/event/RequiredTimeAssigned.java b/gtd-common/src/main/java/net/mguenther/gtd/domain/event/RequiredTimeAssigned.java new file mode 100644 index 0000000..1a7e91e --- /dev/null +++ b/gtd-common/src/main/java/net/mguenther/gtd/domain/event/RequiredTimeAssigned.java @@ -0,0 +1,31 @@ +package net.mguenther.gtd.domain.event; + +import java.util.Locale; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +public class RequiredTimeAssigned extends ItemEvent { + + private final int requiredTime; + + public RequiredTimeAssigned(final String itemId, final int requiredTime) { + super(itemId); + this.requiredTime = requiredTime; + } + + public RequiredTimeAssigned(final String eventId, final long timestamp, final String itemId, final int requiredTime) { + super(eventId, timestamp, itemId); + this.requiredTime = requiredTime; + } + + public int getRequiredTime() { + return requiredTime; + } + + @Override + public String toString() { + return String.format(Locale.getDefault(), "RequiredTimeAssigned{eventId=%s, itemId=%s, requiredTime=%s}", getEventId(), getItemId(), getRequiredTime()); + } +} diff --git a/gtd-common/src/main/java/net/mguenther/gtd/domain/event/TagAssigned.java b/gtd-common/src/main/java/net/mguenther/gtd/domain/event/TagAssigned.java new file mode 100644 index 0000000..f641232 --- /dev/null +++ b/gtd-common/src/main/java/net/mguenther/gtd/domain/event/TagAssigned.java @@ -0,0 +1,31 @@ +package net.mguenther.gtd.domain.event; + +import java.util.Locale; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +public class TagAssigned extends ItemEvent { + + private final String tag; + + public TagAssigned(final String itemId, final String tag) { + super(itemId); + this.tag = tag; + } + + public TagAssigned(final String eventId, final long timestamp, final String itemId, final String tag) { + super(eventId, timestamp, itemId); + this.tag = tag; + } + + public String getTag() { + return tag; + } + + @Override + public String toString() { + return String.format(Locale.getDefault(), "TagAssigned{eventId=%s, itemId=%s, tag=%s}", getEventId(), getItemId(), getTag()); + } +} diff --git a/gtd-common/src/main/java/net/mguenther/gtd/domain/event/TagRemoved.java b/gtd-common/src/main/java/net/mguenther/gtd/domain/event/TagRemoved.java new file mode 100644 index 0000000..f7fa6ae --- /dev/null +++ b/gtd-common/src/main/java/net/mguenther/gtd/domain/event/TagRemoved.java @@ -0,0 +1,31 @@ +package net.mguenther.gtd.domain.event; + +import java.util.Locale; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +public class TagRemoved extends ItemEvent { + + private final String tag; + + public TagRemoved(final String itemId, final String tag) { + super(itemId); + this.tag = tag; + } + + public TagRemoved(final String eventId, final long timestamp, final String itemId, final String tag) { + super(eventId, timestamp, itemId); + this.tag = tag; + } + + public String getTag() { + return tag; + } + + @Override + public String toString() { + return String.format(Locale.getDefault(), "TagRemoved{eventId=%s, itemId=%s, tag=%s}", getEventId(), getItemId(), getTag()); + } +} diff --git a/gtd-discovery-service/Dockerfile b/gtd-discovery-service/Dockerfile new file mode 100644 index 0000000..6d877ed --- /dev/null +++ b/gtd-discovery-service/Dockerfile @@ -0,0 +1,4 @@ +FROM ewolff/docker-java +COPY target/gtd-discovery-service-0.1.0-SNAPSHOT.jar . +CMD /usr/bin/java -Xmx256m -Xms256m -jar gtd-discovery-service-0.1.0-SNAPSHOT.jar +EXPOSE 8761 \ No newline at end of file diff --git a/gtd-discovery-service/pom.xml b/gtd-discovery-service/pom.xml new file mode 100644 index 0000000..8954c4a --- /dev/null +++ b/gtd-discovery-service/pom.xml @@ -0,0 +1,46 @@ + + + + 4.0.0 + + + net.mguenther.gtd + gtd-parent + 0.1.0-SNAPSHOT + + + gtd-discovery-service + 0.1.0-SNAPSHOT + [GTD] Discovery Service + https://github.com/mguenther/spring-kafka-event-sourcing-sampler + + + UTF-8 + UTF-8 + 1.8 + + + + + org.springframework.cloud + spring-cloud-starter-eureka-server + + + org.springframework.boot + spring-boot-starter-test + test + + + + + + + org.springframework.boot + spring-boot-maven-plugin + + + + + diff --git a/gtd-discovery-service/src/main/java/net/mguenther/gtd/DiscoveryServiceApplication.java b/gtd-discovery-service/src/main/java/net/mguenther/gtd/DiscoveryServiceApplication.java new file mode 100644 index 0000000..b51acdd --- /dev/null +++ b/gtd-discovery-service/src/main/java/net/mguenther/gtd/DiscoveryServiceApplication.java @@ -0,0 +1,18 @@ +package net.mguenther.gtd; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.cloud.netflix.eureka.server.EnableEurekaServer; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +@SpringBootApplication +@EnableEurekaServer +public class DiscoveryServiceApplication { + + public static void main(String[] args) { + SpringApplication.run(DiscoveryServiceApplication.class, args); + } +} diff --git a/gtd-discovery-service/src/main/resources/application.yml b/gtd-discovery-service/src/main/resources/application.yml new file mode 100644 index 0000000..85409c4 --- /dev/null +++ b/gtd-discovery-service/src/main/resources/application.yml @@ -0,0 +1,12 @@ +server: + port: 8761 + +eureka: + instance: + hostname: localhost + preferIpAddress: true + client: + registerWithEureka: false + fetchRegistry: false + serviceUrl: + defaultZone: http://${eureka.instance.hostname}:${server.port}/eureka/ diff --git a/gtd-discovery-service/src/main/resources/bootstrap.yml b/gtd-discovery-service/src/main/resources/bootstrap.yml new file mode 100644 index 0000000..73343aa --- /dev/null +++ b/gtd-discovery-service/src/main/resources/bootstrap.yml @@ -0,0 +1,3 @@ +spring: + application: + name: discovery diff --git a/gtd-query-side/Dockerfile b/gtd-query-side/Dockerfile new file mode 100644 index 0000000..31e25ba --- /dev/null +++ b/gtd-query-side/Dockerfile @@ -0,0 +1,4 @@ +FROM ewolff/docker-java +COPY target/gtd-query-side-0.1.0-SNAPSHOT.jar . +CMD /usr/bin/java -Xmx256m -Xms256m -jar gtd-query-side-0.1.0-SNAPSHOT.jar +EXPOSE 8080 \ No newline at end of file diff --git a/gtd-query-side/pom.xml b/gtd-query-side/pom.xml new file mode 100644 index 0000000..0adcf0e --- /dev/null +++ b/gtd-query-side/pom.xml @@ -0,0 +1,133 @@ + + + + 4.0.0 + + + net.mguenther.gtd + gtd-parent + 0.1.0-SNAPSHOT + + + gtd-query-side + 0.1.0-SNAPSHOT + [GTD] Query Side + https://github.com/mguenther/spring-kafka-event-sourcing-sampler + + + + + org.apache.maven.plugins + maven-compiler-plugin + + + lombok.launch.AnnotationProcessorHider$AnnotationProcessor + + + 1.8 + 1.8 + + + + org.springframework.boot + spring-boot-maven-plugin + + + + + + + + net.mguenther.gtd + gtd-codec + 0.1.0-SNAPSHOT + + + net.mguenther.gtd + gtd-common + 0.1.0-SNAPSHOT + + + + org.springframework.boot + spring-boot-starter + + + org.springframework.boot + spring-boot-starter-web + + + org.springframework.boot + spring-boot-configuration-processor + true + + + org.springframework.boot + spring-boot-starter-validation + + + org.springframework + spring-tx + + + org.springframework.kafka + spring-kafka + + + org.springframework.boot + spring-boot-starter-data-jpa + + + + org.springframework.cloud + spring-cloud-starter-eureka + + + org.springframework.boot + spring-boot-starter-actuator + + + + com.h2database + h2 + runtime + + + + org.apache.avro + avro + + + + org.slf4j + slf4j-api + 1.7.5 + + + org.slf4j + slf4j-log4j12 + 1.7.5 + + + + com.fasterxml.jackson.core + jackson-databind + + + org.apache.commons + commons-lang3 + + + org.projectlombok + lombok + + + + junit + junit + test + + + diff --git a/gtd-query-side/src/main/java/net/mguenther/gtd/GtdQueryApplication.java b/gtd-query-side/src/main/java/net/mguenther/gtd/GtdQueryApplication.java new file mode 100644 index 0000000..bb1bd08 --- /dev/null +++ b/gtd-query-side/src/main/java/net/mguenther/gtd/GtdQueryApplication.java @@ -0,0 +1,20 @@ +package net.mguenther.gtd; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.cloud.client.discovery.EnableDiscoveryClient; +import org.springframework.web.servlet.config.annotation.EnableWebMvc; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +@SpringBootApplication +@EnableDiscoveryClient +@EnableWebMvc +public class GtdQueryApplication { + + public static void main(String[] args) { + SpringApplication.run(GtdQueryApplication.class, args); + } +} diff --git a/gtd-query-side/src/main/java/net/mguenther/gtd/api/ItemQueryResource.java b/gtd-query-side/src/main/java/net/mguenther/gtd/api/ItemQueryResource.java new file mode 100644 index 0000000..f198646 --- /dev/null +++ b/gtd-query-side/src/main/java/net/mguenther/gtd/api/ItemQueryResource.java @@ -0,0 +1,42 @@ +package net.mguenther.gtd.api; + +import net.mguenther.gtd.domain.Item; +import net.mguenther.gtd.domain.ItemView; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +import java.util.concurrent.CompletableFuture; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +@RestController +public class ItemQueryResource { + + private static final Logger log = LoggerFactory.getLogger(ItemQueryResource.class); + + private final ItemView itemView; + + @Autowired + public ItemQueryResource(final ItemView itemView) { + this.itemView = itemView; + } + + @RequestMapping(path = "/items/{itemId}", method = RequestMethod.GET, produces = "application/json") + public CompletableFuture> showItem(@PathVariable("itemId") String itemId) { + + log.info("Received a show item request for item with ID {}.", itemId); + + return itemView.getItem(itemId) + .thenApply(optionalItem -> optionalItem.map(ResponseEntity::ok).orElse(ResponseEntity.notFound().build())) + .exceptionally(e -> ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build()); + } +} diff --git a/gtd-query-side/src/main/java/net/mguenther/gtd/api/ItemsQueryResource.java b/gtd-query-side/src/main/java/net/mguenther/gtd/api/ItemsQueryResource.java new file mode 100644 index 0000000..f8b8d51 --- /dev/null +++ b/gtd-query-side/src/main/java/net/mguenther/gtd/api/ItemsQueryResource.java @@ -0,0 +1,43 @@ +package net.mguenther.gtd.api; + +import net.mguenther.gtd.domain.Item; +import net.mguenther.gtd.domain.ItemView; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +import java.util.List; +import java.util.concurrent.CompletableFuture; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +@RestController +public class ItemsQueryResource { + + private static final Logger log = LoggerFactory.getLogger(ItemsQueryResource.class); + + private final ItemView itemView; + + @Autowired + public ItemsQueryResource(final ItemView itemView) { + this.itemView = itemView; + } + + @RequestMapping(path = "/items", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE) + public CompletableFuture>> listAllItems() { + + log.info("Received a list all managed items request."); + + return itemView.getItems() + .thenApply(ResponseEntity::ok) + .exceptionally(e -> ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build()); + } +} diff --git a/gtd-query-side/src/main/java/net/mguenther/gtd/domain/DefaultItemView.java b/gtd-query-side/src/main/java/net/mguenther/gtd/domain/DefaultItemView.java new file mode 100644 index 0000000..313a79c --- /dev/null +++ b/gtd-query-side/src/main/java/net/mguenther/gtd/domain/DefaultItemView.java @@ -0,0 +1,41 @@ +package net.mguenther.gtd.domain; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.concurrent.CompletableFuture; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +@Component +public class DefaultItemView implements ItemView { + + private final ItemRepository repository; + + @Autowired + public DefaultItemView(final ItemRepository repository) { + this.repository = repository; + } + + @Override + public CompletableFuture> getItems() { + + return CompletableFuture.supplyAsync(() -> { + final List items = new ArrayList<>(); + items.addAll(repository.findAll()); + return Collections.unmodifiableList(items); + }); + } + + @Override + public CompletableFuture> getItem(final String itemId) { + + return CompletableFuture.supplyAsync(() -> Optional.ofNullable(repository.findOne(itemId))); + } +} diff --git a/gtd-query-side/src/main/java/net/mguenther/gtd/domain/ItemRepository.java b/gtd-query-side/src/main/java/net/mguenther/gtd/domain/ItemRepository.java new file mode 100644 index 0000000..c2d7cdb --- /dev/null +++ b/gtd-query-side/src/main/java/net/mguenther/gtd/domain/ItemRepository.java @@ -0,0 +1,12 @@ +package net.mguenther.gtd.domain; + +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.stereotype.Repository; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +@Repository +public interface ItemRepository extends JpaRepository { +} diff --git a/gtd-query-side/src/main/java/net/mguenther/gtd/domain/ItemUpdater.java b/gtd-query-side/src/main/java/net/mguenther/gtd/domain/ItemUpdater.java new file mode 100644 index 0000000..f503c38 --- /dev/null +++ b/gtd-query-side/src/main/java/net/mguenther/gtd/domain/ItemUpdater.java @@ -0,0 +1,52 @@ +package net.mguenther.gtd.domain; + +import net.mguenther.gtd.domain.event.ItemCreated; +import net.mguenther.gtd.domain.event.ItemEvent; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.util.concurrent.CompletableFuture; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +@Service +public class ItemUpdater implements EventHandler { + + private static final Logger log = LoggerFactory.getLogger(ItemUpdater.class); + + private final ItemRepository repository; + + @Autowired + public ItemUpdater(final ItemRepository repository) { + this.repository = repository; + } + + @Override + public CompletableFuture onEvent(final ItemEvent event) { + + return CompletableFuture.runAsync(() -> { + if (event instanceof ItemCreated) { + createNewItem((ItemCreated) event); + } else { + modifyExistingItem(event); + } + }); + } + + private void createNewItem(final ItemCreated itemCreated) { + final Item newItem = new Item(itemCreated); + repository.save(newItem); + log.info("Applied event {} and created new item with current state {}.", itemCreated, newItem); + } + + private void modifyExistingItem(final ItemEvent event) { + final Item item = repository.findOne(event.getItemId()); + item.project(event); + final Item updatedItem = repository.save(item); + log.info("Applied event {} to the aggregate with ID {} and current state {}.", event, event.getItemId(), updatedItem); + } +} diff --git a/gtd-query-side/src/main/java/net/mguenther/gtd/domain/ItemView.java b/gtd-query-side/src/main/java/net/mguenther/gtd/domain/ItemView.java new file mode 100644 index 0000000..195bded --- /dev/null +++ b/gtd-query-side/src/main/java/net/mguenther/gtd/domain/ItemView.java @@ -0,0 +1,15 @@ +package net.mguenther.gtd.domain; + +import java.util.List; +import java.util.Optional; +import java.util.concurrent.CompletableFuture; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +public interface ItemView { + + CompletableFuture> getItems(); + CompletableFuture> getItem(String itemId); +} diff --git a/gtd-query-side/src/main/java/net/mguenther/gtd/kafka/ItemEventConsumer.java b/gtd-query-side/src/main/java/net/mguenther/gtd/kafka/ItemEventConsumer.java new file mode 100644 index 0000000..cf75996 --- /dev/null +++ b/gtd-query-side/src/main/java/net/mguenther/gtd/kafka/ItemEventConsumer.java @@ -0,0 +1,47 @@ +package net.mguenther.gtd.kafka; + +import net.mguenther.gtd.domain.EventHandler; +import net.mguenther.gtd.domain.event.ItemEvent; +import net.mguenther.gtd.kafka.serialization.AvroItemEvent; +import net.mguenther.gtd.kafka.serialization.ItemEventConverter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.kafka.annotation.KafkaListener; +import org.springframework.kafka.support.Acknowledgment; +import org.springframework.stereotype.Component; + +/** + * @author Markus Günther (markus.guenther@gmail.com) + * @author Boris Fresow (bfresow@gmail.com) + */ +@Component +public class ItemEventConsumer { + + private static final Logger log = LoggerFactory.getLogger(ItemEventConsumer.class); + + private final ItemEventConverter converter; + + private final EventHandler eventHandler; + + @Autowired + public ItemEventConsumer(final ItemEventConverter converter, + final EventHandler eventHandler) { + this.converter = converter; + this.eventHandler = eventHandler; + } + + @KafkaListener(topics = "${gtd.topic}", group = "getting-things-done") + public void consume(final AvroItemEvent itemEvent, final Acknowledgment ack) { + final ItemEvent event = converter.to(itemEvent); + log.debug("Received event {}. Trying to apply it to the latest state of aggregate with ID {}.", event, event.getItemId()); + try { + eventHandler + .onEvent(event) + .thenRun(ack::acknowledge); + } catch (Exception e) { + // log the exception and do *not* acknowledge the event + log.warn("Unable to apply event {} to the latest state of aggregate with ID {}.", event, event.getItemId(), e); + } + } +} diff --git a/gtd-query-side/src/main/resources/application.yml b/gtd-query-side/src/main/resources/application.yml new file mode 100644 index 0000000..95c1fac --- /dev/null +++ b/gtd-query-side/src/main/resources/application.yml @@ -0,0 +1,33 @@ +server: + contextPath: /api + port: 8090 + +spring: + kafka: + bootstrapServers: localhost:9092 + consumer: + groupId: getting-things-done-query + keyDeserializer: org.apache.kafka.common.serialization.StringDeserializer + valueDeserializer: net.mguenther.gtd.kafka.serialization.ItemEventDeserializer + autoOffsetReset: latest + enableAutoCommit: false + listener: + ackMode: MANUAL + +gtd: + topic: topic-getting-things-done + +management: + security: + enabled: false + +eureka: + client: + enabled: true + healthcheck: + enabled: true + serviceUrl: + defaultZone: ${EUREKA_CLIENT_SERVICEURL_DEFAULTZONE:http://localhost:8761/eureka/} + instance: + appname: gtd-es-query-side + preferIpAddress: true diff --git a/gtd-query-side/src/main/resources/bootstrap.yml b/gtd-query-side/src/main/resources/bootstrap.yml new file mode 100644 index 0000000..d11c925 --- /dev/null +++ b/gtd-query-side/src/main/resources/bootstrap.yml @@ -0,0 +1,3 @@ +spring: + application: + name: gtd-es-query-side diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..bb660bf --- /dev/null +++ b/pom.xml @@ -0,0 +1,148 @@ + + + + 4.0.0 + + + org.springframework.boot + spring-boot-starter-parent + 1.5.4.RELEASE + + + + net.mguenther.gtd + gtd-parent + pom + 0.1.0-SNAPSHOT + [GTD] Parent + https://github.com/mguenther/spring-kafka-event-sourcing-sampler + + + UTF-8 + + 4.12 + 1.3 + 1.7.22 + 1.3.0.BUILD-SNAPSHOT + Dalston.SR2 + 1.8.1 + 3.5 + + 1.8.1 + 3.5 + + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/libs-snapshot + + true + + + + + + gtd-codec + gtd-common + gtd-command-side + gtd-query-side + gtd-api-gateway + gtd-discovery-service + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + ${plugin.compiler.version} + + 1.8 + 1.8 + + + + org.apache.avro + avro-maven-plugin + ${plugin.avro.version} + + + + + + + + + + org.springframework.kafka + spring-kafka + ${spring.kafka.version} + + + org.springframework.kafka + spring-kafka-test + ${spring.kafka.version} + + + + org.springframework.cloud + spring-cloud-dependencies + ${spring-cloud.version} + pom + import + + + + org.springframework + spring-tx + ${spring.version} + + + + org.apache.avro + avro + ${avro.version} + + + + org.apache.commons + commons-lang3 + ${commons-lang3.version} + + + + org.slf4j + jcl-over-slf4j + ${slf4j.version} + + + org.slf4j + jul-to-slf4j + ${slf4j.version} + + + org.slf4j + log4j-over-slf4j + ${slf4j.version} + + + + junit + junit + ${junit.version} + test + + + org.hamcrest + hamcrest-core + ${hamcrest.version} + test + + + + + \ No newline at end of file