BAEL-3299 - Testing a Spring Batch Job (#7982)

* BAEL-3299: First version. Broken tests

* BAEL-3299: Fix tests

* BAEL-3299: Include gitignore for output files

* BAEL-3299: Example of writer unit test

* BAEL-3299: Cleaned up and included more tests

* BAEL-3299: Updated to use JobParameters

* BAEL-3299: Fixed broken startup and included cleanup for tests

* BAEL-3299: Fine tuned version. Fixed formatting.

* BAEL-3299: Cleaned up redundant stuff

* BAEL-3299: Fixed formatting

* BAEL-3299: Moved source code in spring-batch module

* BAEL-3299: Fixed broken tests
This commit is contained in:
Sorin Zamfir
2019-10-15 05:32:04 +03:00
committed by maibin
parent 3e4c964e6a
commit 9199d0c895
25 changed files with 750 additions and 32 deletions

View File

@@ -1 +1,2 @@
output.csv output.csv
output.json

View File

@@ -15,12 +15,30 @@
</parent> </parent>
<dependencies> <dependencies>
<!-- JAXB APIs & runtime no longer provided in JDK 11 -->
<!-- see http://openjdk.java.net/jeps/320 -->
<dependency>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
<version>${jaxb.version}</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.glassfish.jaxb</groupId>
<artifactId>jaxb-runtime</artifactId>
<version>${jaxb.version}</version>
<scope>runtime</scope>
</dependency>
<!-- SQLite database driver --> <!-- SQLite database driver -->
<dependency> <dependency>
<groupId>org.xerial</groupId> <groupId>org.xerial</groupId>
<artifactId>sqlite-jdbc</artifactId> <artifactId>sqlite-jdbc</artifactId>
<version>${sqlite.version}</version> <version>${sqlite.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.springframework</groupId> <groupId>org.springframework</groupId>
<artifactId>spring-oxm</artifactId> <artifactId>spring-oxm</artifactId>
@@ -32,40 +50,67 @@
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.springframework</groupId> <groupId>org.springframework</groupId>
<artifactId>spring-jdbc</artifactId> <artifactId>spring-jdbc</artifactId>
<version>${spring.version}</version> <version>${spring.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.springframework.batch</groupId> <groupId>org.springframework.batch</groupId>
<artifactId>spring-batch-core</artifactId> <artifactId>spring-batch-core</artifactId>
<version>${spring.batch.version}</version> <version>${spring.batch.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.springframework.batch</groupId> <groupId>org.springframework.batch</groupId>
<artifactId>spring-batch-test</artifactId> <artifactId>spring-batch-test</artifactId>
<version>${spring.batch.version}</version> <version>${spring.batch.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.opencsv</groupId> <groupId>com.opencsv</groupId>
<artifactId>opencsv</artifactId> <artifactId>opencsv</artifactId>
<version>${opencsv.version}</version> <version>${opencsv.version}</version>
</dependency> </dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-batch</artifactId>
<version>${spring.boot.version}</version>
</dependency>
<dependency>
<groupId>org.hsqldb</groupId>
<artifactId>hsqldb</artifactId>
<version>2.5.0</version>
<scope>runtime</scope>
</dependency>
<dependency> <dependency>
<groupId>org.awaitility</groupId> <groupId>org.awaitility</groupId>
<artifactId>awaitility</artifactId> <artifactId>awaitility</artifactId>
<version>${awaitility.version}</version> <version>${awaitility.version}</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<version>${spring.boot.version}</version>
<scope>test</scope>
</dependency>
</dependencies> </dependencies>
<properties> <properties>
<spring.version>5.0.3.RELEASE</spring.version> <spring.version>5.2.0.RELEASE</spring.version>
<spring.batch.version>4.0.0.RELEASE</spring.batch.version> <spring.batch.version>4.2.0.RELEASE</spring.batch.version>
<spring.boot.version>2.1.9.RELEASE</spring.boot.version>
<sqlite.version>3.15.1</sqlite.version> <sqlite.version>3.15.1</sqlite.version>
<opencsv.version>4.1</opencsv.version> <opencsv.version>4.1</opencsv.version>
<jaxb.version>2.3.1</jaxb.version>
<awaitility.version>3.1.1</awaitility.version> <awaitility.version>3.1.1</awaitility.version>
</properties> </properties>

View File

@@ -1,5 +1,7 @@
package org.baeldung.batch; package org.baeldung.batch;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.Job; import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.JobParameters;
@@ -8,6 +10,9 @@ import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.AnnotationConfigApplicationContext;
public class App { public class App {
private static final Logger LOGGER = LoggerFactory.getLogger(App.class);
public static void main(final String[] args) { public static void main(final String[] args) {
// Spring Java config // Spring Java config
final AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); final AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
@@ -27,19 +32,16 @@ public class App {
final JobLauncher jobLauncher = (JobLauncher) context.getBean("jobLauncher"); final JobLauncher jobLauncher = (JobLauncher) context.getBean("jobLauncher");
final Job job = (Job) context.getBean(batchJobName); final Job job = (Job) context.getBean(batchJobName);
System.out.println("----------------------------------------"); LOGGER.info("Starting the batch job: {}", batchJobName);
System.out.println("Starting the batch job: " + batchJobName);
try { try {
// To enable multiple execution of a job with the same parameters // To enable multiple execution of a job with the same parameters
JobParameters jobParameters = new JobParametersBuilder() JobParameters jobParameters = new JobParametersBuilder().addString("jobID", String.valueOf(System.currentTimeMillis()))
.addString("jobID", String.valueOf(System.currentTimeMillis())) .toJobParameters();
.toJobParameters();
final JobExecution execution = jobLauncher.run(job, jobParameters); final JobExecution execution = jobLauncher.run(job, jobParameters);
System.out.println("Job Status : " + execution.getStatus()); LOGGER.info("Job Status : {}", execution.getStatus());
System.out.println("Job succeeded");
} catch (final Exception e) { } catch (final Exception e) {
e.printStackTrace(); e.printStackTrace();
System.out.println("Job failed"); LOGGER.error("Job failed {}", e.getMessage());
} }
} }
} }

View File

@@ -86,9 +86,14 @@ public class SpringBatchConfig {
} }
@Bean @Bean
protected Step step1(@Qualifier("itemProcessor") ItemProcessor<Transaction, Transaction> processor, protected Step step1(@Qualifier("itemProcessor") ItemProcessor<Transaction, Transaction> processor, ItemWriter<Transaction> writer) throws ParseException {
ItemWriter<Transaction> writer) throws ParseException { return stepBuilderFactory
return stepBuilderFactory.get("step1").<Transaction, Transaction>chunk(10).reader(itemReader(inputCsv)).processor(processor).writer(writer).build(); .get("step1")
.<Transaction, Transaction> chunk(10)
.reader(itemReader(inputCsv))
.processor(processor)
.writer(writer)
.build();
} }
@Bean(name = "firstBatchJob") @Bean(name = "firstBatchJob")

View File

@@ -1,5 +1,7 @@
package org.baeldung.batch.partitioner; package org.baeldung.batch.partitioner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.Job; import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.JobParameters;
@@ -7,6 +9,9 @@ import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.AnnotationConfigApplicationContext;
public class SpringbatchPartitionerApp { public class SpringbatchPartitionerApp {
private static final Logger LOGGER = LoggerFactory.getLogger(SpringbatchPartitionerApp.class);
public static void main(final String[] args) { public static void main(final String[] args) {
// Spring Java config // Spring Java config
final AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); final AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
@@ -15,14 +20,13 @@ public class SpringbatchPartitionerApp {
final JobLauncher jobLauncher = (JobLauncher) context.getBean("jobLauncher"); final JobLauncher jobLauncher = (JobLauncher) context.getBean("jobLauncher");
final Job job = (Job) context.getBean("partitionerJob"); final Job job = (Job) context.getBean("partitionerJob");
System.out.println("Starting the batch job"); LOGGER.info("Starting the batch job");
try { try {
final JobExecution execution = jobLauncher.run(job, new JobParameters()); final JobExecution execution = jobLauncher.run(job, new JobParameters());
System.out.println("Job Status : " + execution.getStatus()); LOGGER.info("Job Status : {}", execution.getStatus());
System.out.println("Job succeeded");
} catch (final Exception e) { } catch (final Exception e) {
e.printStackTrace(); e.printStackTrace();
System.out.println("Job failed"); LOGGER.error("Job failed {}", e.getMessage());
} }
} }
} }

View File

@@ -0,0 +1,43 @@
package org.baeldung.batchtesting;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.PropertySource;
@SpringBootApplication
@PropertySource("classpath:batchtesting/application.properties")
public class SpringBatchApplication implements CommandLineRunner {
@Autowired
private JobLauncher jobLauncher;
@Autowired
@Qualifier("transformBooksRecords")
private Job transformBooksRecordsJob;
@Value("${file.input}")
private String input;
@Value("${file.output}")
private String output;
public static void main(String[] args) {
SpringApplication.run(SpringBatchApplication.class, args);
}
@Override
public void run(String... args) throws Exception {
JobParametersBuilder paramsBuilder = new JobParametersBuilder();
paramsBuilder.addString("file.input", input);
paramsBuilder.addString("file.output", output);
jobLauncher.run(transformBooksRecordsJob, paramsBuilder.toJobParameters());
}
}

View File

@@ -0,0 +1,137 @@
package org.baeldung.batchtesting;
import java.io.IOException;
import org.baeldung.batchtesting.model.Book;
import org.baeldung.batchtesting.model.BookDetails;
import org.baeldung.batchtesting.model.BookRecord;
import org.baeldung.batchtesting.service.BookDetailsItemProcessor;
import org.baeldung.batchtesting.service.BookItemProcessor;
import org.baeldung.batchtesting.service.BookRecordFieldSetMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.builder.FlatFileItemReaderBuilder;
import org.springframework.batch.item.file.mapping.FieldSetMapper;
import org.springframework.batch.item.json.JacksonJsonObjectMarshaller;
import org.springframework.batch.item.json.JsonFileItemWriter;
import org.springframework.batch.item.json.builder.JsonFileItemWriterBuilder;
import org.springframework.batch.item.support.ListItemWriter;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.FileSystemResource;
@Configuration
@EnableBatchProcessing
public class SpringBatchConfiguration {
private static Logger LOGGER = LoggerFactory.getLogger(SpringBatchConfiguration.class);
private static final String[] TOKENS = { "bookname", "bookauthor", "bookformat", "isbn", "publishyear" };
@Autowired
private JobBuilderFactory jobBuilderFactory;
@Autowired
private StepBuilderFactory stepBuilderFactory;
@Bean
@StepScope
public FlatFileItemReader<BookRecord> csvItemReader(@Value("#{jobParameters['file.input']}") String input) {
FlatFileItemReaderBuilder<BookRecord> builder = new FlatFileItemReaderBuilder<>();
FieldSetMapper<BookRecord> bookRecordFieldSetMapper = new BookRecordFieldSetMapper();
LOGGER.info("Configuring reader to input {}", input);
// @formatter:off
return builder
.name("bookRecordItemReader")
.resource(new FileSystemResource(input))
.delimited()
.names(TOKENS)
.fieldSetMapper(bookRecordFieldSetMapper)
.build();
// @formatter:on
}
@Bean
@StepScope
public JsonFileItemWriter<Book> jsonItemWriter(@Value("#{jobParameters['file.output']}") String output) throws IOException {
JsonFileItemWriterBuilder<Book> builder = new JsonFileItemWriterBuilder<>();
JacksonJsonObjectMarshaller<Book> marshaller = new JacksonJsonObjectMarshaller<>();
LOGGER.info("Configuring writer to output {}", output);
// @formatter:off
return builder
.name("bookItemWriter")
.jsonObjectMarshaller(marshaller)
.resource(new FileSystemResource(output))
.build();
// @formatter:on
}
@Bean
@StepScope
public ListItemWriter<BookDetails> listItemWriter() {
return new ListItemWriter<BookDetails>();
}
@Bean
@StepScope
public BookItemProcessor bookItemProcessor() {
return new BookItemProcessor();
}
@Bean
@StepScope
public BookDetailsItemProcessor bookDetailsItemProcessor() {
return new BookDetailsItemProcessor();
}
@Bean
public Step step1(ItemReader<BookRecord> csvItemReader, ItemWriter<Book> jsonItemWriter) throws IOException {
// @formatter:off
return stepBuilderFactory
.get("step1")
.<BookRecord, Book> chunk(3)
.reader(csvItemReader)
.processor(bookItemProcessor())
.writer(jsonItemWriter)
.build();
// @formatter:on
}
@Bean
public Step step2(ItemReader<BookRecord> csvItemReader, ItemWriter<BookDetails> listItemWriter) {
// @formatter:off
return stepBuilderFactory
.get("step2")
.<BookRecord, BookDetails> chunk(3)
.reader(csvItemReader)
.processor(bookDetailsItemProcessor())
.writer(listItemWriter)
.build();
// @formatter:on
}
@Bean(name = "transformBooksRecords")
public Job transformBookRecords(Step step1, Step step2) throws IOException {
// @formatter:off
return jobBuilderFactory
.get("transformBooksRecords")
.flow(step1)
.next(step2)
.end()
.build();
// @formatter:on
}
}

View File

@@ -0,0 +1,33 @@
package org.baeldung.batchtesting.model;
public class Book {
private String author;
private String name;
public Book() {
}
public String getAuthor() {
return author;
}
public String getName() {
return name;
}
public void setAuthor(String author) {
this.author = author;
}
public void setName(String name) {
this.name = name;
}
@Override
public String toString() {
return "Book [author=" + author + ", name=" + name + "]";
}
}

View File

@@ -0,0 +1,50 @@
package org.baeldung.batchtesting.model;
public class BookDetails {
private String bookName;
private String bookFormat;
private String publishingYear;
private String bookISBN;
public String getBookName() {
return bookName;
}
public void setBookName(String bookName) {
this.bookName = bookName;
}
public String getBookFormat() {
return bookFormat;
}
public void setBookFormat(String bookFormat) {
this.bookFormat = bookFormat;
}
public String getPublishingYear() {
return publishingYear;
}
public void setPublishingYear(String publishingYear) {
this.publishingYear = publishingYear;
}
public String getBookISBN() {
return bookISBN;
}
public void setBookISBN(String bookISBN) {
this.bookISBN = bookISBN;
}
@Override
public String toString() {
return "BookDetails [bookName=" + bookName + ", bookFormat=" + bookFormat + ", publishingYear=" + publishingYear + ", bookISBN=" + bookISBN + "]";
}
}

View File

@@ -0,0 +1,60 @@
package org.baeldung.batchtesting.model;
public class BookRecord {
private String bookName;
private String bookAuthor;
private String bookFormat;
private String bookISBN;
private String publishingYear;
public void setBookName(String bookName) {
this.bookName = bookName;
}
public void setBookAuthor(String bookAuthor) {
this.bookAuthor = bookAuthor;
}
public void setBookFormat(String bookFormat) {
this.bookFormat = bookFormat;
}
public void setBookISBN(String bookISBN) {
this.bookISBN = bookISBN;
}
public void setPublishingYear(String publishingYear) {
this.publishingYear = publishingYear;
}
public String getBookName() {
return bookName;
}
public String getBookAuthor() {
return bookAuthor;
}
public String getBookFormat() {
return bookFormat;
}
public String getBookISBN() {
return bookISBN;
}
public String getPublishingYear() {
return publishingYear;
}
@Override
public String toString() {
return "BookRecord [bookName=" + bookName + ", bookAuthor=" + bookAuthor + ", bookFormat=" + bookFormat + ", bookISBN=" + bookISBN + ", publishingYear=" + publishingYear + "]";
}
}

View File

@@ -0,0 +1,24 @@
package org.baeldung.batchtesting.service;
import org.baeldung.batchtesting.model.BookDetails;
import org.baeldung.batchtesting.model.BookRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.item.ItemProcessor;
public class BookDetailsItemProcessor implements ItemProcessor<BookRecord, BookDetails> {
private static Logger LOGGER = LoggerFactory.getLogger(BookDetailsItemProcessor.class);
@Override
public BookDetails process(BookRecord item) throws Exception {
BookDetails bookDetails = new BookDetails();
bookDetails.setBookFormat(item.getBookFormat());
bookDetails.setBookISBN(item.getBookISBN());
bookDetails.setPublishingYear(item.getPublishingYear());
bookDetails.setBookName(item.getBookName());
LOGGER.info("Processing bookdetails {}", bookDetails);
return bookDetails;
}
}

View File

@@ -0,0 +1,22 @@
package org.baeldung.batchtesting.service;
import org.baeldung.batchtesting.model.Book;
import org.baeldung.batchtesting.model.BookRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.item.ItemProcessor;
public class BookItemProcessor implements ItemProcessor<BookRecord, Book> {
private static Logger LOGGER = LoggerFactory.getLogger(BookItemProcessor.class);
@Override
public Book process(BookRecord item) throws Exception {
Book book = new Book();
book.setAuthor(item.getBookAuthor());
book.setName(item.getBookName());
LOGGER.info("Processing book {}", book);
return book;
}
}

View File

@@ -0,0 +1,22 @@
package org.baeldung.batchtesting.service;
import org.baeldung.batchtesting.model.BookRecord;
import org.springframework.batch.item.file.mapping.FieldSetMapper;
import org.springframework.batch.item.file.transform.FieldSet;
import org.springframework.validation.BindException;
public class BookRecordFieldSetMapper implements FieldSetMapper<BookRecord> {
@Override
public BookRecord mapFieldSet(FieldSet fieldSet) throws BindException {
BookRecord bookRecord = new BookRecord();
bookRecord.setBookName(fieldSet.readString("bookname"));
bookRecord.setBookAuthor(fieldSet.readString("bookauthor"));
bookRecord.setBookFormat(fieldSet.readString("bookformat"));
bookRecord.setBookISBN(fieldSet.readString("isbn"));
bookRecord.setPublishingYear(fieldSet.readString("publishyear"));
return bookRecord;
}
}

View File

@@ -0,0 +1,3 @@
spring.batch.job.enabled=false
file.input=src/main/resources/batchtesting/input.csv
file.output=src/main/resources/batchtesting/output.json

View File

@@ -0,0 +1,8 @@
Foundation,Asimov I.,hardcover,ISBN 12839,2018
Roadside Picnic,Strugatski A.,paperback,ISBN 12839,1988
Norwegian Wood,Murakami H.,paperback,ISBN 12839,2015
Davinci Code,Brown D.,hardcover,ISBN 12839,2005
Ubik,Dick K. P.,hardcover,ISBN 12839,2013
JFK,King S.,paperback,ISBN 12839,2017
Contact,Sagan C.,paperback,ISBN 12839,2000
Brave New World,Huxley A.,paperback,ISBN 12839,1986
1 Foundation Asimov I. hardcover ISBN 12839 2018
2 Roadside Picnic Strugatski A. paperback ISBN 12839 1988
3 Norwegian Wood Murakami H. paperback ISBN 12839 2015
4 Davinci Code Brown D. hardcover ISBN 12839 2005
5 Ubik Dick K. P. hardcover ISBN 12839 2013
6 JFK King S. paperback ISBN 12839 2017
7 Contact Sagan C. paperback ISBN 12839 2000
8 Brave New World Huxley A. paperback ISBN 12839 1986

View File

@@ -8,14 +8,17 @@
</layout> </layout>
</appender> </appender>
<logger name="org.baeldung.taskletsvschunks" level="debug" <logger name="org.baeldung.taskletsvschunks" level="debug" additivity="false">
additivity="false">
<appender-ref ref="STDOUT" /> <appender-ref ref="STDOUT" />
</logger> </logger>
<logger name="org.baeldung.batchscheduler" level="debug" additivity="false"> <logger name="org.baeldung.batchscheduler" level="debug" additivity="false">
<appender-ref ref="STDOUT" /> <appender-ref ref="STDOUT" />
</logger> </logger>
<logger name="org.baeldung.batchtesting" level="debug" additivity="false">
<appender-ref ref="STDOUT" />
</logger>
<root level="error"> <root level="error">
<appender-ref ref="STDOUT" /> <appender-ref ref="STDOUT" />

View File

@@ -5,8 +5,8 @@ import org.junit.Test;
public class SpringContextIntegrationTest { public class SpringContextIntegrationTest {
@Test @Test
public final void testMain() throws Exception { public void testMain() throws Exception {
App.main(null); App.main(null);
} }
} }

View File

@@ -5,8 +5,8 @@ import org.junit.Test;
public class SpringContextTest { public class SpringContextTest {
@Test @Test
public final void testMain() throws Exception { public void testMain() throws Exception {
App.main(null); App.main(null);
} }
} }

View File

@@ -0,0 +1,115 @@
package org.baeldung.batchtesting;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import java.util.Collection;
import org.junit.After;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.batch.core.ExitStatus;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobInstance;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.test.AssertFile;
import org.springframework.batch.test.JobLauncherTestUtils;
import org.springframework.batch.test.JobRepositoryTestUtils;
import org.springframework.batch.test.context.SpringBatchTest;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.core.io.FileSystemResource;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.annotation.DirtiesContext.ClassMode;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.TestExecutionListeners;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.context.support.DependencyInjectionTestExecutionListener;
import org.springframework.test.context.support.DirtiesContextTestExecutionListener;
@RunWith(SpringRunner.class)
@SpringBatchTest
@EnableAutoConfiguration
@ContextConfiguration(classes = { SpringBatchConfiguration.class })
@TestExecutionListeners({ DependencyInjectionTestExecutionListener.class, DirtiesContextTestExecutionListener.class })
@DirtiesContext(classMode = ClassMode.AFTER_CLASS)
public class SpringBatchIntegrationTest {
private static final String TEST_OUTPUT = "src/test/resources/output/actual-output.json";
private static final String EXPECTED_OUTPUT = "src/test/resources/output/expected-output.json";
private static final String TEST_INPUT = "src/test/resources/input/test-input.csv";
@Autowired
private JobLauncherTestUtils jobLauncherTestUtils;
@Autowired
private JobRepositoryTestUtils jobRepositoryTestUtils;
@After
public void cleanUp() {
jobRepositoryTestUtils.removeJobExecutions();
}
private JobParameters defaultJobParameters() {
JobParametersBuilder paramsBuilder = new JobParametersBuilder();
paramsBuilder.addString("file.input", TEST_INPUT);
paramsBuilder.addString("file.output", TEST_OUTPUT);
return paramsBuilder.toJobParameters();
}
@Test
public void givenReferenceOutput_whenJobExecuted_thenSuccess() throws Exception {
// given
FileSystemResource expectedResult = new FileSystemResource(EXPECTED_OUTPUT);
FileSystemResource actualResult = new FileSystemResource(TEST_OUTPUT);
// when
JobExecution jobExecution = jobLauncherTestUtils.launchJob(defaultJobParameters());
JobInstance actualJobInstance = jobExecution.getJobInstance();
ExitStatus actualJobExitStatus = jobExecution.getExitStatus();
// then
assertThat(actualJobInstance.getJobName(), is("transformBooksRecords"));
assertThat(actualJobExitStatus.getExitCode(), is("COMPLETED"));
AssertFile.assertFileEquals(expectedResult, actualResult);
}
@Test
public void givenReferenceOutput_whenStep1Executed_thenSuccess() throws Exception {
// given
FileSystemResource expectedResult = new FileSystemResource(EXPECTED_OUTPUT);
FileSystemResource actualResult = new FileSystemResource(TEST_OUTPUT);
// when
JobExecution jobExecution = jobLauncherTestUtils.launchStep("step1", defaultJobParameters());
Collection<StepExecution> actualStepExecutions = jobExecution.getStepExecutions();
ExitStatus actualJobExitStatus = jobExecution.getExitStatus();
// then
assertThat(actualStepExecutions.size(), is(1));
assertThat(actualJobExitStatus.getExitCode(), is("COMPLETED"));
AssertFile.assertFileEquals(expectedResult, actualResult);
}
@Test
public void whenStep2Executed_thenSuccess() {
// when
JobExecution jobExecution = jobLauncherTestUtils.launchStep("step2", defaultJobParameters());
Collection<StepExecution> actualStepExecutions = jobExecution.getStepExecutions();
ExitStatus actualExitStatus = jobExecution.getExitStatus();
// then
assertThat(actualStepExecutions.size(), is(1));
assertThat(actualExitStatus.getExitCode(), is("COMPLETED"));
actualStepExecutions.forEach(stepExecution -> {
assertThat(stepExecution.getWriteCount(), is(8));
});
}
}

View File

@@ -0,0 +1,116 @@
package org.baeldung.batchtesting;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import java.util.Arrays;
import org.baeldung.batchtesting.model.Book;
import org.baeldung.batchtesting.model.BookRecord;
import org.junit.After;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.json.JsonFileItemWriter;
import org.springframework.batch.test.AssertFile;
import org.springframework.batch.test.JobRepositoryTestUtils;
import org.springframework.batch.test.MetaDataInstanceFactory;
import org.springframework.batch.test.StepScopeTestUtils;
import org.springframework.batch.test.context.SpringBatchTest;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.core.io.FileSystemResource;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.annotation.DirtiesContext.ClassMode;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.TestExecutionListeners;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.context.support.DependencyInjectionTestExecutionListener;
import org.springframework.test.context.support.DirtiesContextTestExecutionListener;
@RunWith(SpringRunner.class)
@SpringBatchTest
@EnableAutoConfiguration
@ContextConfiguration(classes = { SpringBatchConfiguration.class })
@TestExecutionListeners({ DependencyInjectionTestExecutionListener.class, DirtiesContextTestExecutionListener.class })
@DirtiesContext(classMode = ClassMode.AFTER_CLASS)
public class SpringBatchStepScopeIntegrationTest {
private static final String TEST_OUTPUT = "src/test/resources/output/actual-output.json";
private static final String EXPECTED_OUTPUT_ONE = "src/test/resources/output/expected-output-one.json";
private static final String TEST_INPUT_ONE = "src/test/resources/input/test-input-one.csv";
@Autowired
private JsonFileItemWriter<Book> jsonItemWriter;
@Autowired
private FlatFileItemReader<BookRecord> itemReader;
@Autowired
private JobRepositoryTestUtils jobRepositoryTestUtils;
private JobParameters defaultJobParameters() {
JobParametersBuilder paramsBuilder = new JobParametersBuilder();
paramsBuilder.addString("file.input", TEST_INPUT_ONE);
paramsBuilder.addString("file.output", TEST_OUTPUT);
return paramsBuilder.toJobParameters();
}
@After
public void cleanUp() {
jobRepositoryTestUtils.removeJobExecutions();
}
@Test
public void givenMockedStep_whenReaderCalled_thenSuccess() throws Exception {
// given
StepExecution stepExecution = MetaDataInstanceFactory.createStepExecution(defaultJobParameters());
// when
StepScopeTestUtils.doInStepScope(stepExecution, () -> {
BookRecord bookRecord;
itemReader.open(stepExecution.getExecutionContext());
while ((bookRecord = itemReader.read()) != null) {
// then
assertThat(bookRecord.getBookName(), is("Foundation"));
assertThat(bookRecord.getBookAuthor(), is("Asimov I."));
assertThat(bookRecord.getBookISBN(), is("ISBN 12839"));
assertThat(bookRecord.getBookFormat(), is("hardcover"));
assertThat(bookRecord.getPublishingYear(), is("2018"));
}
itemReader.close();
return null;
});
}
@Test
public void givenMockedStep_whenWriterCalled_thenSuccess() throws Exception {
// given
FileSystemResource expectedResult = new FileSystemResource(EXPECTED_OUTPUT_ONE);
FileSystemResource actualResult = new FileSystemResource(TEST_OUTPUT);
Book demoBook = new Book();
demoBook.setAuthor("Grisham J.");
demoBook.setName("The Firm");
StepExecution stepExecution = MetaDataInstanceFactory.createStepExecution(defaultJobParameters());
// when
StepScopeTestUtils.doInStepScope(stepExecution, () -> {
jsonItemWriter.open(stepExecution.getExecutionContext());
jsonItemWriter.write(Arrays.asList(demoBook));
jsonItemWriter.close();
return null;
});
// then
AssertFile.assertFileEquals(expectedResult, actualResult);
}
}

View File

@@ -0,0 +1 @@
Foundation,Asimov I.,hardcover,ISBN 12839,2018
1 Foundation Asimov I. hardcover ISBN 12839 2018

View File

@@ -0,0 +1,8 @@
Foundation,Asimov I.,hardcover,ISBN 12839,2018
Roadside Picnic,Strugatski A.,paperback,ISBN 12839,1988
Norwegian Wood,Murakami H.,paperback,ISBN 12839,2015
Davinci Code,Brown D.,hardcover,ISBN 12839,2005
Ubik,Dick K. P.,hardcover,ISBN 12839,2013
JFK,King S.,paperback,ISBN 12839,2017
Contact,Sagan C.,paperback,ISBN 12839,2000
Brave New World,Huxley A.,paperback,ISBN 12839,1986
1 Foundation Asimov I. hardcover ISBN 12839 2018
2 Roadside Picnic Strugatski A. paperback ISBN 12839 1988
3 Norwegian Wood Murakami H. paperback ISBN 12839 2015
4 Davinci Code Brown D. hardcover ISBN 12839 2005
5 Ubik Dick K. P. hardcover ISBN 12839 2013
6 JFK King S. paperback ISBN 12839 2017
7 Contact Sagan C. paperback ISBN 12839 2000
8 Brave New World Huxley A. paperback ISBN 12839 1986

View File

@@ -0,0 +1,3 @@
[
{"author":"Grisham J.","name":"The Firm"}
]

View File

@@ -0,0 +1,3 @@
[
{"author":"Grisham J.","name":"The Firm"}
]

View File

@@ -0,0 +1,10 @@
[
{"author":"Asimov I.","name":"Foundation"},
{"author":"Strugatski A.","name":"Roadside Picnic"},
{"author":"Murakami H.","name":"Norwegian Wood"},
{"author":"Brown D.","name":"Davinci Code"},
{"author":"Dick K. P.","name":"Ubik"},
{"author":"King S.","name":"JFK"},
{"author":"Sagan C.","name":"Contact"},
{"author":"Huxley A.","name":"Brave New World"}
]