Skip to content

Commit 1994ee6

Browse files
committed
Added documentation to chunk-csv-database project.
1 parent 72c1502 commit 1994ee6

File tree

3 files changed

+55
-3
lines changed

3 files changed

+55
-3
lines changed

batch/chunk-csv-database/pom.xml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,8 @@
1111

1212
<artifactId>chunk-csv-database</artifactId>
1313
<packaging>war</packaging>
14+
<name>Batch Chunk CSV Database</name>
15+
<description>Chunk Processing - Read, Process, Write to a Database</description>
1416

1517
<dependencies>
1618
<dependency>

batch/chunk-csv-database/src/main/resources/META-INF/persistence.xml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@
1111
<property name="javax.persistence.schema-generation.drop-source" value="script"/>
1212
<property name="javax.persistence.schema-generation.create-script-source" value="META-INF/create.sql"/>
1313
<property name="javax.persistence.schema-generation.drop-script-source" value="META-INF/drop.sql"/>
14-
<property name="eclipselink.logging.level" value="FINE"/>
1514
</properties>
1615
</persistence-unit>
1716
</persistence>

batch/chunk-csv-database/src/test/java/org/javaee7/batch/chunk/csv/database/BatchCSVDatabaseTest.java

Lines changed: 53 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,13 +22,50 @@
2222
import static org.junit.Assert.assertEquals;
2323

2424
/**
25+
* The Batch specification provides a Chunk Oriented processing style. This style is defined by enclosing into a
26+
* transaction a set of reads, process and write operations via +javax.batch.api.chunk.ItemReader+,
27+
* +javax.batch.api.chunk.ItemProcessor+ and +javax.batch.api.chunk.ItemWriter+. Items are read one at a time, processed
28+
* and aggregated. The transaction is then committed when the defined +checkpoint-policy+ is triggered.
29+
*
30+
* include::myJob.xml[]
31+
*
32+
* A very simple job is defined in the +myJob.xml+ file. Just a single step with a reader, a processor and a writer.
33+
*
34+
* This job will read a file from the system in CSV format:
35+
* include::MyItemReader#open[]
36+
* include::MyItemReader#readItem[]
37+
*
38+
* Process the data by transforming it into a +Person+ object:
39+
* include::MyItemProcessor#processItem[]
40+
*
41+
* Finally write the data using JPA to a database:
42+
* include::MyItemWriter#writeItems[]
43+
*
2544
* @author Roberto Cortez
2645
*/
2746
@RunWith(Arquillian.class)
2847
public class BatchCSVDatabaseTest {
2948
@PersistenceContext
3049
private EntityManager entityManager;
3150

51+
/**
52+
* We're just going to deploy the application as a +web archive+. Note the inclusion of the following files:
53+
*
54+
* [source,file]
55+
* ----
56+
* /META-INF/batch-jobs/myjob.xml
57+
* /META-INF/persistence.xml
58+
* /META-INF/create.sql
59+
* /META-INF/drop-sql
60+
* /META-INF/mydata.csv
61+
* ----
62+
*
63+
* * The +myjob.xml+ file is needed for running the batch definition.
64+
* * The +persistence.xml+ file is needed for JPA configuration, create schema, load-data and drop schema.
65+
* * The +create.sql+ file has the necessary database schema for the data.
66+
* * The +drop.sql+ file has the required commands to drop the database schema created.
67+
* * The +mydata.csv+ file has the data to load into the database.
68+
*/
3269
@Deployment
3370
public static WebArchive createDeployment() {
3471
WebArchive war = ShrinkWrap.create(WebArchive.class)
@@ -44,7 +81,17 @@ public static WebArchive createDeployment() {
4481
return war;
4582
}
4683

84+
@SuppressWarnings("unchecked")
4785
@Test
86+
/**
87+
* In the test, we're just going to invoke the batch execution and wait for completion. To validate the test
88+
* expected behaviour we need to query the +Metric[]+ object available in the step execution.
89+
*
90+
* The batch process itself will read and write 7 elements of type +Person+. Commits are executed after 3 elements
91+
* are read.
92+
*
93+
* @throws Exception an exception if the batch could not complete successfully.
94+
*/
4895
public void testBatchCSVDatabase() throws Exception {
4996
JobOperator jobOperator = BatchRuntime.getJobOperator();
5097
Long executionId = jobOperator.start("myJob", new Properties());
@@ -57,17 +104,21 @@ public void testBatchCSVDatabase() throws Exception {
57104
if (stepExecution.getStepName().equals("myStep")) {
58105
Map<Metric.MetricType, Long> metricsMap = BatchTestHelper.getMetricsMap(stepExecution.getMetrics());
59106

107+
// <1> The read count should be 7 elements. Check +MyItemReader+.
60108
assertEquals(7L, metricsMap.get(Metric.MetricType.READ_COUNT).longValue());
109+
// <2> The write count should be same 7 read elements.
61110
assertEquals(7L, metricsMap.get(Metric.MetricType.WRITE_COUNT).longValue());
111+
// <3> The commit count should be 4. Checkpoint is on every 3rd read, 4 commits for read elements.
62112
assertEquals(3L, metricsMap.get(Metric.MetricType.COMMIT_COUNT).longValue());
63113
}
64114
}
65115

66116
Query query = entityManager.createNamedQuery("Person.findAll");
67-
@SuppressWarnings("unchecked")
68117
List<Person> persons = query.getResultList();
69118

119+
// <4> Confirm that the elements were actually persisted into the database.
70120
assertEquals(7L, persons.size());
121+
// <5> Job should be completed.
71122
assertEquals(jobExecution.getBatchStatus(), BatchStatus.COMPLETED);
72123
}
73-
}
124+
}

0 commit comments

Comments
 (0)