{{announcement.body}}
{{announcement.title}}

Spring Batch — Writing to Multiple Destinations

DZone 's Guide to

Spring Batch — Writing to Multiple Destinations

In this article, take a look at Spring Batch and how to write to multiple destinations.

· Database Zone ·
Free Resource

Bright orange flowers in a field

In this example, I will show you how to write data to multiple destinations and how can we write data into JSON and XML. 

In this example, I am using MySQL, however, you can use any database of your choice. Please make sure to create the equivalent schema and data into that DB.

MySQL

 

You may also enjoy: An Introduction to Spring Batch

Main class to start the execution.

Java


Interface used to create string representing object.

Java


JobConfiguration

Java
xxxxxxxxxx
1
133
1
package com.example.configuration;
2
 
          
3
import java.io.File;
4
import java.util.ArrayList;
5
import java.util.HashMap;
6
import java.util.List;
7
import java.util.Map;
8
 
          
9
import javax.sql.DataSource;
10
 
          
11
import org.springframework.batch.core.Job;
12
import org.springframework.batch.core.Step;
13
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
14
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
15
import org.springframework.batch.item.ItemWriter;
16
import org.springframework.batch.item.database.JdbcPagingItemReader;
17
import org.springframework.batch.item.database.Order;
18
import org.springframework.batch.item.database.support.MySqlPagingQueryProvider;
19
import org.springframework.batch.item.file.FlatFileItemWriter;
20
import org.springframework.batch.item.support.CompositeItemWriter;
21
import org.springframework.batch.item.xml.StaxEventItemWriter;
22
import org.springframework.beans.factory.annotation.Autowired;
23
import org.springframework.context.annotation.Bean;
24
import org.springframework.context.annotation.Configuration;
25
import org.springframework.core.io.FileSystemResource;
26
import org.springframework.oxm.xstream.XStreamMarshaller;
27
 
          
28
import com.example.aggregator.CustomLineAggregator;
29
import com.example.mapper.CustomerRowMapper;
30
import com.example.model.Customer;
31
 
          
32
 
          
33
@Configuration
34
public class JobConfiguration {
35
    @Autowired
36
    private JobBuilderFactory jobBuilderFactory;
37
    
38
    @Autowired
39
    private StepBuilderFactory stepBuilderFactory;
40
    
41
    @Autowired
42
    private DataSource dataSource;
43
    
44
    @Bean
45
    public JdbcPagingItemReader<Customer> customerPagingItemReader(){
46
        // reading database records using JDBC in a paging fashion
47
        JdbcPagingItemReader<Customer> reader = new JdbcPagingItemReader<>();
48
        reader.setDataSource(this.dataSource);
49
        reader.setFetchSize(1000);
50
        reader.setRowMapper(new CustomerRowMapper());
51
        
52
        // Sort Keys
53
        Map<String, Order> sortKeys = new HashMap<>();
54
        sortKeys.put("id", Order.ASCENDING);
55
        
56
        // MySQL implementation of a PagingQueryProvider using database specific features.
57
        MySqlPagingQueryProvider queryProvider = new MySqlPagingQueryProvider();
58
        queryProvider.setSelectClause("id, firstName, lastName, birthdate");
59
        queryProvider.setFromClause("from customer");
60
        queryProvider.setSortKeys(sortKeys);
61
        
62
        reader.setQueryProvider(queryProvider);
63
        
64
        return reader;
65
    }
66
    
67
    
68
    @Bean
69
    public FlatFileItemWriter<Customer> jsonItemWriter() throws Exception{
70
        String customerOutputPath = File.createTempFile("customerOutput", ".out").getAbsolutePath();
71
        System.out.println(">> Output Path = "+customerOutputPath);
72
        
73
        FlatFileItemWriter<Customer> writer = new FlatFileItemWriter<>();
74
        writer.setLineAggregator(new CustomLineAggregator());
75
        writer.setResource(new FileSystemResource(customerOutputPath));
76
        writer.afterPropertiesSet();
77
        
78
        return writer;
79
    }
80
    
81
    @Bean
82
    public StaxEventItemWriter<Customer> xmlItemWriter() throws Exception{
83
        String customerOutputPath = File.createTempFile("customerOutput", ".out").getAbsolutePath();
84
        System.out.println(">> Output Path = "+customerOutputPath);
85
        
86
        Map<String, Class> aliases = new HashMap<>();
87
        aliases.put("customer", Customer.class);
88
        
89
        XStreamMarshaller marshaller = new XStreamMarshaller();
90
        marshaller.setAliases(aliases);
91
        
92
        // StAX and Marshaller for serializing object to XML. 
93
        StaxEventItemWriter<Customer> writer = new StaxEventItemWriter<>();
94
        writer.setRootTagName("customers");
95
        writer.setMarshaller(marshaller);
96
        writer.setResource(new FileSystemResource(customerOutputPath));
97
        writer.afterPropertiesSet();
98
        
99
        return writer;
100
    }
101
    
102
    
103
    @Bean
104
    public CompositeItemWriter<Customer> itemWriter() throws Exception{ 
105
        List<ItemWriter<? super Customer>> writers = new ArrayList<>();
106
        writers.add(xmlItemWriter());
107
        writers.add(jsonItemWriter());
108
        
109
        CompositeItemWriter<Customer> compositeItemWriter = new CompositeItemWriter<>();
110
        compositeItemWriter.setDelegates(writers);
111
        compositeItemWriter.afterPropertiesSet();
112
        
113
        return compositeItemWriter;
114
    }
115
    
116
    
117
    @Bean
118
    public Step step1() throws Exception {
119
        return stepBuilderFactory.get("step1")
120
                .<Customer, Customer> chunk(10)
121
                .reader(customerPagingItemReader())
122
                .writer(itemWriter())
123
                .build();
124
    }
125
    
126
    @Bean
127
    public Job job() throws Exception {
128
        return jobBuilderFactory.get("job")
129
                .start(step1())
130
                .build();
131
    }
132
}
Java
xxxxxxxxxx
1
21
 
1
package com.example.mapper;
2
 
          
3
import java.sql.ResultSet;
4
import java.sql.SQLException;
5
import java.time.format.DateTimeFormatter;
6
 
          
7
import org.springframework.jdbc.core.RowMapper;
8
 
          
9
import com.example.model.Customer;
10
 
          
11
public class CustomerRowMapper implements RowMapper<Customer> {
12
    private static final DateTimeFormatter DT_FORMAT = DateTimeFormatter.ofPattern("dd-MM-yyyy HH:mm:ss");
13
 
          
14
    @Override
15
    public Customer mapRow(ResultSet rs, int rowNum) throws SQLException {
16
        return Customer.builder().id(rs.getLong("id"))
17
                .firstName(rs.getString("firstName"))
18
                .lastName(rs.getString("lastName"))
19
                .birthdate(rs.getString("birthdate")).build();
20
    }
21
}
Java
xxxxxxxxxx
1
18
 
1
package com.example.model;
2
 
          
3
 
          
4
import lombok.AllArgsConstructor;
5
import lombok.Builder;
6
import lombok.Data;
7
import lombok.NoArgsConstructor;
8
 
          
9
@Data
10
@AllArgsConstructor
11
@Builder
12
@NoArgsConstructor
13
public class Customer {
14
    private Long id;
15
    private String firstName;
16
    private String lastName;
17
    private String birthdate;
18
}


application.properties

Java
xxxxxxxxxx
1
 
1
spring.datasource.driver-class-name=com.mysql.cj.jdbc.Driver
2
spring.datasource.url=jdbc:mysql://localhost:3306/test
3
spring.datasource.username=root
4
spring.datasource.password=root
5
 
          
6
spring.batch.initialize-schema=always
7
 
          
8
spring.batch.job.enabled=false


pom.xml

XML


Output: Two response files have been created, and it means we're able to write data to multiple destinations.

Java
xxxxxxxxxx
1
 
1
>> Output Path = C:\Users\user\AppData\Local\Temp\1\customerOutput5824034651210161854.out
2
>> Output Path = C:\Users\user\AppData\Local\Temp\1\customerOutput8870745056369564522.out


customerOutput8870745056369564522.out

Java


customerOutput5824034651210161854.out

XML


Further Reading

Spring Batch Read an XML File and Write to Oracle Database

Batch Processing Large Data Sets With Spring Boot and Spring Batch

Topics:
spring batch ,database ,write to multiple destinations

Opinions expressed by DZone contributors are their own.

{{ parent.title || parent.header.title}}

{{ parent.tldr }}

{{ parent.urlSource.name }}