I have a Job configuration like this,
JOB (
Step 1: Read from DB (JDBC Item Reader + Writer)
Keep the list of items in Job Execution Context
Step 2 Take Items (list) from Step 1 (from Job Context)
and execute this Step 2 in multiple parallel processes
)
Here is the code looks like,
<bean id="webServiceReader" class="org.springframework.batch.item.adapter.ItemReaderAdapter" scope="step">
<property name="targetObject" ref="individualXXXXService"/>
<property name="targetMethod" value="execute"/>
<property name="arguments">
<list>
<value type="java.util.List">#{jobExecutionContext['JDBC_ITEMS']}</value>
</list>
</property>
</bean>
<batch:job id="identityCpiIborSync" restartable="true">
<batch:step id="fetchCustomers" next="fetchDatabase">
<batch:tasklet>
<batch:chunk reader="dbItemReader" writer="dbItemWriter" commit-interval="500"/>
</batch:tasklet>
<batch:listeners>
<batch:listener ref="stepScopeExecutionListener"/>
</batch:listeners>
</batch:step>
<batch:step id="updateWS">
<!-- <batch:tasklet task-executor="taskExecutor" throttle-limit="10"> -->
<batch:tasklet>
<batch:chunk reader="webServiceReader" writer="cpiWSItemWriter" commit-interval="10"/>
</batch:tasklet>
</batch:step>
<batch:listeners>
<batch:listener ref="batchJobListener" />
</batch:listeners>
</batch:job>
My Question here is,
Do I have any other option other than writing Partitioner handler for second-step, I just want to know if there any way other than partitioning to achieve parallel processing in second step with that list of items.
Kindly suggest if there a better way,