I'm trying to read data from table which is having ~13 million records. I used setFetchSize to read the records in batch wise doing some lookups and writing it to csv file. But output csv is generating with more than 50 million records which is not expected. I suspecting the iteration is not working as correctly. Any help will be appreciated.
def processRecords(ParamHelper params){
try {
CSVWriter writer;
writer = new CSVWriter(new FileWriter(params.outputDir.getAbsolutePath()+"/Records_fact.csv"),(Character)'\t',(Character)'\u0000',(Character)'\n');
String contractRateSql = "select contract_id,season from table";
ResultSet resRecords = stmt.executeQuery(contractRateSql);
Map <String,Map<String,String>> masterRecords = new HashMap<String,Map<String,String>>();
Map<String,String> existingRecords = null;
int count = 0;
resRecords.setFetchSize(10000)
while(resRecords.next()) {
try{
existingRecords = new HashMap<String,String>();
existingRecords.put("cont_id",resRateRecords.getString("contract_id"));
existingRecords.put("season",resRateRecords.getString("season"));
masterRecords.put(resRecords.getString("contract_id")+"#"+count++,existingRecords);
}
catch(Exception e){
e.printStackTrace();
}
masterRecords.each{ k, v ->
try{
//some process
}catch(Exception e){
e.printStackTrace();
}
}
if(valueList.size()>0)
writer.writeAll(valueList);
try {
if (resRateRecords != null) resRateRecords.close();
} catch (Exception e) {
};
}
writer.close();
catch(Exception e){
e.printStackTrace();
println("Occured while fetching the data");
}
}
}