I want to fetch a huge amount of data (over 100.000 rows with 50 columns) from my Oracle database into an Excel File using Java Apache POI and Oracle JDBC. That is working well for small amount of data but what is the smartest way to handle tasks like this? Maybe using multithreading? I get errors like "java.lang.OutOfMemoryError: Java heap space" although I already increased the memory to 1024MB.
This is my current code:
public void fetchDataIntoExcelFile(String sqlStmt){
// Check whether jdbc driver exists or not
try {
Class.forName("oracle.jdbc.driver.OracleDriver");
} catch (ClassNotFoundException e) {
System.out.println("Please install the Oracle JDBC Driver!");
e.printStackTrace();
return;
}
// Try to establish a connection to the db
try {
conn = DriverManager.getConnection("jdbc:oracle:thin:@myserver", "user", "password");
} catch (SQLException e) {
e.printStackTrace();
return;
}
XSSFWorkbook wbx = new XSSFWorkbook();
XSSFSheet sheet = wbx.createSheet();
FileOutputStream fos = null;
try {
fos = new FileOutputStream("filename.xlsx");
wbx.write(fos);
} catch (IOException e) {
e.printStackTrace();
System.out.println("Error occured while creating file.");
}
// Execute SQL Statement if connection was successful
if (conn != null) {
try {
Statement stmt = conn.createStatement();
ResultSet rs = stmt.executeQuery(sqlStmt);
ResultSetMetaData columns = rs.getMetaData();
XSSFRow row = null;
XSSFCell cell = null;
// Start writing results into the excel file
int i = 0;
while (rs.next()){
row = sheet.createRow(i++);
for (int j = 1; j <= columns.getColumnCount(); j++){
cell = row.createCell(j);
if (i == 1){
cell.setCellValue(columns.getColumnLabel(j));
}else{
cell.setCellValue(rs.getString(columns.getColumnLabel(j)));
}
}
}
try {
wbx.write(fos);
fos.close();
} catch (IOException e) {
e.printStackTrace();
}
} catch (SQLException e) {
System.out.println("Connection Failed!");
e.printStackTrace();
return;
}
} else {
System.out.println("Failed to make connection!");
}
}