I'm using Hive to select million records using JDBC now I want to work with the ResultSet to write my OWL file how can I apply this scenario using MapReduce?
Note: in case of no , I also have the database in mysql, can I work with resultset with MapReduce?
private static String driverName = "org.apache.hive.jdbc.HiveDriver";
public static void main(String[] args) throws OWLOntologyCreationException, OWLOntologyStorageException, SQLException, ClassNotFoundException, FileNotFoundException {
// Register driver and create driver instance
// Class.forName("com.mysql.jdbc.Driver");
Class.forName(driverName);
// get connection
// Connection connection = DriverManager.getConnection("jdbc:mysql://127.0.0.1:3306/foodProducts", "root", "root");
Connection connection = DriverManager.getConnection("jdbc:hive2://localhost:10000/default", "", "");
// String SQL = "SELECT p.NDB_Number, p.Long_Name ,n.Output_Value, n.Output_UOM, n.Nutrient_Name, \n"
// + "p.Ingredients, ss.Household_Serving, ss.Household_Serving_Size_UOM, ss.Serving_Size, ss.Serving_Size_UOM\n"
// + "FROM foodProducts.Products as p left join foodProducts.Nutrients as n on n.NDB_No = p.NDB_Number\n"
// + "left join foodProducts.ServingSize as ss on ss.NDB_No = p.NDB_Number";
String SQL = "SELECT p.NDB_Number, p.Long_Name ,n.Output_Value,n.Output_UOM, n.Nutrient_Name ,p.Ingredients, ss.Household_Serving, " +
"ss.Household_Serving_Size_UOM, ss.Serving_Size, ss.Serving_Size_UOM FROM products as p left join nutrients as n on n.NDB_No = p.NDB_Number " +
"left join servingsize as ss on ss.NDB_No = p.NDB_Number ";
connection.setAutoCommit(false);
PreparedStatement stmt = connection.prepareStatement(SQL, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY);
// stmt.setFetchSize(Integer.MIN_VALUE);
stmt.setFetchSize(10000);
stmt.execute();
ResultSet res = stmt.getResultSet();
OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
// Load the local copy
File owlFile = new File("src/main/resources/thesisOWL.owl");
OWLOntology foodOwl = manager.loadOntologyFromOntologyDocument(owlFile);
File owlDataFile = new File("src/main/resources/thesisOWLData.owl");
OWLDataFactory dataFactory = manager.getOWLDataFactory();
String base = "http://www.semanticweb.org/omar/ontologies/2019/10/untitled-ontology-69#";
PrefixManager pm = new DefaultPrefixManager(base);
while (res.next()) {
String foodItemName = res.getString(2);
String outputValue = res.getString(3);
...
....
....