Do we have to initialize a new container to perform queries on the data stored in GridDb.? I have retrieved the data from the csv file and stored it in GridDB after initializing a container and performing a query,
which means all the data is being retrieved and displayed on the terminal/console but to perform column related queries, do I have to make a new container and initialize all the data columns again in the new container or can I use the same container for performing different queries ?
P.S. I have mentioned the code below for reference.
var griddb = require('griddb-node-api');
const dfd = require("danfojs-node")
var fs = require('fs');
const createCsvWriter = require('csv-writer').createObjectCsvWriter;
const csvWriter = createCsvWriter({
path: 'out.csv',
header: [
{id: "id", title:"id"},
{id: "new_name", title:"new_name"},
{id: "est_diameter_min", title:"est_diameter_min"},
{id: "est_diameter_max", title:"est_diameter_max"},
{id: "relative_velocity", title:"relative_velocity"},
{id: "miss_distance", title:"miss_distance"},
{id: "orbiting_body" , title:"orbiting_body"},
{id: "sentry_object", title:"sentry_object"},
{id: "absolute_magnitude", title:"absolute_magnitude"}
]
});
const factory = griddb.StoreFactory.getInstance();
const store = factory.getStore({
"host": '239.0.0.1',
"port": 31999,
"clusterName": "defaultCluster",
"username": "admin",
"password": "admin"
});
// For connecting to the GridDB Server we have to make containers and specify the schema.
const conInfo = new griddb.ContainerInfo({
'name': "neoanalysis",
'columnInfoList': [
["name", griddb.Type.STRING],
["id", griddb.Type.INTEGER],
["new_name", griddb.Type.STRING],
["est_diameter_min", griddb.Type.DOUBLE],
["est_diameter_max", griddb.Type.DOUBLE],
["relative_velocity", griddb.Type.DOUBLE],
["miss_distance", griddb.Type.DOUBLE],
["absolute_magnitude", griddb.Type.DOUBLE]
],
'type': griddb.ContainerType.COLLECTION, 'rowKey': true
});
// ////////////////////////////////////////////
const csv = require('csv-parser');
const fs = require('fs');
var lst = []
var lst2 = []
var i =0;
fs.createReadStream('./Dataset/neo.csv')
.pipe(csv())
.on('data', (row) => {
lst.push(row);
console.log(lst);
})
.on('end', () => {
var container;
var idx = 0;
for(let i=0;i<lst.length;i++){
store.putContainer(conInfo, false)
.then(cont => {
container = cont;
return container.createIndex({ 'columnName': 'name', 'indexType': griddb.IndexType.DEFAULT });
})
.then(() => {
idx++;
container.setAutoCommit(false);
return container.put([String(idx), lst[i]['id'],lst[i]["new_name"],lst[i]["est_diameter_min"],lst[i]["est_diameter_max"],lst[i]["relative_velocity"],lst[i]["miss_distance"],lst[i]["absolute_magnitude"]]);
})
.then(() => {
return container.commit();
})
.catch(err => {
if (err.constructor.name == "GSException") {
for (var i = 0; i < err.getErrorStackSize(); i++) {
console.log("[", i, "]");
console.log(err.getErrorCode(i));
console.log(err.getMessage(i));
}
} else {
console.log(err);
}
});
}
store.getContainer("neoanalysis")
.then(ts => {
container = ts;
query = container.query("select *")
return query.fetch();
})
.then(rs => {
while (rs.hasNext()) {
let rsNext = rs.next()
lst2.push(
{
'id': rsNext[1],
"new_name": rsNext[2],
"est_diameter_min": rsNext[3],
"est_diameter_max": rsNext[4],
"relative_velocity": rsNext[5],
"miss_distance": rsNext[6],
"absolute_magnitude": rsNext[7],
}
);
}
csvWriter
.writeRecords(lst2)
.then(()=> console.log('The CSV file was written successfully'));
return
}).catch(err => {
if (err.constructor.name == "GSException") {
for (var i = 0; i < err.getErrorStackSize(); i++) {
console.log("[", i, "]");
console.log(err.getErrorCode(i));
console.log(err.getMessage(i));
}
} else {
console.log(err);
}
});
});