I have a function that fetches thread (gmail conversation) ids from database and then asks Google API for all data for each thread id. Once it receives a thread object, it stores it to database. This works fine for my inbox which has ~1k messages. But I am not sure if it would work for accounts with over 100k messages.
Now what I am asking, once a machine runs out of memory, will it break or will it continue executing callback functions whenever enough RAM is available again? Should I modify this code to do this part-by-part (rerun whole script at some points and continue with fresh RAM from where it last ended?)
function eachThread(auth) {
var gmail = google.gmail('v1');
MongoClient.connect(mongoUrl, function(err, db){
assert.equal(null, err);
var collection = db.collection('threads');
// Find all data in collection and convert it to array
collection.find().toArray(function(err, docs){
assert.equal(null, err);
var threadContents = [];
// For each doc in array...
for (var i = 0; i < docs.length; i++) {
gmail
.users
.threads
.get( {auth:auth,'userId':'me', 'id':docs[i].id}, function(err, resp){
assert.equal(null, err);
threadContents.push(resp);
console.log(threadContents.length);
console.log(threadContents[threadContents.length - 1].id);
var anotherCollection = db.collection('threadContents');
anotherCollection.updateOne(
{id: threadContents[threadContents.length - 1].id},
threadContents[threadContents.length - 1],
{upsert:true},
function(err, result){
assert.equal(null, err);
console.log('updated one.');
});
if (threadContents.length === docs.length) {
console.log('Length matches!');
db.close();
}
});//end(callback(threads.get))
}//end(for(docs.length))
});//end(find.toArray)
});//end(callback(mongo.connect))
}//end(func(eachThread))