2

I'm trying to insert a bulk of plain objects to mongodb, but something very weird is happening. Let's say i try to insert 1000 objects like this

for (var i = 0; i < 1000; i++) {
     var commentAux = {
       'a': 'test' + i,
       'b': 'https://test.com',
       'c': appUser.id,
       'd': 'test of' + i
     }
     comments.push(commentAux);
}
Comment.collection.insert(comments, function (err, docs) {
   if (err) {
     return res.json({success: false, message: 'Comments Fail'});
   }
}

But if I change the 1000 for lets say 1500, the server hangs. It never throws an excetpion, neither a warning, nothing. It just stucks there. I've been reading, and this amount of documents isn't even near the limit amount that mongo supports.

Someone face the same issue? I'm using mongo 3.2 in windows

MarBVI
  • 811
  • 2
  • 12
  • 34
  • Does MongoDB log anything? It might be because [of this](https://docs.mongodb.com/manual/reference/limits/#Write-Command-Operation-Limit-Size), but I'm not sure. – robertklep Jul 21 '16 at 16:21
  • 2
    http://stackoverflow.com/questions/16726330/mongoose-mongodb-batch-insert?answertab=active#tab-top for bulk operations – Rabea Jul 21 '16 at 16:27
  • 1
    Not sure why it is hanging, but the bulk max size is 1000. There is no benefit going larger than 1000. https://docs.mongodb.com/manual/reference/limits/#Write-Command-Operation-Limit-Size – helmy Jul 21 '16 at 16:58
  • Ty for comments...resolve it with a custom handler – MarBVI Jul 21 '16 at 19:31

2 Answers2

1

As mentioned in the comments, the bulk max size is 1000...so i've implemented a custom BulkManager in order to manage high amounts of documents in my API.

So my BulkManager.js it this.

module.exports = function () {
    var instance = {};
    var Promise = require("bluebird");
    var Q = require("q");
    Promise.promisifyAll(instance);

    instance.insert = function (DbModel, items) {
        var arrays = [], size = 1000;

        while (items.length > 0)
            arrays.push(items.splice(0, size));

        return saveItems(DbModel, arrays, arrays.length, 0);
    };

    var deferred = Q.defer();

    function saveItems(DbModel, arrays, amountToProcess, indexToProcess) {
        if (indexToProcess <= (amountToProcess - 1)) {
            var items = arrays[indexToProcess];
            DbModel.collection.insert(items, function (err, docs) {
                if (err) {
                    return deferred.reject({success: false, error: err});
                }

                if (amountToProcess > 1 && indexToProcess < (amountToProcess - 1)) {
                    indexToProcess++;
                    saveItems(DbModel, arrays, amountToProcess, indexToProcess);
                } else {
                    return deferred.resolve({success: true, docs: docs});
                }
            });
        }
        return deferred.promise;
    }
    return instance;
};

And from my API I call it like this.

BulkManager.insert(PhotoModel, photos).then(function (response) {
    if (!response.success) {
        return res.json({success: false, message: 'Photos Fail'});
    }
    // do stuff on success
}
MarBVI
  • 811
  • 2
  • 12
  • 34
0

I had the same problem for inserting 100000 records. Finally i switched from mongoose to mongodb driver and used: db.collection('mycollectionaname').insert(arrayofrecords, { ordered: false });

you can also take a look at: https://docs.mongodb.com/getting-started/node/insert/

Mohammad
  • 43
  • 4