0

I'm trying to use the insertMany method, but without success... I read this stackoverflow answer, but it does not tell how to do handle array of objects that are very big.

In my test case I have 9755 objects, with this script it imports all lines 9 times, which is not what I want...

I would like to dive the array in chunks of 1000 objects for each import.

How can I do this?

function bulkImportToMongo(arrayToImport, mongooseModel) {
  const Model = require(`../../../models/${mongooseModel}`);
  let counter = 0;
  let temp = [];
  arrayToImport.forEach(item => {
    temp.push(item);
    counter++;

    if (counter % 1000 == 0) {
      Model.insertMany(temp).then(mongoosedocs => {
        console.log(`imported ${counter} objects`);
        console.log(mongoosedocs.length);
        temp = [];
      });
    }
  });
}
Isak La Fleur
  • 4,428
  • 7
  • 34
  • 50

1 Answers1

1

You need to create an array of batches (each batch with no more than 100 elements). For each batch call Model.insertMany. To wait until all documents have been inserted, you Promise.all:

function bulkImportToMongo(arrayToImport, mongooseModel) {
  const Model = require(`../../../models/${mongooseModel}`);
  const batchSize = 100;
  let batchCount = Math.ceil(arrayToImport.length / batchSize);
  let recordsLeft = arrayToImport.length;
  let ops = [];
  let counter = 0;
  for (let i = 0; i < batchCount; i++) {
    let batch = arrayToImport.slice(counter, counter + batchSize);
    counter += batchSize;
    ops.push(Model.insertMany(batch));
  }
  return Promise.all(ops);
}
Isak La Fleur
  • 4,428
  • 7
  • 34
  • 50
alexmac
  • 19,087
  • 7
  • 58
  • 69