I have created a Node.js script that reads data from a CSV file, and then makes an axios POST request to an endpoint to create a user in a database. My issue is, when working with a lot of data (100+ users in CSV) I am sending so many requests at one time that I am getting a 429 status code that says too many requests. Is there any way to space these out?
Here is my code :
const csv = require('csv-parser');
const fs = require('fs');
const axios = require('axios');
let dataArray = [];
let API_KEY = 'example key'
fs.createReadStream('output.csv')
.pipe(csv())
.on('data', async (row) => {
try {
let hluser = JSON.stringify({"TenantCode":"","Items":[{"MemberDetails":{"LegacyContactKey": row.hl_uid,"MemberID":"","EmailAddress": row.email,"FirstName": row.first_name,"LastName": row.last_name,"CompanyName": row.Company,"Title": row.job_title,"Phone1":"","Phone1Type":"Phone","Phone2":"","Phone2Type":"Office","Address1":"","City": row.city,"State": row.state,"PostalCode":"","Country": row.country,"Bio":"","ProfileImageURL":"","FacebookURL":"","TwitterURL":"","LinkedInURL":"","WebsiteUrl":"","IsMember":"True","IsActive":"True"},"CommunityGroups":[{"GroupKey":"","RoleDescription":"Member"}]}]});
console.log(hluser);
let config = {
method: 'POST',
url: 'example.com/v1/api',
headers: {
'x-api-key': API_KEY,
'Content-Type': 'application/json'
},
data: hluser
};
const {data} = await axios(config)
console.log(data);
console.log('***********************************');
console.log(`Successfully added ${row.first_name} ${row.last_name} to Higher Logic`);
console.log('***********************************');
console.log(row);
} catch (err) {
console.log(err);
console.log(`Could not add ${row.first_name} ${row.last_name}`);
}})
.on('end', () => {
console.log('CSV file successfully processed');
});