I thought about the batching request a couple of hours after I posted the question. The problem with Node JS is that it does not has built in support for batch requests, unlike the php client library for example; Therefore, I had to spent some time implementing support for it on my own since I was not able to find any example. I'll share the solution in case it helps someone else or for my future reference.
async function getGroupMembersData(){
const groupEmail = "group@domain.com"; //google group email
const groupMembers = await getGroupMembers(groupEmail).catch(error=>{
console.error(`Error querying group members: ${error.toString()}`);
});
if(!groupMembers){ return; }
const url = "https://www.googleapis.com/batch/admin/directory_v1";
const scopes = ["https://www.googleapis.com/auth/admin.directory.user.readonly"];
const requests = [];
for(let i=0; i<groupMembers.length; ++i){
const user = groupMembers[i];
const request = {
email: user,
endpoint: `GET directory_v1/admin/directory/v1/users/${user}?fields=*`
};
requests.push(request);
}
const batchRequestData = await batchProcess(url, scopes, requests).catch(error=>{
console.error(`Error processing batch request: ${error.toString()}`);
});
if(!batchRequestData){ return; }
const usersList = batchRequestData.map(i=>{
return i.responseBody;
});
console.log(usersList);
}
//get group members using group email address
async function getGroupMembers(groupKey){
const client = await getClient(scopes); //function to get an authorized client, you have to implement on your own
const service = google.admin({version: "directory_v1", auth: client});
const request = await service.members.list({
groupKey,
fields: "members(email)",
maxResults: 200
});
const members = !!request.data.members ? request.data.members.map(i=>i.email) : [];
return members;
}
//batch request processing in groups of 100
async function batchProcess(batchUrl, scopes, requests){
const client = await getClient(scopes); //function to get an authorized client, you have to implement on your own
let results = [];
const boundary = "foobar99998888"; //boundary line definition
let batchBody = ""; const nl = "\n";
const batchLimit = 100; //define batch limit (max supported = 100)
const totalRounds = Math.ceil(requests.length / batchLimit);
let batchRound = 1;
let batchItem = 0;
let roundLimit = batchLimit;
do{
roundLimit = roundLimit < requests.length ? roundLimit : requests.length;
//build the batch request body
for(batchItem; batchItem<roundLimit; batchItem++){
const requestData = requests[batchItem];
batchBody += `--${boundary}${nl}`;
batchBody += `Content-Type: application/http${nl}`;
batchBody += `Content-Id: <myapprequest-${requestData.email}>${nl}${nl}`;
batchBody += `${requestData.endpoint}${nl}`;
}
batchBody += `--${boundary}--`;
//send the batch request
const batchRequest = await client.request({
url: batchUrl,
method: "POST",
headers: {
"Content-Type": `multipart/mixed; boundary=${boundary}`
},
body: batchBody
}).catch(error=>{
console.log("Error processing batch request: " + error);
});
//parse the batch request response
if(!!batchRequest){
const batchResponseData = batchRequest.data;
const responseBoundary = batchRequest.headers["content-type"].split("; ")[1].replace("boundary=", "");
const httpResponses = batchResponseParser(batchResponseData, responseBoundary);
results.push(...httpResponses);
}
batchRound++;
roundLimit += batchLimit;
} while(batchRound <= totalRounds);
return results;
};
//batch response parser
function batchResponseParser(data, boundary){
const nl = "\r\n";
data = data.replace(`--${boundary}--`,"");
const responses = data.split(`--${boundary}`);
responses.shift();
const formattedResponses = responses.map(i=>{
const parts = i.split(`${nl}${nl}`);
const responseMetaParts = (parts[0].replace(nl, "")).split(nl);
let responseMeta = {};
responseMetaParts.forEach(part=>{
const objectParts = part.split(":");
responseMeta[objectParts[0].trim()] = objectParts[1].trim();
});
const responseHeadersParts = parts[1].split(nl);
let responseHeaders = {};
responseHeadersParts.forEach(part=>{
if(part.indexOf("HTTP/1.1") > -1){
responseHeaders.status = part;
} else {
const objectParts = part.split(":");
responseHeaders[objectParts[0].trim()] = objectParts[1].trim();
}
});
const reg = new RegExp(`${nl}`, "g");
const responseBody = JSON.parse(parts[2].replace(reg, ""));
const formatted = {
responseMeta: responseMeta,
responseHeaders: responseHeaders,
responseBody: responseBody
};
return formatted;
});
return formattedResponses;
}