I have a function that gets all the files in a directory recursively using fs.readdirSync. It works well with the small directory I ran it through as a test, but now that I am running this on a directory that is over 100GB large, it is taking a very long time to complete. Any ideas on how I can speed this up or if there's a better way of doing this? I'm eventually going to have to run this over some directories with Terabytes of data.
// Recursive function to get files
function getFiles(dir, files = []) {
// Get an array of all files and directories in the passed directory using fs.readdirSync
const fileList = fs.readdirSync(dir);
// Create the full path of the file/directory by concatenating the passed directory and file/directory name
for (const file of fileList) {
const name = `${dir}/${file}`;
// Check if the current file/directory is a directory using fs.statSync
if (fs.statSync(name).isDirectory()) {
// If it is a directory, recursively call the getFiles function with the directory path and the files array
getFiles(name, files);
} else {
// If it is a file, push the full path to the files array
files.push(name);
}
}
return files;
}