0

Looking to get some help. I'm new to Nodejs and wondering if it is possible, to remove this custom event emitter. I'm using package "ssh2-sftp-client", the code is running well, but i get warning from node console, said

(node:67350) MaxListenersExceededWarning: Possible EventEmitter memory leak detected. 11 close listeners added to [Client]. Use emitter.setMaxListeners() to increase limit

but I'm not sure where is the place to clean up the event, hope someone can give me some advice.

async function executeFtpDownload() {
  try {
    await conn.connect({
      host: params.ftpHost,
      port: params.ftpPort,
      user: params.ftpUser,
      password: params.fptPassword,
    });

    const files = await conn.list("/home/bmw/autoline");
    const targetFiles = files.filter((file) => {
      return Object.values(targetFilesName).some(
        (targetName) => targetName === file.name
      );
    });

    const dirFiles = await Promise.all(
      targetFiles.map(async (file) => {
        await conn.downloadDir(
          `/home/bmw/autoline/${file.name}`,
          `./${params.LocalPath}/${file.name}`
        );

        const record = await conn.list(`/home/bmw/autoline/${file.name}`);
        const dateRecord = record.reduce((obj, item) => {
          const date = item.name.split("_")[0];
          if (obj[date]) {
            obj[date] = [...obj[date], item.name];
          } else {
            obj[date] = [item.name];
          }
          return obj;
        }, {});
        return {
          [file.name]: dateRecord,
        };
      })
    );

    const folder_list = dirFiles.reduce((acc, item) => {
      const key = Object.keys(item)[0];
      const record = item[key];
      return {
        ...acc,
        [key]: record,
      };
    }, {});

    const processUpload = await Promise.all(
      Object.entries(folder_list).map((folder) => {
        const folderName = folder[0];

        return Promise.all(
          Object.entries(folder[1]).map((subFolder) => {
            const subFolderName = subFolder[0];
            const files = subFolder[1];

            return Promise.all(
              files.map(async (file) => {
                const fileContent = fs.createReadStream(
                  `./${params.LocalPath}/${folderName}/${file}`
                );
                const remove_remote = `/home/bmw/autoline/${folderName}/${file}`;
                const remote = `/home/bmw/autoline/Uploaded${folderName}/${file}`;
                const localPath = `./${params.LocalPath}/${folderName}/${file}`;

                await s3
                  .putObject({
                    Body: fileContent,
                    Bucket: params.Bucket,
                    Key: `${folderName}/${subFolderName}/${file}`,
                  })
                  .promise();

                await conn.fastPut(localPath, remote);
                // await conn.delete(remove_remote);

                await fs.unlink(
                  `./${params.LocalPath}/${folderName}/${file}`,
                  function (err) {
                    if (err) throw err;
                    // if no error, file has been deleted successfully
                    console.log(`File ${file} deleted!`);
                  }
                );
              })
            );
          })
        );
      })
    );

    console.log("FTPDOWNLOAD DONE");
    conn.end();
    return folder_list;
  } catch (err) {
    conn.end();
    console.log("ERR executeFtpDownload:", err);

    return {
      statusCode: err.code,
      body: err.name,
    };
  }
}

1 Answers1

2

running the same issue since two days.

Using @supercharge/promise-pool saved my life.

const { results,errors } = await PromisePool //Promise pool return an object containing these 2 properties where results = your promises responses as array
    .for(myArrayOfData) // Your data array for iteration that will pass each value to your process
    .withConcurrency(9) // Maximum amount of promises running at the same time
    .process(async currentObject=> { return  await myPromisedFunction(currentObject);}) // your promise execution

Personnal note, since it's a warning caused by SFTP event listener, I've set withConcurrency() to 9 because I first list all files from my remote folder that creates 1 listener, + 9 concurent promises to get each file content where all of them listeners as well, for a total of 10 that is the maximum default limit.

Full sample for my case:

client.list(CONFIG.sftp.folder).then(async (data) => {
                const { results,errors } = await PromisePool
                    .for(data)
                    .withConcurrency(9)
                    .process(async sftpFileObject => { let file = await readFileContent(client, CONFIG.sftp.folder, sftpFileObject); return file; })
                //Do some fun stuff with my results var containing my resolved data
                resolve('Successfully pulled all file content without warning!')
            }).then(() => {
                client.end();
            }).catch((err) => {
                client.end();
                console.error(err);
                reject(err)
            })