18

I have the following file I want to read line by line and stop reading it once I have found "nameserver 8.8.8.8".

nameserver 8.8.8.8
nameserver 45.65.85.3
nameserver 40.98.3.3

I am using nodejs and the readline module to do so

const readline = require('readline');
const fs = require('fs');

function check_resolv_nameserver(){
  // flag indicates whether namerserver_line was found or not
  var nameserver_flag = false;

  const rl = readline.createInterface({
    input: fs.createReadStream('file_to_read.conf')
  });

  rl.on('line', (line) => {
    console.log(`Line from file: ${line}`);
    if (line === 'nameserver 8.8.8.8'){
      console.log('Found the right file. Reading lines should stop here.');
      nameserver_flag = true;
      rl.close();
    }
  });

  rl.on('close', function(){
    if (nameserver_flag === true){
      console.log('Found nameserver 8.8.8.8');
    }
    else {
      console.log('Could not find nameserver 8.8.8.8');
    }
  });
}

check_resolv_nameserver();

Since I emit a close event with rl.close() as soon as I read the first match, I would expect my Code to read only the first line and then stop reading further. But instead my output looks like this

Line from file: nameserver 8.8.8.8
Found the right file. Reading lines should stop here.
Found nameserver 8.8.8.8
Line from file: nameserver 45.65.85.3
Line from file: nameserver 40.98.3.3

How can I make readline stop after first match and let me proceed with a something else?

gvoigt
  • 181
  • 1
  • 1
  • 4
  • 1
    My guess is that the file contents are buffered, so closing `rl` (or the file handle) won't prevent more `line` events from being emitted if there are still complete lines in the buffer. A workaround would be to set a flag once you got the correct line and ignore subsequent lines if that flag is set. – robertklep May 24 '17 at 09:17
  • @robertklep, I think you are right..., because the data size is actually from highWaterMark of createReadStream..., the line event will just keep firing, as long as there are remaining lines... – Aaron Gong Feb 25 '19 at 08:03
  • To close rl we have to close readStream before, But readStrean read too much data once, we can not stop it. – Shuai Li Mar 02 '19 at 02:09
  • 1
    `lineReader.close(); lineReader.removeAllListeners()` – Oz Shabat Sep 05 '19 at 05:17

6 Answers6

19

for those of you who can't make the linereader stop, do this (in your readline callback):

lineReader.close()
lineReader.removeAllListeners()
Oz Shabat
  • 1,434
  • 17
  • 16
2

It appears readline buffers some lines, so you'll have to add your own check.

Example:

#! /usr/bin/node

const fs = require('fs')
const readline = require('readline')

const reader = readline.createInterface({
    input: fs.createReadStream('test.js')
})

let wasRead = false

reader.on('line', line => {
    if (wasRead) return undefined
    console.log('hello world')
    wasRead = true
    reader.close()
})
Breck
  • 2,075
  • 1
  • 13
  • 10
0

You should close the stream as well:

const readline = require('readline');
const fs = require('fs');
const readStream = fs.createReadStream('file_to_read.conf');

// More code here ...

const rl = readline.createInterface({
    input: readStream
  });

// Rest of your code

rl.close();
readStream.destroy();
Gergo
  • 2,190
  • 22
  • 24
  • 1
    It will not really work. See comments in the above answer, the 'line' event will keep firing, until you go through the lines in the buffer (size specified by highWaterMark of createReadStream)... However you readStream.destroy() should stop any new data from coming in – Aaron Gong Feb 25 '19 at 08:05
0

I Searched for a long time I didn't get the chance to get this working... SO I managed to get what I want thanks to a node module : line-reader

It's good as it can read from file but also from buffer.

Here is a simple code sample where you can read 2 lines then stop.

const lineReader = require('line-reader');
const stream = require('stream');

let bufferStream = new stream.PassThrough();
bufferStream.end(yourBuffer);

let lineNumber = 0;
lineReader.eachLine(bufferStream, function(line) {
    lineNumber++;
    if (lineNumber === 1 || lineNumber === 2) {
        // Perform whatever
    } else {

        // returning false breaks the reading
        return false;
    }

}, async function finished (err) {
    if (err) {
        // throw error or whatever
    }

    // Do after reading processing here
});

EDIT: I Found a clean way to achieve everything exactly as planned :

1st create a splitter to read string chunks

class Splitter extends Transform {
    constructor(options){
        super(options);
        this.splitSize = options.splitSize;
        this.buffer = Buffer.alloc(0);
        this.continueThis = true;
    }
    stopIt() {
        this.continueThis = false;
    }

    _transform(chunk, encoding, cb){

        this.buffer = Buffer.concat([this.buffer, chunk]);

        while ((this.buffer.length > this.splitSize || this.buffer.length === 1) && this.continueThis){
            try {
                let chunk = this.buffer.slice(0, this.splitSize);

                this.push(chunk);
                this.buffer = this.buffer.slice(this.splitSize);
                if (this.buffer[0] === 26){
                    console.log('EOF : ' + this.buffer[0]);
                }
            } catch (err) {
                console.log('ERR OCCURED => ', err);
                break;
            }
        }
        console.log('WHILE FINISHED');
        cb();
    }
}

Then pipe it to your stream :

let bufferStream = new stream.PassThrough();
bufferStream.end(hugeBuffer);
let splitter = new Splitter({splitSize : 170}); // In my case I have 170 length lines, so I want to process them line by line
let lineNr = 0;
bufferStream
      .pipe(splitter)
      .on('data', async function(line){

          line = line.toString().trim();

          splitter.pause(); // pause stream so you can perform long time processing with await
          lineNr++;

         if (lineNr === 1){
              // DO stuff with 1st line

         } else {
              splitter.stopIt(); // Break the stream and stop reading so we just read 1st line
         }

         splitter.resume() // resumestream so you can process next chunk
    }).on('error', function(err){
            console.log('Error while reading file.' + err);
            // whatever
     }).on('end', async function(){
           console.log('end event');

           // Stream has ended, do whatever...

    });

This code enables perfect read streams, line by line. No need to use the Splitter if the whole file is not so long

Deunz
  • 1,776
  • 19
  • 32
0

What works for me was add on pause a resume, it allow me to edit the file after read.

var lineReader = require('readline').createInterface({
    input: require('fs').createReadStream(require('path').resolve('test.js'))
});
lineReader.on('line', function (line) { console.log(line) }) /*loop all lines*/
.on('pause', function () { 
    /* resume after read lines is finished to close file */
    lineReader.resume(); 
}) 
.on('close', function () { 
    /*action after file read is close*/ 
    console.log('Close ok')
}); 
0

You can declare a line event listener and remove it when needed.

const lineEventListener = (line) => {
  // do sth
  // Close
  rl.close();
  rl.removeListener('line', lineEventListener);
}
rl.on('line', lineEventListener);