0

The following curl command works perfectly to call, pass argument and execute my "jobified" spark program

curl 'http://someserver:8090/jobs?appName=secondtest&classPath=Works.epJob&context=hiveContext' -d "inputparms=/somepath1 /somepath2"

Here is the spark program

override def runJob(hive: HiveContext, config: Config):Any = {  
var inputParms = config.getString("inputparms").split(" "); //comes from node
var path1 = inputParms.apply(0)
var path2 = inputParms.apply(1)

Instead of the curl command, I need to do a http post in node.js. Here is what I have

var postData = JSON.stringify({
  "inputparms": paths
})

var options = {
hostname: 'someserver',
port: 8090,
path: '/jobs?appName=secondtest&classPath=Works.epJob context=hiveContext',
method: 'POST',
headers: {
    'Content-Type': 'application/json',
    'Content-Length': Buffer.byteLength(postData , 'utf8')
}
};

 http.request(options, function(response) {...

Howerever the above script does not work. Am I missing something? Thanks!

Edit 1:

 var myreq = http.request(options, function(response) { ...})
 myreq.write(postData);
 myreq.end();

I get a parse error

Error: Parse Error
at Error (native)
at Socket.socketOnData (_http_client.js:361:20)
at emitOne (events.js:96:13)
at Socket.emit (events.js:188:7)
at readableAddChunk (_stream_readable.js:177:18)
at Socket.Readable.push (_stream_readable.js:135:10)
at TCP.onread (net.js:542:20) bytesParsed: 2, code: 'HPE_INVALID_CONSTANT' }
user1384205
  • 1,231
  • 3
  • 20
  • 39
  • I see that you are setting two headers, Content-Type and Content-Length. But how are you setting the body? – noorul Sep 21 '16 at 02:43
  • @noorul - With the above update, the above error shows. Along with it, here is the error from SJS message": "org.apache.hadoop.mapreduce.lib.input.InvalidInputException: Input path does not exist: file:/home/someid/workspace/sparkjobserver/spark-jobserver/\"inputparms=/path1", – user1384205 Sep 21 '16 at 05:08

1 Answers1

0

The following works for me

var http = require("http");
var options = {
  hostname: 'localhost',
  port: 8090,
  path: '/jobs?appName=test&classPath=spark.jobserver.LongPiJob',
  method: 'POST',
  headers: {
      'Content-Type': 'application/json',
  }
};
var req = http.request(options, function(res) {
  console.log('Status: ' + res.statusCode);
  console.log('Headers: ' + JSON.stringify(res.headers));
  res.setEncoding('utf8');
  res.on('data', function (body) {
    console.log('Body: ' + body);
  });
});
req.on('error', function(e) {
  console.log('problem with request: ' + e.message);
});
// write data to request body
req.write('stress.test.longpijob.duration=120');
req.end();
noorul
  • 1,283
  • 1
  • 8
  • 18
  • Hi Noorul. Can you share how you retrieve the input from your jobified spark program? In my case, I do this ``` var inputParms = config.getString("inputparms").split(" "); //comes from node var path1 = inputParms.apply(0) var path2 = inputParms.apply(1) ``` – user1384205 Sep 21 '16 at 10:32
  • path1 prints as "inputparms=/path1 path2 prints as /path2" – user1384205 Sep 21 '16 at 10:33
  • See https://github.com/spark-jobserver/spark-jobserver/blob/master/job-server-tests/src/spark.jobserver/LongPiJob.scala – noorul Sep 21 '16 at 11:32
  • @user1384205 Did the answer help you? – noorul Sep 28 '16 at 03:21
  • Thanks @noorul. Sending the args in req.write fixed it. – user1384205 Sep 30 '16 at 13:53