1

I'm using Hadoop 1.2.1, eclipse juno. I'm trying to chaining three map task in a single Mapreduce job. while writing Mapreduce code in eclipse, I'm getting error like chainmapper is not applicable for the arguments and also I cant set inputpath. Following are my mapreduce code,

 package org.myorg;

import java.io.IOException;
import java.net.URI;
import java.nio.file.FileSystem;
import java.util.StringTokenizer;

import javax.security.auth.login.Configuration;

import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.MapRunnable;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.mapred.lib.ChainMapper;
import org.apache.hadoop.mapred.lib.ChainReducer;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.net.StaticMapping;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;


public class Recommand extends Configured implements Tool {



    public static class IdIndexMapper extends MapReduceBase implements Mapper<LongWritable, Text, Text, Text>{

        public void map(LongWritable key, Text val, OutputCollector<Text, Text> output,Reporter reporter)throws IOException{
            String[] ids;
            String ln=val.toString();
            ids=ln.split("\t");
            output.collect(new Text(ids[0]),new Text(ids[1]));

    }
}
    public static class FtrMapper extends MapReduceBase implements Mapper<Text, Text, Text, Text>{
        public void map(Text key, Text val, OutputCollector<Text, Text>output, Reporter reporter) throws IOException{
            String[] str;

            String lne=val.toString();
        while(lne.contains("M1024")){
                str=lne.split(",");
            String[] str1=new String[str.length];
                for(int i=0;i<str.length;i++){
                                if(str[i]=="M1024"){   //hre need to give id which we need to split;
                                    continue;
                                        }
                                str1[i]=str[i];
                                output.collect(key,new Text(str1[i]));                  
//                          System.out.println("str1 out:"+str[i]); 
                                    }
                            }


        }
    }

public static class CntMapper extends MapReduceBase implements Mapper<Text, Text, Text, IntWritable>{

    private final static IntWritable one=new IntWritable(1);
    private  Text word=new Text();
    public void map(Text key, Text val, OutputCollector<Text, IntWritable>output, Reporter reporter)throws IOException{
        String line = val.toString();
    StringTokenizer tokenizer = new StringTokenizer(line);
        while (tokenizer.hasMoreTokens()) {
            word.set(tokenizer.nextToken());
            output.collect(word, one);
                    }
                }
            }


public static class Reduce extends MapReduceBase implements Reducer<Text, IntWritable, Text, IntWritable>{
    public void reduce(Text key, Iterable<IntWritable>values, OutputCollector<Text, IntWritable>output, Reporter reporter)throws IOException{
        int sum=0;
        for(IntWritable val:values){
            sum+=val.get();
                    }
        output.collect(key,new IntWritable(sum));
                    }
                }   

static int printUsage() {
    System.out.println("recommand  ");
    ToolRunner.printGenericCommandUsage(System.out);
    return -1;
}

public int run(String[] args) throws Exception {
    JobConf conf = new JobConf(getConf(), Recommand.class);
    conf.setJobName("wordcount");

    if (args.length != 2) {
        System.out.println("ERROR: Wrong number of parameters: " +
                args.length + " instead of 2.");
        return printUsage();
    }
    FileInputFormat.setInputPaths(conf, args[0]);
    FileOutputFormat.setOutputPath(conf, new Path(args[1]));

    conf.setInputFormat(TextInputFormat.class);
    conf.setOutputFormat(TextOutputFormat.class);

    JobConf mapAConf = new JobConf(false);
    ChainMapper.addMapper(conf, IdIndexMapper.class, LongWritable.class, Text.class, Text.class, Text.class, true, mapAConf);

    JobConf mapBConf = new JobConf(false);
    ChainMapper.addMapper(conf, FtrMapper.class, Text.class, Text.class, Text.class, Text.class, true, mapBConf);

    JobConf mapCConf = new JobConf(false);
    ChainMapper.addMapper(conf, CntMapper.class, Text.class, Text.class, Text.class, IntWritable.class, true, mapBConf);

    JobConf reduceConf = new JobConf(false);
    ChainReducer.setReducer(conf, Reduce.class, Text.class, IntWritable.class, Text.class, IntWritable.class, true, reduceConf);

    JobClient.runJob(conf);
    return 0;
}

public static void main(String[] args) throws Exception {
    int res = ToolRunner.run(new org.apache.hadoop.conf.Configuration(), Recommand(), args);
    System.exit(res);
}
}

Can anyone help me to solve this problem please?

Carsten Løvbo Andersen
  • 26,637
  • 10
  • 47
  • 77
Karthick
  • 97
  • 1
  • 1
  • 7
  • can you post the error message please? – vefthym Aug 11 '14 at 09:01
  • first one came on "addmapper" which is "The method addMapper(JobConf, Class extends Mapper>, Class extends K1>, Class extends V1>, Class extends K2>, Class extends V2>, boolean, JobConf) in the type ChainMapper is not applicable for the arguments (JobConf, Class, Class, Class, Class, Class, boolean, JobConf)" 2 one is from setting Inputpath which is "The method setInputPaths(Job, String) in the type FileInputFormat is not applicable for the arguments (JobConf, String)" – Karthick Aug 11 '14 at 09:51
  • Try the answer of this post (possible duplicate): http://stackoverflow.com/questions/6840922/hadoop-mapreduce-driver-for-chaining-mappers-within-a-mapreduce-job/10470437#10470437 – vefthym Aug 11 '14 at 10:05
  • ya i tried the same what u given.when i try that code in eclipse it showing error what i said, on the line of ChainMapper.addMapper.i dont know how to solve them. – Karthick Aug 11 '14 at 10:09
  • I don't think so... The answer of the mentioned post, imports `org.apache.hadoop.mapred.Mapper;` not `org.apache.hadoop.mapreduce.Mapper;`, as you do. Did you try that? – vefthym Aug 11 '14 at 10:13
  • vefthym, thanks I was also facing the same issue and , imports org.apache.hadoop.mapred.Mapper; works for me – Rama Apr 01 '16 at 08:58

1 Answers1

0

Make sure of the following to avoid this error

  1. Both classes extend the Mapper class.
  2. The ChainMapper class being used is from the correct API, whichever is applicable to your code.

org.apache.hadoop.mapreduce.lib.chain.ChainMapper or import org.apache.hadoop.mapred.lib.ChainMapper

Dov Benyomin Sohacheski
  • 7,133
  • 7
  • 38
  • 64