I have a map(Object key,Text value,Context context) , put a tupleWritable in the context with context.write(). and In the reduce(Text key,Iterable values,Context context),I read the tupleWritable ,but it's empty. below is my code.That confused me ,any help will be appreciated.
package boc.competition.team1;
import java.io.IOException;
import java.util.HashMap;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.join.TupleWritable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.MultipleInputs;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;;
public class App
{
public static class SCSTransMap extends Mapper<Object,Text,Text,TupleWritable>{
private Text name = new Text();
@Override
public void map(Object key,Text value,Context context) throws IOException,InterruptedException{
IntWritable i = new IntWritable(1);
TupleWritable result = new TupleWritable(new IntWritable[] { i, new IntWritable(3)});
System.out.println(result.get(0)+"====="+result.get(1));
//------here print the right value 1=====3
context.write(name, result);
}
}
}
public static class reducer extends Reducer<Text,TupleWritable,Text,Text>{
@Override
public void reduce(Text key,Iterable<TupleWritable> values,Context context) throws IOException,InterruptedException{
for(TupleWritable tuple:values) {
System.out.println(tuple.get(0)+"====="+tuple.get(1));
// and here print 0=======0
}
}
}
public static void main( String[] args ) throws Exception
{
Configuration conf = new Configuration();
Job job = Job.getInstance(conf,"team1Job");
job.setJarByClass(App.class);
job.setReducerClass(reducer.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(TupleWritable.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
MultipleInputs.addInputPath(job, new Path("C:\\Program Files\\PuTTY\\data\\scs\\Scs_Journal.csv"), TextInputFormat.class,SCSTransMap.class);
FileOutputFormat.setOutputPath(job, new Path(OUT_PATH));
System.exit(job.waitForCompletion(true)?0:1);
}
}