I use the follow codes to write some data into a SequenceFile Format file . when the program run a while , I interrupte the program via red button on eclipse console . however , when I check the data file on hdfs , the sequence file's size is zero. and also can not use 'hadoop fs -text filename' command view the file. when I use SequenceFile.Reader read the file previously created , I meet the 'Exception in thread "main" java.io.EOFException' exception. In this case , how to do? my development environment is eclipse3.7(on windows 7 ) and the hadoop cluster (hadoop version 1.0.3 )on CentOS 6 .
class Sequence extends Thread {
private String uri = "hdfs://172.20.11.60:9000";
private String filePath = "/user/hadoop/input/";
private String fileName = "Sequence-01.seq";
public SequenceFile.Writer writer;
private static int cnt = 0;
private void init() {
Configuration conf = new Configuration();
try {
FileSystem fs = FileSystem.get(URI.create(uri), conf);
writer = SequenceFile.createWriter(fs, conf, new Path(filePath
+ fileName), LongWritable.class, Text.class);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public Sequence() {
init();
}
@Override
public void run(){
while(true){
try {
writer.append(new LongWritable(100), new Text("hello,world"));
cnt++;
if(cnt%100 == 0){
System.out.println("flush current data to file system");
writer.syncFs();
}
} catch (IOException e) {
// TODO Auto-generated catch block
System.out.println("append data error");
e.printStackTrace();
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
System.out.println("thread interupted");
e.printStackTrace();
}
}
}
}
public class TestSequenceFile {
/**
* @param args
*/
public static void main(String[] args) {
// TODO Auto-generated method stub
new Sequence().start();
}
}