Escolar Documentos
Profissional Documentos
Cultura Documentos
The below is the sample program for the word count application
The main driver program is below
package P2;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
job.setMapperClass(MyMapper.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
System.exit(job.waitForCompletion(true)?0:1);
}
}
import java.io.IOException;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
for(String word:words){
context.write(new Text(word), new IntWritable(1));
}
System.out.println(currentLine);
import java.io.IOException;
import java.util.Iterator;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
@Override
protected void reduce(Text word, Iterable<IntWritable> arr,Context ctx)
Iterator it=arr.iterator();
int count=0;
while(it.hasNext())
{
IntWritable i=(IntWritable)it.next();
count=count+i.get();
}
ctx.write(word,new IntWritable(count));
}