package com.test;
import java.io.IOException;
import java.util.Iterator;
import java.util.StringTokenizer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.NLineInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
/**
* TextInputFormat处理的数据来自于一个InputSplit。InputSplit根据块大小划分。
* 由于每条记录有长有短,所以,每个map任务处理的记录数都不一样
* NLineInputFormat决定每个map处理记录数是相同的
*/
public class WordCountNL extends Configured implements Tool {
public static class Map extends Mapper<LongWritable, Text, Text, IntWritable> {
public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String line = value.toString();
StringTokenizer st = new StringTokenizer(line);
while(st.hasMoreElements()) {
context.write(new Text(st.nextElement().toString()), new IntWritable(1));
}
}
}
public static class Combiner extends Reducer<Text, IntWritable, Text, IntWritable> {
public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
int count = 0;
Iterator<IntWritable> it = values.iterator();
while(it.hasNext()) {
count = count + it.next().get();
}
context.write(key, new IntWritable(count));
}
}
public static class Reduce extends Reducer<Text, IntWritable, Text, IntWritable> {
public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
int count = 0;
Iterator<IntWritable> it = values.iterator();
while(it.hasNext()) {
count = count + it.next().get();
}
context.write(key, new IntWritable(count));
}
}
public int run(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
Configuration conf = this.getConf();
//设置每个map可以处理多少行数据
//conf.set("mapreduce.input.lineinputformat.linespermap", "1");
conf.set(NLineInputFormat.LINES_PER_MAP, "1");
Job job = new Job(conf);
job.setJobName(WordCountNL.class.getSimpleName());
job.setJarByClass(WordCountNL.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
job.setMapperClass(Map.class);
job.setCombinerClass(Combiner.class);
job.setReducerClass(Reduce.class);
job.setInputFormatClass(NLineInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
job.waitForCompletion(true);
return job.isSuccessful()?0:1;
}
public static void main(String[] args) throws Exception {
int exit = ToolRunner.run(new WordCount(), args);
System.exit(exit);
}
}