2021-07-15

package mapreduce;

import java.io.IOException;
import java.util.StringTokenizer;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class WordCount {
    public static void main(String[] args) throws IOException,ClassNotFoundException,InterruptedException {
    Job job = Job.getInstance();
    job.setJobName("WordCount");
    job.setJarByClass(WordCount.class);
    job.setMapperClass(doMapper.class);
    job.setReducerClass(doReducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);
    Path in = new Path("hdfs://localhost:9000/mymapreduce1/in/buyer_favorite1");
    Path out = new Path("hdfs://localhost:9000/mymapreduce1/out");
    FileInputFormat.addInputPath(job,in);
    FileOutputFormat.setOutputPath(job,out);
    System.exit(job.waitForCompletion(true)?0:1);
    }
    public static class doMapper extends Mapper{
        public static final IntWritable one = new IntWritable(1);
        public static Text word = new Text();
        @Override
        protected void map(Object key, Text value, Context context)
        throws IOException,InterruptedException {
            StringTokenizer tokenizer = new StringTokenizer(value.toString(),"  ");
            word.set(tokenizer.nextToken());
            context.write(word,one);
        }
    }
    public static class doReducer extends Reducer{
        private IntWritable result = new IntWritable();
        @Override
        protected void reduce(Text key,Iterable values,Context context)
        throws IOException,InterruptedException{
            int sum = 0;
            for (IntWritable value : values){
                sum += value.get();
            }
        result.set(sum);
        context.write(key,result);
        }
    }
}
public static class doMapper extends Mapper{  
    //第一个Object表示输入key的类型;第二个Text表示输入value的类型;
    //第三个Text表示表示输出键的类型;第四个IntWritable表示输出值的类型  
    public static final IntWritable one = new IntWritable(1);  
    public static Text word = new Text();  
    @Override  
    protected void map(Object key, Text value, Context context)  
    throws IOException, InterruptedException  
    //抛出异常  
    {  
        StringTokenizer tokenizer = new StringTokenizer(value.toString(),"\t");
        //StringTokenizer是Java工具包中的一个类,用于将字符串进行拆分  
        word.set(tokenizer.nextToken());  
        //返回当前位置到下一个分隔符之间的字符串  
        context.write(word, one);  
        //将word存到容器中,记一个数  
    } 
}
public static class doReducer extends Reducer{  
    //参数同Map一样,依次表示是输入键类型,输入值类型,输出键类型,输出值类型  
    private IntWritable result = new IntWritable();  
    @Override  
    protected void reduce(Text key, Iterable values, Context context)  
    throws IOException, InterruptedException {  
        int sum = 0;  
        for (IntWritable value : values) {  
        sum += value.get();  
        }  
    //for循环遍历,将得到的values值累加  
    result.set(sum);  
    context.write(key, result);  
    }  
}  

你可能感兴趣的:(代码,字符串)