欢迎使用CSDN-markdown编辑器

idea开发mapreduce实战

我的intellij idea版本是14,Hadoop版本2.6,使用《hadoop权威指南》的天气统计源码作为示例。

下面附上源码,数据集在http://hadoopbook.com/code.html可以下载1901和1902两年数据:

代码

package com.hadoop.maxtemperature;  

import java.io.IOException;  
import org.apache.hadoop.io.IntWritable;  
import org.apache.hadoop.io.LongWritable;  
import org.apache.hadoop.io.Text;  
import org.apache.hadoop.mapreduce.Mapper;  

public class MaxTemperatureMapper  
        extends Mapper<LongWritable, Text, Text, IntWritable> {  //注1  
    private static final int MISSING = 9999;  
    @Override  
    public void map(LongWritable key, Text value, Context context)  
            throws IOException, InterruptedException {  
        String line = value.toString();  
        String year = line.substring(15, 19);  
        int airTemperature;  
        if (line.charAt(87) == '+') { // parseInt doesn't like leading plus signs  
            airTemperature = Integer.parseInt(line.substring(88, 92));  
        } else {  
            airTemperature = Integer.parseInt(line.substring(87, 92));  
        }  
        String quality = line.substring(92, 93);  
        if (airTemperature != MISSING && quality.matches("[01459]")) {  
            context.write(new Text(year), new IntWritable(airTemperature));  
        }  
    }  
}  
package com.hadoop.maxtemperature;  


import java.io.IOException;  
import org.apache.hadoop.io.IntWritable;  
import org.apache.hadoop.io.Text;  
import org.apache.hadoop.mapreduce.Reducer;  

public class MaxTemperatureReducer  
        extends Reducer {  
    @Override  
    public void reduce(Text key, Iterable values,  
                       Context context)  
            throws IOException, InterruptedException {  
        int maxValue = Integer.MIN_VALUE;  
        for (IntWritable value : values) {  
            maxValue = Math.max(maxValue, value.get());  
        }  
        context.write(key, new IntWritable(maxValue));  
    }  
}  
package com.hadoop.maxtemperature;  


import org.apache.hadoop.fs.Path;  
import org.apache.hadoop.io.IntWritable;  
import org.apache.hadoop.io.Text;  
import org.apache.hadoop.mapreduce.Job;  
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;  
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;  

public class MaxTemperature {  
    public static void main(String[] args) throws Exception {  
        if (args.length != 2) {  
            System.err.println("Usage: MaxTemperature  ");  
            System.exit(-1);  
        }  
        Job job = new Job();  
        job.setJarByClass(MaxTemperature.class);  
        job.setJobName("Max temperature");  

        FileInputFormat.addInputPath(job, new Path(args[0]));  
        FileOutputFormat.setOutputPath(job, new Path(args[1]));  

        job.setMapperClass(MaxTemperatureMapper.class);  
        job.setReducerClass(MaxTemperatureReducer.class);  

        job.setOutputKeyClass(Text.class);              //注1  
        job.setOutputValueClass(IntWritable.class);  

        System.exit(job.waitForCompletion(true) ? 0 : 1);  
    }  
}  

按照参考文章配置一步一步来,但是文章中有些配置需要改一下
第8步
执行完后需要build->build Artifacts…
第10步
main class并不需要输入org.apache.hadoop.util.RunJar,而是选择我们的主类
然后需要
program arguments里填写三个参数,一个是jar包的路径,第二个是输入路径,第三个是输出路径

踩了个坑

我一开始并没有按照文章添加yarn的包,但是后来报错:
Exception in thread “main” java.lang.NoClassDefFoundError: org/apache/hadoop/yarn/util/Apps
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClass(ClassLoader.java:800)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:449)
at java.net.URLClassLoader.access 100(URLClassLoader.java:71)atjava.net.URLClassLoader 1.run(URLClassLoader.java:361)
at java.net.URLClassLoader 1.run(URLClassLoader.java:355)atjava.security.AccessController.doPrivileged(NativeMethod)atjava.net.URLClassLoader.findClass(URLClassLoader.java:354)atjava.lang.ClassLoader.loadClass(ClassLoader.java:425)atsun.misc.Launcher AppClassLoader.loadClass(Launcher.java:308)
at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
at org.apache.hadoop.mapred.LocalDistributedCacheManager.setup(LocalDistributedCacheManager.java:93)
at org.apache.hadoop.mapred.LocalJobRunner Job.(LocalJobRunner.java:163)atorg.apache.hadoop.mapred.LocalJobRunner.submitJob(LocalJobRunner.java:731)atorg.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:432)atorg.apache.hadoop.mapreduce.Job 10.run(Job.java:1285)
at org.apache.hadoop.mapreduce.Job 10.run(Job.java:1282)atjava.security.AccessController.doPrivileged(NativeMethod)atjavax.security.auth.Subject.doAs(Subject.java:415)atorg.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1556)atorg.apache.hadoop.mapreduce.Job.submit(Job.java:1282)atorg.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1303)atcom.hadoop.maxTemprature.MaxTemperature.main(MaxTemperature.java:39)atsun.reflect.NativeMethodAccessorImpl.invoke0(NativeMethod)atsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)atsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)atjava.lang.reflect.Method.invoke(Method.java:606)atcom.intellij.rt.execution.application.AppMain.main(AppMain.java:140)Causedby:java.lang.ClassNotFoundException:org.apache.hadoop.yarn.util.Appsatjava.net.URLClassLoader 1.run(URLClassLoader.java:366)
at java.net.URLClassLoader 1.run(URLClassLoader.java:355)atjava.security.AccessController.doPrivileged(NativeMethod)atjava.net.URLClassLoader.findClass(URLClassLoader.java:354)atjava.lang.ClassLoader.loadClass(ClassLoader.java:425)atsun.misc.Launcher AppClassLoader.loadClass(Launcher.java:308)
at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
… 29 more
解决
http://blog.csdn.net/zhangge360/article/details/51181794
原因是没有导入yarn包

参考文章

http://blog.csdn.net/programmer_wei/article/details/45286749

你可能感兴趣的:(hadoop,mapreduce,idea)