Hadoop2开发环境

使用Maven创建Hadoop2项目
     <dependencies>
        <dependency>
            <groupId>jdk.tools</groupId>
            <artifactId>jdk.tools</artifactId>
            <version>1.7</version>
            <scope>system</scope>
            <systemPath>Java_Home/lib/tools.jar</systemPath>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-common</artifactId>
            <version>2.5.2</version>
            <exclusions>
                <exclusion>
                    <groupId>jdk.tools</groupId>
                    <artifactId>jdk.tools</artifactId>
                </exclusion>
            </exclusions>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-hdfs</artifactId>
            <version>2.5.2</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-client</artifactId>
            <version>2.5.2</version>
        </dependency>
    </dependencies>


连接到Hadoop2环境,例如: 

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.conf.Configuration;

public class IPStarter {
    public static void main(String[] args)  throws Exception {
        Configuration conf = new Configuration();
        //连接到hadoop2集群调试
        conf.set("fs.defaultFS","hdfs://localhost:9000");
        conf.set("mapreduce.framework.name", "yarn");
        conf.set("yarn.resourcemanager.address", "localhost:8032");
        conf.set("yarn.resourcemanager.scheduler.address", "localhost:8030");
        //conf.set("mapred.remote.os", "Linux");
        Job job = new Job(conf, "JobName");
        job.setJar("/home/grid/study/hadoopDev2/target/hadoopDev2-1.0-SNAPSHOT.jar"); //开发环境调试时使用
        job.setMapperClass(IPMap.class);
        job.setReducerClass(IPReduce.class);

        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(Text.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);

        FileInputFormat.addInputPath(job, new Path("hdfs://localhost:9000/dev2/in/access.20120104.log"));
        FileOutputFormat.setOutputPath(job, new Path("hdfs://localhost:9000/dev2/out/ip"));
        System.exit(job.waitForCompletion(true) ? 0 : 1);
    }
}

PS:复制 HADOOP_HOME/etc/hadoop/log4j.properties 到程序的 src 目录下,这样调试时才能在控制台看到 Job 执行的详细日志 


你可能感兴趣的:(Hadoop2开发环境)