将HDFS数据导出到MongoDB中


将HDFS数据导出到MongoDB中

本节将使用MongoOutputFormat类将HDFS中的数据导入MongoDB文档。

准备工作

1、从https://github.com/mongodb/mongo-hadoop下载代码,编译Mongo Hadoop Adaptor ,可以自己下载编译,也可以下载我已经编译好的。

2、下载Mongo Java Driver,https://github.com/mongodb/mongo-java-driver/downloads。

操作流程

1、将准备好的两个Jar包拷入 $HADOOP_HOME/lib中。

2、创建MapReduce代码


import java.io.*;

import org.apache.commons.logging.*;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.*;
import org.bson.*;
import org.bson.types.ObjectId;


import com.mongodb.hadoop.*;
import com.mongodb.hadoop.util.*;

public class ExportToMongoDBFromHDFS {
	
	private static final Log log =
LogFactory.getLog(ExportToMongoDBFromHDFS.class);
	
	public static class ReadWeblogs extends Mapper{
		
		public void map(Text key, Text value, Context context) throws IOException, InterruptedException{
			
			System.out.println("Key: " + key);
			System.out.println("Value: " + value);
			
			String[] fields = value.toString().split("\t");
			
			String md5 = fields[0];
			String url = fields[1];
			String date = fields[2];
			String time = fields[3];
			String ip = fields[4];
			
			BSONObject b = new BasicBSONObject();
			b.put("md5", md5);
			b.put("url", url);
			b.put("date", date);
			b.put("time", time);
			b.put("ip", ip);
			
			context.write( new ObjectId(), b);
		}
	}
	
	public static void main(String[] args) throws Exception{
		
		final Configuration conf = new Configuration();
		conf.set("mapred.job.tracker", "192.168.11.131:9001");
		MongoConfigUtil.setOutputURI(conf,"mongodb://192.168.100.161:27017/test.weblogs");
		//MongoConfigUtil.setCreateInputSplits(conf, false);
		System.out.println("Configuration: " + conf);
		
		final Job job = new Job(conf, "Export to Mongo");
		
		Path in = new Path("/data/weblogs/weblog_entries.txt");
		FileInputFormat.setInputPaths(job, in);
		
		job.setJarByClass(ExportToMongoDBFromHDFS.class);
		job.setMapperClass(ReadWeblogs.class);
		
		job.setOutputKeyClass(ObjectId.class);
		job.setOutputValueClass(BSONObject.class);
		
		job.setInputFormatClass(TextInputFormat.class);
		job.setOutputFormatClass(MongoOutputFormat.class);
		
		job.setNumReduceTasks(0);
		
		System.exit(job.waitForCompletion(true) ? 0 : 1 );
		
	}
}

3、将MapReduce代码导出为Jar包

4、执行MapReduce程序hadoop jar ExportToMongoDBFromHDFS.jar ExportToMongoDBFromHDFS

5、执行完成

在mongodb中验证db.weblogs.count()

结果:3000




你可能感兴趣的:(Hadoop)