从hadoop2.2,HBase0.96 mapreduce操作

从hadoop取出文件写入hbase表中
package example2;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

//Administrator
public class ImportFromFileExample {
	
	public static class ImportMapper extends Mapper<LongWritable, Text, ImmutableBytesWritable,Text >{
		
		
		@Override
		protected void map(LongWritable key, Text value, Context context)
				throws IOException, InterruptedException {
			context.write(new ImmutableBytesWritable(Bytes.toBytes(key.get())), value);
		}

		
	}
	
	public static class Reducer1 extends TableReducer<ImmutableBytesWritable, Text, ImmutableBytesWritable> {
		private byte[] family=null;
		private byte[]qualifier=null;
		

		@Override
		protected void setup(Context context) throws IOException,
				InterruptedException {
			String column=context.getConfiguration().get("conf.column");
			byte[][]colkey=KeyValue.parseColumn(Bytes.toBytes(column));
			family=colkey[0];
			if(colkey.length>1){
				qualifier=colkey[1];
			}
		}
		
        public void reduce(ImmutableBytesWritable key, Iterable<Text> values, Context context)
                throws IOException, InterruptedException {
        	String valueCon=null;
        	for(Text text:values){
        		valueCon+=text.toString();
        	}
            Put put = new Put(key.get());
            put.add(family, qualifier, Bytes.toBytes(valueCon));
            context.write(key, put);
        }
    }

	/**
	 * @param args
	 */
	public static void main(String[] args)throws Exception {
		Configuration conf=HBaseConfiguration.create();
		String []argArray=new GenericOptionsParser(conf, args).getRemainingArgs();
		if(argArray.length!=1){
			System.exit(1);
		}
		conf.set("conf.column", "family1:text");
		Job job=new Job(conf,"import from hdfs to hbase");
		job.setJarByClass(ImportFromFileExample.class);
		job.setMapperClass(ImportMapper.class);
		job.setOutputFormatClass(TableOutputFormat.class);
		job.getConfiguration().set(TableOutputFormat.OUTPUT_TABLE, "testtable");
		job.setMapOutputKeyClass(ImmutableBytesWritable.class);
		job.setMapOutputValueClass(Text.class);
		job.setOutputKeyClass(ImmutableBytesWritable.class);
		job.setOutputValueClass(Put.class);
		TableMapReduceUtil.initTableReducerJob("testtable", Reducer1.class, job);
		FileInputFormat.addInputPaths(job, argArray[0]);
		System.exit(job.waitForCompletion(true) ? 0 : 1);

	}

}

2从hbase加载数据到hdfs

package example2;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

//Administrator
public class FromHBaseToHDFSExample {
	
	public static class HBaseMapper extends TableMapper<IntWritable, Text>{

		@Override
		protected void map(ImmutableBytesWritable key, Result value,
				Context context) throws IOException, InterruptedException {
			 for(KeyValue kv:value.raw()){
				 context.write(null, new Text(Bytes.toString(kv.getValue())));
			 }
		}
		
	}

	/**
	 * @param args
	 */
	public static void main(String[] args)throws Exception {
		Configuration conf=HBaseConfiguration.create();
		String []argArray=new GenericOptionsParser(conf, args).getRemainingArgs();
		if(argArray.length!=1){
			System.exit(1);
		}
		Job job=new Job(conf,"import hbase to hdfs");
		job.setJarByClass(FromHBaseToHDFSExample.class);
		TableMapReduceUtil.initTableMapperJob("testtable", new Scan(), HBaseMapper.class, IntWritable.class, Text.class, job);
		FileOutputFormat.setOutputPath(job, new Path(argArray[0]));
		job.setNumReduceTasks(0);
		System.exit(job.waitForCompletion(true) ? 0 : 1);
		

	}

}

3hbase 到hbase


package example2;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;

//Administrator
public class FromHBaseToHBaseExample {
	
	public static class HBaseMapper extends TableMapper<ImmutableBytesWritable, Text>{

		@Override
		protected void map(ImmutableBytesWritable key, Result value,
				Context context) throws IOException, InterruptedException {
			for(Cell kv:value.rawCells()){
				Text out=new Text(Bytes.toString(kv.getFamilyArray())+"|"+Bytes.toString(kv.getQualifierArray())+"|"+Bytes.toString(kv.getValueArray()));
				context.write(new ImmutableBytesWritable(kv.getRowArray()), out);
			}
		}
	}
	
	public static class HBaseReducer extends TableReducer<ImmutableBytesWritable, Text, ImmutableBytesWritable>{

		@Override
		protected void reduce(ImmutableBytesWritable key, Iterable<Text> values,
				Context context)
				throws IOException, InterruptedException {
			for(Text value:values){
				String text=value.toString();
				String [] textArray=text.split("|");
				Put put=new Put(key.get());
				System.out.println("---------------------------------------"+Bytes.toBytes(textArray[0]));
				put.add(Bytes.toBytes("family1"), Bytes.toBytes(textArray[1]), Bytes.toBytes(textArray[2]));
				context.write(null, put);
			}
			
		}
	}

	/**
	 * @param args
	 */
	public static void main(String[] args)throws Exception {
		Configuration conf=HBaseConfiguration.create();
		//String [] argArray=new GenericOptionsParser(conf, args).getRemainingArgs();
		conf.set("from.table", "testtable");
		conf.set("to.table", "hbase");
		//conf.set("family", "family1");
		Job job=new Job(conf,"hbase to hbase");
		job.setJarByClass(FromHBaseToHBaseExample.class);
		TableMapReduceUtil.initTableMapperJob(conf.get("from.table"), new Scan(), HBaseMapper.class,ImmutableBytesWritable.class, Text.class, job);
		TableMapReduceUtil.initTableReducerJob(conf.get("to.table"), HBaseReducer.class, job);
		System.exit(job.waitForCompletion(true)?0:1);
		

	}

}



你可能感兴趣的:(从hadoop2.2,HBase0.96 mapreduce操作)