mapreduce 操作hbase、mysql

public class MyAction {


	public static void main(String[] args) {
		UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hadoop");
		ugi.doAs(new PrivilegedAction<Void>() {
			public Void run() {
				try{
					System.setProperty("path.separator", ":");
					
					Configuration conf = new Configuration();
					
					//可以设置用到的第三方jar
					conf.set("tmpjars", "/tmp/jars/mysql-connector-java-5.1.7-bin.jar");
					
					DBConfiguration.configureDB(conf, "com.mysql.jdbc.Driver", "jdbc:mysql://172.168.8.54:3306/test","hadoop", "hadoop");
					
					conf.set("mapred.job.tracker", "127.0.0.1:9001");
					conf.set("fs.default.name", "hdfs://127.0.0.1:9000");
					conf.set("hadoop.job.ugi", "hadoop");
					conf.set("hbase.zookeeper.quorum","127.0.0.1,127.0.0.1,127.0.0.1");
					conf.set("hbase.zookeeper.property.clientPort", "2181"); 
					
					Job job = new Job(conf);
					job.setJobName("MyAction");
					job.setJarByClass(MyAction.class);
										
					job.setOutputFormatClass(DBOutputFormat.class);
					
					Scan scan = new Scan();
					TableMapReduceUtil.initTableMapperJob("misdn_catetory13", scan, MyMapper.class,	MapOut.class, NullWritable.class, job);
					
					DBOutputFormat.setOutput(job, "misdn_catetory13", new String[]{"mid","mfamily","mcolumn","mvalue"});
					
					job.waitForCompletion(true);
				}catch(Exception e){
					e.printStackTrace();
				}
				return null;
			}});
	}

}


public class MyMapper extends TableMapper<MapOut,NullWritable> {

	@Override
	protected void map(ImmutableBytesWritable key, Result value,Context context)
			throws IOException, InterruptedException {
		
		String rkey=new String(key.get());
		
		List<KeyValue> list=value.list();
		
		for(KeyValue kv:list){
			MapOut mapOut=new MapOut();
			mapOut.setKey(rkey);
			mapOut.setFamily(new String(kv.getFamily()));
			mapOut.setColumn(new String(kv.getQualifier()));
			mapOut.setValue(new String(kv.getValue()));
			context.write(mapOut, NullWritable.get());
		}
	}

}


public class MapOut implements WritableComparable<MapOut>,DBWritable {
	
	private String key;
	
	private String family;
	
	private String column;
	
	private String value;

	public void readFields(DataInput in) throws IOException {
		key=in.readUTF();
		family=in.readUTF();
		column=in.readUTF();
		value=in.readUTF();
	}

	public void write(DataOutput out) throws IOException {
		out.writeUTF(key);
		out.writeUTF(family);
		out.writeUTF(column);
		out.writeUTF(value);
	}

	public String getKey() {
		return key;
	}

	public void setKey(String key) {
		this.key = key;
	}

	public String getFamily() {
		return family;
	}

	public void setFamily(String family) {
		this.family = family;
	}

	public String getColumn() {
		return column;
	}

	public void setColumn(String column) {
		this.column = column;
	}

	public String getValue() {
		return value;
	}

	public void setValue(String value) {
		this.value = value;
	}

	public int compareTo(MapOut o) {
		return this.key.compareTo(o.getKey());
	}

	public void readFields(ResultSet arg0) throws SQLException {
		// TODO Auto-generated method stub
		
	}

	public void write(PreparedStatement ps) throws SQLException {
		ps.setString(1, key);
		ps.setString(2, family);
		ps.setString(3, column);
		ps.setString(4, value);
	}

你可能感兴趣的:(mapreduce,mysql,hbase)