hbase code

 

package com.run.hbase.dataImport;

 

import java.io.BufferedReader;

import java.io.BufferedWriter;

import java.io.File;

import java.io.FileReader;

import java.io.FileWriter;

import java.io.IOException;

 

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.hbase.HBaseConfiguration;

import org.apache.hadoop.hbase.KeyValue;

import org.apache.hadoop.hbase.client.HTable;

import org.apache.hadoop.hbase.client.Put;

import org.apache.hadoop.hbase.client.Result;

import org.apache.hadoop.hbase.client.ResultScanner;

import org.apache.hadoop.hbase.client.Scan;

 

/**

 * 将内蒙的数据提取出来并存放在本地

 * 

 * @author Administrator

 * 

 */

public class GetNMData {

 

public static void main(String[] args) throws IOException {

Configuration conf = HBaseConfiguration.create();

conf.set("hbase.zookeeper.quorum", "192.168.5.211");

conf.set("hbase.zookeeper.property.clientPort", "2181");

HTable table = new HTable(conf, "virtualaccount".getBytes());

HTable table1 = new HTable(conf, "virtualaccount1");

// getRelationship(table);

// getGroup(table);

putGroup(table1);

putRelationship(table1);

}

public static void getRelationship(HTable table) throws IOException{

File path = new File("/home/hadoop/tmp/relationship1");

FileWriter writer = new FileWriter(path);

BufferedWriter bWriter = new BufferedWriter(writer);

Scan s = new Scan();

s.addFamily("relationship1".getBytes());

ResultScanner ss = table.getScanner(s);

int i = 0 ;

for (Result r : ss) {

for (KeyValue kv : r.list()) {

bWriter.append(new String(kv.getRow(),"UTF-8")+"\t"+"relationship1"+"\t"+new String(kv.getQualifier(),"UTF-8")+"\t"+new String(kv.getValue(),"UTF-8")+"\n");

i++;

if(i%5000==0){

bWriter.flush();

}

}

}

bWriter.close();

ss.close();

table.close();

}

public static void getGroup(HTable table) throws IOException{

File path = new File("/home/hadoop/tmp/group");

FileWriter writer = new FileWriter(path);

BufferedWriter bWriter = new BufferedWriter(writer);

Scan s = new Scan();

s.addFamily("group".getBytes());

ResultScanner ss = table.getScanner(s);

int i = 0 ;

for (Result r : ss) {

for (KeyValue kv :  r.list()) {

bWriter.append(new String(kv.getRow(),"UTF-8")+"\t"+"group"+"\t"+new String(kv.getQualifier(),"UTF-8")+"\t"+new String(kv.getValue(),"UTF-8")+"\n");

i++;

if(i%5000==0){

bWriter.flush();

}

}

}

bWriter.close();

ss.close();

table.close();

}

public static void putGroup(HTable table) throws IOException{

File path = new File("/home/hadoop/tmp/group");

FileReader reader = new FileReader(path);

BufferedReader bReader = new BufferedReader(reader);

String tmp = bReader.readLine();

while(bReader!=null){

String t[] = tmp.split("\t", 5);

if(t.length==4){

Put p = new Put(t[0].getBytes("UTF-8"));

p.add("group".getBytes(), t[2].getBytes(), t[3].getBytes());

table.put(p);

}

tmp = bReader.readLine();

}

table.close();

bReader.close();

}

public static void putRelationship(HTable table) throws IOException{

File path = new File("/home/hadoop/tmp/relationship1");

FileReader reader = new FileReader(path);

BufferedReader bReader = new BufferedReader(reader);

String tmp = bReader.readLine();

while(bReader!=null){

String t[] = tmp.split("\t", 5);

if(t.length==4){

Put p = new Put(t[0].getBytes("UTF-8"));

p.add("group".getBytes(), t[2].getBytes(), t[3].getBytes());

table.put(p);

}

tmp = bReader.readLine();

}

table.close();

bReader.close();

}

}

 

 

 

 

 

 

 

 

 

 

你可能感兴趣的:(hbase)