完整程序下载: https://github.com/szxiaokang/hbaseAPI
首先说下环境, 在windowns 下安装了三台虚拟机, 网卡桥接的, 每台有独立的ip, 分别为:
10.68.128.215 master
10.68.128.212 slave1
10.68.128.211 slave2
hbase 版本 1.2.3
zookeeper版本 3.4.9
系统已启动, 切已正常运行. windows 下的hosts文件已经做了绑定
需要的 jar包
hadoop-common-2.7.1.jar
hbase-common-1.2.3.jar
hbase-hadoop-compat-1.2.3.jar
hbase-thrift-1.2.3.jar
zookeeper-3.4.9.jar
commons-codec-1.9.jar
commons-configuration-1.6.jar
hbase-client-1.2.3.jar
hbase-server-1.2.3.jar
slf4j-api-1.7.7.jar
slf4j-log4j12-1.7.5.jar
log4j-1.2-api-2.4.1.jar
log4j-api-2.4.1.jar
log4j-core-2.4.1.jar
log4j-slf4j-impl-2.4.1.jar
log4j-web-2.4.1.jar
commons-logging-1.2.jar
guava-12.0.1.jar
commons-collections-3.2.2.jar
commons-lang-2.6.jar
hadoop-annotations-2.7.1.jar
hadoop-auth-2.7.1.jar
protobuf-java-2.5.0.jar
hbase-protocol-1.2.3.jar
htrace-core-3.1.0-incubating.jar
netty-all-4.0.23.Final.jar
commons-cli-1.2.jar
metrics-core-2.2.0.jar
jackson-core-asl-1.9.13.jar
jackson-jaxrs-1.9.13.jar
jackson-mapper-asl-1.9.13.jar
jackson-xc-1.9.13.jar
程序如下
/**
* @ClassName: Query.java
* @author Kang [email protected]
* @version V1.0
* @Date 2016-10-26 上午10:52:50
* @Description: TODO
*
*/
package kang.com.cn;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
public class Query {
public static Configuration configuration;
public static Connection connection;
public static Admin admin;
public static void main(String[] args) throws IOException {
println("Start...");
listTables();
getData("member", "angelababy");
println("End...");
}
/**
* 初始化链接
*/
public static void init() {
configuration = HBaseConfiguration.create();
configuration.set("hbase.zookeeper.property.clientPort", "2181");
configuration.set("hbase.zookeeper.quorum", "10.68.128.215,10.68.128.211,10.68.128.212");
configuration.set("hbase.master", "hdfs://10.68.128.215:60000");
configuration.set("hbase.root.dir", "hdfs://10.68.128.215:9000/hbase");
try {
connection = ConnectionFactory.createConnection(configuration);
admin = connection.getAdmin();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 关闭连接
*/
public static void close() {
try {
if (null != admin) {
admin.close();
}
if (null != connection) {
connection.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 创建表
*
* @param tableName 表名
* @param family 列族列表
* @throws IOException
*/
public static void createTable(String tableName, String[] cols) throws IOException {
init();
TableName tName = TableName.valueOf(tableName);
if (admin.tableExists(tName)) {
println(tableName + " exists.");
} else {
HTableDescriptor hTableDesc = new HTableDescriptor(tName);
for (String col : cols) {
HColumnDescriptor hColumnDesc = new HColumnDescriptor(col);
hTableDesc.addFamily(hColumnDesc);
}
admin.createTable(hTableDesc);
}
close();
}
/**
* 删除表
*
* @param tableName 表名称
* @throws IOException
*/
public static void deleteTable(String tableName) throws IOException {
init();
TableName tName = TableName.valueOf(tableName);
if (admin.tableExists(tName)) {
admin.disableTable(tName);
admin.deleteTable(tName);
} else {
println(tableName + " not exists.");
}
close();
}
/**
* 查看已有表
*
* @throws IOException
*/
public static void listTables() {
init();
HTableDescriptor hTableDescriptors[] = null;
try {
hTableDescriptors = admin.listTables();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
for (HTableDescriptor hTableDescriptor : hTableDescriptors) {
println(hTableDescriptor.getNameAsString());
}
close();
}
/**
* 插入单行
*
* @param tableName 表名称
* @param rowKey RowKey
* @param colFamily 列族
* @param col 列
* @param value 值
* @throws IOException
*/
public static void insert(String tableName, String rowKey, String colFamily, String col, String value) throws IOException {
init();
Table table = connection.getTable(TableName.valueOf(tableName));
Put put = new Put(Bytes.toBytes(rowKey));
put.addColumn(Bytes.toBytes(colFamily), Bytes.toBytes(col), Bytes.toBytes(value));
table.put(put);
/*
* 批量插入 List putList = new ArrayList(); puts.add(put); table.put(putList);
*/
table.close();
close();
}
public static void delete(String tableName, String rowKey, String colFamily, String col) throws IOException {
init();
if (!admin.tableExists(TableName.valueOf(tableName))) {
println(tableName + " not exists.");
} else {
Table table = connection.getTable(TableName.valueOf(tableName));
Delete del = new Delete(Bytes.toBytes(rowKey));
if (colFamily != null) {
del.addFamily(Bytes.toBytes(colFamily));
}
if (colFamily != null && col != null) {
del.addColumn(Bytes.toBytes(colFamily), Bytes.toBytes(col));
}
/*
* 批量删除 List deleteList = new ArrayList(); deleteList.add(delete); table.delete(deleteList);
*/
table.delete(del);
table.close();
}
close();
}
/**
* 根据RowKey获取数据
*
* @param tableName 表名称
* @param rowKey RowKey名称
* @param colFamily 列族名称
* @param col 列名称
* @throws IOException
*/
public static void getData(String tableName, String rowKey, String colFamily, String col) throws IOException {
init();
Table table = connection.getTable(TableName.valueOf(tableName));
Get get = new Get(Bytes.toBytes(rowKey));
if (colFamily != null) {
get.addFamily(Bytes.toBytes(colFamily));
}
if (colFamily != null && col != null) {
get.addColumn(Bytes.toBytes(colFamily), Bytes.toBytes(col));
}
Result result = table.get(get);
showCell(result);
table.close();
close();
}
/**
* 根据RowKey获取信息
*
* @param tableName
* @param rowKey
* @throws IOException
*/
public static void getData(String tableName, String rowKey) throws IOException {
getData(tableName, rowKey, null, null);
}
/**
* 格式化输出
*
* @param result
*/
public static void showCell(Result result) {
Cell[] cells = result.rawCells();
for (Cell cell : cells) {
println("RowName: " + new String(CellUtil.cloneRow(cell)) + " ");
println("Timetamp: " + cell.getTimestamp() + " ");
println("column Family: " + new String(CellUtil.cloneFamily(cell)) + " ");
println("row Name: " + new String(CellUtil.cloneQualifier(cell)) + " ");
println("value: " + new String(CellUtil.cloneValue(cell)) + " ");
}
}
/**
* 打印
*
* @param obj 打印对象
*/
private static void println(Object obj) {
System.out.println(obj);
}
}
hbase(main):018:0> list
TABLE
domain_table
member
2 row(s) in 0.0120 seconds
=> ["domain_table", "member"]
hbase(main):019:0> scan 'member'
ROW COLUMN+CELL
angelababy column=address:city, timestamp=1477376233811, value=ShangHai
angelababy column=address:contry, timestamp=1477376192558, value=China
angelababy column=info:age, timestamp=1477375647675, value=27
angelababy column=info:birthday, timestamp=1477375750883, value=1989-2-28
angelababy column=info:company, timestamp=1477376154357, value=TaiYangChuanHe
angelababy column=info:height, timestamp=1477375972466, value=168cm
angelababy column=info:weight, timestamp=1477376025268, value=45kg
angelababy column=member_id:number, timestamp=1477632132701, value=2
angelababy column=name:chinese, timestamp=1477378496767, value=\xE6\x9D\xA8\xE9\xA2\x96
angelababy column=name:english, timestamp=1477378538782, value=angelababy
gaoyuanyuan column=address:city, timestamp=1477379052063, value=BeiJing
gaoyuanyuan column=address:contry, timestamp=1477379037776, value=China
gaoyuanyuan column=info:age, timestamp=1477378745014, value=37
gaoyuanyuan column=info:birthday, timestamp=1477378781555, value=1979-10-5
gaoyuanyuan column=info:company, timestamp=1477378994114, value=GaoYuanYuanGongZuoShi
gaoyuanyuan column=info:height, timestamp=1477378658944, value=165cm
gaoyuanyuan column=info:weight, timestamp=1477378674360, value=48kg
gaoyuanyuan column=member_id:number, timestamp=1477379130831, value=1
gaoyuanyuan column=name:chinese, timestamp=1477378621922, value=\xE9\xAB\x98\xE5\x9C\x86\xE5\x9C\x86
gaoyuanyuan column=name:english, timestamp=1477378592003, value=Gao YuanYuan
2 row(s) in 0.1200 seconds
Start...
15:29:03.806 DEBUG org.apache.hadoop.security.Groups 301 getUserToGroupsMappingService - Creating new Groups object
15:29:03.846 DEBUG org.apache.hadoop.util.NativeCodeLoader 46 - Trying to load the custom-built native-hadoop library...
.....
15:29:04.735 TRACE org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder 93 getBestComparer - Unsafe comparer selected for byte unaligned system architecture
domain_table
member
15:29:04.744 INFO org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation 2139 closeMasterService - Closing master protocol: MasterService
....
15:29:04.952 TRACE org.apache.hadoop.hbase.client.ScannerCallableWithReplicas 234 updateCurrentlyServingReplica - Setting current scanner as id=-1 associated with replica=0
15:29:04.967 TRACE org.apache.hadoop.hbase.client.MetaCache 175 cacheLocation - Merged cached locations: [region=member,,1477375368907.9ca5604d5939499dc276340d29994208., hostname=slave1,16020,1477617236309, seqNum=47]
15:29:04.994 TRACE org.apache.hadoop.hbase.ipc.AbstractRpcClient 236 callBlockingMethod - Call: Get, callTime: 18ms
RowName: angelababy
Timetamp: 1477376233811
column Family: address
row Name: city
value: ShangHai
RowName: angelababy
Timetamp: 1477376192558
column Family: address
row Name: contry
value: China
RowName: angelababy
Timetamp: 1477375647675
column Family: info
row Name: age
value: 27
RowName: angelababy
Timetamp: 1477375750883
column Family: info
row Name: birthday
value: 1989-2-28
RowName: angelababy
Timetamp: 1477376154357
column Family: info
row Name: company
value: TaiYangChuanHe
RowName: angelababy
Timetamp: 1477375972466
column Family: info
row Name: height
value: 168cm
RowName: angelababy
Timetamp: 1477376025268
column Family: info
row Name: weight
value: 45kg
RowName: angelababy
Timetamp: 1477632132701
column Family: member_id
row Name: number
value: 2
RowName: angelababy
Timetamp: 1477378496767
column Family: name
row Name: chinese
value: 杨颖
RowName: angelababy
Timetamp: 1477378538782
column Family: name
row Name: english
value: angelababy
15:29:04.996 INFO org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation 1710 closeZooKeeperWatcher - Closing zookeeper sessionid=0x158091bd27b000a
.....
15:29:05.016 TRACE org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection 570 run - IPC Client (1375204579) connection to slave1/10.68.128.212:16020 from kangyun: interrupted while waiting for call responses
15:29:05.016 TRACE org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection 1060 markClosed - IPC Client (1375204579) connection to slave1/10.68.128.212:16020 from kangyun: marking at should close, reason: Origin: InterruptedException
15:29:05.017 TRACE org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection 860 close - IPC Client (1375204579) connection to slave1/10.68.128.212:16020 from kangyun: closing ipc connection to slave1/10.68.128.212:16020
15:29:05.017 TRACE org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection 866 close - IPC Client (1375204579) connection to slave1/10.68.128.212:16020 from kangyun: ipc connection to slave1/10.68.128.212:16020 closed
15:29:05.017 TRACE org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection 583 run - IPC Client (1375204579) connection to slave1/10.68.128.212:16020 from kangyun: stopped, connections 0
End...
遇到的问题说下.
第一个问题: 程序运行之后没有反应, 没有日志
此问题是因为没有配置 log4j2.xml (不是log4j.xml)
在包根目录下新建一个 log4j2.xml, 内容如下, 可以根据你的需要修改
第二个问题: 没有查询到预期的信息
运行之后一直没有出来预期的结果, 大概的日志信息长这样:
2016-10-27 at 14:35:46 CST INFO org.apache.zookeeper.ClientCnxn$SendThread 876 primeConnection - Socket connection established to slave2/10.68.128.211:2181, initiating session
2016-10-27 at 14:35:46 CST DEBUG org.apache.zookeeper.ClientCnxn$SendThread 949 primeConnection - Session establishment request sent on slave2/10.68.128.211:2181
2016-10-27 at 14:35:46 CST TRACE org.apache.zookeeper.ClientCnxnSocket 124 readConnectResult - readConnectResult 37 0x[0,0,0,0,0,0,ffffff9c,40,3,58,3,ffffffb9,ffffffd5,2c,0,c,0,0,0,10,1b,c,3c,60,b,6b,ffffffaf,76,ffffff83,ffffffff,ffffff9c,25,38,fffffff8,32,ffffff82,0,]
2016-10-27 at 14:35:46 CST INFO org.apache.zookeeper.ClientCnxn$SendThread 1299 onConnected - Session establishment complete on server slave2/10.68.128.211:2181, sessionid = 0x35803b9d52c000c, negotiated timeout = 40000
2016-10-27 at 14:35:46 CST DEBUG org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher 602 process - hconnection-0x31e0fd3d0x0, quorum=master:2181,slave1:2181,slave2:2181, baseZNode=/hbase Received ZooKeeper Event, type=None, state=SyncConnected, path=null
2016-10-27 at 14:35:46 CST DEBUG org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher 686 connectionEvent - hconnection-0x31e0fd3d-0x35803b9d52c000c connected
2016-10-27 at 14:35:46 CST DEBUG org.apache.zookeeper.ClientCnxn$SendThread 843 readResponse - Reading reply sessionid:0x35803b9d52c000c, packet:: clientPath:null serverPath:null finished:false header:: 1,3 replyHeader:: 1,111669149796,0 request:: '/hbase/hbaseid,F response:: s{17179869199,111669149705,1475908620421,1477531322314,25,0,0,0,67,0,17179869199}
2016-10-27 at 14:35:46 CST DEBUG org.apache.zookeeper.ClientCnxn$SendThread 843 readResponse - Reading reply sessionid:0x35803b9d52c000c, packet:: clientPath:null serverPath:null finished:false header:: 2,4 replyHeader:: 2,111669149796,0 request:: '/hbase/hbaseid,F response:: #ffffffff000146d61737465723a3136303030ffffffe06554104326ffffffc71850425546a2430643365353666652d646631642d343734632d626538322d646536356137353336363632,s{17179869199,111669149705,1475908620421,1477531322314,25,0,0,0,67,0,17179869199}
2016-10-27 at 14:35:46 CST TRACE org.apache.hadoop.hbase.zookeeper.ZKUtil 1935 logRetrievedMsg - hconnection-0x31e0fd3d-0x35803b9d52c000c, quorum=master:2181,slave1:2181,slave2:2181, baseZNode=/hbase Retrieved 42 byte(s) of data from znode /hbase/hbaseid; data=PBUF\x0A$0d3e56fe-df1d-474c-b...
2016-10-27 at 14:35:46 CST DEBUG org.apache.hadoop.hbase.ipc.AbstractRpcClient 116 - Codec=org.apache.hadoop.hbase.codec.KeyValueCodec@374f910c, compressor=null, tcpKeepAlive=true, tcpNoDelay=true, connectTO=10000, readTO=20000, writeTO=60000, minIdleTimeBeforeClose=120000, maxRetries=0, fallbackAllowed=false, bind address=null
2016-10-27 at 14:35:46 CST DEBUG org.apache.zookeeper.ClientCnxn$SendThread 843 readResponse - Reading reply sessionid:0x35803b9d52c000c, packet:: clientPath:null serverPath:null finished:false header:: 3,3 replyHeader:: 3,111669149796,0 request:: '/hbase,F response:: s{17179869186,17179869186,1475908611213,1475908611213,0,168,0,0,0,16,111669149727}
2016-10-27 at 14:35:46 CST DEBUG org.apache.zookeeper.ClientCnxn$SendThread 843 readResponse - Reading reply sessionid:0x35803b9d52c000c, packet:: clientPath:null serverPath:null finished:false header:: 4,4 replyHeader:: 4,111669149796,0 request:: '/hbase/master,F response:: #ffffffff000146d61737465723a3136303030ffffffcbffffffb7b17ffffffa6ffffffcfffffff96ffffffd650425546a12a66d617374657210ffffff807d18ffffffd4ffffffe9ffffffe9ffffff9dffffff802b10018ffffff8a7d,s{111669149701,111669149701,1477531311006,1477531311006,0,0,0,240946676744585216,54,0,111669149701}
每隔几秒就会在刷一次, 本人的解决办法是将 程序中配置的 master, slave1, slave2 全部改成ip即可
完整程序下载: https://github.com/szxiaokang/hbaseAPI
如果解决了你的问题, 给个Star, 3Q :)