HBase_Eclipse基本操作
查看HBase中的表
创建HTable
删除HTable
添加表列族
获取表描述信息
添加数据
根据rowKey 获取列键值
遍历表内容
1.准备工作
1.新建一个普通java项目,在项目根目录下新建lib文件夹。
2.将$HBase/lib下的所有jar包拷贝到项目lib目录下。
3.选中所有jar包,右击选择build->add to buildPath
4.在项目根目录下新建hbase-site.xml和log4j.properties两个文件。
hbase-site.xml
<configuration>
<property>
<name>hbase.cluster.distributedname>
<value>truevalue>
property>
<property>
<name>hbase.rootdirname>
#zhx01是hostname,也可以写IP
<value>hdfs://zhx01:9000/hbasevalue>
property>
<property>
<name>hbase.zookeeper.property.dataDirname>
#ZooKeeperdata目录
<value>/home/zouhongxue/data/zkdatavalue>
property>
<property>
<name>hbase.zookeeper.quorumname>
#这是自己的机器IP
<value>192.168.50.100value>
property>
<property>
<name>hbase.zookeeper.property.clientPortname>
<value>2181value>
property>
configuration>
log4j.properties
这个日志配置文件内容直接复制就OK
# Define some default values that can be overridden by system properties
hbase.root.logger=INFO,console
hbase.security.logger=INFO,console
hbase.log.dir=.
hbase.log.file=hbase.log
# Define the root logger to the system property "hbase.root.logger".
log4j.rootLogger=${hbase.root.logger}
# Logging Threshold
log4j.threshold=ALL
#
# Daily Rolling File Appender
#
log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
# Rollver at midnight
log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
# 30-day backup
#log4j.appender.DRFA.MaxBackupIndex=30
log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
# Pattern format: Date LogLevel LoggerName LogMessage
log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
# Rolling File Appender properties
hbase.log.maxfilesize=256MB
hbase.log.maxbackupindex=20
# Rolling File Appender
log4j.appender.RFA=org.apache.log4j.RollingFileAppender
log4j.appender.RFA.File=${hbase.log.dir}/${hbase.log.file}
log4j.appender.RFA.MaxFileSize=${hbase.log.maxfilesize}
log4j.appender.RFA.MaxBackupIndex=${hbase.log.maxbackupindex}
log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
#
# Security audit appender
#
hbase.security.log.file=SecurityAuth.audit
hbase.security.log.maxfilesize=256MB
hbase.security.log.maxbackupindex=20
log4j.appender.RFAS=org.apache.log4j.RollingFileAppender
log4j.appender.RFAS.File=${hbase.log.dir}/${hbase.security.log.file}
log4j.appender.RFAS.MaxFileSize=${hbase.security.log.maxfilesize}
log4j.appender.RFAS.MaxBackupIndex=${hbase.security.log.maxbackupindex}
log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout
log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
log4j.category.SecurityLogger=${hbase.security.logger}
log4j.additivity.SecurityLogger=false
#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE
#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.visibility.VisibilityController=TRACE
#
# Null Appender
#
log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
#
# console
# Add "console" to rootlogger above if you want to use this
#
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
log4j.appender.asyncconsole=org.apache.hadoop.hbase.AsyncConsoleAppender
log4j.appender.asyncconsole.target=System.err
# Custom Logging levels
log4j.logger.org.apache.zookeeper=INFO
#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
log4j.logger.org.apache.hadoop.hbase=INFO
# Make these two classes INFO-level. Make them DEBUG to see more zk debug.
log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=INFO
log4j.logger.org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher=INFO
#log4j.logger.org.apache.hadoop.dfs=DEBUG
# Set this class to log INFO only otherwise its OTT
# Enable this to get detailed connection error/retry logging.
# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=TRACE
# Uncomment this line to enable tracing on _every_ RPC call (this can be a lot of output)
#log4j.logger.org.apache.hadoop.ipc.HBaseServer.trace=DEBUG
# Uncomment the below if you want to remove logging of client region caching'
# and scan of hbase:meta messages
# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=INFO
# log4j.logger.org.apache.hadoop.hbase.client.MetaScanner=INFO
# Prevent metrics subsystem start/stop messages (HBASE-17722)
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN
5.新建一个包,新建一个文件HBase.java。完成后如下图。
2.HBase.java里面封装了一些操作,包括对HTable,HColumn还有遍历等。
package com.demo.hbase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.RegexStringComparator;
import org.apache.hadoop.hbase.filter.RowFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.filter.SubstringComparator;
import org.apache.hadoop.hbase.util.Bytes;
public class HBase {
private static Connection conn = null;
static{
//创建配置对象
Configuration configuration = HBaseConfiguration.create();
//创建Hbase连接
try {
conn = ConnectionFactory.createConnection(configuration);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/**
* 查看HBase中的表
* @throws IOException
*/
public static void list(){
//获取HBase管理实例对象
Admin admin = null;
try {
admin = conn.getAdmin();
for (TableName tn: admin.listTableNames()) {
System.out.println("表:"+tn.getNameAsString());
}
} catch (IOException e) {
e.printStackTrace();
}finally {
if (admin!=null) {
try {
admin.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
/**
* 创建HTable
* @param tableName
* @param familys
* @return
* @throws IOException
*/
public static boolean create(String tableName,String ...familys){
Admin admin = null;
try {
admin = conn.getAdmin();
if (admin.tableExists(TableName.valueOf(tableName))) {
System.out.println("表已存在");
return false;
}else {
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName));
for (String family : familys) {
HColumnDescriptor hcd = new HColumnDescriptor(family);
htd.addFamily(hcd);
}
admin.createTable(htd);
System.out.println("表创建成功");
return true;
}
} catch (IOException e) {
e.printStackTrace();
}finally {
if (admin!=null) {
try {
admin.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
return false;
}
/**
* 删除HTable
* @param tableName
* @throws IOException
*/
public static void delTable(String tableName) {
Admin admin = null;
try {
admin = conn.getAdmin();
if (admin.tableExists(TableName.valueOf(tableName))) {
admin.disableTable(TableName.valueOf(tableName));
admin.deleteTable(TableName.valueOf(tableName));
}
} catch (IOException e) {
e.printStackTrace();
}finally {
if (admin!=null) {
try {
admin.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
/**
* 获取表描述信息
* @param tableName
*/
public static void describe(String tableName){
try {
Admin admin = conn.getAdmin();
HTableDescriptor htd= admin.getTableDescriptor(TableName.valueOf(tableName));
System.out.println("===describe "+tableName+"===");
for(HColumnDescriptor hcd:htd.getColumnFamilies()) {
System.out.println(hcd.getNameAsString());
}
System.out.println("=======================");
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 添加表列族
* @param tableName
* @param familys
* @throws IOException
*/
public static void addFamily(String tableName,String ...familys) {
Admin admin = null;
try {
admin = conn.getAdmin();
if (admin.tableExists(TableName.valueOf(tableName))) {
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName));
for (String family : familys) {
HColumnDescriptor hcd = new HColumnDescriptor(family);
htd.addFamily(hcd);
}
admin.modifyTable(TableName.valueOf(tableName), htd);
System.out.println("列族添加成功");
}else {
System.out.println("表不存在");
}
} catch (IOException e) {
e.printStackTrace();
}finally {
if (admin!=null) {
try {
admin.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
/**
* 添加数据
* @param tableName
* @param key
* @param kvs
* @throws IOException
*/
public static void put(String tableName,String key,String[][] kvs){
Table table = null;
try {
table = conn.getTable(TableName.valueOf(tableName));
List lp = new ArrayList();
for (String[] kv : kvs) {
Put put = new Put(Bytes.toBytes(key));
put.addColumn(Bytes.toBytes(kv[0]),
Bytes.toBytes(kv[1]),
Bytes.toBytes(kv[2]));
lp.add(put);
}
table.put(lp);
System.out.println("添加成功");
} catch (IOException e) {
e.printStackTrace();
}finally {
if (table!=null) {
try {
table.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
/**
* 根据rowKey 获取列键值
* @param tableName
* @param rowKey
*/
public static void get(String tableName,String rowKey){
Table table = null;
try {
table = conn.getTable(TableName.valueOf(tableName));
Get get = new Get(Bytes.toBytes(rowKey));
Result result = table.get(get);
for (Cell cell : result.listCells()) {
String family = Bytes.toString(CellUtil.cloneFamily(cell));
String qualifier = Bytes.toString(CellUtil.cloneQualifier(cell));
String value = Bytes.toString(CellUtil.cloneValue(cell));
System.out.println(family+"\t"+qualifier+"\t"+value);
}
} catch (IOException e) {
e.printStackTrace();
}finally {
if (table!=null) {
try {
table.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
/**
* 遍历表内容
* @param tableName
*/
public static void scan(String tableName){
Table table = null;
try {
table = conn.getTable(TableName.valueOf(tableName));
Scan scan = new Scan();
//全表扫描,可以指定列族、列键、不能有行健
ResultScanner rs = table.getScanner(scan);
for (Result row : rs) {
for (Cell cell : row.listCells()) {
System.out.println("Rowkey:"+Bytes.toString(row.getRow())+"\t"
+"Family:"+Bytes.toString(CellUtil.cloneFamily(cell))+"\t"
+"Quilifier:"+Bytes.toString(CellUtil.cloneQualifier(cell))+"\t"
+"Value:"+Bytes.toString(CellUtil.cloneValue(cell)));
}
}
} catch (IOException e) {
e.printStackTrace();
}finally {
if (table!=null) {
try {
table.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
/**
* 遍历表(带过滤器)
* @param tableName
* @param filterList
*/
public static void scan(String tableName,Scan scan){
Table table = null;
try {
table = conn.getTable(TableName.valueOf(tableName));
//全表扫描,可以指定列族、列键、不能有行健
ResultScanner rs = table.getScanner(scan);
for (Result row : rs) {
for (Cell cell : row.listCells()) {
System.out.println("Rowkey:"+Bytes.toString(row.getRow())+"\t"
+"Family:"+Bytes.toString(CellUtil.cloneFamily(cell))+"\t"
+"Quilifier:"+Bytes.toString(CellUtil.cloneQualifier(cell))+"\t"
+"Value:"+Bytes.toString(CellUtil.cloneValue(cell)));
}
}
} catch (IOException e) {
e.printStackTrace();
}finally {
if (table!=null) {
try {
table.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
/**
* 查询条件
* @return
*/
public static Scan scan(){
Scan scan = new Scan();
//列值过滤器
SingleColumnValueFilter scvf =
new SingleColumnValueFilter(Bytes.toBytes("info"),
Bytes.toBytes("song1"),
CompareOp.EQUAL,
Bytes.toBytes("冰雨"));
//行键过滤器 BinaryPrefixComparator | SubstringComparator | RegexStringComparator
RowFilter rf = new RowFilter(CompareOp.EQUAL,
new BinaryComparator(Bytes.toBytes("刘德华")));
// scan.setFilter(scvf);
// scan.setFilter(rf);
//过滤器链
FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
filterList.addFilter(scvf);
filterList.addFilter(rf);
scan.setFilter(filterList);
// 设置查询起止rowkey
scan.setStartRow(Bytes.toBytes("a1"));
scan.setStopRow(Bytes.toBytes("刘德华"));
return scan;
}
/**
* 测试入口
* @param args
* @throws IOException
*/
public static void main(String[] args) {
//设置连接参数:HBase数据库所在的主机IP
// configuration.set("hbase.zookeeper.quorum", "192.168.50.100");
//设置连接参数:HBase数据库使用的端口
// configuration.set("hbase.zookeeper.property.clientPort", "2181");
// delTable("student");
// create("student", "info","addr");
// list();
// conn.close();
// list();
// get("music", "刘德华");
// describe("music");
// String [][] column = {{"info","song1","冰雨"}};
// put("music", "刘德华", column);
scan("music");
}
}