目录
HBase的安装与基本操作
HBase 伪分布式环境搭建
HBase 开发:使用Java操作HBase
第1关:创建表
第2关:添加数据
第3关:获取数据
第4关:删除表
HBase 开发:批量操作
第1关:批量获取数据
第2关:批量删除数据
第3关:批量导入数据至HBase
HBase开发: Java API 管理表
第1关:JavaAPI获取表的列表
第2关:修改表
第3关:禁用表、启用表、删除表
第一关:HBase数据库的安装与配置
mkdir /app
cd /opt
tar -zxvf hbase-2.1.1-bin.tar.gz -C /app
vim /app/hbase-2.1.1/conf/hbase-env.sh
vi中的操作:按i编写下面这行代码在文件末尾(换英文键盘输入)
export JAVA_HOME=/usr/lib/jvm/jdk1.8.0_111
vi中的操作:按键盘的esc键
shift + : 输入wq保存退出
下面两步一样
vi /app/hbase-2.1.1/conf/hbase-site.xml
vim /etc/profile
# SET HBASE_enviroment
HBASE_HOME=/app/hbase-2.1.1
export PATH=$PATH:$HBASE_HOME/bin
source /etc/profile
start-hbase.sh
第二关:创建表
hbase shell
回车
create 'test','data'
回车
create 'dept','data'
回车
create 'emp','data'
回车
list
测评
第三关:添加数据,删除数据
# 启动 HBase 一行一次
start-hbase.sh
# 进入 hbase shell
hbase shell
create 'mytable','data'
put 'mytable','row1','data:1','zhangsan'
put 'mytable','row2','data:2','zhangsanfeng'
put 'mytable','row3','data:3','zhangwuji'
# 退出
exit
下面这位博主的亲测详细有用
Fdecad的博客_CSDN博客-数据结构,头歌实训,配置环境领域博主
代码文件
package step1;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.util.Bytes;
public class Task{
public void createTable()throws Exception{
/********* Begin *********/
Configuration config = HBaseConfiguration.create();
Connection connection = ConnectionFactory.createConnection(config);
try {
// Create table
Admin admin = connection.getAdmin();
try {
TableName tableName = TableName.valueOf("dept");
// 新 API 构建表
// TableDescriptor 对象通过 TableDescriptorBuilder 构建;
TableDescriptorBuilder tableDescriptor =
TableDescriptorBuilder.newBuilder(tableName);
ColumnFamilyDescriptor family =
ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("data")).build();// 构建列族对象
tableDescriptor.setColumnFamily(family); // 设置列族
admin.createTable(tableDescriptor.build()); // 创建表
TableName emp = TableName.valueOf("emp");
// 新 API 构建表
// TableDescriptor 对象通过 TableDescriptorBuilder 构建;
TableDescriptorBuilder empDescriptor =
TableDescriptorBuilder.newBuilder(emp);
ColumnFamilyDescriptor empfamily =
ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("emp")).build();// 构建列族对象
empDescriptor.setColumnFamily(empfamily); // 设置列族
admin.createTable(empDescriptor.build()); // 创建表
} finally {
admin.close();
}
} finally {
connection.close();
}
/********* End *********/
}
}
命令行
start-dfs.sh
回车
start-hbase.sh
回车
代码文件
package step2;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.util.Bytes;
public class Task {
public void insertInfo()throws Exception{
/********* Begin *********/
Configuration config = HBaseConfiguration.create();
Connection connection = ConnectionFactory.createConnection(config);
Admin admin = connection.getAdmin();
TableName tableName = TableName.valueOf("tb_step2");
TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(tableName);
ColumnFamilyDescriptor family = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("data")).build();// 构建列族对象
tableDescriptor.setColumnFamily(family); // 设置列族
admin.createTable(tableDescriptor.build()); // 创建表
// 添加数据
byte[] row1 = Bytes.toBytes("row1");
Put put1 = new Put(row1);
byte[] columnFamily1 = Bytes.toBytes("data"); // 列
byte[] qualifier1 = Bytes.toBytes(String.valueOf(1)); // 列族修饰词
byte[] value1 = Bytes.toBytes("张三丰"); // 值
put1.addColumn(columnFamily1, qualifier1, value1);
byte[] row2 = Bytes.toBytes("row2");
Put put2 = new Put(row2);
byte[] columnFamily2 = Bytes.toBytes("data"); // 列
byte[] qualifier2 = Bytes.toBytes(String.valueOf(2)); // 列族修饰词
byte[] value2 = Bytes.toBytes("张无忌"); // 值
put2.addColumn(columnFamily2, qualifier2, value2);
Table table = connection.getTable(tableName);
table.put(put1);
table.put(put2);
/********* End *********/
}
}
命令行
start-dfs.sh
回车
start-hbase.sh
回车
代码文件
package step3;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.util.Bytes;
public class Task {
public void queryTableInfo()throws Exception{
/********* Begin *********/
Configuration config = HBaseConfiguration.create();
Connection connection = ConnectionFactory.createConnection(config);
Admin admin = connection.getAdmin();
TableName tableName = TableName.valueOf("t_step3");
Table table = connection.getTable(tableName);
// 获取数据
Get get = new Get(Bytes.toBytes("row1")); // 定义 get 对象
Result result = table.get(get); // 通过 table 对象获取数据
//System.out.println("Result: " + result);
// 很多时候我们只需要获取“值” 这里表示获取 data:1 列族的值
byte[] valueBytes = result.getValue(Bytes.toBytes("data"), Bytes.toBytes("1")); // 获取到的是字节数组
// 将字节转成字符串
String valueStr = new String(valueBytes,"utf-8");
System.out.println("value:" + valueStr);
TableName tableStep3Name = TableName.valueOf("table_step3");
Table step3Table = connection.getTable(tableStep3Name);
// 批量查询
Scan scan = new Scan();
ResultScanner scanner = step3Table.getScanner(scan);
try {
int i = 0;
for (Result scannerResult: scanner) {
//byte[] value = scannerResult.getValue(Bytes.toBytes("data"), Bytes.toBytes(1));
// System.out.println("Scan: " + scannerResult);
byte[] row = scannerResult.getRow();
System.out.println("rowName:" + new String(row,"utf-8"));
}
} finally {
scanner.close();
}
/********* End *********/
}
}
命令行
start-dfs.sh
回车
start-hbase.sh
回车
代码文件
package step4;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.util.Bytes;
public class Task {
public void deleteTable()throws Exception{
/********* Begin *********/
Configuration config = HBaseConfiguration.create();
Connection connection = ConnectionFactory.createConnection(config);
Admin admin = connection.getAdmin();
TableName tableName = TableName.valueOf("t_step4");
admin.disableTable(tableName);
admin.deleteTable(tableName);
/********* End *********/
}
}
命令行
start-dfs.sh
回车
start-hbase.sh
回车
代码文件
package step1;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.generated.rest.rest_jsp;
import org.apache.hadoop.hbase.util.Bytes;
public class Task {
public void batchGet() throws Exception {
/********* Begin *********/
Configuration config = HBaseConfiguration.create();
Connection Connection = ConnectionFactory.createConnection(config);
List rows = new ArrayList<>();
rows.add("2018");
//rows.add("2019");
rows.add("2020");
TableName tableName = TableName.valueOf(Bytes.toBytes("step1_student"));
Table table = Connection.getTable(tableName);
getData(table,rows);
/********* End *********/
}
public List getData(Table table, List rows) throws Exception {
List gets = new ArrayList<>();
for (String str : rows) {
Get get = new Get(Bytes.toBytes(str));
gets.add(get);
}
List values = new ArrayList<>();
Result[] results = table.get(gets);
for (Result result : results) {
System.out.println("Row:" + Bytes.toString(result.getRow()));
for (Cell kv : result.rawCells()) {
String family = Bytes.toString(CellUtil.cloneFamily(kv));
String qualifire = Bytes.toString(CellUtil.cloneQualifier(kv));
String value = Bytes.toString(CellUtil.cloneValue(kv));
values.add(value);
System.out.println(family + ":" + qualifire + "\t" + value);
}
}
return values;
}
}
命令行
start-dfs.sh
回车
start-hbase.sh
回车
代码文件
package step2;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableDescriptors;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
public class Task {
public void batchDelete()throws Exception{
/********* Begin *********/
Configuration conf = HBaseConfiguration.create();
Connection conn = ConnectionFactory.createConnection(conf);
TableName tableName = TableName.valueOf("step2_table");
Table table = conn.getTable(tableName);
List rows1 = new ArrayList();
for(int i = 1; i<6;i++){
String row = "row" + i;
rows1.add(row);
}
delete(table,rows1);
List rows2 = new ArrayList<>();
for(int i = 7;i<11;i++){
String row = "row" + i;
rows2.add(row);
}
delete(table,rows2);
/********* End *********/
}
public void delete(Table table,List rows)throws IOException{
List deletes = new ArrayList<>();
for(String str : rows){
byte[] row = Bytes.toBytes(str);
Delete delete = new Delete(row);
deletes.add(delete);
}
table.delete(deletes);
}
}
命令行
start-dfs.sh
回车
start-hbase.sh
回车
代码文件
package step3;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableDescriptors;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.util.Bytes;
public class Task {
public void batchPut()throws Exception{
/********* Begin *********/
Configuration config = new Configuration();
Connection conn = ConnectionFactory.createConnection(config);
Admin admin = conn.getAdmin();
// 建表
TableName tableName = TableName.valueOf(Bytes.toBytes("stu"));
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);
ColumnFamilyDescriptor family = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("basic_info")).build();
ColumnFamilyDescriptor family2 = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("school_info")).build();
builder.setColumnFamily(family);
builder.setColumnFamily(family2);
admin.createTable(builder.build());
List puts = new ArrayList<>();
String[] rows = {"20181122","20181123"};
String[][] basic_infos = {{"阿克蒙德","male","1987-05-23","tel:139********","HUNan-ChangSha"},{"萨格拉斯","male","1986-05-23","tel:187********","HUNan-ChangSha"}};
String[] basic_colums = {"name","gender","birthday","connect","address"};
String[][] school_infos = {{"ChengXing","class 1 grade 2","Software"},{"ChengXing","class 2 grade 2","Software"}};
String[] school_colums = {"college","class","object"};
for (int x = 0; x < rows.length; x++) {
// 循环添加数据
Put put = new Put(Bytes.toBytes(rows[x]));
for (int i = 0; i < basic_infos.length; i++) {
byte[] columFamily = Bytes.toBytes("basic_info");
byte[] qualifier = Bytes.toBytes(basic_colums[i]);
byte[] value = Bytes.toBytes(basic_infos[x][i]);
put.addColumn(columFamily, qualifier, value);
}
for (int i = 0; i < school_infos.length; i++) {
byte[] columFamily = Bytes.toBytes("school_info");
byte[] qualifier = Bytes.toBytes(school_colums[i]);
byte[] value = Bytes.toBytes(school_infos[x][i]);
put.addColumn(columFamily, qualifier, value);
}
puts.add(put);
}
Table table = conn.getTable(tableName);
table.put(puts);
/********* End *********/
}
}
命令行
start-dfs.sh
回车
start-hbase.sh
回车
代码文件
package step1;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.*;
public class Task {
public void showTableList() throws Exception {
/********* Begin *********/
Configuration conf = HBaseConfiguration.create(); //使用create()静态方法就可以得到Configuration对象
Connection conn = ConnectionFactory.createConnection(conf); //config为前文的配置对象
Admin admin = conn.getAdmin(); //使用连接对象获取Admin对象
List tableDescriptors = admin.listTableDescriptors();
for(TableDescriptor tableDescriptor: tableDescriptors){
System.out.println("Table:" + tableDescriptor.getTableName());
System.out.println("\texists:" + admin.tableExists(tableDescriptor.getTableName()));
System.out.println("\tenabled:" + admin.isTableEnabled(tableDescriptor.getTableName()));
}
/********* End *********/
}
}
代码文件
package step2;
import java.io.IOException;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.util.*;
public class Task {
public void updateTables()throws Exception{
/********* Begin *********/
Configuration conf = HBaseConfiguration.create(); //使用create()静态方法就可以得到Configuration对象
Connection conn = ConnectionFactory.createConnection(conf); //config为前文的配置对象
Admin admin = conn.getAdmin(); //使用连接对象获取Admin对象
TableName tableName1 = TableName.valueOf("t_emp2");
TableName tableName2 = TableName.valueOf("t_dept2");
ColumnFamilyDescriptorBuilder buildFamily = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("data"));//创建builder对象
buildFamily.setBlocksize(1024*1024);//设置缓存大小
buildFamily.setBlockCacheEnabled(false);//设置关闭缓存,默认是true
buildFamily.setCompressionType(Compression.Algorithm.GZ);//默认是NONE
ColumnFamilyDescriptor family = buildFamily.build();//构建Family对象
admin.modifyColumnFamily(tableName1, family);//调用修改方法,方法接收两个参数:TableName,ColumnFamilyDescriptor
admin.deleteColumnFamily(tableName1, Bytes.toBytes("data1"));//删除表中名为data的列族
ColumnFamilyDescriptorBuilder buildFamily1 = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("data1"));//创建builder对象
buildFamily1.setMinVersions(2);
buildFamily1.setMaxVersions(5);
buildFamily1.setInMemory(true); //默认是false
buildFamily1.setTimeToLive(60*60*24); //以秒为单位,超过这个时间设置的就会在下一次大合并中被删除
ColumnFamilyDescriptor family1 = buildFamily1.build();//构建Family对象
admin.modifyColumnFamily(tableName2, family1);//调用修改方法,方法接收两个参数:TableName,ColumnFamilyDescriptor
admin.deleteColumnFamily(tableName2, Bytes.toBytes("data"));//删除表中名为data的列族
/********* End *********/
}
}
代码文件
package step3;
import java.io.IOException;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.*;
public class Task {
/**
* 刪除表
* @param tableName 表名
* @throws Exception
*/
public void deleteTable(String tableName)throws Exception{
/********* Begin *********/
Configuration conf = HBaseConfiguration.create(); //使用create()静态方法就可以得到Configuration对象
Connection conn = ConnectionFactory.createConnection(conf); //config为前文的配置对象
Admin admin = conn.getAdmin(); //使用连接对象获取Admin对象
TableName testName = TableName.valueOf(Bytes.toBytes(tableName));
admin.disableTable(testName);
admin.deleteTable(testName);
/********* End *********/
}
/**
* 创建表
* @param tableName 表名
* @param columnNames 列族的动态数组
* @throws Exception
*/
public void createTable(String tableName,String... columnNames)throws Exception{
/********* Begin *********/
Configuration conf = HBaseConfiguration.create(); //使用create()静态方法就可以得到Configuration对象
Connection conn = ConnectionFactory.createConnection(conf); //config为前文的配置对象
Admin admin = conn.getAdmin(); //使用连接对象获取Admin对象
// TableName tableName1 = TableName.valueOf(Bytes.toBytes(tableName));//定义表名
// HTableDescriptor htd = new HTableDescriptor(tableName1);//定义表对象
// for(String s: columnNames){
// htd.addFamily(new HColumnDescriptor(Bytes.toBytes(s)));
// }
//构建一个Test_teacher_info表
TableDescriptorBuilder test_teacher_info = TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName));
for(String s: columnNames){
ColumnFamilyDescriptor of = ColumnFamilyDescriptorBuilder.of(s);
test_teacher_info.setColumnFamily(of);
}
//构建
TableDescriptor build = test_teacher_info.build();
admin.createTable(build);
/********* End *********/
}
/**
* 启用表
* @param tableName
* @throws Exception
*/
public void enableTable(String tableName) throws Exception{
/********* Begin *********/
Configuration conf = HBaseConfiguration.create(); //使用create()静态方法就可以得到Configuration对象
Connection conn = ConnectionFactory.createConnection(conf); //config为前文的配置对象
Admin admin = conn.getAdmin(); //使用连接对象获取Admin对象
TableName demoName = TableName.valueOf(Bytes.toBytes(tableName));
admin.enableTable(demoName);
/********* End *********/
}
/**
* 禁用表
* @param tableName
*/
public void disableTable(String tableName)throws Exception{
/********* Begin *********/
Configuration conf = HBaseConfiguration.create(); //使用create()静态方法就可以得到Configuration对象
Connection conn = ConnectionFactory.createConnection(conf); //config为前文的配置对象
Admin admin = conn.getAdmin(); //使用连接对象获取Admin对象
TableName testName = TableName.valueOf(Bytes.toBytes(tableName));
admin.disableTable(testName);
/********* End *********/
}
}