一、错误描述信息:
SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
SLF4J: Defaulting to no-operation (NOP) logger implementation
SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further details.
Exception in thread "main" org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException: Failed 1 action: org.apache.hadoop.hbase.security.AccessDeniedException: Insufficient permissions (user=zhangshan, scope=default:userzhangshan, family=base:original_data, params=[table=default:zhangshan,family=base:original_data],action=WRITE)
at org.apache.hadoop.hbase.security.access.AccessController.prePut(AccessController.java:1560)
at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$30.call(RegionCoprocessorHost.java:892)
at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$RegionOperation.call(RegionCoprocessorHost.java:1663)
at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1738)
at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1695)
at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.prePut(RegionCoprocessorHost.java:888)
at org.apache.hadoop.hbase.regionserver.HRegion.doPreMutationHook(HRegion.java:2783)
at org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2758)
at org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2697)
at org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2701)
at org.apache.hadoop.hbase.regionserver.RSRpcServices.doBatchOp(RSRpcServices.java:677)
at org.apache.hadoop.hbase.regionserver.RSRpcServices.doNonAtomicRegionMutation(RSRpcServices.java:639)
at org.apache.hadoop.hbase.regionserver.RSRpcServices.multi(RSRpcServices.java:1931)
at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32213)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2034)
at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:107)
at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
at java.lang.Thread.run(Thread.java:744)
: 1 time,
at org.apache.hadoop.hbase.client.AsyncProcess$BatchErrors.makeException(AsyncProcess.java:227)
at org.apache.hadoop.hbase.client.AsyncProcess$BatchErrors.access$1700(AsyncProcess.java:207)
at org.apache.hadoop.hbase.client.AsyncProcess.waitForAllPreviousOpsAndReset(AsyncProcess.java:1663)
at org.apache.hadoop.hbase.client.BufferedMutatorImpl.backgroundFlushCommits(BufferedMutatorImpl.java:208)
at org.apache.hadoop.hbase.client.BufferedMutatorImpl.flush(BufferedMutatorImpl.java:183)
at org.apache.hadoop.hbase.client.HTable.flushCommits(HTable.java:1513)
at org.apache.hadoop.hbase.client.HTable.put(HTable.java:1107)
at com.xxx.xxx.hadoop.HbaseUtil.add(HbaseUtil.java:199)
at com.xxx.xxx.hadoop.HbaseUtil.main(HbaseUtil.java:353)
二、解决办法: 设置当前登录hadoop环境下的登录用户为 hadoo环境部署的linux用户,该用户可在linux正常访问hbase数据库的用户即可。
import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;
public class HbaseUtil {
private static final String HADOOP_USER_NAME = "hbase.hadoop.user";
private static String oozieUser = "hadoopUser";//linux上可登录连接hbase的用户名
private static String family = "base";
private static String TABLENAME = "table3";
private static Connection conn;
private static String hbaseIp = "172.21.x.xxx,172.21.x.xxx,172.21.x.xxx";
private static String hbasePort = "9501";
public static void init() {
System.setProperty("HADOOP_USER_NAME", "hadoopUser"); //设置当前window/linux下用户为HBase可访问用户
Configuration config = HBaseConfiguration.create();
config.set("hbase.zookeeper.quorum",hbaseIp); //hbase 服务地址
config.set("hbase.zookeeper.property.clientPort",hbasePort); //端口号
config.set("hadoop.user.name", "hadoopUser"); //linux上可登录连接hbase的用户名
try {
conn = ConnectionFactory.createConnection(config);
} catch (IOException e) {
e.printStackTrace();
}
}
// 创建表
public static void createTable(String tableName, String seriesStr) throws IllegalArgumentException, IOException {
Admin admin = null;
TableName table = TableName.valueOf(tableName);
try {
admin = conn.getAdmin();
if (!admin.tableExists(table)) {
System.out.println(tableName + " table not Exists");
HTableDescriptor descriptor = new HTableDescriptor(table);
String[] series = seriesStr.split(",");
for (String s : series) {
descriptor.addFamily(new HColumnDescriptor(s.getBytes()));
}
admin.createTable(descriptor);
}
} finally {
IOUtils.closeQuietly(admin);
}
}
// 添加数据
public static void add(String rowKey, Map
Table table = null;
try {
table = conn.getTable(TableName.valueOf(TABLENAME));
Put put = new Put(Bytes.toBytes(rowKey));
for (Map.Entry
put.addColumn(family.getBytes(), Bytes.toBytes(entry.getKey()),Bytes.toBytes(entry.getValue()));
}
table.put(put);
} finally {
IOUtils.closeQuietly(table);
}
}
// 根据rowkey获取数据
public static Map
Table table = null;
Map
try {
table = conn.getTable(TableName.valueOf(TABLENAME));
Get get = new Get(Bytes.toBytes(rowKey));
get.addFamily(family.getBytes());
Result res = table.get(get);
Map
Iterator
resultMap = new HashMap
while (it.hasNext()) {
Entry
resultMap.put(Bytes.toString(entry.getKey()),
Bytes.toString(entry.getValue()));
}
} finally {
IOUtils.closeQuietly(table);
}
return resultMap;
}
// 根据rowkey和column获取数据
public static String getValueBySeries(String rowKey, String column) throws IllegalArgumentException, IOException {
Table table = null;
String resultStr = null;
try {
table = conn.getTable(TableName.valueOf(TABLENAME));
Get get = new Get(Bytes.toBytes(rowKey));
get.addColumn(Bytes.toBytes(family), Bytes.toBytes(column));
Result res = table.get(get);
byte[] result = res.getValue(Bytes.toBytes(family),
Bytes.toBytes(column));
resultStr = Bytes.toString(result);
} finally {
IOUtils.closeQuietly(table);
}
return resultStr;
}
// 根据table查询所有数据
public static void getValueByTable() throws Exception {
Map
Table table = null;
try {
table = conn.getTable(TableName.valueOf(TABLENAME));
ResultScanner rs = table.getScanner(new Scan());
for (Result r : rs) {
System.out.println("获得到rowkey:" + new String(r.getRow()));
for (KeyValue keyValue : r.raw()) {
System.out.println(
"列:" + new String(keyValue.getFamily()) + "====值:"
+ new String(keyValue.getValue()));
}
}
} finally {
IOUtils.closeQuietly(table);
}
}
// 删除表
public static void dropTable(String tableName) throws IOException {
Admin admin = null;
TableName table = TableName.valueOf(tableName);
try {
admin = conn.getAdmin();
if (admin.tableExists(table)) {
admin.disableTable(table);
admin.deleteTable(table);
}
} finally {
IOUtils.closeQuietly(admin);
}
}
public static void main(String[] args) throws Exception {
init();
// 创建表
createTable(TABLENAME, family);
// 添加数据1
String rowKey1 = "test01";
Map
columns.put("original_data", "original_data_test01_1");
columns.put("original_data", "original_data_test01_2");
add(rowKey1, columns);
// 添加数据2
String rowKey2 = "test02";
Map
columns2.put("original_data", "original_data_test02_1");
columns2.put("original_data", "original_data_test02_2");
add(rowKey2, columns2);
// 查询数据1-1
Map
for (Map.Entry
System.out.println("map1-" + entry.getKey() + ":"+ entry.getValue());
}
// 查询数据1-2
Map
for (Map.Entry
System.out.println("map2-" + entry.getKey() + ":" + entry.getValue());
}
// 查询数据2
String original_data_value = getValueBySeries(rowKey1, "original_data");
System.out.println("original_data_value->" + original_data_value);
// 查看表中所有数据
getValueByTable();
}
}