Hbase——Shell + Java API
- 一、Hbase的shell操作
- 二、Hbase的Java API操作
- 1. 创建maven工程,导入jar包
- 2. DDL操作
- 3. DML操作
- 3.1 Get
- 3.2 Put
- 3.3 Delete
- 3.4 Scan
- 4. 过滤器
一、Hbase的shell操作
命名 |
描述 |
Group name:general |
|
help ‘命名名’ |
查看命令的使用描述 |
whoami |
查看当前用户 |
version |
查看hbase版本信息 |
status |
查看hbase集群的状态信息 |
table_help |
查看表的帮助文档 |
Group name:namespace |
|
list_namespace |
列举当前所有的namespace |
create_namespace |
创建namespace |
drop_namespace |
删除namespace |
describe_namespace |
查看namespace的信息 |
alter_namespace |
修改namespace |
list_namespace_tables |
列举指定namespace下的所有表 |
Group name:DDL |
|
alter |
修改列族 |
create |
创建表 |
describe |
查看表相关的详细信息 |
enable |
启用表 |
disable |
禁用表 |
is_enabled |
判断表是否被启用 |
is_disabled |
判断表是否被禁用 |
drop |
删除表 |
exists |
判断表是否存在 |
list |
列举表 |
Group name: DML |
|
put |
用于向表中插入 [一列] 数据 |
get |
从表中获取一条rowkey数据 |
scan |
从表中获取多条rowkey数据 |
delete |
删除一条数据 |
- 命令结尾不要加分号
- shell 命令行默认是向后删除,Ctrl+backspace 是向前删除
- 一旦不小心进入错误命令行,需要马上输入一条正确的指令回到正确的命令行,否则需要强制终止进程,重新进入
二、Hbase的Java API操作
1. 创建maven工程,导入jar包
<repositories>
<repository>
<id>clouderaid>
<url>https://repository.cloudera.com/artifactory/cloudera-repos/url>
repository>
repositories>
<dependencies>
<dependency>
<groupId>org.apache.hadoopgroupId>
<artifactId>hadoop-clientartifactId>
<version>2.6.0-mr1-cdh5.14.0version>
dependency>
<dependency>
<groupId>org.apache.hbasegroupId>
<artifactId>hbase-clientartifactId>
<version>1.2.0-cdh5.14.0version>
dependency>
<dependency>
<groupId>org.apache.hbasegroupId>
<artifactId>hbase-serverartifactId>
<version>1.2.0-cdh5.14.0version>
dependency>
<dependency>
<groupId>junitgroupId>
<artifactId>junitartifactId>
<version>4.12version>
<scope>testscope>
dependency>
<dependency>
<groupId>org.testnggroupId>
<artifactId>testngartifactId>
<version>6.14.3version>
<scope>testscope>
dependency>
dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.pluginsgroupId>
<artifactId>maven-compiler-pluginartifactId>
<version>3.0version>
<configuration>
<source>1.8source>
<target>1.8target>
<encoding>UTF-8encoding>
configuration>
plugin>
<plugin>
<groupId>org.apache.maven.pluginsgroupId>
<artifactId>maven-shade-pluginartifactId>
<version>2.2version>
<executions>
<execution>
<phase>packagephase>
<goals>
<goal>shadegoal>
goals>
<configuration>
<filters>
<filter>
<artifact>*:*artifact>
<excludes>
<exclude>META-INF/*.SFexclude>
<exclude>META-INF/*.DSAexclude>
<exclude>META-INF/*/RSAexclude>
excludes>
filter>
filters>
configuration>
execution>
executions>
plugin>
plugins>
build>
2. DDL操作
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import java.io.IOException;
public class HbaseDDL {
public static void main(String[] args) throws IOException {
Configuration conf = HBaseConfiguration.create();
conf.set("hbase.zookeeper.quorum","node-1:2181,node-2:2181,node-3:2181");
Connection conn = ConnectionFactory.createConnection(conf);
HBaseAdmin admin = (HBaseAdmin) conn.getAdmin();
NamespaceDescriptor descriptor = NamespaceDescriptor.create("hbase").build();
admin.createNamespace(descriptor);
String tbName = "hbase:tbname";
if (admin.tableExists(tbName)) {
admin.disableTable(tbName);
admin.deleteTable(tbName);
}
HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tbName));
HColumnDescriptor family = new HColumnDescriptor("cf".getBytes());
family.setMaxVersions(3);
family.setBlockCacheEnabled(true);
desc.addFamily(family);
admin.createTable(desc);
admin.close();
conn.close();
}
}
- 注意事项:一般而言,DDL操作都是在shell客户端完成
3. DML操作
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.jupiter.api.Test;
import java.io.IOException;
public class HbaseDML {
private static Table connect() throws IOException {
Configuration conf = HBaseConfiguration.create();
conf.set("hbase.zookeeper.quorum", "node-1:2181,node-2:2181,node-3:2181");
Connection conn = ConnectionFactory.createConnection(conf);
return conn.getTable(TableName.valueOf("ns:tbname"));
}
private void close(Table table) throws IOException {
table.close();
connect().close();
}
}
3.1 Get
@Test
public void get() throws IOException {
Table table = connect();
Get get = new Get("rowkey".getBytes());
Result result = table.get(get);
for (Cell cell : result.rawCells()) {
System.out.println(
Bytes.toString(CellUtil.cloneFamily(cell)) + "\t" +
Bytes.toString(CellUtil.cloneQualifier(cell)) + "\t" +
Bytes.toString(CellUtil.cloneValue(cell)) + "\t" +
cell.getTimestamp()
);
}
close(table);
}
3.2 Put
@Test
public void put() throws IOException {
Table table = connect();
Put put = new Put("rowkey".getBytes());
put.addColumn("cf".getBytes(), "col".getBytes(), "value".getBytes());
table.put(put);
close(table);
}
3.3 Delete
@Test
public void delete() throws IOException {
Table table = connect();
Delete delete = new Delete("rowkey".getBytes());
delete.addColumn("cf".getBytes(), "col".getBytes());
delete.addColumns("cf".getBytes(), "col".getBytes());
table.delete(delete);
close(table);
}
3.4 Scan
@Test
public void scan() throws IOException {
Table table = connect();
Scan scan = new Scan();
ResultScanner scanner = table.getScanner(scan);
for (Result result : scanner) {
System.out.println(Bytes.toString(result.getRow()));
for (Cell cell : result.rawCells()) {
System.out.println(
Bytes.toString(CellUtil.cloneFamily(cell)) + "\t" +
Bytes.toString(CellUtil.cloneQualifier(cell)) + "\t" +
Bytes.toString(CellUtil.cloneValue(cell)) + "\t" +
cell.getTimestamp()
);
}
System.out.println();
}
}
4. 过滤器
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.filter.*;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.jupiter.api.Test;
import java.io.IOException;
public class HbaseFilter {
private static Table connect() throws IOException {
Configuration conf = HBaseConfiguration.create();
conf.set("hbase.zookeeper.quorum", "cluster01:2181,cluster02:2181,cluster03:2181");
Connection conn = ConnectionFactory.createConnection(conf);
return conn.getTable(TableName.valueOf("student:stu_info"));
}
@Test
public void scan() throws IOException {
Table table = connect();
Scan scan = new Scan();
scan.setStartRow("rowkey".getBytes());
scan.setStopRow("rowkey".getBytes());
Filter rowFilter = new RowFilter(CompareFilter.CompareOp.EQUAL, new BinaryComparator("20191027_001".getBytes()));
Filter familyFilter = new FamilyFilter(CompareFilter.CompareOp.NOT_EQUAL, new BinaryComparator("basic".getBytes()));
Filter columnFilter = new QualifierFilter(CompareFilter.CompareOp.EQUAL, new SubstringComparator("a"));
ValueFilter valueFilter = new ValueFilter(CompareFilter.CompareOp.EQUAL, new SubstringComparator("lao"));
Filter singleColumnValueExcludeFilter = new SingleColumnValueExcludeFilter(
"basic".getBytes(),
"name".getBytes(),
CompareFilter.CompareOp.EQUAL,
"lao".getBytes()
);
byte[][] prefixes = {
"name".getBytes(),
"age".getBytes()
};
Filter multipleColumnPrefixFilter = new MultipleColumnPrefixFilter(prefixes);
PrefixFilter prefixFilter = new PrefixFilter("201910".getBytes());
scan.setStartRow("20191027_001".getBytes());
Filter pageFilter = new PageFilter(3);
FilterList filterList = new FilterList();
filterList.addFilter(singleColumnValueExcludeFilter);
filterList.addFilter(multipleColumnPrefixFilter);
scan.setFilter(rowFilter);
ResultScanner scanner = table.getScanner(scan);
for (Result result : scanner) {
System.out.println(Bytes.toString(result.getRow()));
for (Cell cell : result.rawCells()) {
System.out.println(
Bytes.toString(CellUtil.cloneFamily(cell)) + "\t" +
Bytes.toString(CellUtil.cloneQualifier(cell)) + "\t" +
Bytes.toString(CellUtil.cloneValue(cell)) + "\t" +
cell.getTimestamp()
);
}
System.out.println();
}
close(table);
}
private void close(Table table) throws IOException {
table.close();
connect().close();
}
}