大数据(Hbase简单示例)

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;



public class HBaseCRUDExample {
    private static final String TABLE_NAME = "bl_test";
    private static final byte[] COLUMN_FAMILY = Bytes.toBytes("info");

    public static void main(String[] args) {
        Configuration config = HBaseConfiguration.create();
        config.set("hbase.zookeeper.property.clientPort", "2181");
        config.set("hbase.zookeeper.quorum", "hadoop01,hadoop02,hadoop03");

        try (Connection connection = ConnectionFactory.createConnection(config)) {
            TableName tableName = TableName.valueOf(TABLE_NAME);
            Table table = connection.getTable(tableName);

            Admin admin = connection.getAdmin();
            createTable(admin, TABLE_NAME, COLUMN_FAMILY);

            // 插入数据
            putData(table, "row1", "name", "John");
            putData(table, "row1", "age", "25");

            // 查询数据
            getData(table, "row1", "name");
            getData(table, "row1", "age");

            // 更新数据
            putData(table, "row1", "age", "26");

            // 再次查询数据
            getData(table, "row1", "name");
            getData(table, "row1", "age");

            // 删除数据
            deleteData(table, "row1", "age");

            // 再次查询数据
            getData(table, "row1", "name");
            getData(table, "row1", "age");

            table.close();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    private static void createTable(Admin admin, String tableName, byte[] columnFamily) throws IOException {
        TableName table = TableName.valueOf(tableName);

        if (admin.tableExists(table)) {
            System.out.println("Table already exists: " + tableName);
        } else {
            TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(table);
            ColumnFamilyDescriptorBuilder columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(columnFamily);
            tableDescriptor.setColumnFamily(columnFamilyDescriptor.build());

            admin.createTable(tableDescriptor.build());
            System.out.println("Table created: " + tableName);
        }
    }

    private static void putData(Table table, String rowKey, String column, String value) throws IOException {
        Put put = new Put(Bytes.toBytes(rowKey));
        put.addColumn(COLUMN_FAMILY, Bytes.toBytes(column), Bytes.toBytes(value));
        table.put(put);
        System.out.println("Data inserted. Row: " + rowKey + ", Column: " + column + ", Value: " + value);
    }

    private static void getData(Table table, String rowKey, String column) throws IOException {
        Get get = new Get(Bytes.toBytes(rowKey));
        Result result = table.get(get);
        byte[] value = result.getValue(COLUMN_FAMILY, Bytes.toBytes(column));
        if (value != null) {
            String columnValue = Bytes.toString(value);
            System.out.println("Data retrieved. Row: " + rowKey + ", Column: " + column + ", Value: " + columnValue);
        } else {
            System.out.println("Data not found. Row: " + rowKey + ", Column: " + column);
        }
    }

    private static void deleteData(Table table, String rowKey, String column) throws IOException {
        Delete delete = new Delete(Bytes.toBytes(rowKey));
        delete.addColumn(COLUMN_FAMILY, Bytes.toBytes(column));
        table.delete(delete);
        System.out.println("Data deleted. Row: " + rowKey + ", Column: " + column);
    }
}

你可能感兴趣的:(hbase,大数据,hadoop)