scala中使用自己封装的工具类

需求:用scala代码,在hbase中做条件查询

scala中使用自己封装的工具类

object SparkHBase2  {
  def main(args: Array[String]): Unit = {
    val table=HBaseTools.openTable("t_prod_weixin_art");
    val rows=HBaseTools.scanValueDatas(table, "info", "content", 10).toArray()
    rows.foreach(println)
    //关闭资源
    HBaseTools.closeTable(table)
    HBaseTools.closeConn()
  }

}


自己封装的工具类HBaseTools如下

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.PageFilter;
import org.apache.hadoop.hbase.util.Bytes;

public class HBaseTools {

	public static Configuration conf = new Configuration();
	private static Connection conn = null;
	private static ExecutorService pool = Executors.newFixedThreadPool(200);

	static {
		conf.set("hbase.zookeeper.property.clientPort", "2181");
		conf.set("hbase.zookeeper.quorum", "dmp01,dmp02,dmp03,dmp04,dmp05");
		conf.setLong(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, 120000);
		try {
			conn = ConnectionFactory.createConnection(conf);
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	public static Table openTable(String tableName) {
		Table table = null;
		try {
			table = conn.getTable(TableName.valueOf(tableName), pool);
		} catch (IOException e) {
			e.printStackTrace();
		}
		return table;
	}

	public static void closeTable(Table table) {
		if (table != null) {
			try {
				table.close();
			} catch (IOException e) {
				e.printStackTrace();
			}
		}
	}
	
	public static void closeConn() {
		if (conn != null) {
			try {
				conn.close();
			} catch (IOException e) {
				e.printStackTrace();
			}
		}
	}
	
	public static void putColumnDatas(Table table, String rowKey,
			String familyName, Map columnDatas) {
		Put put = new Put(rowKey.getBytes());
		for (Map.Entry columnData : columnDatas.entrySet()) {
			put.addColumn(familyName.getBytes(),
					columnData.getKey().getBytes(), columnData.getValue());
		}
		try {
			table.put(put);
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	public static void putFamilyDatas(Table table, String rowKey,
			Map> familyDatas) {
		Put put = new Put(rowKey.getBytes());
		for (Map.Entry> familyData : familyDatas
				.entrySet()) {
			String familyName = familyData.getKey();
			Map columnDatas = familyData.getValue();
			for (Map.Entry columnData : columnDatas.entrySet()) {
				put.addColumn(familyName.getBytes(), columnData.getKey()
						.getBytes(), columnData.getValue());
			}
		}
		try {
			table.put(put);
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	public static void putRowDatas(Table table,
			Map>> rowDatas) {
		List puts = new ArrayList();
		for (Map.Entry>> rowData : rowDatas
				.entrySet()) {
			String rowKey = rowData.getKey();
			if (rowKey != null) {
				Map> familyDatas = rowData
						.getValue();
				Put put = new Put(rowKey.getBytes());
				for (Map.Entry> familyData : familyDatas
						.entrySet()) {
					String familyName = familyData.getKey();
					Map columnDatas = familyData.getValue();
					for (Map.Entry columnData : columnDatas
							.entrySet()) {
						put.addColumn(familyName.getBytes(), columnData
								.getKey().getBytes(), columnData.getValue());
					}
				}
				puts.add(put);
			}
		}
		try {
			table.put(puts);
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	public static Map getFamilyDatas(Table table, String rowkey, String family) {
		Map datas = new HashMap();
		Get get = new Get(rowkey.getBytes());
		get.addFamily(family.getBytes());
		try {
			Result result = table.get(get);
			List cells = result.listCells();
			for (Cell cell : cells) {
				String key = new String(CellUtil.cloneQualifier(cell));
				String value = new String(CellUtil.cloneValue(cell), "UTF-8");
				datas.put(key, value);
			}
		} catch (IOException e) {
			e.printStackTrace();
		}
		return datas;
	}
	
	public static String getValueData(Table table, String rowkey, String family, String key) {
		String value = "";
		Get get = new Get(rowkey.getBytes());
		get.addFamily(family.getBytes());
		try {
			Result result = table.get(get);
			value = Bytes.toString(result.getValue(family.getBytes(), key.getBytes()));
		} catch (IOException e) {
			e.printStackTrace();
		}
		return value;
	}
	
	public static List scanValueDatas(Table table, String family, String key, int limit) {
		List values = new ArrayList();
		Scan scan = new Scan() ;
		Filter filter = new PageFilter(limit);
		scan.setFilter(filter);
		try {
			ResultScanner results = table.getScanner(scan);
			for (Result res : results) {
				values.add(Bytes.toString(res.getValue(family.getBytes(), key.getBytes())));
			}
		} catch (IOException e) {
			e.printStackTrace();
		}
		
		return values;
	}

}


你可能感兴趣的:(spark)