Strom+Hbase

0、Hbase中创建表‘ns1:wordcount’

$hbase shell>create 'ns1:wordcount' , 'f1'

表结构为:

rowkey就是word

f1列族下有:word列和count列

1 、引入依赖


		
			4.0.0

			com.XXXX
			StormDemo
			1.0-SNAPSHOT

			
				
					org.apache.storm
					storm-core
					1.0.3
				
				
					junit
					junit
					4.11
				
				
					org.apache.storm
					storm-kafka
					1.0.2
				
				
					log4j
					log4j
					1.2.17
				
				
					org.apache.kafka
					kafka_2.10
					0.8.1.1
					
						
							org.apache.zookeeper
							zookeeper
						
						
							log4j
							log4j
						
					
				
				
					org.apache.storm
					storm-hbase
					1.0.3
				
				
					org.apache.hbase
					hbase-client
					1.2.3
				
				
					org.apache.hadoop
					hadoop-common
					2.7.3
				
			

		

2、HbaseBolt

package com.XXX.stormdemo.hbase;

		import org.apache.hadoop.conf.Configuration;
		import org.apache.hadoop.hbase.HBaseConfiguration;
		import org.apache.hadoop.hbase.TableName;
		import org.apache.hadoop.hbase.client.Connection;
		import org.apache.hadoop.hbase.client.ConnectionFactory;
		import org.apache.hadoop.hbase.client.Table;
		import org.apache.hadoop.hbase.util.Bytes;
		import org.apache.storm.shade.org.apache.http.conn.HttpConnectionFactory;
		import org.apache.storm.task.OutputCollector;
		import org.apache.storm.task.TopologyContext;
		import org.apache.storm.topology.IRichBolt;
		import org.apache.storm.topology.OutputFieldsDeclarer;
		import org.apache.storm.tuple.Tuple;

		import java.io.IOException;
		import java.util.Map;

		/**
		 * HbaseBolt,写入数据到hbase库中。
		 */
		public class HbaseBolot implements IRichBolt {

			private Table t ;
			public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
				try {
					Configuration conf = HBaseConfiguration.create();
					Connection conn = ConnectionFactory.createConnection(conf);
					TableName tname = TableName.valueOf("ns1:wordcount");
					t = conn.getTable(tname);
				} catch (IOException e) {
					e.printStackTrace();
				}

			}

			public void execute(Tuple tuple) {
				String word = tuple.getString(0);
				Integer count = tuple.getInteger(1);
				//使用hbase的increment机制进行wordcount
				byte[] rowkey = Bytes.toBytes(word);
				byte[] f = Bytes.toBytes("f1");
				byte[] c = Bytes.toBytes("count");
				try {
					t.incrementColumnValue(rowkey,f,c,count);
				} catch (IOException e) {
				}
			}

			public void cleanup() {
			}

			public void declareOutputFields(OutputFieldsDeclarer declarer) {
			}

			public Map getComponentConfiguration() {
				return null;
			}
		}

3、把配置文件复制到sources文件夹下

[resources]
        hbase-site.xml
        hdfs-site.xml

4、查看Hbase中提前创建好的表‘ns1:wordcount’

 scan ‘ns1:wordcount’

你可能感兴趣的:(Strom+Hbase)