https://search.maven.org
4.0.0
cn.tzb.com
stormwordcount
1.0-SNAPSHOT
org.apache.storm
storm-core
1.0.6
org.clojure
clojure
1.7.0
package wordcount;
import org.apache.storm.Config;
import org.apache.storm.LocalCluster;
import org.apache.storm.generated.AlreadyAliveException;
import org.apache.storm.generated.InvalidTopologyException;
import org.apache.storm.topology.TopologyBuilder;
import org.apache.storm.tuple.Fields;
public class WordCountTopologyMain {
public static void main(String[] args) throws AlreadyAliveException, InvalidTopologyException {
// 1、准备一个 TopologyBuilder
TopologyBuilder topologyBuilder = new TopologyBuilder();
topologyBuilder.setSpout("mySpout",new MySpout(),1);
topologyBuilder.setBolt("mybolt1",new MySplitBolt(),10).shuffleGrouping("mySpout");
topologyBuilder.setBolt("mybolt2",new MyCountBolt(),2).fieldsGrouping("mybolt1",new Fields("word"));
//2、创建configuration,指定topology 需要的worker的数量
Config config=new Config();
config.setNumWorkers(2);
//3、提交任务,分为本地模式、集群模式
// StormSubmitter.submitTopologyWithProgressBar("mywordcount",config,topologyBuilder.createTopology());
LocalCluster localCluster = new LocalCluster();
localCluster.submitTopology("mywordcount",config,topologyBuilder.createTopology());
}
}
package wordcount;
import org.apache.storm.spout.SpoutOutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseRichSpout;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Values;
import java.util.Map;
public class MySpout extends BaseRichSpout {
SpoutOutputCollector collector;
/*
* 初始化方法
* */
public void open(Map map, TopologyContext context, SpoutOutputCollector collector) {
this.collector = collector;
}
/*
* storm 框架在while(true),调用 nextTuple方法
* */
public void nextTuple() {
collector.emit(new Values("Hello World I love China"));
}
public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declare(new Fields("firstStorm"));
}
}
package wordcount;
import org.apache.storm.task.OutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseRichBolt;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Tuple;
import org.apache.storm.tuple.Values;
import java.util.Map;
public class MySplitBolt extends BaseRichBolt {
OutputCollector outputCollector;
/*
* 初始化初始化方法
* */
public void prepare(Map map, TopologyContext topologyContext, OutputCollector outputCollector) {
this.outputCollector = outputCollector;
}
/*
* 被storm 框架 while(true) 循环调用
* */
public void execute(Tuple tuple) {
String line = tuple.getString(0);
String[] words = line.split(" ");
for (String word : words) {
outputCollector.emit(new Values(word,1));
}
}
public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
outputFieldsDeclarer.declare(new Fields("word","num"));
}
}
package wordcount;
import org.apache.storm.task.OutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.IBasicBolt;
import org.apache.storm.topology.IRichBolt;
import org.apache.storm.topology.IWindowedBolt;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseRichBolt;
import org.apache.storm.tuple.Tuple;
import java.util.HashMap;
import java.util.Map;
public class MyCountBolt extends BaseRichBolt {
OutputCollector collector;
Map<String, Integer> map = new HashMap<String, Integer>();
public void prepare(Map map, TopologyContext topologyContext, OutputCollector outputCollector) {
this.collector = outputCollector;
}
public void execute(Tuple tuple) {
String word = tuple.getString(0);
Integer num = tuple.getInteger(1);
if (map.containsKey(word)) {
Integer count = map.get(word);
map.put(word, count + num);
}else{
map.put(word,1);
}
System.out.println("count:"+map);
}
public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
}
}
修改为集群模式
//3、提交任务,分为本地模式、集群模式
StormSubmitter.submitTopologyWithProgressBar("mywordcount",config,topologyBuilder.createTopology());
//LocalCluster localCluster = new LocalCluster();
//localCluster.submitTopology("mywordcount",config,topologyBuilder.createTopology());
[INFO] Scanning for projects...
[INFO]
[INFO] ------------------------------------------------------------------------
[INFO] Building stormwordcount 1.0-SNAPSHOT
[INFO] ------------------------------------------------------------------------
[INFO]
[INFO] --- maven-resources-plugin:2.6:resources (default-resources) @ stormwordcount ---
[WARNING] Using platform encoding (UTF-8 actually) to copy filtered resources, i.e. build is platform dependent!
[INFO] Copying 0 resource
[INFO]
[INFO] --- maven-compiler-plugin:3.1:compile (default-compile) @ stormwordcount ---
[INFO] Nothing to compile - all classes are up to date
[INFO]
[INFO] --- maven-resources-plugin:2.6:testResources (default-testResources) @ stormwordcount ---
[WARNING] Using platform encoding (UTF-8 actually) to copy filtered resources, i.e. build is platform dependent!
[INFO] skip non existing resourceDirectory D:\Data\JavaProject\storm\src\test\resources
[INFO]
[INFO] --- maven-compiler-plugin:3.1:testCompile (default-testCompile) @ stormwordcount ---
[INFO] Nothing to compile - all classes are up to date
[INFO]
[INFO] --- maven-surefire-plugin:2.12.4:test (default-test) @ stormwordcount ---
[INFO] No tests to run.
[INFO]
[INFO] --- maven-jar-plugin:2.4:jar (default-jar) @ stormwordcount ---
[INFO] ------------------------------------------------------------------------
[INFO] BUILD SUCCESS
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 1.302 s
[INFO] Finished at: 2018-09-21T16:36:02+08:00
[INFO] Final Memory: 11M/241M
[INFO] ------------------------------------------------------------------------
Process finished with exit code 0
[hadoop@node1 ~]$ storm jar stormwordcount.jar wordcount.WordCountTopologyMain
[hadoop@node1 ~]$ storm jar stormwordcount.jar wordcount.WordCountTopologyMain
Running: /usr/apps/jdk1.8.0_181-amd64/bin/java -client -Ddaemon.name= -Dstorm.options= -Dstorm.home=/export/servers/apache-storm-1.0.6 -Dstorm.log.dir=/export/servers/apache-storm-1.0.6/logs -Djava.library.path=/usr/local/lib:/opt/local/lib:/usr/lib -Dstorm.conf.file= -cp /export/servers/apache-storm-1.0.6/lib/storm-core-1.0.6.jar:/export/servers/apache-storm-1.0.6/lib/kryo-3.0.3.jar:/export/servers/apache-storm-1.0.6/lib/reflectasm-1.10.1.jar:/export/servers/apache-storm-1.0.6/lib/asm-5.0.3.jar:/export/servers/apache-storm-1.0.6/lib/minlog-1.3.0.jar:/export/servers/apache-storm-1.0.6/lib/objenesis-2.1.jar:/export/servers/apache-storm-1.0.6/lib/clojure-1.7.0.jar:/export/servers/apache-storm-1.0.6/lib/disruptor-3.3.2.jar:/export/servers/apache-storm-1.0.6/lib/log4j-api-2.8.jar:/export/servers/apache-storm-1.0.6/lib/log4j-core-2.8.jar:/export/servers/apache-storm-1.0.6/lib/log4j-slf4j-impl-2.8.jar:/export/servers/apache-storm-1.0.6/lib/slf4j-api-1.7.21.jar:/export/servers/apache-storm-1.0.6/lib/log4j-over-slf4j-1.6.6.jar:/export/servers/apache-storm-1.0.6/lib/servlet-api-2.5.jar:/export/servers/apache-storm-1.0.6/lib/storm-rename-hack-1.0.6.jar:stormwordcount.jar:/export/servers/storm/conf:/export/servers/apache-storm-1.0.6/bin -Dstorm.jar=stormwordcount.jar wordcount.WordCountTopologyMain
678 [main] WARN o.a.s.u.Utils - STORM-VERSION new 1.0.6 old null
758 [main] INFO o.a.s.StormSubmitter - Generated ZooKeeper secret payload for MD5-digest: -8904365196210987616:-7053950946314153300
850 [main] WARN o.a.s.u.Utils - STORM-VERSION new 1.0.6 old 1.0.6
866 [main] INFO o.a.s.s.a.AuthUtils - Got AutoCreds []
870 [main] WARN o.a.s.u.NimbusClient - Using deprecated config nimbus.host for backward compatibility. Please update your storm.yaml so it only has config nimbus.seeds
942 [main] INFO o.a.s.u.NimbusClient - Found leader nimbus : node1:6627
946 [main] WARN o.a.s.u.NimbusClient - Using deprecated config nimbus.host for backward compatibility. Please update your storm.yaml so it only has config nimbus.seeds
956 [main] INFO o.a.s.u.NimbusClient - Found leader nimbus : node1:6627
967 [main] INFO o.a.s.StormSubmitter - Uploading topology jar stormwordcount.jar to assigned location: /export/data/storm/workdir/nimbus/inbox/stormjar-0e45d4c0-daa1-4a6f-aaf8-d8c69e00f63f.jar
Start uploading file 'stormwordcount.jar' to '/export/data/storm/workdir/nimbus/inbox/stormjar-0e45d4c0-daa1-4a6f-aaf8-d8c69e00f63f.jar' (5667 bytes)
[==================================================] 5667 / 5667
File 'stormwordcount.jar' uploaded to '/export/data/storm/workdir/nimbus/inbox/stormjar-0e45d4c0-daa1-4a6f-aaf8-d8c69e00f63f.jar' (5667 bytes)
996 [main] INFO o.a.s.StormSubmitter - Successfully uploaded topology jar to assigned location: /export/data/storm/workdir/nimbus/inbox/stormjar-0e45d4c0-daa1-4a6f-aaf8-d8c69e00f63f.jar
996 [main] WARN o.a.s.u.NimbusClient - Using deprecated config nimbus.host for backward compatibility. Please update your storm.yaml so it only has config nimbus.seeds
1005 [main] INFO o.a.s.u.NimbusClient - Found leader nimbus : node1:6627
1007 [main] INFO o.a.s.StormSubmitter - Submitting topology mywordcount in distributed mode with conf {"topology.workers":2,"storm.zookeeper.topology.auth.scheme":"digest","storm.zookeeper.topology.auth.payload":"-8904365196210987616:-7053950946314153300"}
1424 [main] INFO o.a.s.StormSubmitter - Finished submitting topology: mywordcount
[hadoop@node1 ~]$