hadoop上运行c++程序步骤

一。写一个程序保存为wordcount-simple.cpp:

#include"hadoop/Pipes.hh"
#include "hadoop/TemplateFactory.hh"
#include "hadoop/StringUtils.hh"

const std::string WORDCOUNT = "WORDCOUNT";
const std::string INPUT_WORDS = "INPUT_WORDS";
const std::string OUTPUT_WORDS = "OUTPUT_WORDS";

class WordCountMap: public HadoopPipes::Mapper { // Mapper类
public:
HadoopPipes::TaskContext::Counter* inputWords;

WordCountMap(HadoopPipes::TaskContext& context) {
inputWords = context.getCounter(WORDCOUNT, INPUT_WORDS);
}

void map(HadoopPipes::MapContext& context) {
std::vector words = 
HadoopUtils::splitString(context.getInputValue(), " "); // 按空格进行单词分割
for(unsigned int i=0; i < words.size(); ++i) {
context.emit(words[i], "1"); // 单词作为key,value为1
}
context.incrementCounter(inputWords, words.size()); // 向map-reduce提交进度信息
}
};

class WordCountReduce: public HadoopPipes::Reducer { // reduce类
public:
HadoopPipes::TaskContext::Counter* outputWords;

WordCountReduce(HadoopPipes::TaskContext& context) {
outputWords = context.getCounter(WORDCOUNT, OUTPUT_WORDS);
}

void reduce(HadoopPipes::ReduceContext& context) {
int sum = 0;
while (context.nextValue()) {
sum += HadoopUtils::toInt(context.getInputValue()); // 统计单词出现的次数
}
context.emit(context.getInputKey(), HadoopUtils::toString(sum)); // 输出结果
context.incrementCounter(outputWords, 1); 
}
};

int main(int argc, char *argv[]) {
return HadoopPipes::runTask(HadoopPipes::TemplateFactory WordCountReduce>()); // 运行任务
}

二。Makefile:

CC = g++HADOOP_INSTALL = $(HADOOP_HOME) PLATFORM = Linux-i386-32 CPPFLAGS = -O2 -m32-I$(HADOOP_INSTALL)/c++/$(PLATFORM)/include LDFLAGS += -lcrypto -lhadooppipes-lhadooputils -lpthread wordcount-simple: wordcount-simple.cpp $(CC)$(CPPFLAGS) $< -Wall -L$(HADOOP_INSTALL)/c++/$(PLATFORM)/lib $(LDFLAGS) -o$@

三。make生成可执行文件

四。将可执行文件编译上传到HDFS:

hadoop dfs-copyFromLocal ./wordcount-simple /home

五。创建配置文件:word.xml

  

    //Set the binary path on DFS

    hadoop.pipes.executable

    /home/wordcount

  

  

    hadoop.pipes.java.recordreader

    true

  

  

    hadoop.pipes.java.recordwriter

    true

  

六。创建本地文件hello.txt内容为:hello world

七。将hello.txt上传到dfs

hadoop dfs-copyFromLocal ./hello.txt /home

八。运行程序

hadoop pipes -conf./word.xml -input /home/hello -output /home/result

(dfs会自动创建result目录保存结果)



你可能感兴趣的:(hadoop,c++)