提示:文章内容主要以案例为主
目录
前言
项目说明
一,程序需求
1.需求
2.数据
二,编码操作
1.项目建包目录
2.FriendsRecommend.java
3.FriendsRecommendMapper.java
4.FriendsRecommendReduce.java
三,Xshell运行的步骤
1.创建目录
2.上传程序
3.分布式文件系统上传测试数据
4.执行程序
5. 查看结果
总结
提示:以下是本篇文章正文内容,下面案例可供参考
xiaoming laowang renhua linzhiling
laowang xiaoming fengjie
renhua xiaoming ligang fengjie
linzhiling xiaoming ligang fengjie guomeimei
ligang renhua fengjie linzhiling
guomeimei fengjie linzhiling
fengjie renhua laowang linzhiling guomeimei
package org.hadoop.mr;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import java.io.IOException;
public class FriendsRecommend {
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf);
job.setJarByClass(FriendsRecommend.class);
Path input = new Path(args[0]);
FileInputFormat.addInputPath(job, input);
Path output = new Path(args[1]);
//如果文件存在,,删除文件,方便后续调试代码
if (output.getFileSystem(conf).exists(output)) {
output.getFileSystem(conf).delete(output,true);
}
FileOutputFormat.setOutputPath(job, output);
job.setMapperClass(FriendsRecommendMapper.class);
job.setMapOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
job.setReducerClass(FriendsRecommendReduce.class);
job.waitForCompletion(true);
}
}
package org.hadoop.mr;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.util.StringUtils;
import java.io.IOException;
public class FriendsRecommendMapper extends Mapper {
Text mkey = new Text();
IntWritable mval = new IntWritable();
@Override
protected void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {
//不能用双引号,要用单引号 将传递过来的值进行分割
String[] strs = StringUtils.split(value.toString(), ' ');
// 直接好友的 key为直接好友列表 value为0
for (int i = 1; i < strs.length; i++) {
//直接好友关系
mkey.set(fof(strs[0], strs[i]));
mval.set(0);
context.write(mkey, mval);
//间接好友关系 设置value为1
for (int j = i + 1; j < strs.length; j++) {
mkey.set(fof(strs[i], strs[j]));
mval.set(1);
context.write(mkey, mval);
}
}
}
//两个共同好友的间接好友之间,可能存在 B C 和C B 的情况,但是比对累加时,计算机不识别,所以需要字典排序
private static String fof(String str1, String str2) {
//compareTo比较的 正数说明大
if (str1.compareTo(str2) > 0) {
return str2 + ":" + str1;
}
return str1 + ":" + str2;
}
}
package org.hadoop.mr;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import java.io.IOException;
public class FriendsRecommendReduce extends Reducer {
private IntWritable mValue = new IntWritable();
@Override
protected void reduce(Text key, Iterable values, Context context) throws IOException, InterruptedException {
//
int flg = 0;
int sum = 0;
for (IntWritable value : values) {
if (value.get() == 0) {// 直接关系
flg = 1;
}
sum += value.get(); // 添加间接权重
}
if (flg == 0) {
mValue.set(sum);
context.write(key, mValue);
}
}
}
cd /opt/
ls # 如果目录下没有testData目录的话自己手动创建一下即可
cd testData/
mkdir friend
cd friend/
cd /opt/testData/friend/
rz
cd /opt/testData/friend/
rz
start-dfs.sh
start-yarn.sh
hdfs dfs -mkdir /input
hdfs dfs -put friend.txt /input
hdfs dfs -ls /input
hadoop jar FriendsRecommend-1.0-SNAPSHOT.jar /input /output
hdfs dfs -cat /output/part-r-00000
本文主要介绍了Hadoop学习案例——MapReduce课程设计 好友推荐功能,过程中要注意jar包的打包。