Hadoop实现数据的二次排序

需求:对业务数据根据上行流量和上行总流量做一个排序处理,上行流量升序,上行流量相同的上行总流量升序

数据说明:7,8,9,10位置的数据类型分别为upFlow,dowFlow,upCountFlow,downCountFlow

1363157985066 13726230503 00-FD-07-A4-72-B8:CMCC 120.196.100.82 i02.c.aliimg.com 游戏娱乐 24 27 2481 24681 200
1363157995052 13826544101 5C-0E-8B-C7-F1-E0:CMCC 120.197.40.4 jd.com 京东购物 4 0 264 0 200
1363157991076 13926435656 20-10-7A-28-CC-0A:CMCC 120.196.100.99 taobao.com 淘宝购物 2 4 132 1512 200
1363154400022 13926251106 5C-0E-8B-8B-B1-50:CMCC 120.197.40.4 cnblogs.com 技术门户 4 0 240 0 200
1363157993044 18211575961 94-71-AC-CD-E6-18:CMCC-EASY 120.196.100.99 iface.qiyi.com 视频网站 15 12 1527 2106 200
1363157995074 84138413 5C-0E-8B-8C-E8-20:7DaysInn 120.197.40.4 122.72.52.12 未知 20 16 4116 1432 200
1363157993055 13560439658 C4-17-FE-BA-DE-D9:CMCC 120.196.100.99 sougou.com 综合门户 18 15 1116 954 200
1363157992093 13560439658 C4-17-FE-BA-DE-D9:CMCC 120.196.100.99 zhilian.com 招聘门户 15 9 918 4938 200
1363157995033 15920133257 5C-0E-8B-C7-BA-20:CMCC 120.197.40.4 sug.so.360.cn 信息安全 20 20 3156 2936 200
1363157983019 13719199419 68-A1-B7-03-07-B1:CMCC-EASY 120.196.100.82 baidu.com 综合搜索 4 0 240 0 200
1363157984041 13660577991 5C-0E-8B-92-5C-20:CMCC-EASY 120.197.40.4 s19.cnzz.com 站点统计 24 9 6960 690 200
1363157973098 15013685858 5C-0E-8B-C7-F7-90:CMCC 120.197.40.4 rank.ie.sogou.com 搜索引擎 28 27 3659 3538 200
1363157986029 15989002119 E8-99-C4-4E-93-E0:CMCC-EASY 120.196.100.99 www.umeng.com 站点统计 3 3 1938 180 200
1363157986041 13480253104 5C-0E-8B-C7-FC-80:CMCC-EASY 120.197.40.4 csdn.net 技术门户 3 3 180 180 200
1363157984040 13602846565 5C-0E-8B-8B-B6-00:CMCC 120.197.40.4 2052.flash2-http.qq.com 综合门户 15 12 1938 2910 200
1363157995093 13922314466 00-FD-07-A2-EC-BA:CMCC 120.196.100.82 img.qfc.cn 图片大全 12 12 3008 3720 200
1363157982040 13502468823 5C-0A-5B-6A-0B-D4:CMCC-EASY 120.196.100.99 y0.ifengimg.com 综合门户 57 102 7335 110349 200
1363157986072 18320173382 84-25-DB-4F-10-1A:CMCC-EASY 120.196.100.99 input.shouji.sogou.com 搜索引擎 21 18 9531 2412 200
1363157990043 13925057413 00-1F-64-E1-E6-9A:CMCC 120.196.100.55 t3.baidu.com 搜索引擎 69 63 11058 48243 200
1363157988072 13760778710 00-FD-07-A4-7B-08:CMCC 120.196.100.82 http://youku.com/ 视频网站 2 2 120 120 200
1363157985079 13823070001 20-7C-8F-70-68-1F:CMCC 120.196.100.99 img.qfc.cn 图片浏览 6 3 360 180 200
1363157985069 13600217502 00-1F-64-E2-E8-B1:CMCC 120.196.100.55 www.baidu.com 综合门户 18 138 1080 186852 200

1、编写bean类,继承WritableComparable,用于比较逻辑的实现

package com.xiaofei.mr.sort;

import org.apache.hadoop.io.WritableComparable;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;

/**
 * @author yangquan
 * @date 2021.07.13
 */
public class FlowBeanCompare implements WritableComparable {

    private Integer upFlow;
    private Integer downFlow;
    private Integer upCountFlow;
    private Integer downCountFlow;

    public FlowBeanCompare() {
    }

    public FlowBeanCompare(Integer upFlow, Integer downFlow, Integer upCountFlow, Integer downCountFlow) {
        this.upFlow = upFlow;
        this.downFlow = downFlow;
        this.upCountFlow = upCountFlow;
        this.downCountFlow = downCountFlow;
    }

    @Override
    public int compareTo(FlowBeanCompare o) {
        int result = this.upFlow.compareTo(o.upFlow);
        if(result==0){
            return this.downFlow.compareTo(o.downFlow);
        }else {
            return result;
        }

    }

    @Override
    public void write(DataOutput out) throws IOException {
        out.writeInt(upFlow);
        out.writeInt(downFlow);
        out.writeInt(upCountFlow);
        out.writeInt(downCountFlow);
    }

    @Override
    public void readFields(DataInput in) throws IOException {
        this.upFlow = in.readInt();
        this.downFlow=in.readInt();
        this.upCountFlow=in.readInt();
        this.downCountFlow=in.readInt();

    }

    public Integer getUpFlow() {
        return upFlow;
    }

    public void setUpFlow(Integer upFlow) {
        this.upFlow = upFlow;
    }

    public Integer getDownFlow() {
        return downFlow;
    }

    public void setDownFlow(Integer downFlow) {
        this.downFlow = downFlow;
    }

    public Integer getUpCountFlow() {
        return upCountFlow;
    }

    public void setUpCountFlow(Integer upCountFlow) {
        this.upCountFlow = upCountFlow;
    }

    public Integer getDownCountFlow() {
        return downCountFlow;
    }

    public void setDownCountFlow(Integer downCountFlow) {
        this.downCountFlow = downCountFlow;
    }

    @Override
    public String toString() {
        return "FlowBean{" +
                "upFlow=" + upFlow +
                ", downFlow=" + downFlow +
                ", upCountFlow=" + upCountFlow +
                ", downCountFlow=" + downCountFlow +
                '}';
    }

}
2、编写程序运行的主类
package com.xiaofei.mr.sort;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.IOException;

/**
 * @author yangquan
 * @date 2021.07.13
 */
public class FlowBeanSort extends Configured implements Tool {

    @Override
    public int run(String[] args) throws Exception {
        Job job=Job.getInstance(super.getConf(),FlowBeanSort.class.getSimpleName());
        job.setJarByClass(FlowBeanSort.class);

        job.setInputFormatClass(TextInputFormat.class);
        Path inpath = new Path("D:\\数据资料\\hadoop数据资料\\SortFlow\\intput");
        TextInputFormat.addInputPath(job,inpath);

        job.setMapperClass(FlowMapper.class);
        job.setMapOutputKeyClass(FlowBeanCompare.class);
        job.setMapOutputValueClass(NullWritable.class);

        job.setOutputFormatClass(TextOutputFormat.class);

        Path outpath=new Path("D:\\数据资料\\hadoop数据资料\\SortFlow\\out");
        FileSystem fileSystem = FileSystem.get(super.getConf());
        if(fileSystem.exists(outpath)){
            fileSystem.delete(outpath,true);
        }
        TextOutputFormat.setOutputPath(job,outpath);
        return (job.waitForCompletion(true)?0:1);
    }

    public static void main(String[] args) throws Exception {
        int run = ToolRunner.run(new Configuration(), new FlowBeanSort(), args);
        System.exit(run);
    }

    public static class FlowMapper extends Mapper{
    
        private FlowBeanCompare flowBeanCompare;

        @Override
        protected void setup(Context context) throws IOException, InterruptedException {
            flowBeanCompare=new FlowBeanCompare();
        }

        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String[] splits = value.toString().split("\t");
            int upLow =Integer.parseInt( splits[6]);
            int downFlow = Integer.parseInt(splits[7]);
            int upCountFlow = Integer.parseInt(splits[8]);
            int downCountFlow = Integer.parseInt(splits[9]);

            flowBeanCompare.setUpFlow(upLow);
            flowBeanCompare.setDownFlow(downFlow);
            flowBeanCompare.setUpCountFlow(upCountFlow);
            flowBeanCompare.setDownCountFlow(downCountFlow);

            context.write(flowBeanCompare,NullWritable.get());
       //说明:mapper中必须使用bean对象这个才会进行比对,如果使用Text类型的输出时无法进行比对的这点要注意

            /*String line= splits[6]+","+splits[7]+","+splits[8]+","+splits[9];
            context.write(new Text(line),NullWritable.get());*/

        }
    }

}
3、运行的结果,因为没有reduce所以我们产生的结构是flowbean的结构数据

 4、优化格式化输出,如果输出的数据需要给到别人使用但是结构话的数据使用起来还需要做转换,所以在存储之前我们可以先改变输出。

public static class FlowBeanReducer extends Reducer{
    @Override
    protected void reduce(FlowBeanCompare key, Iterable values, Context context) throws IOException, InterruptedException {
        String upFlow = key.getUpFlow().toString();
        String downFlow = key.getDownFlow().toString();
        String upCountFlow = key.getUpCountFlow().toString();
        String downCountFlow = key.getDownCountFlow().toString();
        String line = upFlow+","+downFlow+","+upCountFlow+","+downCountFlow;

        context.write(new Text(line),NullWritable.get());

    }
}

5、运行结构

你可能感兴趣的:(hadoop,mapreduce,大数据)