数据文件,取第三列价格 取TOPn
[root@master IMFdatatest]#hadoop dfs -cat /library/TopN1.txt
DEPRECATED: Use of this script to execute hdfs command is deprecated.
Instead use the hdfs command for it.
16/02/15 06:10:39 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
1,9819,100,121
2,8918,2000,111
3,2813,1234,22
4,9100,10,1101
5,3210,490,111
6,1298,28,1211
7,1010,281,90
8,1818,9000,20
[root@master IMFdatatest]#hadoop dfs -cat /library/TopN2.txt
DEPRECATED: Use of this script to execute hdfs command is deprecated.
Instead use the hdfs command for it.
16/02/15 06:10:47 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
10,3333,10,100
11,9321,1000,293
12,3881,701,20
13,6791,910,30
14,8888,11,39
package com.dtspark.hadoop.hellomapreduce;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import java.io.IOException;
import java.util.Arrays;
import java.util.Iterator;
import java.util.StringTokenizer;
public class TopNSorted2 {
public static class DataMapper
extends Mapper<LongWritable, Text, Text, Text>{
int[] topN;
int length;
@Override
protected void setup(Mapper<LongWritable, Text, Text, Text>.Context context)
throws IOException, InterruptedException {
length = context.getConfiguration().getInt("topn", 5);
topN = new int[length + 1];
for (int i=0;i<4;i++) {
System.out.println("map setup: " + i +" =i,topN "+topN[i]);
}
}
public void map(LongWritable key, Text value, Context context
) throws IOException, InterruptedException {
System.out.println("Map Methond Invoked!!!");
String[] data = value.toString().split(",");
if (4 == data.length){
int cost = Integer.valueOf(data[2]);
topN[0] = cost;
System.out.println("map: topN[0]"+ topN[0]);
Arrays.sort(topN);
for (int i=0;i<4;i++) {
System.out.println("map: "+ i +" =i,topN "+topN[i]);
}
}
}
@Override
protected void cleanup(Context context)
throws IOException, InterruptedException {
for(int i = 1; i < length + 1; i++){
context.write(new Text(String.valueOf(topN[i])), new Text(String.valueOf(topN[i])));
}
}
}
public static class DataReducer
extends Reducer<Text,Text,Text, Text> {
int[] topN;
int length;
@Override
protected void setup(Reducer<Text, Text, Text, Text>.Context context)
throws IOException, InterruptedException {
length = context.getConfiguration().getInt("topn", 5);
topN = new int[length + 1];
for (int i=0;i<4;i++) {
System.out.println("Reducer setup: "+ i +" =i,topN "+topN[i]);
}
}
public void reduce(Text key, Iterable<Text> values,
Context context
) throws IOException, InterruptedException {
System.out.println("Reduce Methond Invoked!!!" );
topN[0] = Integer.valueOf(key.toString());
System.out.println("reduce: topN[0]"+ topN[0]);
Arrays.sort(topN);
for (int i=0;i<length;i++) {
System.out.println("reduce: "+ i +" =i,topN "+topN[i]);
}
}
@Override
protected void cleanup(Context context)
throws IOException, InterruptedException {
for(int i = length; i > 0; i--){
System.out.println("reduce cleanup: "+ i +" =i,topN "+topN[i]);
context.write(new Text(String.valueOf(length - i + 1)), new Text(String.valueOf(topN[i])));
}
}
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
conf.setInt("topn", 3);
String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
if (otherArgs.length < 2) {
System.err.println("Usage: TopNSorted <in> [<in>...] <out>");
System.exit(2);
}
Job job = Job.getInstance(conf, "TopNSorted");
job.setJarByClass(TopNSorted2.class);
job.setMapperClass(DataMapper.class);
job.setReducerClass(DataReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
for (int i = 0; i < otherArgs.length - 1; ++i) {
FileInputFormat.addInputPath(job, new Path(otherArgs[i]));
}
FileOutputFormat.setOutputPath(job,
new Path(otherArgs[otherArgs.length - 1]));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}
运行结果:
[root@master IMFdatatest]#hadoop dfs -cat /library/outputTopN/part-r-00000
DEPRECATED: Use of this script to execute hdfs command is deprecated.
Instead use the hdfs command for it.
16/02/15 06:37:06 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
1 9000
2 2000
3 1234
打的日志
INFO [main] (org.apache.hadoop.conf.Configuration.deprecation:1049) 2016-02-15 20:26:09,600 ---- session.id is deprecated. Instead, use dfs.metrics.session-id
INFO [main] (org.apache.hadoop.metrics.jvm.JvmMetrics:76) 2016-02-15 20:26:09,606 ---- Initializing JVM Metrics with processName=JobTracker, sessionId=
WARN [main] (org.apache.hadoop.mapreduce.JobSubmitter:261) 2016-02-15 20:26:10,088 ---- No job jar file set. User classes may not be found. See Job or Job#setJar(String).
INFO [main] (org.apache.hadoop.mapreduce.lib.input.FileInputFormat:281) 2016-02-15 20:26:10,164 ---- Total input paths to process : 2
INFO [main] (org.apache.hadoop.mapreduce.JobSubmitter:494) 2016-02-15 20:26:10,206 ---- number of splits:2
INFO [main] (org.apache.hadoop.mapreduce.JobSubmitter:583) 2016-02-15 20:26:10,301 ---- Submitting tokens for job: job_local295993511_0001
INFO [main] (org.apache.hadoop.mapreduce.Job:1300) 2016-02-15 20:26:10,460 ---- The url to track the job:http://localhost:8080/
INFO [main] (org.apache.hadoop.mapreduce.Job:1345) 2016-02-15 20:26:10,461 ---- Running job: job_local295993511_0001
INFO [Thread-3] (org.apache.hadoop.mapred.LocalJobRunner:471) 2016-02-15 20:26:10,463 ---- OutputCommitter set in config null
INFO [Thread-3] (org.apache.hadoop.mapred.LocalJobRunner:489) 2016-02-15 20:26:10,469 ---- OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
INFO [Thread-3] (org.apache.hadoop.mapred.LocalJobRunner:448) 2016-02-15 20:26:10,521 ---- Waiting for map tasks
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.LocalJobRunner:224) 2016-02-15 20:26:10,522 ---- Starting task: attempt_local295993511_0001_m_000000_0
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.yarn.util.ProcfsBasedProcessTree:181) 2016-02-15 20:26:10,553 ---- ProcfsBasedProcessTree currently is supported only on Linux.
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.Task:587) 2016-02-15 20:26:10,699 ---- Using ResourceCalculatorProcessTree :org.apache.hadoop.yarn.util.WindowsBasedProcessTree@199cb24
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.MapTask:753) 2016-02-15 20:26:10,705 ---- Processing split: hdfs://192.168.2.100:9000/library/TopN/TopN1.txt:0+120
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.MapTask:1202) 2016-02-15 20:26:10,780 ---- (EQUATOR) 0 kvi 26214396(104857584)
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.MapTask:995) 2016-02-15 20:26:10,781 ---- mapreduce.task.io.sort.mb: 100
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.MapTask:996) 2016-02-15 20:26:10,782 ---- soft limit at 83886080
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.MapTask:997) 2016-02-15 20:26:10,782 ---- bufstart = 0; bufvoid = 104857600
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.MapTask:998) 2016-02-15 20:26:10,782 ---- kvstart = 26214396; length = 6553600
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.MapTask:402) 2016-02-15 20:26:10,786 ---- Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
map setup: 0 =i,topN 0
map setup: 1 =i,topN 0
map setup: 2 =i,topN 0
map setup: 3 =i,topN 0
INFO [main] (org.apache.hadoop.mapreduce.Job:1366) 2016-02-15 20:26:11,463 ---- Job job_local295993511_0001 running in uber mode : false
INFO [main] (org.apache.hadoop.mapreduce.Job:1373) 2016-02-15 20:26:11,465 ---- map 0% reduce 0%
Map Methond Invoked!!!
map: topN[0]100
map: 0 =i,topN 0
map: 1 =i,topN 0
map: 2 =i,topN 0
map: 3 =i,topN 100
Map Methond Invoked!!!
map: topN[0]2000
map: 0 =i,topN 0
map: 1 =i,topN 0
map: 2 =i,topN 100
map: 3 =i,topN 2000
Map Methond Invoked!!!
map: topN[0]1234
map: 0 =i,topN 0
map: 1 =i,topN 100
map: 2 =i,topN 1234
map: 3 =i,topN 2000
Map Methond Invoked!!!
map: topN[0]10
map: 0 =i,topN 10
map: 1 =i,topN 100
map: 2 =i,topN 1234
map: 3 =i,topN 2000
Map Methond Invoked!!!
map: topN[0]490
map: 0 =i,topN 100
map: 1 =i,topN 490
map: 2 =i,topN 1234
map: 3 =i,topN 2000
Map Methond Invoked!!!
map: topN[0]28
map: 0 =i,topN 28
map: 1 =i,topN 490
map: 2 =i,topN 1234
map: 3 =i,topN 2000
Map Methond Invoked!!!
map: topN[0]281
map: 0 =i,topN 281
map: 1 =i,topN 490
map: 2 =i,topN 1234
map: 3 =i,topN 2000
Map Methond Invoked!!!
map: topN[0]9000
map: 0 =i,topN 490
map: 1 =i,topN 1234
map: 2 =i,topN 2000
map: 3 =i,topN 9000
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.LocalJobRunner:591) 2016-02-15 20:26:12,049 ----
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.MapTask:1457) 2016-02-15 20:26:12,052 ---- Starting flush of map output
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.MapTask:1475) 2016-02-15 20:26:12,052 ---- Spilling map output
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.MapTask:1476) 2016-02-15 20:26:12,052 ---- bufstart = 0; bufend = 30; bufvoid = 104857600
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.MapTask:1478) 2016-02-15 20:26:12,053 ---- kvstart = 26214396(104857584); kvend = 26214388(104857552); length = 9/6553600
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.MapTask:1660) 2016-02-15 20:26:12,074 ---- Finished spill 0
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.Task:1001) 2016-02-15 20:26:12,084 ---- Task:attempt_local295993511_0001_m_000000_0 is done. And is in the process of committing
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.LocalJobRunner:591) 2016-02-15 20:26:12,099 ---- map
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.Task:1121) 2016-02-15 20:26:12,100 ---- Task 'attempt_local295993511_0001_m_000000_0' done.
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.LocalJobRunner:249) 2016-02-15 20:26:12,100 ---- Finishing task: attempt_local295993511_0001_m_000000_0
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.LocalJobRunner:224) 2016-02-15 20:26:12,100 ---- Starting task: attempt_local295993511_0001_m_000001_0
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.yarn.util.ProcfsBasedProcessTree:181) 2016-02-15 20:26:12,103 ---- ProcfsBasedProcessTree currently is supported only on Linux.
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.Task:587) 2016-02-15 20:26:12,221 ---- Using ResourceCalculatorProcessTree :org.apache.hadoop.yarn.util.WindowsBasedProcessTree@1c8fa85
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.MapTask:753) 2016-02-15 20:26:12,225 ---- Processing split: hdfs://192.168.2.100:9000/library/TopN/TopN2.txt:0+76
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.MapTask:1202) 2016-02-15 20:26:12,255 ---- (EQUATOR) 0 kvi 26214396(104857584)
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.MapTask:995) 2016-02-15 20:26:12,255 ---- mapreduce.task.io.sort.mb: 100
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.MapTask:996) 2016-02-15 20:26:12,255 ---- soft limit at 83886080
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.MapTask:997) 2016-02-15 20:26:12,256 ---- bufstart = 0; bufvoid = 104857600
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.MapTask:998) 2016-02-15 20:26:12,256 ---- kvstart = 26214396; length = 6553600
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.MapTask:402) 2016-02-15 20:26:12,257 ---- Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
map setup: 0 =i,topN 0
map setup: 1 =i,topN 0
map setup: 2 =i,topN 0
map setup: 3 =i,topN 0
Map Methond Invoked!!!
map: topN[0]10
map: 0 =i,topN 0
map: 1 =i,topN 0
map: 2 =i,topN 0
map: 3 =i,topN 10
Map Methond Invoked!!!
map: topN[0]1000
map: 0 =i,topN 0
map: 1 =i,topN 0
map: 2 =i,topN 10
map: 3 =i,topN 1000
Map Methond Invoked!!!
map: topN[0]701
map: 0 =i,topN 0
map: 1 =i,topN 10
map: 2 =i,topN 701
map: 3 =i,topN 1000
Map Methond Invoked!!!
map: topN[0]910
map: 0 =i,topN 10
map: 1 =i,topN 701
map: 2 =i,topN 910
map: 3 =i,topN 1000
Map Methond Invoked!!!
map: topN[0]11
map: 0 =i,topN 11
map: 1 =i,topN 701
map: 2 =i,topN 910
map: 3 =i,topN 1000
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.LocalJobRunner:591) 2016-02-15 20:26:12,267 ----
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.MapTask:1457) 2016-02-15 20:26:12,267 ---- Starting flush of map output
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.MapTask:1475) 2016-02-15 20:26:12,267 ---- Spilling map output
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.MapTask:1476) 2016-02-15 20:26:12,267 ---- bufstart = 0; bufend = 26; bufvoid = 104857600
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.MapTask:1478) 2016-02-15 20:26:12,267 ---- kvstart = 26214396(104857584); kvend = 26214388(104857552); length = 9/6553600
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.MapTask:1660) 2016-02-15 20:26:12,283 ---- Finished spill 0
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.Task:1001) 2016-02-15 20:26:12,288 ---- Task:attempt_local295993511_0001_m_000001_0 is done. And is in the process of committing
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.LocalJobRunner:591) 2016-02-15 20:26:12,292 ---- map
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.Task:1121) 2016-02-15 20:26:12,292 ---- Task 'attempt_local295993511_0001_m_000001_0' done.
INFO [LocalJobRunner Map Task Executor #0] (org.apache.hadoop.mapred.LocalJobRunner:249) 2016-02-15 20:26:12,292 ---- Finishing task: attempt_local295993511_0001_m_000001_0
INFO [Thread-3] (org.apache.hadoop.mapred.LocalJobRunner:456) 2016-02-15 20:26:12,293 ---- map task executor complete.
INFO [Thread-3] (org.apache.hadoop.mapred.LocalJobRunner:448) 2016-02-15 20:26:12,295 ---- Waiting for reduce tasks
INFO [pool-6-thread-1] (org.apache.hadoop.mapred.LocalJobRunner:302) 2016-02-15 20:26:12,295 ---- Starting task: attempt_local295993511_0001_r_000000_0
INFO [pool-6-thread-1] (org.apache.hadoop.yarn.util.ProcfsBasedProcessTree:181) 2016-02-15 20:26:12,305 ---- ProcfsBasedProcessTree currently is supported only on Linux.
INFO [pool-6-thread-1] (org.apache.hadoop.mapred.Task:587) 2016-02-15 20:26:12,421 ---- Using ResourceCalculatorProcessTree :org.apache.hadoop.yarn.util.WindowsBasedProcessTree@c6c5de
INFO [pool-6-thread-1] (org.apache.hadoop.mapred.ReduceTask:362) 2016-02-15 20:26:12,425 ---- Using ShuffleConsumerPlugin:org.apache.hadoop.mapreduce.task.reduce.Shuffle@1dfdd90
INFO [pool-6-thread-1] (org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl:196) 2016-02-15 20:26:12,447 ---- MergerManager: memoryLimit=363285696, maxSingleShuffleLimit=90821424, mergeThreshold=239768576, ioSortFactor=10, memToMemMergeOutputsThreshold=10
INFO [EventFetcher for fetching Map Completion Events] (org.apache.hadoop.mapreduce.task.reduce.EventFetcher:61) 2016-02-15 20:26:12,451 ---- attempt_local295993511_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
INFO [main] (org.apache.hadoop.mapreduce.Job:1373) 2016-02-15 20:26:12,479 ---- map 100% reduce 0%
INFO [localfetcher#1] (org.apache.hadoop.mapreduce.task.reduce.LocalFetcher:141) 2016-02-15 20:26:12,508 ---- localfetcher#1 about to shuffle output of map attempt_local295993511_0001_m_000001_0 decomp: 34 len: 38 to MEMORY
INFO [localfetcher#1] (org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput:100) 2016-02-15 20:26:12,517 ---- Read 34 bytes from map-output for attempt_local295993511_0001_m_000001_0
INFO [localfetcher#1] (org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl:314) 2016-02-15 20:26:12,522 ---- closeInMemoryFile -> map-output of size: 34, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->34
INFO [localfetcher#1] (org.apache.hadoop.mapreduce.task.reduce.LocalFetcher:141) 2016-02-15 20:26:12,529 ---- localfetcher#1 about to shuffle output of map attempt_local295993511_0001_m_000000_0 decomp: 38 len: 42 to MEMORY
INFO [localfetcher#1] (org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput:100) 2016-02-15 20:26:12,531 ---- Read 38 bytes from map-output for attempt_local295993511_0001_m_000000_0
INFO [localfetcher#1] (org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl:314) 2016-02-15 20:26:12,531 ---- closeInMemoryFile -> map-output of size: 38, inMemoryMapOutputs.size() -> 2, commitMemory -> 34, usedMemory ->72
INFO [EventFetcher for fetching Map Completion Events] (org.apache.hadoop.mapreduce.task.reduce.EventFetcher:76) 2016-02-15 20:26:12,532 ---- EventFetcher is interrupted.. Returning
INFO [pool-6-thread-1] (org.apache.hadoop.mapred.LocalJobRunner:591) 2016-02-15 20:26:12,533 ---- 2 / 2 copied.
INFO [pool-6-thread-1] (org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl:674) 2016-02-15 20:26:12,533 ---- finalMerge called with 2 in-memory map-outputs and 0 on-disk map-outputs
INFO [pool-6-thread-1] (org.apache.hadoop.mapred.Merger:597) 2016-02-15 20:26:12,648 ---- Merging 2 sorted segments
INFO [pool-6-thread-1] (org.apache.hadoop.mapred.Merger:696) 2016-02-15 20:26:12,649 ---- Down to the last merge-pass, with 2 segments left of total size: 58 bytes
INFO [pool-6-thread-1] (org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl:751) 2016-02-15 20:26:12,651 ---- Merged 2 segments, 72 bytes to disk to satisfy reduce memory limit
INFO [pool-6-thread-1] (org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl:781) 2016-02-15 20:26:12,652 ---- Merging 1 files, 74 bytes from disk
INFO [pool-6-thread-1] (org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl:796) 2016-02-15 20:26:12,653 ---- Merging 0 segments, 0 bytes from memory into reduce
INFO [pool-6-thread-1] (org.apache.hadoop.mapred.Merger:597) 2016-02-15 20:26:12,653 ---- Merging 1 sorted segments
INFO [pool-6-thread-1] (org.apache.hadoop.mapred.Merger:696) 2016-02-15 20:26:12,657 ---- Down to the last merge-pass, with 1 segments left of total size: 63 bytes
INFO [pool-6-thread-1] (org.apache.hadoop.mapred.LocalJobRunner:591) 2016-02-15 20:26:12,658 ---- 2 / 2 copied.
INFO [pool-6-thread-1] (org.apache.hadoop.conf.Configuration.deprecation:1049) 2016-02-15 20:26:12,982 ---- mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
Reducer setup: 0 =i,topN 0
Reducer setup: 1 =i,topN 0
Reducer setup: 2 =i,topN 0
Reducer setup: 3 =i,topN 0
Reduce Methond Invoked!!!
reduce: topN[0]1000
reduce: 0 =i,topN 0
reduce: 1 =i,topN 0
reduce: 2 =i,topN 0
Reduce Methond Invoked!!!
reduce: topN[0]1234
reduce: 0 =i,topN 0
reduce: 1 =i,topN 0
reduce: 2 =i,topN 1000
Reduce Methond Invoked!!!
reduce: topN[0]2000
reduce: 0 =i,topN 0
reduce: 1 =i,topN 1000
reduce: 2 =i,topN 1234
Reduce Methond Invoked!!!
reduce: topN[0]701
reduce: 0 =i,topN 701
reduce: 1 =i,topN 1000
reduce: 2 =i,topN 1234
Reduce Methond Invoked!!!
reduce: topN[0]9000
reduce: 0 =i,topN 1000
reduce: 1 =i,topN 1234
reduce: 2 =i,topN 2000
Reduce Methond Invoked!!!
reduce: topN[0]910
reduce: 0 =i,topN 910
reduce: 1 =i,topN 1234
reduce: 2 =i,topN 2000
reduce cleanup: 3 =i,topN 9000
reduce cleanup: 2 =i,topN 2000
reduce cleanup: 1 =i,topN 1234
INFO [pool-6-thread-1] (org.apache.hadoop.mapred.Task:1001) 2016-02-15 20:26:13,603 ---- Task:attempt_local295993511_0001_r_000000_0 is done. And is in the process of committing
INFO [pool-6-thread-1] (org.apache.hadoop.mapred.LocalJobRunner:591) 2016-02-15 20:26:13,607 ---- 2 / 2 copied.
INFO [pool-6-thread-1] (org.apache.hadoop.mapred.Task:1162) 2016-02-15 20:26:13,608 ---- Task attempt_local295993511_0001_r_000000_0 is allowed to commit now
INFO [pool-6-thread-1] (org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter:439) 2016-02-15 20:26:13,855 ---- Saved output of task 'attempt_local295993511_0001_r_000000_0' to hdfs://192.168.2.100:9000/library/outputTopN7/_temporary/0/task_local295993511_0001_r_000000
INFO [pool-6-thread-1] (org.apache.hadoop.mapred.LocalJobRunner:591) 2016-02-15 20:26:13,856 ---- reduce > reduce
INFO [pool-6-thread-1] (org.apache.hadoop.mapred.Task:1121) 2016-02-15 20:26:13,856 ---- Task 'attempt_local295993511_0001_r_000000_0' done.
INFO [pool-6-thread-1] (org.apache.hadoop.mapred.LocalJobRunner:325) 2016-02-15 20:26:13,857 ---- Finishing task: attempt_local295993511_0001_r_000000_0
INFO [Thread-3] (org.apache.hadoop.mapred.LocalJobRunner:456) 2016-02-15 20:26:13,857 ---- reduce task executor complete.
INFO [main] (org.apache.hadoop.mapreduce.Job:1373) 2016-02-15 20:26:14,502 ---- map 100% reduce 100%
INFO [main] (org.apache.hadoop.mapreduce.Job:1384) 2016-02-15 20:26:14,505 ---- Job job_local295993511_0001 completed successfully
INFO [main] (org.apache.hadoop.mapreduce.Job:1391) 2016-02-15 20:26:14,541 ---- Counters: 38
File System Counters
FILE: Number of bytes read=1569
FILE: Number of bytes written=754423
FILE: Number of read operations=0
FILE: Number of large read operations=0
FILE: Number of write operations=0
HDFS: Number of bytes read=512
HDFS: Number of bytes written=21
HDFS: Number of read operations=25
HDFS: Number of large read operations=0
HDFS: Number of write operations=5
Map-Reduce Framework
Map input records=13
Map output records=6
Map output bytes=56
Map output materialized bytes=80
Input split bytes=226
Combine input records=0
Combine output records=0
Reduce input groups=6
Reduce shuffle bytes=80
Reduce input records=6
Reduce output records=3
Spilled Records=12
Shuffled Maps =2
Failed Shuffles=0
Merged Map outputs=2
GC time elapsed (ms)=42
CPU time spent (ms)=0
Physical memory (bytes) snapshot=0
Virtual memory (bytes) snapshot=0
Total committed heap usage (bytes)=798633984
Shuffle Errors
BAD_ID=0
CONNECTION=0
IO_ERROR=0
WRONG_LENGTH=0
WRONG_MAP=0
WRONG_REDUCE=0
File Input Format Counters
Bytes Read=196
File Output Format Counters
Bytes Written=21