MapReduce當(dāng)中topN問題的解決方法
topN問題:
馬克-to-win @ 馬克java社區(qū):防盜版實(shí)名手機(jī)尾號(hào):73203。topN問題就是輸出每組中最大的一個(gè)或幾個(gè)。為什么說是一個(gè)或幾個(gè)呢?因?yàn)檩敵鲆粋€(gè)或者輸出幾個(gè)的算法是一樣的。我們下面以輸出一個(gè)為例子,做一個(gè)示范。
6個(gè)訂單如下:
o1,p2,250.0
o2,p3,500.0
o2,p4,100.0
o2,p5,700.0
o3,p1,150.0
o1,p1,200.0
求出每個(gè)訂單中最高的一個(gè)或者兩個(gè),輸出結(jié)果:
o1 250.0
o2 700.0
o3 150.0
package com;
import java.io.File;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class TopNTestMark_to_win {
public static class TokenizerMapper extends Mapper<Object, Text, Text, DoubleWritable> {
/*
o1,p2,250.0
o2,p3,500.0
o2,p4,100.0
o2,p5,700.0
o3,p1,150.0
o1,p1,200.0
*/
public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
System.out.println("key is " + key.toString() + " value is " + value.toString());
String line = value.toString();
String[] fields = line.split(",");
String orderId = fields[0];
double amount = Double.parseDouble(fields[2]);
DoubleWritable amountDouble = new DoubleWritable(amount);
context.write(new Text(orderId), amountDouble);
}
}
public static class IntSumReducer extends Reducer<Text, DoubleWritable, Text, DoubleWritable> {
DoubleWritable resultDouble = new DoubleWritable(0.0);
public void reduce(Text key, Iterable<DoubleWritable> values, Context context)
throws IOException, InterruptedException {
System.out.println("reduce key is 馬克-to-win @ 馬克java社區(qū):" + key.toString());
double max = Double.MIN_VALUE;
for (DoubleWritable v2 : values) {
if (v2.get() > max) {
max = v2.get();
}
}
resultDouble.set(max);
context.write(key, resultDouble);
}
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
Job job = new Job(conf, "word count");
job.setJarByClass(TopNTest.class);
job.setMapperClass(TokenizerMapper.class);
// job.setCombinerClass(IntSumReducer.class);
job.setReducerClass(IntSumReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(DoubleWritable.class);
File file = new File("e:/temp/output");
if (file.exists() && file.isDirectory()) {
deleteFile(file);
}
FileInputFormat.setInputPaths(job, new Path("e:/temp/input/serial.txt"));
FileOutputFormat.setOutputPath(job, new Path("e:/temp/output"));
System.out.println("mytest hadoop successful");
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
public static boolean deleteFile(File dirFile) {
if (!dirFile.exists()) {
return false;
}
if (dirFile.isFile()) {
return dirFile.delete();
} else { /* 空目錄就不進(jìn)入for循環(huán)了, 進(jìn)入到下一句dirFile.delete(); */
for (File file : dirFile.listFiles()) {
deleteFile(file);
}
}
return dirFile.delete();
}
}
輸出結(jié)果:
o1 250.0
o2 700.0
o3 150.0