import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import java.io.IOException;
class PersonMapper extends Mapper<LongWritable, Text,Text,LongWritable>{
@Override
protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, LongWritable>.Context context) throws IOException, InterruptedException {
String line = value.toString();
String[] split = line.split(" ");
// tom hello hadoop cat
for (int i =0;i<split.length-1;i++){
for (int j=i+1;j<split.length;j++){
String res = compare(split[i], split[j]);
if (i==0){
context.write(new Text(res),new LongWritable(1L));
}else {
context.write(new Text(res),new LongWritable(-1L));
}
}
}
}
public static String compare(String s1,String s2){
if (s1.compareTo(s2)<0){
return s1+"-"+s2;
}else {
return s2+"-"+s1;
}
}
}
class PersonReducer extends Reducer<Text,LongWritable,Text,Text>{
@Override
protected void reduce(Text key, Iterable<LongWritable> values, Reducer<Text, LongWritable, Text, Text>.Context context) throws IOException, InterruptedException {
boolean flag = true;
for (LongWritable value : values) {
long l = value.get();
if (l==1){
context.write(key,new Text("直接好友"));
flag=false;
break;
}
}
if (flag){
context.write(key,new Text("间接好友"));
}
}
}
public class PersonDemo {
public static void main(String[] args) throws Exception{
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://master:9000");
Job job = Job.getInstance(conf);
job.setJarByClass(PersonDemo.class);
job.setJobName("好友关系统计案例");
job.setMapperClass(PersonMapper.class);
job.setReducerClass(PersonReducer.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(LongWritable.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
FileInputFormat.setInputPaths(job,new Path(args[0]));
FileOutputFormat.setOutputPath(job,new Path(args[1]));
boolean b = job.waitForCompletion(true);
if (b){
System.out.println("好友关系案例mapreduce实现执行成功!>_-");
}else {
System.out.println("好友关系案例mapreduce实现执行失败");
}
}
}
标签:hadoop,案例,job,org,apache,import,mapreducr,class,好友
From: https://www.cnblogs.com/w-ll/p/18528940