mapReduce共同好友案例(hadoop)

4.mapReduce共同好友案例

思路:

A: B,C

B:A,D

C:A,D

D:B,C

第一步

B A -----B是A的朋友

C A

A B

D B

A C

D C

B D

C D

 

A: B,C ----A是BC的朋友

B:A,D

C:A,D

D:B,C

第二步

B:C   A ----BC的共同好友A

A:D  B

A:D   C

B:C   D

 

B:C  A,D

A:d  B,C

第一步:

import java.io.IOException;

 

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.Path;

import org.apache.hadoop.io.LongWritable;

import org.apache.hadoop.io.Text;

import org.apache.hadoop.mapreduce.Job;

import org.apache.hadoop.mapreduce.Mapper;

import org.apache.hadoop.mapreduce.Reducer;

import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;

import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

 

public class SameFriendOne {

 

/**

 * 给定的文件内容样式:A:B,C,D,F,E,O

 * A的好友是B,C,D,F,E,O

 * 同理其他的行也是这样

 * 我们就组合成一个(friend,person)的形式输出

 * 主要是求某一个是谁的共同好友

 * @author Administrator

 *

 */

public static class MyMapper extends Mapper<LongWritable, Text, Text, Text>{

@Override

protected void map(LongWritable key, Text value,Context context)throws IOException, InterruptedException {

String[] lines = value.toString().split(":");

String person = lines[0];

for (String friend : lines[1].split(",")) {

context.write(new Text(friend), new Text(person));

}

}

}

//B A

//C A

 

public static class MyReducer extends Reducer<Text, Text, Text, Text>{

@Override

protected void reduce(Text friend, Iterable<Text> values,Context context)

throws IOException, InterruptedException {

StringBuilder persons = new StringBuilder();

for (Text t : values) {

persons.append(t).append(",");

}

context.write(new Text(friend), new Text(persons.toString()));

}

}

 

public static void main(String[] args) throws Exception {

 

Configuration conf = new Configuration();

Job job = Job.getInstance(conf,"");

job.setJarByClass(SameFriendOne.class);

 

 

job.setMapperClass(MyMapper.class);

job.setOutputKeyClass(Text.class);

job.setOutputValueClass(Text.class);

 

job.setReducerClass(MyReducer.class);

job.setOutputKeyClass(Text.class);

job.setOutputValueClass(Text.class);

//FileInputFormat.addInputPath(job, new Path(args[0]));

FileInputFormat.setInputPaths(job, new Path("E:/files/input"));

FileOutputFormat.setOutputPath(job,new Path("E:/files/output") );

 

boolean isDone = job.waitForCompletion(true);

System.exit(isDone ? 0:1);

}

public static void outwindows(String path) {

 

 

}

 

}

 

 

A: B,C ----A是BC的朋友

B:A,D

C:A,D

D:B,C

 

 

第二步:

 

import java.io.IOException;

import java.util.Arrays;

 

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.Path;

import org.apache.hadoop.io.LongWritable;

import org.apache.hadoop.io.Text;

import org.apache.hadoop.mapreduce.Job;

import org.apache.hadoop.mapreduce.Mapper;

import org.apache.hadoop.mapreduce.Reducer;

import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;

import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

 

public class SameFriendTwo {

 

 

 

public static class MyMapper extends Mapper<LongWritable, Text, Text, Text>{

@Override

protected void map(LongWritable key, Text value,Context context)throws IOException, InterruptedException {

String[] lines = value.toString().split("\t");

String friend = lines[0];

String[] persons = lines[1].split(",");

 

Arrays.sort(persons);

 

for (int i = 0; i < persons.length-1; i++) {

for (int j = i+1; j < persons.length; j++) {

context.write(new Text(persons[i]+"-"+persons[j]), new Text(friend));

}

}

 

}

}

 

public static class MyReducer extends Reducer<Text, Text, Text, Text>{

@Override

protected void reduce(Text per_per, Iterable<Text> friends,Context context)

throws IOException, InterruptedException {

StringBuilder sb = new StringBuilder();

for (Text t : friends) {

sb.append(t).append(",");

}

context.write(per_per, new Text(sb.toString().substring(0, sb.toString().length())));

}

}

 

public static void main(String[] args) throws Exception {

 

Configuration conf = new Configuration();

Job job = Job.getInstance(conf);

job.setJarByClass(SameFriendTwo.class);

 

job.setMapperClass(MyMapper.class);

 

 

job.setReducerClass(MyReducer.class);

job.setOutputKeyClass(Text.class);

job.setOutputValueClass(Text.class);

 

FileInputFormat.setInputPaths(job, new Path("E:/files/output"));

FileOutputFormat.setOutputPath(job,new Path("E:/files/output2") );

 

boolean isDone = job.waitForCompletion(true);

System.exit(isDone ? 0:1);

}

 

}

 

B:C  A,D

A:d  B,C

 

猜你喜欢

转载自blog.csdn.net/qq_37001101/article/details/84174945