1. 程式人生 > >【Hadoop學習之十】MapReduce案例分析二-好友推薦

【Hadoop學習之十】MapReduce案例分析二-好友推薦

nds ioe 間接 ava inux turn cep new ()

環境
  虛擬機:VMware 10
  Linux版本:CentOS-6.5-x86_64
  客戶端:Xshell4
  FTP:Xftp4
  jdk8
  hadoop-3.1.1

最應該推薦的好友TopN,如何排名?

技術分享圖片

tom hello hadoop cat
world hadoop hello hive
cat tom hive
mr hive hello
hive cat hadoop world hello mr
hadoop tom hive world
hello tom world hive mr
package test.mr.fof;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.GenericOptionsParser;
public class MyFOF { /** * 最應該推薦的好友TopN,如何排名? * @param args * @throws Exception */ public static void main(String[] args) throws Exception { Configuration conf = new Configuration(true); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); conf.set(
"sleep", otherArgs[2]); Job job = Job.getInstance(conf,"FOF"); job.setJarByClass(MyFOF.class); //Map job.setMapperClass(FMapper.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(IntWritable.class); //Reduce job.setReducerClass(FReducer.class); //HDFS 輸入路徑 Path input = new Path(otherArgs[0]); FileInputFormat.addInputPath(job, input ); //HDFS 輸出路徑 Path output = new Path(otherArgs[1]); if(output.getFileSystem(conf).exists(output)){ output.getFileSystem(conf).delete(output,true); } FileOutputFormat.setOutputPath(job, output ); System.exit(job.waitForCompletion(true) ? 0 :1); } // tom hello hadoop cat // world hadoop hello hive // cat tom hive // mr hive hello // hive cat hadoop world hello mr // hadoop tom hive world // hello tom world hive mr }
package test.mr.fof;

import java.io.IOException;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.util.StringUtils;

public class FMapper extends Mapper<LongWritable, Text, Text, IntWritable>{

    Text mkey= new Text();
    IntWritable mval = new IntWritable();
    
    @Override
    protected void map(LongWritable key, Text value,Context context)
            throws IOException, InterruptedException {
        
        //value: 0-直接關系  1-間接關系
        //tom       hello hadoop cat   :   hello:hello  1
        //hello     tom world hive mr      hello:hello  0


        String[] strs = StringUtils.split(value.toString(), ‘ ‘);
        
        String user=strs[0];
        String user01=null;
        for(int i=1;i<strs.length;i++){
            //與好友清單中好友屬於直接關系
            mkey.set(fof(strs[0],strs[i]));  
            mval.set(0); 
            context.write(mkey, mval);  
            
            for (int j = i+1; j < strs.length; j++) {
                Thread.sleep(context.getConfiguration().getInt("sleep", 0));
                //好友列表內 成員之間是間接關系
                mkey.set(fof(strs[i],strs[j]));  
                mval.set(1);  
                context.write(mkey, mval);  
            }
        }
    }
    
    public static String fof(String str1  , String str2){
        
        
        if(str1.compareTo(str2) > 0){
            //hello,hadoop
            return str2+":"+str1;
        }
        //hadoop,hello
        return str1+":"+str2;
    }
    
}
package test.mr.fof;

import java.io.IOException;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

public class FReducer  extends  Reducer<Text, IntWritable, Text, Text> {
    
    Text rval = new Text();
    @Override
    protected void reduce(Text key, Iterable<IntWritable> vals, Context context)
            throws IOException, InterruptedException 
    {
        //是簡單的好友列表的差集嗎?
        //最應該推薦的好友TopN,如何排名?

        //hadoop:hello  1
        //hadoop:hello  0
        //hadoop:hello  1
        //hadoop:hello  1
        int sum=0;
        int flg=0;
        for (IntWritable v : vals) 
        {
            //0為直接關系
            if(v.get()==0){
                //hadoop:hello  0
                flg=1;
            }
            sum += v.get();
        }
        
        //只有間接關系才會被輸出
        if(flg==0){
            rval.set(sum+"");
            context.write(key, rval);
        }
    }
}

【Hadoop學習之十】MapReduce案例分析二-好友推薦