1. 程式人生 > >Hadoop(21)-資料清洗(ELT)簡單版

Hadoop(21)-資料清洗(ELT)簡單版

有一個諸如這樣的log日誌

去除長度不合法,並且狀態碼不正確的記錄

LogBean

package com.nty.elt;

/**
 * author nty
 * date time 2018-12-14 15:27
 */
public class Log {

    private String remote_addr;// 記錄客戶端的ip地址
    private String remote_user;// 記錄客戶端使用者名稱稱,忽略屬性"-"
    private String time_local;// 記錄訪問時間與時區
    private String request;//
記錄請求的url與http協議 private String status;// 記錄請求狀態;成功是200 private String body_bytes_sent;// 記錄傳送給客戶端檔案主體內容大小 private String http_referer;// 用來記錄從那個頁面連結訪問過來的 private String http_user_agent;// 記錄客戶瀏覽器的相關資訊 private boolean valid = true;// 判斷資料是否合法 public String getRemote_addr() { return
remote_addr; } public Log setRemote_addr(String remote_addr) { this.remote_addr = remote_addr; return this; } public String getRemote_user() { return remote_user; } public Log setRemote_user(String remote_user) { this.remote_user = remote_user;
return this; } public String getTime_local() { return time_local; } public Log setTime_local(String time_local) { this.time_local = time_local; return this; } public String getRequest() { return request; } public Log setRequest(String request) { this.request = request; return this; } public String getStatus() { return status; } public Log setStatus(String status) { this.status = status; return this; } public String getBody_bytes_sent() { return body_bytes_sent; } public Log setBody_bytes_sent(String body_bytes_sent) { this.body_bytes_sent = body_bytes_sent; return this; } public String getHttp_referer() { return http_referer; } public Log setHttp_referer(String http_referer) { this.http_referer = http_referer; return this; } public String getHttp_user_agent() { return http_user_agent; } public Log setHttp_user_agent(String http_user_agent) { this.http_user_agent = http_user_agent; return this; } public boolean isValid() { return valid; } public Log setValid(boolean valid) { this.valid = valid; return this; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(this.valid); sb.append("\001").append(this.remote_addr); sb.append("\001").append(this.remote_user); sb.append("\001").append(this.time_local); sb.append("\001").append(this.request); sb.append("\001").append(this.status); sb.append("\001").append(this.body_bytes_sent); sb.append("\001").append(this.http_referer); sb.append("\001").append(this.http_user_agent); return sb.toString(); } }

Mapper類

package com.nty.elt;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

/**
 * author nty
 * date time 2018-12-14 15:28
 */
public class LogMapper extends Mapper<LongWritable, Text, Text, NullWritable> {

    private Text logKey = new Text();

    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        //分割一行資料
        String[] fields = value.toString().split(" ");

        Log result = parseLog(fields);

        if (!result.isValid()) {
            return;
        }

        logKey.set(result.toString());

        // 3 輸出
        context.write(logKey, NullWritable.get());
    }

    private Log parseLog(String[] fields) {

        Log log = new Log();

        if (fields.length > 11) {
            log.setRemote_addr(fields[0])
               .setRemote_user(fields[1])
               .setTime_local(fields[3].substring(1))
               .setRequest(fields[6])
               .setStatus(fields[8])
               .setBody_bytes_sent(fields[9])
               .setHttp_referer(fields[10]);
            if (fields.length > 12) {
                log.setHttp_user_agent(fields[11] + " " + fields[12]);
            } else {
                log.setHttp_user_agent(fields[11]);
            }
            // 大於400,HTTP錯誤
            if (Integer.parseInt(log.getStatus()) >= 400) {
                log.setValid(false);
            }
        } else {
            log.setValid(false);
        }

        return log;
    }

}

Driver

package com.nty.elt;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

/**
 * author nty
 * date time 2018-12-14 15:40
 */
public class LogDriver {

    public static void main(String[] args) throws  Exception {
        // 1 獲取job資訊
        Configuration conf = new Configuration();
        Job job = Job.getInstance(conf);

        // 2 載入jar包
        job.setJarByClass(LogDriver.class);

        // 3 關聯map
        job.setMapperClass(LogMapper.class);

        // 4 設定最終輸出型別
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(NullWritable.class);

        // 5 設定輸入和輸出路徑
        FileInputFormat.setInputPaths(job, new Path("d:\\Hadoop_test"));
        FileOutputFormat.setOutputPath(job, new Path("d:\\Hadoop_test_out"));

        // 6 提交
        job.waitForCompletion(true);

    }
}

 

結果