1. 程式人生 > >hbase與hdfs上的資料互傳

hbase與hdfs上的資料互傳

https://www.cnblogs.com/dongdone/p/5687786.html
######將Hbase中表資料上傳到hdfs,此時無reduce
package com.test;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

public class HbaseToHdfs {
public static void main(String[] args){
Configuration conf =HBaseConfiguration.create();
try {
Job job=Job.getInstance(conf, HbaseToHdfs.class.getSimpleName());
job.setMapperClass(HbaseMapper.class);
job.setJarByClass(HbaseToHdfs.class);
job.setNumReduceTasks(0);//設定reduce個數為0
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(Text.class);
TableMapReduceUtil.initTableMapperJob(Bytes.toBytes(“t1”), new Scan(), HbaseMapper.class, Text.class,Text.class, job);
FileOutputFormat.setOutputPath(job, new Path(“hdfs://192.168.17.128:9000/t1”));
job.setOutputFormatClass(TextOutputFormat.class);
try {
job.waitForCompletion(true);
} catch (ClassNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}

}
public static class HbaseMapper extends TableMapper<Text, Text>{
	private Text outkey=new Text();
	private Text outvalue=new Text();
	@Override
	//ImmutableBytesWritable為rowkey
	protected void map(ImmutableBytesWritable key, Result value,
			Mapper<ImmutableBytesWritable, Result, Text, Text>.Context context)
			throws IOException, InterruptedException {
		// TODO Auto-generated method stub
		byte[] name=null;
		name=value.getValue(Bytes.toBytes("f1"), Bytes.toBytes("name"));
		byte[] age=null;
		age=value.getValue(Bytes.toBytes("f1"), Bytes.toBytes("name"));
		byte[] gender=null;
		gender=value.getValue(Bytes.toBytes("f1"), Bytes.toBytes("name"));
		byte[] birthday=null;
		birthday=value.getValue(Bytes.toBytes("f1"), Bytes.toBytes("name"));
		outkey.set(key.get());
		String temp = ((name==null || name.length==0)?"NULL":new String(name)) + "\t" + ((age==null || age.length==0)?"NULL":new String(age)) + "\t" + ((gender==null||gender.length==0)?"NULL":new String(gender)) + "\t" +  ((birthday==null||birthday.length==0)?"NULL":new String(birthday));
        System.out.println(temp);
        outvalue.set(temp);
        context.write(outkey, outvalue);
	}
}

}
###################將hdfs中的資料下載到hbase中
https://blog.csdn.net/qq_26091271/article/details/52586953