1. 程式人生 > >HBASE 使用java api 實現增刪改查例項

HBASE 使用java api 實現增刪改查例項

我們使用windows 本地myeclipse測試程式,具體搭建教程請看

首先建立maven 專案,並修改pom.xml,匯入hbase 第三方jar包,首次會自動下載

<dependency>
      <groupId>org.apache.hbase</groupId>
      <artifactId>hbase-server</artifactId>
      <version>0.98.6-hadoop2</version>
 </dependency>
 <dependency>
      <groupId>org.apache.hbase</groupId>
      <artifactId>hbase-client</artifactId>
      <version>0.98.6-hadoop2</version>
</dependency>

建一個resource folder ,將hive-site.xml log4j放入該資料夾


hbase建立一個測試用的namespace


一下是java 程式碼

封裝工具類

package com.hadoop.hbaseTest.util;

/**
 * 封裝工具類
 */
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;

public class HbaseUtil {
	// 獲取HTable
	public static HTable getConf(String tableName) {
		// 建立俺新conf
		Configuration conf = HBaseConfiguration.create();
		HTable htbl = null;
		try {
			htbl = new HTable(conf, tableName);
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		return htbl;
	}

	// 關閉資源
	public static void closeHtable(HTable htbl) {
		try {
			htbl.close();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}

}

dao層的介面

package com.hadoop.hbaseTest.dao;
/**
 * dao層藉口
 * @author Administrator
 *
 */
public interface OperationDao {

	// 增加資料介面
	public void putData(String rowKey);

	// 刪除資料介面
	public void deleteData(String rowKey);

	// 修改資料解耦
	public void updateData(String rowKey);

	// 檢視資料介面
	public void getData(String rowKey);

	// 批量匯入資料
	public void putAllData(String rowKey);

	// 掃描表介面
	public void scanData(String startRow, String stopRow);

	// 建立表
	public void createTable(String tableName);

	// 刪除表
	public void deleteTable(String tableName);

}

dao層介面實現類

package com.hadoop.hbaseTest.dao.daoImpl;
/**
 * dao層介面實現類
 */
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.zookeeper.common.IOUtils;
import com.hadoop.hbaseTest.dao.OperationDao;
import com.hadoop.hbaseTest.util.HbaseUtil;

public class OperationDaoImpl implements OperationDao {

	@Override
	public void putData(String rowKey) {
		// TODO Auto-generated method stub
		try {
			HTable htbl = HbaseUtil.getConf("test:testApi");
			// 將行鍵傳入put
			Put put = new Put(Bytes.toBytes(rowKey));
			// 增加資料
			put.add(Bytes.toBytes("info"), Bytes.toBytes("num"), Bytes.toBytes("hadoop"+1));
			htbl.put(put);
			HbaseUtil.closeHtable(htbl);
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	@Override
	public void deleteData(String rowKey) {
		// TODO Auto-generated method stub
		// 建立新的Conf

		try {
			HTable htbl = HbaseUtil.getConf("test:testApi");

			// 將行鍵傳入delete
			Delete del = new Delete(Bytes.toBytes(rowKey));
			// 刪除行
			del.deleteColumn(Bytes.toBytes("info"), Bytes.toBytes("num"));
			htbl.delete(del);
			HbaseUtil.closeHtable(htbl);
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}

	@Override
	public void updateData(String rowKey) {
		// TODO Auto-generated method stub
		try {
			HTable htbl = HbaseUtil.getConf("test:testApi");
			// 將行鍵傳入put
			Put put = new Put(Bytes.toBytes(rowKey));
			// 增加資料
			put.add(Bytes.toBytes("info"), Bytes.toBytes("num"), Bytes.toBytes("hadoop"+3));
			htbl.put(put);
			HbaseUtil.closeHtable(htbl);
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	@Override
	public void getData(String rowKey) {
		// TODO Auto-generated method stub
		try {
			HTable htbl = HbaseUtil.getConf("test:testApi");
			// 將行鍵傳入get
			Get get = new Get(Bytes.toBytes(rowKey));
			// 新增查詢條件
			get.addColumn(Bytes.toBytes("info"), Bytes.toBytes("num"));
			// 獲取結果集合
			Result rs = htbl.get(get);
			Cell[] cells = rs.rawCells();
			// 迴圈遍歷結果
			for (Cell cell : cells) {
				// 列印結果
				System.out.print(Bytes.toString(CellUtil.cloneFamily(cell)) + ":");
				System.out.print(Bytes.toString(CellUtil.cloneQualifier(cell)) + "->");
				System.out.print(Bytes.toString(CellUtil.cloneValue(cell)));
			}
			System.out.println();
			HbaseUtil.closeHtable(htbl);
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	@Override
	public void putAllData(String rowKey) {
		// TODO Auto-generated method stub
		try {
			HTable htbl = HbaseUtil.getConf("test:testApi");
			List<Put> list = new ArrayList<Put>(10000);
			// 增加資料
			for (long i = 1; i <= 1000000; i++) {
				Put put = new Put(Bytes.toBytes(rowKey + i));
				put.add(Bytes.toBytes("info"), Bytes.toBytes("num"), Bytes.toBytes("hadoop" + i));
				list.add(put);
				//每到10萬次匯入一次資料
				if (i % 100000 == 0) {
					htbl.put(list);
					list = new ArrayList<Put>(10000);
				}
			}
			//資料如果不是10萬整數倍,剩餘資料迴圈結束一次匯入
			htbl.put(list);
			HbaseUtil.closeHtable(htbl);
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	@Override
	public void scanData(String startRow, String stopRow) {
		// TODO Auto-generated method stub
		HTable htbl = null;
		ResultScanner rss = null;
		try {
			htbl = HbaseUtil.getConf("test:testApi");
			// 將行鍵傳入scan
			Scan scan = new Scan(Bytes.toBytes(startRow), Bytes.toBytes(stopRow));
			// 新增查詢條件
			scan.addColumn(Bytes.toBytes("info"), Bytes.toBytes("num"));
			// 獲取結果集集合
			rss = htbl.getScanner(scan);
			// 遍歷結果集集合
			for (Result rs : rss) {
				System.out.print(Bytes.toString(rs.getRow())+"\t");
				// 遍歷結果集合
				Cell[] cells = rs.rawCells();
				for (Cell cell : cells) {
					System.out.print(Bytes.toString(CellUtil.cloneFamily(cell)) + ":");
					System.out.print(Bytes.toString(CellUtil.cloneQualifier(cell)) + "->");
					System.out.print(Bytes.toString(CellUtil.cloneValue(cell)));
				}
				System.out.println();
			}
		} catch (IOException e) {
			e.printStackTrace();
		} finally {
			// 關閉資源
			IOUtils.closeStream(rss);
			IOUtils.closeStream(htbl);
		}
	}

	@Override
	public void createTable(String tableName) {
		// TODO Auto-generated method stub
		Configuration conf = HBaseConfiguration.create();
		//conf.set("hbase.zookeeper.quorum", "master");
		try {
			HBaseAdmin hba = new HBaseAdmin(conf);
			HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName));
			HColumnDescriptor info = new HColumnDescriptor("info");
			info.setValue("num", "003");
			htd.addFamily(info);
			hba.createTable(htd);
			hba.close();

		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}

	}

	@Override
	public void deleteTable(String tableName) {
		// TODO Auto-generated method stub
		Configuration conf = HBaseConfiguration.create();
		try {
			HBaseAdmin hba = new HBaseAdmin(conf);
			hba.disableTable(tableName);
			hba.deleteTable(tableName);
			hba.close();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
}

測試類

package com.hadoop.hbaseTest.service;
/**
 * 測試類
 */

import com.hadoop.hbaseTest.dao.daoImpl.OperationDaoImpl;

public class Test {
	public static void main(String[] args) {
		OperationDaoImpl od = new OperationDaoImpl();
		String tableName = "test:testApi";
		String rowKey = "rk";
		od.createTable(tableName);
		od.putData(rowKey);
		od.getData(rowKey);
		od.updateData(rowKey);
		od.deleteData(rowKey);
		od.putAllData(rowKey);
		od.scanData("rk1", "rk1200");
		od.deleteTable(tableName);
	}
}

下面是測試結果

建立表


執行後,建立表成功


插入資料


執行插入成功


查詢資料


執行查詢成功


修改資料


執行修改成功,hadoop1 變成hadoop2


刪除資料


執行一次,刪除最近時間戳版本,回到hadoop1


再次執行,刪除前一時間戳版本,刪除成功,資料清空


批量匯入資料


執行,去habse 查詢資料,插入成功


掃描筆


執行插入成功


刪除表


執行刪除成功