1. 程式人生 > >Spark 操作Hbase 對錶的操作:增刪改查 scala

Spark 操作Hbase 對錶的操作:增刪改查 scala

原文地址:http://www.ithao123.cn/content-2523824.html

[摘要:正在build.sbt中設定裝備擺設依附(止之間須要空) ame := test2 scalaVersion := 2.10.4 libraryDependencies = Seq( org.apache.spark % spark-core % 1.0.0, org.apache.hbase % hbase % 1.2.1-hadoop1, org.apache.hbase % hbase-client % 1.2.1-hado] 


在build.sbt中配置依賴(行之間需要空格)

ame := "test2"


scalaVersion := "2.10.4"


libraryDependencies ++= Seq(
  "org.apache.spark" % "spark-core" % "1.0.0",
  "org.apache.hbase" % "hbase" % "1.2.1-hadoop1",
  "org.apache.hbase" % "hbase-client" % "1.2.1-hadoop1",
  "org.apache.hbase" % "hbase-common" % "1.2.1-hadoop1",
  "org.apache.hbase" % "hbase-server" % "1.2.1-hadoop1"
)


version := "1.0"

在環境變數中配置該工程需要的JAR包,命名為SPARK_TEST_JAR。

————————————————————————————————————————

import org.apache.hadoop.hbase.client._

import org.apache.hadoop.hbase.mapreduce.TableInputFormat
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.hbase._
import org.apache.spark._
/**
 * Created by gongxuan on 2/3/15.

 * hadoop1.2.1  scala2.10.4  hbase0.98.9  spark1.0.0
 *
 */
object HBaseTest {
  def main(args: Array[String]) {
    //create table test_table1
    var table_name = "test1"
    val conf = HBaseConfiguration.create
    val admin = new HBaseAdmin(conf)
    if (admin.tableExists(table_name))
    {
      admin.disableTable(table_name)

      admin.deleteTable(table_name)
    }
    val htd = new HTableDescriptor(table_name)
    val hcd = new HColumnDescriptor("id")
    //add  column to table
    htd.addFamily(hcd)
    admin.createTable(htd)


    //put data to HBase table
    val tablename = htd.getName
    val table = new HTable(conf, tablename)
    val databytes = Bytes.toBytes("id")
    for (c <- 1 to 10) {
      val row = Bytes.toBytes("row" + c.toString)
      val p1 = new Put(row)
      p1.add(databytes, Bytes.toBytes(c.toString), Bytes.toBytes("value" + c.toString))
      table.put(p1)
    }
    for (c <- 1 to 10) {
      val g = new Get(Bytes.toBytes("row" + c.toString))
      println("Get:" + table.get(g))
    }


    //search table
    val config = HBaseConfiguration.create
    val sc = new SparkContext("local", "HBaseTest",
      System.getenv("SPARK_HOME"), Seq(System.getenv("SPARK_TEST_JAR")))


    config.set(TableInputFormat.INPUT_TABLE,table_name)

// 用hadoopAPI建立一個RDD

    val hbaseRDD = sc.newAPIHadoopRDD(config, classOf[TableInputFormat],
      classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable],
      classOf[org.apache.hadoop.hbase.client.Result])


    val count = hbaseRDD.count()
    println("HbaseRDD Count:" + count)

    hbaseRDD.cache()

//找到result物件   ,返回型別Array[(org.apache.hadoop.hbase.io.ImmutableBytesWritable,org.apache.hadoop.hbase.client.Result)]

//key是一個不變的ImmutableBytesWritablevalueHbaseResult

//res(0)._2返回第二個引數result

    val res = hbaseRDD.take(count.toInt)
    for (j <- 1 to count.toInt) {

      var rs = res(j - 1)._2

//遍歷res.raw取出每一個單元的值  ,返回型別:Array[org.apache.hadoop.hbase.KeyValue]

      var kvs = rs.raw
      for (kv <- kvs)//再遍歷每一個單元裡面的記錄
        println("row:" + new String(kv.getRow()) +
          " cf:" + new String(kv.getFamily()) +
          " column:" + new String(kv.getQualifier()) +
          " value:" + new String(kv.getValue()))
    }
    //drop table
    admin.disableTable(table_name)
    admin.deleteTable(table_name)
  }
}