1. 程式人生 > >SparkSQL學習(三)SparkSQL讀取Mysql的兩種方式

SparkSQL學習(三)SparkSQL讀取Mysql的兩種方式

第一種:

package Mysql
import java.util.Properties

import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.{SparkConf, SparkContext}



object SparkSQL1 {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName(s"${this.getClass.getSimpleName}").setMaster("local")
    val sc = new SparkContext(conf)
    val sqlContext = new HiveContext(sc)
    val properties = new Properties()
    properties.put("user","root")
    properties.put("password","root")
    val url = "jdbc:mysql://hadoop02:3306/hivedb"
    val df: DataFrame = sqlContext.read.jdbc(url,"CDS",properties)
    df.show()

  }
}

第二種:

package Mysql

import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.{SparkConf, SparkContext}

object SparkSQL2 {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName(s"${this.getClass.getSimpleName}").setMaster("local")
    val sc = new SparkContext(conf)
    val sqlContext = new HiveContext(sc)
    val frame: DataFrame = sqlContext.read.format("jdbc")
      .option("url", "jdbc:mysql://hadoop02:3306/hivedb")
      .option("user", "root")
      .option("password", "root")
      .option("dbtable", "CDS")
      .option("driver", "com.mysql.jdbc.Driver")
      .load()
    frame.show()
    
 }
}