spark用scala讀取hive表資料
阿新 • • 發佈:2019-02-04
spark1.6寫法:
val conf = new SparkConf()
val sc = new SparkContext(conf)
val hiveContext = new HiveContext(sc)
// 指定hive中使用的資料庫;
hiveContext.sql("use abc_hive_db")
//執行hive查詢
val rdd = hiveContext.sql("select name,productInfo from pro_table where date>='20170110' ")
spark2.x寫法:
val sparkConf = new SparkConf().setAppName("adver").setMaster("local[*]")
val sparkSession = SparkSession.builder().config(sparkConf).enableHiveSupport().getOrCreate()
sparkSession.sql("use abc_hive_db")
val rdd = sparkSession.sql("select name,productInfo from pro_table where date>='20170110' ")