package com.baidu.buildtable
import org.apache.spark.sql.SparkSession
//import org.apache.hadoop.hive.ql.exec.spark.session.SparkSession
object SparHiv {
def main(args: Array[String]): Unit = {
/*
val conf = new SparkConf().setAppName("HelloWorld").setMaster("local[2]")
val sc = new SparkContext(conf)
val spark1 = SparkSession.builder().config(conf).getOrCreate()*/
/*
讀取桌面json,使用SQL語句輸出
*/
val spark = SparkSession.builder
.appName("my spark application")
.master("local[2]")
.getOrCreate()
//讀取數據
val df = spark.read.json("/Users/zhaojing/Desktop/peopleCP.json")
//展示所有數據
df.show()
//DSL
df.select("name").show()
//SQL
df.createTempView("peopleCP")
spark.sql("select * from peopleCP where age = 11").show()
//關閉
spark.close()
}
}