导入hadoop和hive的配置文件org.apache.spark spark-core_2.11 2.3.1 org.apache.spark spark-sql_2.11 2.3.1 org.apache.spark spark-hive_2.11 2.3.1 mysql mysql-connector-java 5.1.38
core-site.xml
dfs-site.xml
hive-site.xml
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
object SparkHiveStart {
def main(args: Array[String]): Unit = {
val session = SparkSession.builder()
.enableHiveSupport()
.master("local[2]")
.appName(this.getClass.getSimpleName)
.getOrCreate()
session.sql("show databases").show()
}
}



