代码
package com.test
import org.apache.spark.sql.SparkSession
object scalaDemo1 {
def main(args: Array[String]): Unit = {
val spark = SparkSession.builder()
.master("local[*]")
.appName(this.getClass.getName)
.config("spark.ui.port", "30101")
.config("spark.executor.cores","10") // 或者spark-submit提交时配置的参数值也可以获取
.getOrCreate()
val sc = spark.sparkContext
spark.sparkContext.setLogLevel("ERROR")
// For implicit conversions like converting RDDs to Dataframes
import org.apache.spark.sql.functions._
import spark.implicits._ //解决:无法重载方法agg
import spark.sql
import org.apache.spark.sql.types._ // 类型转换用到 df1.select('event.cast(IntegerType))
// 获取spark任务参数
println(spark.sparkContext.getConf.get("spark.executor.cores").toInt)
spark.stop()
}
}



