org.scala-lang scala-library2.11.12 org.scala-lang scala-compiler2.11.12 org.scala-lang scala-reflect2.11.12 org.apache.spark spark-core_2.112.4.5 org.scala-tools maven-scala-plugin2.15.2 compile testCompile
package com.shujia.core
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
object Demo01WordCount {
def main(args: Array[String]): Unit = {
//初始化Spark环境
//创建Spark配置对象
val conf: SparkConf = new SparkConf()
//配置任务的名称
conf.setAppName("Demo01WordCount")
//配置Spark的运行方式 local表示本地运行
conf.setMaster("local")
//1、创建Spark上下文对象 即Spark的入口
val sc: SparkContext = new SparkContext(conf)
//2、读取文件 将文件构建成RDD
val linesRDD: RDD[String] = sc.textFile("Spark/data/words.txt")
linesRDD.foreach(println)
}
}
java,spark,java,hadoop
java,spark,java,hadoop
java,spark,java,hadoop
java,spark,java,hadoop
java,spark,java,hadoop
java,spark,java,hadoop
java,spark,java,hadoop
java,spark,java,hadoop
java,spark,java,hadoop
java,spark,java,hadoop



