- updateStateByKey与reduceByKey
- 代码实现updateStateByKey
虚拟机端:nc -lk 8888 用于测试
代码在IDEA中运行,从虚拟机nc -lk 8888指令的命令行中接收数据
package sparkstreaming
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{Durations, StreamingContext}
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
object Demo2UpdateStateByKey {
def main(args: Array[String]): Unit = {
val conf: SparkConf = new SparkConf()
conf.setMaster("local[2]")
conf.setAppName("Demo1")
val sc: SparkContext = new SparkContext(conf)
val ssc: StreamingContext = new StreamingContext(sc, Durations.seconds(5))
ssc.checkpoint("SparkLearning/src/main/data/checkpoint")
val linesDS: ReceiverInputDStream[String] = ssc.socketTextStream("master", 8888)
val words: DStream[String] = linesDS.flatMap(_.split(","))
val kvDS: DStream[(String, Int)] = words.map((_,1))
val updateFun = (seq:Seq[Int],opt:Option[Int]) =>{
//计算当前batch
val currCount: Int = seq.sum
//获取之前的计算结果
val befCount: Int = opt.getOrElse(0)
//最新一个单词的数量
val newCount = currCount + befCount
//返回Option
Option(newCount)
}
//reduceBykey:每次只统计当前batch的数据,不能实现累加
val countDS: DStream[(String, Int)] = kvDS.updateStateByKey(updateFun)
countDS.print()
ssc.start()
ssc.awaitTermination()
ssc.stop()
}
}



