updateState
by key can be used to create a stateful DStream
based on upcoming data. It requires a function:
object UpdateStateFunctions {
def updateState(current: Seq[Double], previous: Option[StatCounter]) = {
previous.map(s => s.merge(current)).orElse(Some(StatCounter(current)))
}
}
which takes a sequence of the current
values, an Option
of previous state and returns an Option
of the updated state. Putting this all together:
import org.apache.spark._
import org.apache.spark.streaming.dstream.DStream
import scala.collection.mutable.Queue
import org.apache.spark.util.StatCounter
import org.apache.spark.streaming._
object UpdateStateByKeyApp {
def main(args: Array[String]) {
val sc = new SparkContext("local", "updateStateByKey", new SparkConf())
val ssc = new StreamingContext(sc, Seconds(10))
ssc.checkpoint("/tmp/chk")
val queue = Queue(
sc.parallelize(Seq(("foo", 5.0), ("bar", 1.0))),
sc.parallelize(Seq(("foo", 1.0), ("foo", 99.0))),
sc.parallelize(Seq(("bar", 22.0), ("foo", 1.0))),
sc.emptyRDD[(String, Double)],
sc.emptyRDD[(String, Double)],
sc.emptyRDD[(String, Double)],
sc.parallelize(Seq(("foo", 1.0), ("bar", 1.0)))
)
val inputStream: DStream[(String, Double)] = ssc.queueStream(queue)
inputStream.updateStateByKey(UpdateStateFunctions.updateState _).print()
ssc.start()
ssc.awaitTermination()
ssc.stop()
}
}