微信公众号搜"智元新知"关注
微信扫一扫可直接关注哦!

大数据系列-SPARK-STREAMING流数据window

大数据系列-SPARK-STREAMING流数据window

package com.test

import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}

//window
object SparkStreamingWindow {

  def main(args: Array[String]): Unit = {

    val sparkConf = new SparkConf().setAppName("SparkStreamingWindow").setMaster("local[*]")
    val streamingContext = new StreamingContext(sparkConf, Seconds(5) /*采集周期*/)
    streamingContext.checkpoint("data/cpDir")

    val dstream: ReceiverInputDStream[String] = streamingContext.socketTextStream("localhost", 8600)
    val wordToMap = dstream.map((_, 1))
    //window的窗口范围是采集周期的整倍 例 10 = 5 * 2
    //认window的滑动步长是采集周期,有重叠
    val windowDStream: DStream[(String, Int)] = wordToMap.window(Seconds(10) /*范围*/ , Seconds(10) /*步长*/)
    windowDStream.reduceByKey(_ + _).print

    //窗口范围>步长时减少重复计算
    wordToMap.reduceByKeyAndWindow(
      (x: Int, y: Int) => {
        x + y
      },
      (x: Int, y: Int) => {//去重
        x - y
      },
      Seconds(10) /*范围*/ ,
      Seconds(5) /*步长*/
    ).print()

    streamingContext.start()
    streamingContext.awaitTermination()

  }

}

版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 dio@foxmail.com 举报,一经查实,本站将立刻删除。

相关推荐