package com.bawei.stream
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.log4j.{Level, Logger}
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming.kafka010.KafkaUtils
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}
object StreamFromKafka {
def updateFunc(a: Seq[Int], b: Option[Int]): Option[Int] = {
Some(a.sum + b.getOrElse(0))
}
def main(args: Array[String]): Unit = {
val checkpointPath = "./kafka-direct"
val ssc = StreamingContext.getOrCreate(checkpointPath, () => {
createFunc(checkpointPath)
})
ssc.start()
ssc.awaitTermination()
}
def createFunc(checkpointPath:String): StreamingContext = {
//todo:1、创建sparkConf
val sparkConf: SparkConf = new SparkConf()
.setAppName("SparkStreamingKafka_direct_checkpoint")
.setMaster("local[4]")
//todo:2、创建sparkContext
val sc = new SparkContext(sparkConf)
sc.setLogLevel("WARN")
//Logger.getLogger("org").setLevel(Level.ERROR)
//todo:3、创建StreamingContext
val ssc = new StreamingContext(sc, Seconds(5))
ssc.checkpoint(checkpointPath)
//todo:4、kafka的参数配置
/*val kafkaParams=Map("metadata.broker.list" ->"node1:9092,node2:9092,node3:9092"
,"group.id" -> "kafka-direct01")*/
val kafkaParams = Map[String, Object](
"bootstrap.servers" -> "192.168.182.147:9092,192.168.182.148:9092,192.168.182.149:9092",
"key.deserializer" -> classOf[StringDeserializer],
"value.deserializer" -> classOf[StringDeserializer],
"group.id" -> "group1"
)
//todo:5、定义一个topics ,是一个集合,可以存放多个topic
val topics=Set("test")
//todo:6、利用KafkaUtils.createDirectStream构建Dstream
//val kafkaTopicDS: InputDStream[(String, String)] = KafkaUtils.createDirectStream[String,String,StringDecoder,StringDecoder](ssc,kafkaParams,topics)
val kafkaTopicDS: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream(ssc,PreferConsistent,Subscribe[String, String](topics, kafkaParams))
//todo:7、获取kafka中topic的数据
val kafkaData: DStream[String] = kafkaTopicDS.map(x=>x.value())
//todo:8、切分每一行,每个单词记为1
val wordAndOne: DStream[(String, Int)] = kafkaData.flatMap(_.split(" ")).map((_,1))
//todo:9、相同单词出现次数累加
val result: DStream[(String, Int)] = wordAndOne.updateStateByKey(updateFunc)
//todo:打印
result.print()
ssc
}
}