object H extends App{ val conf=new SparkConf().setMaster("local[2]").setAppName("hello") val ss=new StreamingContext(conf,Seconds(5)) val kafkaParams=Map[String,String]("metadata.broker.list"->"myhadoop1:9092") ss.checkpoint("hdfs://myhadoop1:8020/data") val topic=Set[String]("wordcount1") //kafka val lines=KafkaUtils.createDirectStream[String,String,StringDecoder,StringDecoder](ss,kafkaParams,topic) lines.flatMap(_._2.split(" ")).map((_,1)).updateStateByKey((seqs:Seq[Int],option:Option[Int])=>{ var oldValue=option.getOrElse(0) for(seq<-seqs){ oldValue+=seq } Option[Int](oldValue) }).print() ss.start() ss.awaitTermination() }
售后响应及时
7×24小时客服热线数据备份
更安全、更高效、更稳定价格公道精准
项目经理精准报价不弄虚作假合作无风险
重合同讲信誉,无效全额退款