1 为数据添加时间水印
- 解决数据无序问题
https://ci.apache.org/projects/flink/flink-docs-release-1.6/dev/event_timestamps_watermarks.html
package test.flink.scala.scalaproject
import java.text.SimpleDateFormat
import java.util.Properties
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.api.scala.createTypeInformation
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.api.watermark.Watermark
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011
import org.slf4j.LoggerFactory
object LogAnalysis {
// 生产上记录日志建议采用这种方法
val logger = LoggerFactory.getLogger("LogAnalysis")
def main(args: Array[String]): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val topic = "tzbtest"
val prop = new Properties()
prop.setProperty("bootstrap.servers", "master:9092");
prop.setProperty("group.id","test-tzb-group")
// 接收 kafka 的数据
val consumer = new FlinkKafkaConsumer011[String](topic,new SimpleStringSchema(),prop)
// 接收 kafka 的数据
val data = env.addSource(consumer)
val logData = data.map( x=>{
val splits = x.split("\t")
val level = splits(2)
val timeStr = splits(3)
var time = 0l
try{
val sourceFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
time = sourceFormat.parse(timeStr).getTime
}catch {
case e:Exception =>{
logger.error(s"time parse error: $timeStr",e.getMessage)
}
}
val domain = splits(5)
val traffic = splits(6).toLong
// 返回 tuple
(level,time,domain,traffic)
}).filter(_._2 != 0).filter(_._1 == "E")
.map(x => {
(x._2, x._3, x._4) // 1 level(抛弃) 2 time 3 domain 4 traffic
})
//logData.print().setParallelism(1)
// 为数据添加水印
logData.assignTimestampsAndWatermarks(new AssignerWithPeriodicWatermarks[(Long,String,Long)]{
val maxOutOfOrderness = 10000L // 10 seconds
var currentMaxTimestamp: Long = _ // 占位符
override def getCurrentWatermark: Watermark = {
// return the watermark as current highest timestamp minus the out-of-orderness bound
new Watermark(currentMaxTimestamp - maxOutOfOrderness)
}
override def extractTimestamp(element: (Long, String, Long), previousElementTimestamp: Long): Long = {
val timestamp = element._1
currentMaxTimestamp = Math.max(timestamp, currentMaxTimestamp)
timestamp
}
})
env.execute("LogAnalysis")
}
}
2 业务逻辑
统计每1分钟内每个域名的流量
package test.flink.scala.scalaproject
import java.text.SimpleDateFormat
import java.util.{Date, Properties}
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.api.java.tuple.Tuple
import org.apache.flink.api.scala.createTypeInformation
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.api.scala.function.WindowFunction
import org.apache.flink.streaming.api.watermark.Watermark
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011
import org.apache.flink.util.Collector
import org.slf4j.LoggerFactory
import scala.collection.mutable.ArrayBuffer
object LogAnalysis {
// 生产上记录日志建议采用这种方法
val logger = LoggerFactory.getLogger("LogAnalysis")
def main(args: Array[String]): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val topic = "tzbtest"
val prop = new Properties()
prop.setProperty("bootstrap.servers", "master:9092");
prop.setProperty("group.id", "test-tzb-group")
// 接收 kafka 的数据
val consumer = new FlinkKafkaConsumer011[String](topic, new SimpleStringSchema(), prop)
// 接收 kafka 的数据
val data = env.addSource(consumer)
val logData = data.map(x => {
val splits = x.split("\t")
val level = splits(2)
val timeStr = splits(3)
var time = 0l
try {
val sourceFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
time = sourceFormat.parse(timeStr).getTime
} catch {
case e: Exception => {
logger.error(s"time parse error: $timeStr", e.getMessage)
}
}
val domain = splits(5)
val traffic = splits(6).toLong
// 返回 tuple
(level, time, domain, traffic)
}).filter(_._2 != 0).filter(_._1 == "E")
.map(x => {
(x._2, x._3, x._4) // 1 level(抛弃) 2 time 3 domain 4 traffic
})
//logData.print().setParallelism(1)
// 为数据添加水印
logData.assignTimestampsAndWatermarks(new AssignerWithPeriodicWatermarks[(Long, String, Long)] {
val maxOutOfOrderness = 10000L // 10 seconds
var currentMaxTimestamp: Long = _ // 占位符
override def getCurrentWatermark: Watermark = {
// return the watermark as current highest timestamp minus the out-of-orderness bound
new Watermark(currentMaxTimestamp - maxOutOfOrderness)
}
override def extractTimestamp(element: (Long, String, Long), previousElementTimestamp: Long): Long = {
val timestamp = element._1
currentMaxTimestamp = Math.max(timestamp, currentMaxTimestamp)
timestamp
}
}).keyBy(1) // 此处按照域名 keyBy
.window(TumblingEventTimeWindows.of(Time.seconds(60)))
.apply(new WindowFunction[(Long, String, Long), (String, String, Long), Tuple, TimeWindow] {
override def apply(key: Tuple, window: TimeWindow, input: Iterable[(Long, String, Long)], out: Collector[(String, String, Long)]): Unit = {
val domain = key.getField(0).toString
var sum = 0l
val times = ArrayBuffer[Long]()
val iterator = input.iterator
while(iterator.hasNext){
val next = iterator.next()
sum += next._3 // traffic 求和
// TODO 可以拿到 window 里的时间, next._1
times.append(next._1)
}
/*
参数1:1分钟时间 2020-10-01 10:10
参数2:域名
参数3:traffic 的 和
*/
val time = new SimpleDateFormat("yyyy-MM-dd HH:mm").format(new Date(times.max))
out.collect((time, domain, sum))
}
}).print().setParallelism(1)
env.execute("LogAnalysis")
}
}