flink消费kafka数据,按照指定时间开始消费

kafka中根据时间戳开始消费数据

java 复制代码
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.kafka.shaded.org.apache.kafka.clients.consumer.OffsetResetStrategy;
import org.apache.flink.kafka.shaded.org.apache.kafka.common.TopicPartition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.yaml.snakeyaml.nodes.CollectionNode;

import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;

/**
 * 支持按topic指定开始消费时间戳
 *
 * @author 
 */
public class KafkaOffsetsInitializer implements OffsetsInitializer  {

    private Logger logger = LoggerFactory.getLogger(KafkaOffsetsInitializer.class);


    private static final long serialVersionUID = 1L;
    /**
     * key:topic,value:开始消费时间戳
     */
    private Map<String, Long> topicStartingTimestamps;
    private ParameterTool parameters;

    /**
     * @param topicStartingTimestamps
     * @param parameters
     */
    public KafkaOffsetsInitializer(Map<String, Long> topicStartingTimestamps, ParameterTool parameters) {
        this.topicStartingTimestamps = topicStartingTimestamps;
        this.parameters = parameters;
    }

    @Override
    public Map<TopicPartition, Long> getPartitionOffsets(Collection<TopicPartition> partitions,
                                                         PartitionOffsetsRetriever partitionOffsetsRetriever) {
        //定义起始时间,初始offset
        Map<TopicPartition, Long> startingTimestamps = new HashMap<>();
        Map<TopicPartition, Long> initialOffsets = new HashMap<>();

        //commited offset
        Map<TopicPartition, Long> committedOffsets = partitionOffsetsRetriever.committedOffsets(partitions);

        //beginningOffsets the first offset for the given partitions.
        Map<TopicPartition, Long> beginningOffsets = partitionOffsetsRetriever.beginningOffsets(partitions);
        //endOffsets the for the given partitions.
        Map<TopicPartition, Long> endOffsets = partitionOffsetsRetriever.endOffsets(partitions);

        final long now = System.currentTimeMillis();
        partitions.forEach(tp -> {
            //起始时间赋值为从redis中获取到相对应topic的时间
            Long startingTimestamp = topicStartingTimestamps.get(tp.topic());
            if (startingTimestamp == null) {
                //redis里没有取到消费开始时间从启动时间消费
                startingTimestamp = now;
                logger.info("从redis没有取到时间戳,topic:{},partition:{},使用当前时间:{},{}", tp.topic(), tp.partition(), now, new Date(now));
            }
            logger.info("读取时间戳,topic:{},partition:{},时间戳:{},{}", tp.topic(), tp.partition(), now, new Date(now));
            startingTimestamps.put(tp, startingTimestamp);
        });
        partitionOffsetsRetriever.offsetsForTimes(startingTimestamps).forEach((tp, offsetMetadata) -> {
            long offsetForTime = beginningOffsets.get(tp);
            long offsetForCommit = beginningOffsets.get(tp);
            if (offsetMetadata != null) {
                offsetForTime = offsetMetadata.offset();
                logger.info("根据时间戳取到offset,topic:{},partition:{},offset:{}", tp.topic(), tp.partition(), offsetForTime);
            }

            Long commitedOffset = committedOffsets.get(tp);
            if (commitedOffset != null) {
                offsetForCommit = commitedOffset.longValue();
                logger.info("根据已提交offset取到offset,topic:{},partition:{},offset:{}", tp.topic(), tp.partition(), offsetForCommit);
            }
            logger.info("设置读取offset,topic:{},partition:{},offset:{},endOffset:{}", tp.topic(), tp.partition(), Math.max(offsetForTime, offsetForCommit), endOffsets.get(tp));
            //对比时间戳对应的offset和checkpoint保存的offset,取较大值
            //initialOffsets.put(tp, Math.max(offsetForTime, offsetForCommit));
            initialOffsets.put(tp, offsetForCommit);
        });
        return initialOffsets;
    }

    @Override
    public OffsetResetStrategy getAutoOffsetResetStrategy() {
        return OffsetResetStrategy.NONE;
    }
}
相关推荐
阿里云云原生2 天前
嘉银科技基于阿里云 Kafka Serverless 提升业务弹性能力,节省成本超过 20%
kafka·serverless
程序消消乐2 天前
Kafka 入门指南:从 0 到 1 构建你的 Kafka 知识基础入门体系
分布式·kafka
智能化咨询2 天前
Kafka架构:构建高吞吐量分布式消息系统的艺术——进阶优化与行业实践
分布式·架构·kafka
Chasing__Dreams2 天前
kafka--基础知识点--5.2--最多一次、至少一次、精确一次
分布式·kafka
翰林小院2 天前
【大数据专栏】流式处理框架-Apache Fink
大数据·flink
RestCloud2 天前
Kafka实时数据管道:ETL在流式处理中的应用
数据库·kafka·api
AAA修煤气灶刘哥3 天前
Kafka 入门不踩坑!从概念到搭环境,后端 er 看完就能用
大数据·后端·kafka
若鱼19193 天前
spring-kafka消费异常处理
spring·kafka
若鱼19193 天前
Kafka如何配置生产者拦截器和消费者拦截器
java·kafka