flink消费kafka数据,按照指定时间开始消费

kafka中根据时间戳开始消费数据

java 复制代码
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.kafka.shaded.org.apache.kafka.clients.consumer.OffsetResetStrategy;
import org.apache.flink.kafka.shaded.org.apache.kafka.common.TopicPartition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.yaml.snakeyaml.nodes.CollectionNode;

import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;

/**
 * 支持按topic指定开始消费时间戳
 *
 * @author 
 */
public class KafkaOffsetsInitializer implements OffsetsInitializer  {

    private Logger logger = LoggerFactory.getLogger(KafkaOffsetsInitializer.class);


    private static final long serialVersionUID = 1L;
    /**
     * key:topic,value:开始消费时间戳
     */
    private Map<String, Long> topicStartingTimestamps;
    private ParameterTool parameters;

    /**
     * @param topicStartingTimestamps
     * @param parameters
     */
    public KafkaOffsetsInitializer(Map<String, Long> topicStartingTimestamps, ParameterTool parameters) {
        this.topicStartingTimestamps = topicStartingTimestamps;
        this.parameters = parameters;
    }

    @Override
    public Map<TopicPartition, Long> getPartitionOffsets(Collection<TopicPartition> partitions,
                                                         PartitionOffsetsRetriever partitionOffsetsRetriever) {
        //定义起始时间,初始offset
        Map<TopicPartition, Long> startingTimestamps = new HashMap<>();
        Map<TopicPartition, Long> initialOffsets = new HashMap<>();

        //commited offset
        Map<TopicPartition, Long> committedOffsets = partitionOffsetsRetriever.committedOffsets(partitions);

        //beginningOffsets the first offset for the given partitions.
        Map<TopicPartition, Long> beginningOffsets = partitionOffsetsRetriever.beginningOffsets(partitions);
        //endOffsets the for the given partitions.
        Map<TopicPartition, Long> endOffsets = partitionOffsetsRetriever.endOffsets(partitions);

        final long now = System.currentTimeMillis();
        partitions.forEach(tp -> {
            //起始时间赋值为从redis中获取到相对应topic的时间
            Long startingTimestamp = topicStartingTimestamps.get(tp.topic());
            if (startingTimestamp == null) {
                //redis里没有取到消费开始时间从启动时间消费
                startingTimestamp = now;
                logger.info("从redis没有取到时间戳,topic:{},partition:{},使用当前时间:{},{}", tp.topic(), tp.partition(), now, new Date(now));
            }
            logger.info("读取时间戳,topic:{},partition:{},时间戳:{},{}", tp.topic(), tp.partition(), now, new Date(now));
            startingTimestamps.put(tp, startingTimestamp);
        });
        partitionOffsetsRetriever.offsetsForTimes(startingTimestamps).forEach((tp, offsetMetadata) -> {
            long offsetForTime = beginningOffsets.get(tp);
            long offsetForCommit = beginningOffsets.get(tp);
            if (offsetMetadata != null) {
                offsetForTime = offsetMetadata.offset();
                logger.info("根据时间戳取到offset,topic:{},partition:{},offset:{}", tp.topic(), tp.partition(), offsetForTime);
            }

            Long commitedOffset = committedOffsets.get(tp);
            if (commitedOffset != null) {
                offsetForCommit = commitedOffset.longValue();
                logger.info("根据已提交offset取到offset,topic:{},partition:{},offset:{}", tp.topic(), tp.partition(), offsetForCommit);
            }
            logger.info("设置读取offset,topic:{},partition:{},offset:{},endOffset:{}", tp.topic(), tp.partition(), Math.max(offsetForTime, offsetForCommit), endOffsets.get(tp));
            //对比时间戳对应的offset和checkpoint保存的offset,取较大值
            //initialOffsets.put(tp, Math.max(offsetForTime, offsetForCommit));
            initialOffsets.put(tp, offsetForCommit);
        });
        return initialOffsets;
    }

    @Override
    public OffsetResetStrategy getAutoOffsetResetStrategy() {
        return OffsetResetStrategy.NONE;
    }
}
相关推荐
Stringzhua4 小时前
【SpringCloud】Kafka消息中间件
spring·spring cloud·kafka
杨荧11 小时前
【JAVA毕业设计】基于Vue和SpringBoot的服装商城系统学科竞赛管理系统
java·开发语言·vue.js·spring boot·spring cloud·java-ee·kafka
zmd-zk12 小时前
kafka+zookeeper的搭建
大数据·分布式·zookeeper·中间件·kafka
激流丶12 小时前
【Kafka 实战】如何解决Kafka Topic数量过多带来的性能问题?
java·大数据·kafka·topic
筱源源12 小时前
Kafka-linux环境部署
linux·kafka
Mephisto.java12 小时前
【大数据学习 | kafka高级部分】kafka中的选举机制
大数据·学习·kafka
Mephisto.java12 小时前
【大数据学习 | kafka高级部分】kafka的优化参数整理
大数据·sql·oracle·kafka·json·database
Mephisto.java16 小时前
【大数据学习 | kafka高级部分】kafka的kraft集群
大数据·sql·oracle·kafka·json·hbase
Mephisto.java16 小时前
【大数据学习 | kafka高级部分】kafka的文件存储原理
大数据·sql·oracle·kafka·json
yx9o17 小时前
Kafka 源码 KRaft 模式本地运行
分布式·kafka