21 springboot集成kafka

1. 配置文件
java 复制代码
spring.kafka.bootstrap-servers=CentOSA:9092,CentOSB:9092,CentOSC:9092

spring.kafka.producer.retries=5
spring.kafka.producer.acks=all
spring.kafka.producer.batch-size=16384
spring.kafka.producer.buffer-memory=33554432
spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.properties.enable.idempotence=true
spring.kafka.producer.transaction-id-prefix=transaction-id-


spring.kafka.consumer.group-id=group1
spring.kafka.consumer.auto-offset-reset=earliest
spring.kafka.consumer.enable-auto-commit=true
spring.kafka.consumer.auto-commit-interval=100
spring.kafka.consumer.properties.isolation.level=read_committed
spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer

spring.kafka.streams.application-id=wordcount_id
spring.kafka.streams.client-id=app1
spring.kafka.streams.auto-startup=true
spring.kafka.streams.state-dir=/Users/admin/Desktop/checkpoint
spring.kafka.streams.replication-factor=1
spring.kafka.streams.properties.processing.guarantee=exactly_once
        
2. logback.xml
xml 复制代码
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
    <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
        <encoder>
            <pattern>%p %d{yyyy-MM-dd HH:mm:ss} - %m%n</pattern>
            <charset>UTF-8</charset>
        </encoder>
    </appender>

    <!-- 控制台输出日志级别 -->
    <root level="ERROR">
        <appender-ref ref="STDOUT" />
    </root>

    <logger name="org.springframework.kafka" level="INFO"  additivity="false">
        <appender-ref ref="STDOUT" />
    </logger>
    <logger name="org.springframework.kafka.transaction" level="debug"  additivity="false">
         <appender-ref ref="STDOUT" />
    </logger>
</configuration>
3. application
java 复制代码
package com.baizhi;

import org.apache.kafka.clients.consumer.ConsumerRecord;

import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.kstream.*;
import org.apache.kafka.streams.state.KeyValueStore;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.annotation.EnableKafkaStreams;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.annotation.KafkaListeners;
import org.springframework.messaging.handler.annotation.SendTo;

import java.io.IOException;
import java.util.Arrays;
import java.util.stream.Collectors;



@SpringBootApplication
@EnableKafkaStreams
@EnableKafka
public class KafkaSpringBootApplication {
    public static void main(String[] args) throws IOException {
        SpringApplication.run(KafkaSpringBootApplication.class,args);
        System.in.read();
    }



    @KafkaListeners(value = {@KafkaListener(topics = {"topic04"})})
    @SendTo(value = {"topic05"})
    public String listenner(ConsumerRecord<?, ?> cr) {

        return cr.value()+" mashibing edu";
    }

    @Bean
    public KStream<String, String> kStream(StreamsBuilder kStreamBuilder) {

        KStream<String, String> stream = kStreamBuilder.stream(
                "topic02",
                Consumed.with(Serdes.String(),
                Serdes.String()));

        stream.flatMapValues(new ValueMapper<String, Iterable<String>>() {
            @Override
            public Iterable<String> apply(String s) {
                return Arrays.stream(s.split(" ")).collect(Collectors.toList());
            }
        })
        .selectKey((k,v)->v)
        .groupByKey(Serialized.with(Serdes.String(),Serdes.String()))
        .count(Materialized.<String,Long, KeyValueStore<Bytes, byte[]>>as("wordcount"))
        .toStream()
        .print(Printed.toSysOut());

        return stream;
    }

}
java 复制代码
package com.baizhi.service;

public interface IOrderService {
    public void saveOrder(String id,Object message);
}
java 复制代码
package com.baizhi.service.impl;

import com.baizhi.service.IOrderService;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

@Transactional
@Service
public class OrderService implements IOrderService {

    @Autowired
    private KafkaTemplate kafkaTemplate;

    @Override
    public void saveOrder(String id,Object message) {
        kafkaTemplate.send(new ProducerRecord("topic04",id,message));
    }
}
java 复制代码
package com.baizhi.tests;

import com.baizhi.KafkaSpringBootApplication;
import com.baizhi.service.IOrderService;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.kafka.core.KafkaOperations;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;

import java.util.Date;
import java.util.zip.DataFormatException;

@SpringBootTest(classes = {KafkaSpringBootApplication.class})
@RunWith(SpringRunner.class)
public class KafkaTempolateTests {
    @Autowired
    private KafkaTemplate kafkaTemplate;
    @Autowired
    private IOrderService orderService;

    @Test
    public void testOrderService(){
        orderService.saveOrder("001","baizhi edu ");
    }
    @Test
    public void testKafkaTemplate(){
        kafkaTemplate.executeInTransaction(new KafkaOperations.OperationsCallback() {
            @Override
            public Object doInOperations(KafkaOperations kafkaOperations) {
                return kafkaOperations.send(new ProducerRecord("topic01","002","this is a demo"));
            }
        });
    }

}
相关推荐
小小的木头人17 分钟前
Windows Docker desktop 部署
运维·kafka
计算机学长felix1 小时前
基于SpringBoot的“面向校园的助力跑腿系统”的设计与实现(源码+数据库+文档+PPT)
数据库·spring boot·后端
java水泥工2 小时前
课程答疑系统|基于SpringBoot和Vue的课程答疑系统(源码+数据库+文档)
spring boot·vue·计算机毕业设计·java毕业设计·大学生毕业设计·课程答疑系统
Rocket MAN4 小时前
Spring Boot 缓存:工具选型、两级缓存策略、注解实现与进阶优化
spring boot·后端·缓存
程序定小飞6 小时前
基于springboot的民宿在线预定平台开发与设计
java·开发语言·spring boot·后端·spring
FREE技术6 小时前
山区农产品售卖系统
java·spring boot
摇滚侠9 小时前
Spring Boot3零基础教程,云服务停机不收费,笔记71
java·spring boot·笔记
摇滚侠9 小时前
Spring Boot3零基础教程,监听 Kafka 消息,笔记78
spring boot·笔记·kafka
摇滚侠10 小时前
Spring Boot3零基础教程,RedisTemplate 定制化,笔记70
spring boot·笔记·后端
刘一说10 小时前
深入浅出 Spring Boot 自动配置(Auto-Configuration):原理、机制与最佳实践
java·spring boot·后端