springboot整合kafka

Springboot整合Kafka

1.下载kafka

启动

java 复制代码
	1.zookeeper-server-start.bat ..\..\config\zookeeper.properties
	2.kafka-server-start.bat ..\..\config\server.properties
 	3.kafka-server-stop.bat
 	4.zookeeper-server-stop.bat
 

2.引入依赖

        <dependency>
            <groupId>org.springframework.kafka</groupId>
            <artifactId>spring-kafka</artifactId>
        </dependency>

3.消息实体

java 复制代码
package com.example.kafka;

import java.util.Date;

public class KafkaMessage
{
	private long id;
	private String username;
	private String password;
	private Date date;

	public long getId()
	{
		return id;
	}

	public void setId(long id)
	{
		this.id = id;
	}

	public String getUsername()
	{
		return username;
	}

	public void setUsername(String username)
	{
		this.username = username;
	}

	public String getPassword()
	{
		return password;
	}

	public void setPassword(String password)
	{
		this.password = password;
	}

	public Date getDate()
	{
		return date;
	}

	public void setDate(Date date)
	{
		this.date = date;
	}
}

4.生产者

java 复制代码
package com.example.kafka;

import com.alibaba.fastjson.JSONObject;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Component;


@Component
public class KafkaProducer
{
	@Autowired
	private KafkaTemplate<String,String> kafkaTemplate;

	public KafkaTemplate<String, String> getKafkaTemplate()
	{
		return kafkaTemplate;
	}

	public void setKafkaTemplate(KafkaTemplate<String, String> kafkaTemplate)
	{
		this.kafkaTemplate = kafkaTemplate;
	}

	public void sendKafkaMessage(KafkaMessage message)
	{
		kafkaTemplate.send("myTopic", JSONObject.toJSONString(message));
	}

}

5.消费者

java 复制代码
package com.example.kafka;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;

@Component
public class KafkaConsumer
{

	@KafkaListener(topics = "myTopic", groupId = "myGroup")
	public void obtainMessage(ConsumerRecord<String, String> consumerRecord)
	{
		System.out.println("obtainMessage invoked");
		String topic = consumerRecord.topic();
		String key = consumerRecord.key();
		String value = consumerRecord.value();
		long timestamp = consumerRecord.timestamp();
		int partition = consumerRecord.partition();
		System.out.println("topic:" + topic);
		System.out.println("key:" + key);
		System.out.println("value:" + value);
		System.out.println("timestamp:" + timestamp);
		System.out.println("partition:" + partition);
		System.out.println("=======================");
	}
}

5.controller测试

java 复制代码
package com.example.controller;

import com.example.kafka.KafkaMessage;
import com.example.kafka.KafkaProducer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;

import java.text.SimpleDateFormat;
import java.util.Date;

@RestController
@RequestMapping(value = "/kafka")
public class KafkaController
{
	SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");

	@Autowired
	private KafkaProducer kafkaProducer;

	@RequestMapping(value = "message", method = RequestMethod.GET)
	public KafkaMessage sendKafkaMessage(@RequestParam(name = "id") long id, @RequestParam(name = "username") String username,
			@RequestParam(name = "password") String password)
	{
		KafkaMessage kafkaMessage = new KafkaMessage();
		kafkaMessage.setId(id);
		kafkaMessage.setUsername(username);
		kafkaMessage.setPassword(password);
		kafkaMessage.setDate(new Date());
		kafkaProducer.sendKafkaMessage(kafkaMessage);
		return kafkaMessage;
	}
}
相关推荐
天冬忘忧4 分钟前
Kafka 生产者全面解析:从基础原理到高级实践
大数据·分布式·kafka
青云交27 分钟前
大数据新视界 -- Hive 数据仓库:构建高效数据存储的基石(下)(2/ 30)
大数据·数据仓库·hive·数据安全·数据分区·数据桶·大数据存储
zmd-zk36 分钟前
flink学习(2)——wordcount案例
大数据·开发语言·学习·flink
电子手信39 分钟前
知识中台在多语言客户中的应用
大数据·人工智能·自然语言处理·数据挖掘·知识图谱
许苑向上1 小时前
Dubbo集成SpringBoot实现远程服务调用
spring boot·后端·dubbo
天冬忘忧1 小时前
Kafka 数据倾斜:原因、影响与解决方案
分布式·kafka
隔着天花板看星星1 小时前
Kafka-Consumer理论知识
大数据·分布式·中间件·kafka
holywangle1 小时前
解决Flink读取kafka主题数据无报错无数据打印的重大发现(问题已解决)
大数据·flink·kafka
隔着天花板看星星1 小时前
Kafka-副本分配策略
大数据·分布式·中间件·kafka
Lorin 洛林1 小时前
Hadoop 系列 MapReduce:Map、Shuffle、Reduce
大数据·hadoop·mapreduce