引入maven依赖
1 2 3 4 5 6
| <!-- 引入kafka依赖 --> <dependency> <groupId>org.springframework.kafka</groupId> <artifactId>spring-kafka</artifactId> <version>2.8.6</version> </dependency>
|
application.yml配置文件
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36
| spring: kafka: bootstrap-servers: xx.xx.xx.xx:9092 # kafka集群信息,多个用逗号间隔 # 生产者 producer: # 重试次数,设置大于0的值,则客户端会将发送失败的记录重新发送 retries: 3 batch-size: 16384 #批量处理大小,16K buffer-memory: 33554432 #缓冲存储大,32M acks: 1 # 指定消息key和消息体的编解码方式 key-serializer: org.apache.kafka.common.serialization.StringSerializer value-serializer: org.apache.kafka.common.serialization.StringSerializer # 消费者 consumer: # 消费者组 group-id: TestGroup # 是否自动提交 enable-auto-commit: false # 消费偏移配置 # none:如果没有为消费者找到先前的offset的值,即没有自动维护偏移量,也没有手动维护偏移量,则抛出异常 # earliest:在各分区下有提交的offset时:从offset处开始消费;在各分区下无提交的offset时:从头开始消费 # latest:在各分区下有提交的offset时:从offset处开始消费;在各分区下无提交的offset时:从最新的数据开始消费 auto-offset-reset: earliest key-deserializer: org.apache.kafka.common.serialization.StringDeserializer value-deserializer: org.apache.kafka.common.serialization.StringDeserializer # 监听 listener: # record:当每一条记录被消费者监听器(ListenerConsumer)处理之后提交 # batch:当每一批poll()的数据被ListenerConsumer处理之后提交 # time:当每一批poll()的数据被ListenerConsumer处理之后,距离上次提交时间大于TIME时提交 # count:当每一批poll()的数据被ListenerConsumer处理之后,被处理record数量大于等于COUNT时提交 # count_time:TIME或COUNT中有一个条件满足时提交 # manual:当每一批poll()的数据被ListenerConsumer处理之后, 手动调用Acknowledgment.acknowledge()后提交 # manual_immediate:手动调用Acknowledgment.acknowledge()后立即提交,一般推荐使用这种 ack-mode: manual_immediate
|
消费者
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41
| import org.apache.kafka.clients.consumer.ConsumerRecord; import org.springframework.kafka.annotation.KafkaListener; import org.springframework.kafka.support.Acknowledgment; import org.springframework.stereotype.Component;
@Component public class KafkaConsumer {
@KafkaListener(topics = "topic_test", groupId = "group_topic_test") public void topicListener1(ConsumerRecord<String, String> record, Acknowledgment item) { String value = record.value(); System.out.println(value); System.out.println(record); item.acknowledge(); }
@KafkaListener(topics = "topic_test2",groupId = "group_topic_test2") public void topicListener2(ConsumerRecord<String, String> record, Acknowledgment item) { String value = record.value(); System.out.println(value); System.out.println(record); item.acknowledge(); } }
|
生产者
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
| import org.springframework.beans.factory.annotation.Autowired; import org.springframework.kafka.core.KafkaTemplate; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController;
@RestController @RequestMapping("/kafka") public class KafkaController { @Autowired private KafkaTemplate<String, String> kafkaTemplate; @RequestMapping("/send") public void send() { kafkaTemplate.send("topic_test", "key", "测试kafka消息"); } }
|
调用测试
启动Boot项目,使用Postman
工具发送GET请求:
1
| http://localhost:8080/kafka/send
|