导读


由于项目需要,这里面整合springboot和kafka,简单的介绍其使用教程。

使用


初始

依赖POM

在springboot项目中引入kafka的依赖。

  1. <!--kafka,这里没有加版本号,使用的是springboot默认的,不然容易出现版本不一致导致错误-->
  2. <dependency>
  3. <groupId>org.springframework.kafka</groupId>
  4. <artifactId>spring-kafka</artifactId>
  5. </dependency>
  6. <!--fastjson-->
  7. <dependency>
  8. <groupId>com.alibaba</groupId>
  9. <artifactId>fastjson</artifactId>
  10. <version>1.2.71</version>
  11. </dependency>

配置文件

  • application.properties

    1. spring.application.name=kafka-ts
    2. server.port=8080
    3. #============== kafka ===================
    4. # 指定kafka 代理地址,可以多个
    5. spring.kafka.bootstrap-servers=localhost:9092
    6. #=============== provider =======================
    7. spring.kafka.producer.retries=3
    8. # 每次批量发送消息的数量
    9. spring.kafka.producer.batch-size=16384
    10. spring.kafka.producer.buffer-memory=33554432
    11. # 指定消息key和消息体的编解码方式
    12. spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
    13. spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
    14. #=============== consumer =======================
    15. # 指定默认消费者group id
    16. spring.kafka.consumer.group-id=ts-log-group
    17. spring.kafka.consumer.auto-offset-reset=earliest
    18. spring.kafka.consumer.enable-auto-commit=true
    19. spring.kafka.consumer.auto-commit-interval=100
    20. # 指定消息key和消息体的编解码方式
    21. spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
    22. spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer
  • application.yml ```yaml server: port: 8080 spring: application: name: kafka-ts

    指定kafka 代理地址,可以多个

    kafka: bootstrap-servers: localhost:9092 producer: #生产者

    1. retries: 3 #重试次数
    2. batch-size: 16384 #每次批量发送消息的数量
    3. buffer-memory: 3354432
    4. key-serializer: org.apache.kafka.common.serialization.StringSerializer #指定消息key和消息体的编解码方式
    5. value-serializer: org.apache.kafka.common.serialization.StringSerializer

    consumer: #消费者

    1. group-id: ts-log-group
    2. auto-offset-reset: earliest
    3. enable-auto-commit: true
    4. auto-commit-interval: 100
    5. key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
    6. value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
  1. <a name="ynKSE"></a>
  2. ### 实体类
  3. ```java
  4. import com.fasterxml.jackson.annotation.JsonFormat;
  5. import lombok.Data;
  6. import java.util.Date;
  7. @Data
  8. public class Ts {
  9. private String id;
  10. private String name;
  11. private Integer age;
  12. @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
  13. private Date cTime;
  14. @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
  15. private Date insertime;
  16. @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
  17. private Date updateTime;
  18. }

生产者

  1. import com.alibaba.fastjson.JSON;
  2. import com.ifs.Ts;
  3. import org.slf4j.Logger;
  4. import org.slf4j.LoggerFactory;
  5. import org.springframework.beans.factory.annotation.Autowired;
  6. import org.springframework.kafka.core.KafkaTemplate;
  7. import org.springframework.stereotype.Component;
  8. import java.util.Date;
  9. /**
  10. * kafka的生产者
  11. *
  12. * @author heioky
  13. * @date 2020/10/27
  14. */
  15. @Component
  16. public class TsProducer {
  17. Logger logger = LoggerFactory.getLogger(TsProducer.class);
  18. @Autowired
  19. private KafkaTemplate kafkaTemplate;
  20. /**
  21. * @Description: 生产者发送消息
  22. * @return: void
  23. * @Author: heioky
  24. * @Date:2020/10/27 13:48
  25. */
  26. public void send(int i) {
  27. long beginTime = System.currentTimeMillis();
  28. Ts ts = new Ts();
  29. ts.setId(String.valueOf(i));
  30. ts.setUpdateTime(new Date());
  31. ts.setAge(18);
  32. ts.setName("不知火");
  33. kafkaTemplate.send("kafka-ts", JSON.toJSONString(ts));
  34. long endTime = System.currentTimeMillis();
  35. logger.info("生产者发送消息,消耗时间: {}ms", endTime - beginTime);
  36. }
  37. }

消费者

  1. import org.apache.kafka.clients.consumer.ConsumerRecord;
  2. import org.slf4j.Logger;
  3. import org.slf4j.LoggerFactory;
  4. import org.springframework.kafka.annotation.KafkaListener;
  5. import org.springframework.stereotype.Component;
  6. import java.util.Optional;
  7. /**
  8. * kafka的消费者
  9. *
  10. * @author heioky
  11. * @date 2020/10/27
  12. */
  13. @Component
  14. public class TsConsumer {
  15. Logger logger = LoggerFactory.getLogger(TsConsumer.class);
  16. /**
  17. * @Description: 消费者消费数据,这里面topic可以是一个或者多个
  18. * @Param: [consumerRecord]
  19. * @return: void
  20. * @Author: heioky
  21. * @Date:2020/10/27 14:09
  22. */
  23. @KafkaListener(topics = {"kafka-ts"})
  24. public void getMsgInfo(ConsumerRecord<?, ?> consumerRecord) {
  25. //判断是否为null
  26. Optional<?> kafkaMessage = Optional.ofNullable(consumerRecord.value());
  27. if (kafkaMessage.isPresent()) {
  28. //得到Optional实例中的值
  29. Object message = kafkaMessage.get();
  30. logger.info("获取消费消息:{}", message);
  31. }
  32. }
  33. }

启动类

  1. import com.ifs.kafka.TsProducer;
  2. import org.springframework.beans.factory.annotation.Autowired;
  3. import org.springframework.boot.CommandLineRunner;
  4. import org.springframework.boot.SpringApplication;
  5. import org.springframework.boot.autoconfigure.SpringBootApplication;
  6. /**
  7. * 启动
  8. *
  9. * @author heioky
  10. * @date 2020/10/27
  11. */
  12. @SpringBootApplication
  13. public class Sinfo implements CommandLineRunner {
  14. @Autowired
  15. private TsProducer tsProducer;
  16. public static void main(String[] args) {
  17. SpringApplication.run(Sinfo.class, args);
  18. }
  19. @Override
  20. public void run(String... args) throws Exception {
  21. for (int i = 1; i <= 10; i++) {
  22. tsProducer.send(i);
  23. }
  24. }
  25. }

控制台打印

  • 生产者

image.png

  • 消费者

image.png

问题

  1. Caused by: java.lang.ClassNotFoundException: org.springframework.core.log.LogAccessor
  2. at java.net.URLClassLoader.findClass(URLClassLoader.java:382) ~[na:1.8.0_202]
  3. at java.lang.ClassLoader.loadClass(ClassLoader.java:424) ~[na:1.8.0_202]
  4. at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:349) ~[na:1.8.0_202]
  5. at java.lang.ClassLoader.loadClass(ClassLoader.java:357) ~[na:1.8.0_202]
  6. ... 25 common frames omitted

原因:spring-kafka的版本和springboot的不一致导致的,去掉其版本号,使用springboot默认的即可。
image.png

接收多个topic

  • 消费者,使用@KafkaListeners注解来接受多个消息。

    1. /**
    2. * @Description: 消费者消费数据,这里面topic可以是一个或者多个
    3. * @Param: [consumerRecord]
    4. * @return: void
    5. * @Author: heioky
    6. * @Date:2020/10/27 14:09
    7. */
    8. @KafkaListeners({
    9. @KafkaListener(topics = {"kafka-ts"}, groupId = "ts-log-group"),
    10. @KafkaListener(topics = {"kafka-two"}, groupId = "ts-log-group")
    11. })
    12. public void getMsgInfo(String record) {
    13. //判断是否为null
    14. Optional<?> kafkaMessage = Optional.ofNullable(record);
    15. if (kafkaMessage.isPresent()) {
    16. //得到Optional实例中的值
    17. Object message = kafkaMessage.get();
    18. logger.info("获取消费消息:{}", message);
    19. }
    20. }
  • 生产者指定对应的topic

    1. /**
    2. * @Description: 生产者发送消息
    3. * @return: void
    4. * @Author: heioky
    5. * @Date:2020/10/27 13:48
    6. */
    7. public void sendTow(String unid) {
    8. long beginTime = System.currentTimeMillis();
    9. Ts ts = new Ts();
    10. ts.setId(unid);
    11. ts.setUpdateTime(new Date());
    12. ts.setAge(23);
    13. ts.setName("妖刀姬");
    14. kafkaTemplate.send("kafka-two", JSON.toJSONString(ts));
    15. long endTime = System.currentTimeMillis();
    16. logger.info("生产者sendTow发送消息,消耗时间: {}ms", endTime - beginTime);
    17. }
  • 控制台打印

image.png

END


搞定~