1、概述

消息生产者发送消息,消费消费者消费消息

kafka消费者添加认证 设置kafka消费者实例数量_kafka

准备工作:kafak环境搭建部署

创建topic与partitions

bin/kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic kafkaTopic1

选项说明:

–topic : topic名称
–replication : 副本数
–partitions : 分区数

2、spring-boot-kafka-producer生产者创建

kafka消费者添加认证 设置kafka消费者实例数量_zookeeper_02

server:
  servlet:
    context-path: /spring-boot-kafka-producer
  port: 8888
spring:
  kafka:
    bootstrap-servers: 127.0.0.1:9092
    #生产者的配置,大部分我们可以使用默认的,这里列出几个比较重要的属性
    producer:
      #每批次发送消息的数量
      batch-size: 16
      #设置大于0的值将使客户端重新发送任何数据,一旦这些数据发送失败。注意,这些重试与客户端接收到发送错误时的重试没有什么不同。允许重试将潜在的改变数据的顺序,如果这两个消息记录都是发送到同一个partition,则第一个消息失败第二个发送成功,则第二条消息会比第一条消息出现要早。
      retries: 0
      #producer可以用来缓存数据的内存大小。如果数据产生速度大于向broker发送的速度,producer会阻塞或者抛出异常,以“block.on.buffer.full”来表明。这项设置将和producer能够使用的总内存相关,但并不是一个硬性的限制,因为不是producer使用的所有内存都是用于缓存。一些额外的内存会用于压缩(如果引入压缩机制),同样还有一些用于维护请求。
      buffer-memory: 33554432
      #key序列化方式
      key-serializer: org.apache.kafka.common.serialization.StringSerializer
      value-serializer: org.apache.kafka.common.serialization.StringSerializer
package com.kafka.producer;

import java.util.Date;

/**
 * 自定义发送消息格式实体类
 *
 * @author yl
 * @version 1.0
 * @className Message
 * @date 2020/7/23 21:53
 **/
public class Message {

    private String id;

    private String msg;

    private Date sendTime;

    public String getId() {
        return id;
    }

    public void setId(String id) {
        this.id = id;
    }

    public String getMsg() {
        return msg;
    }

    public void setMsg(String msg) {
        this.msg = msg;
    }

    public Date getSendTime() {
        return sendTime;
    }

    public void setSendTime(Date sendTime) {
        this.sendTime = sendTime;
    }

    @Override
    public String toString() {
        return "Message{" +
                "id='" + id + '\'' +
                ", msg='" + msg + '\'' +
                ", sendTime=" + sendTime +
                '}';
    }
}
package com.kafka.producer;

import com.alibaba.fastjson.JSONObject;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.Date;
import java.util.UUID;

/**
 * 生产者消息发送控制层
 *
 * @author yl
 * @version 1.0
 * @className SendController
 * @date 2020/7/23 21:55
 **/
@RestController
@RequestMapping("/kafka")
public class ProducerController {

    @Autowired
    private KafkaTemplate kafkaTemplate;

    @GetMapping(value = "/send")
    public String send() {
        Message message = new Message();
        message.setId(UUID.randomUUID().toString());
        message.setMsg("kakfa消息内容");
        message.setSendTime(new Date());
        System.out.println("消息发送成功==" + message.toString());
        kafkaTemplate.send("kafkaTopic1", JSONObject.toJSONString(message));
        return "消息发送成功!";
    }
}
package com.kafka;

import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;

@SpringBootApplication
public class SpringBootKafkaProducerApplication {

    public static void main(String[] args) {
        SpringApplication.run(SpringBootKafkaProducerApplication.class, args);
    }

}

3、spring-boot-kafka-consumer 消费者创建

kafka消费者添加认证 设置kafka消费者实例数量_spring_03

server:
  servlet:
    context-path: /spring-boot-kafka-consumer
  port: 9999
spring:
  kafka:
    bootstrap-servers: 127.0.0.1:9092
    #消费者的配置
    consumer:
      #Kafka中没有初始偏移或如果当前偏移在服务器上不再存在时,默认区最新 ,有三个选项 【latest, earliest, none】
      auto-offset-reset: latest
      #是否开启自动提交
      enable-auto-commit: true
      #自动提交的时间间隔
      auto-commit-interval: 100
      #key的解码方式
      key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      #value的解码方式
      value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      #在/usr/local/etc/kafka/consumer.properties中有配置
      group-id: test-consumer-group
package com.kafka.consumer;

import com.alibaba.fastjson.JSONObject;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
import java.util.Optional;

/**
 * 消费者端消费消息
 * @className ConsumerMessageHandler
 * @author yl
 * @date 2020/7/23 21:57
 * @version 1.0
 **/
@Component
public class ConsumerMessageHandler {

    @KafkaListener(topics = {"kafkaTopic1"})
    public void listen(ConsumerRecord<?, ?> record){
        Optional<?> kafkaMessage = Optional.ofNullable(record.value());
        if (kafkaMessage.isPresent()) {
            Object object = kafkaMessage.get();
            System.out.println("---->"+record);
            System.out.println("消费者端消费消息:---->"+object.toString());
        }
    }
}
package com.kafka.consumer;

import java.util.Date;

/**
 * 自定义发送消息格式实体类
 *
 * @author yl
 * @version 1.0
 * @className Message
 * @date 2020/7/23 21:53
 **/
public class Message {

    private String id;

    private String msg;

    private Date sendTime;

    public String getId() {
        return id;
    }

    public void setId(String id) {
        this.id = id;
    }

    public String getMsg() {
        return msg;
    }

    public void setMsg(String msg) {
        this.msg = msg;
    }

    public Date getSendTime() {
        return sendTime;
    }

    public void setSendTime(Date sendTime) {
        this.sendTime = sendTime;
    }

    @Override
    public String toString() {
        return "Message{" +
                "id='" + id + '\'' +
                ", msg='" + msg + '\'' +
                ", sendTime=" + sendTime +
                '}';
    }
}
package com.kafka;

import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;

@SpringBootApplication
public class SpringBootKafkaConsumerApplication {

    public static void main(String[] args) {
        SpringApplication.run(SpringBootKafkaConsumerApplication.class, args);
    }

}

git项目源码地址:https://github.com/yilei111/spring-boot-kafka.git