本地搭建好docker kafka环境后,java代码:

package org.example;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

import java.time.Duration;
import java.util.Collections;
import java.util.Properties;

public class SimpleConsumer {
    public static void main(String[] args) {
        String topicName = "test";

        Properties props = new Properties();
        props.put("bootstrap.servers", "localhost:9092");
        props.put("group.id", "test-group");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);

        consumer.subscribe(Collections.singletonList(topicName));

        try {
            while (true) {
                ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100));

                for (ConsumerRecord<String, String> record : records) {
                    System.out.printf("Received message: key = %s, value = %s, partition = %d%n", record.key(), record.value(), record.partition());
                }
            }
        } finally {
            consumer.close();
        }
    }
}

  

发送消息的代码:

package org.example;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;

import java.util.Properties;

public class SimpleProducer {
    public static void main(String[] args) {
        String topicName = "test";

        Properties props = new Properties();
        props.put("bootstrap.servers", "localhost:9092");
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");

        Producer<String, String> producer = new KafkaProducer<>(props);

        try {
            for (int i = 0; i < 10; i++) {
                ProducerRecord<String, String> record = new ProducerRecord<>(topicName, "key" + i, "message" + i);
                producer.send(record, (RecordMetadata metadata, Exception exception) -> {
                    if (exception != null) {
                        exception.printStackTrace();
                    } else {
                        System.out.println("Sent message to " + metadata.topic() + " partition: " + metadata.partition() + " with offset: " + metadata.offset());
                    }
                });
            }
        } finally {
            producer.close();
        }
    }
}

  

为了让 Kafka 生产者使用用户名和密码进行身份验证,你需要在 Kafka 客户端配置中添加一些与 SASL 认证相关的属性。假设你使用的是 SASL/PLAIN 机制,以下是如何修改代码以包含用户名和密码的示例:

package org.example;  

import org.apache.kafka.clients.producer.KafkaProducer;  
import org.apache.kafka.clients.producer.Producer;  
import org.apache.kafka.clients.producer.ProducerRecord;  
import org.apache.kafka.clients.producer.RecordMetadata;  

import java.util.Properties;  

public class SimpleProducer {  
    public static void main(String[] args) {  
        String topicName = "test";  

        Properties props = new Properties();  
        props.put("bootstrap.servers", "localhost:9092");  
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");  
        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");  

        // SASL 认证相关配置  
        props.put("security.protocol", "SASL_PLAINTEXT"); // 或者 "SASL_SSL"  
        props.put("sasl.mechanism", "PLAIN");  
        
        // SASL 配置包括用户名和密码  
        String jaasTemplate = "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"%s\" password=\"%s\";";  
        String jaasCfg = String.format(jaasTemplate, "your-username", "your-password");  
        props.put("sasl.jaas.config", jaasCfg);  

        Producer<String, String> producer = new KafkaProducer<>(props);  

        try {  
            for (int i = 0; i < 10; i++) {  
                ProducerRecord<String, String> record = new ProducerRecord<>(topicName, "key" + i, "message" + i);  
                producer.send(record, (RecordMetadata metadata, Exception exception) -> {  
                    if (exception != null) {  
                        exception.printStackTrace();  
                    } else {  
                        System.out.println("Sent message to " + metadata.topic() + " partition: " + metadata.partition() + " with offset: " + metadata.offset());  
                    }  
                });  
            }  
        } finally {  
            producer.close();  
        }  
    }  
}

  

package org.example;  

import org.apache.kafka.clients.consumer.ConsumerRecord;  
import org.apache.kafka.clients.consumer.ConsumerRecords;  
import org.apache.kafka.clients.consumer.KafkaConsumer;  

import java.time.Duration;  
import java.util.Collections;  
import java.util.Properties;  

public class SimpleConsumer {  
    public static void main(String[] args) {  
        String topicName = "test";  

        Properties props = new Properties();  
        props.put("bootstrap.servers", "localhost:9092");  
        props.put("group.id", "test-group");  
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");  
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");  

        // SASL 认证相关配置  
        props.put("security.protocol", "SASL_PLAINTEXT"); // 或者 "SASL_SSL"  
        props.put("sasl.mechanism", "PLAIN");  

        // SASL 配置包括用户名和密码  
        String jaasTemplate = "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"%s\" password=\"%s\";";  
        String jaasCfg = String.format(jaasTemplate, "your-username", "your-password");  
        props.put("sasl.jaas.config", jaasCfg);  

        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);  

        consumer.subscribe(Collections.singletonList(topicName));  

        try {  
            while (true) {  
                ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100));  

                for (ConsumerRecord<String, String> record : records) {  
                    System.out.printf("Received message: key = %s, value = %s, partition = %d%n", record.key(), record.value(), record.partition());  
                }  
            }  
        } finally {  
            consumer.close();  
        }  
    }  
}

  

注意:

  1. 这里的 security.protocol 可以是 SASL_PLAINTEXT 或 SASL_SSL,视你的集群配置而定。
  2. 需要根据实际情况替换 "your-username" 和 "your-password"
  3. 如果你的 Kafka 集群使用了 SSL,你还需要配置 SSL 相关属性,例如 ssl.truststore.location 和 ssl.truststore.password