接上节搭建环境后使用

1.生成者发送消息:

public class GpKafkaProducer extends Thread{

    //producer api
    KafkaProducer<Integer,String> producer;
    String topic;  //主题

    public GpKafkaProducer(String topic) {
        Properties properties=new Properties();
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.8.162:9092,192.168.8.163:9092,192.168.8.164:9092");
        properties.put(ProducerConfig.CLIENT_ID_CONFIG,"my-producer");
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class.getName());
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        //连接的字符串
        //通过工厂
        //new
        producer=new KafkaProducer<Integer, String>(properties);
        this.topic = topic;
    }
    @Override
    public void run() {
        int num=0;
        while(num<20) {
            try {
                String msg="kafka  msg:"+num;
                //get 会拿到发送的结果
                //同步 get() -> Future()
                //回调通知
                producer.send(new ProducerRecord<>(topic,1, msg), (metadata, exception) -> {
                    System.out.println(metadata.offset()+"->"+metadata.partition()+"->"+metadata.topic());
                });
                TimeUnit.SECONDS.sleep(2);
                ++num;
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }
    }

    public static void main(String[] args) {
        new GpKafkaProducer("love").start();
    }
}

2.消费者消费消息:

public class GpKafkaConsumer extends Thread{

    KafkaConsumer<Integer,String> consumer;
    String topic;

    public GpKafkaConsumer(String topic) {
        Properties properties=new Properties();
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.8.162:9092,192.168.8.163:9092,192.168.8.164:9092");
        properties.put(ConsumerConfig.CLIENT_ID_CONFIG,"my-consumer"); //消费者名字
        properties.put(ConsumerConfig.GROUP_ID_CONFIG,"myclient"); // groupId
        properties.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG,"30000");
        properties.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG,"1000"); //自动提交(批量确认)
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class.getName());
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        //一个新的group的消费者去消费一个topic
        properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,"latest"); //这个属性. 它能够消费昨天发布的数据
        consumer=new KafkaConsumer<Integer, String>(properties);
        this.topic = topic;
    }

    @Override
    public void run() {
        consumer.subscribe(Collections.singleton(this.topic));
        while(true){
            ConsumerRecords<Integer,String> consumerRecords=consumer.poll(Duration.ofSeconds(1));
            consumerRecords.forEach(record->{
                //null->gp kafka practice msg:0->63
                System.out.println(record.key()+"->"+record.value()+"->"+record.offset());
            });
        }
    }

    public static void main(String[] args) {
        new GpKafkaConsumer("love").start();
    }
}

3.结果:

kafka基本使用02_bootstrapkafka基本使用02_bootstrap_02