4.3.1 Kafka 消费者之高级 API

控制台启动一个生产者:

kafka-console-producer.sh --broker-list hadoop201:9092 --topic first

package com.atguigu.kafka.consume;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

import java.util.Arrays;
import java.util.Properties;

public class MyConsumer {
    public static void main(String[] args) {
        Properties props = new Properties();
        // 定义kakfa 服务的地址,不需要将所有broker指定上  只需要指定一个即可
        props.put("bootstrap.servers", "hadoop201:9092");
        // 指定 group.id
        props.put("group.id", "test");
        // 是否自动确认 offset
        props.put("enable.auto.commit", "true");
        // 自动确认offset的时间间隔
        props.put("auto.commit.interval.ms", "1000");
        // key 序列化类
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        // value 的序列化类
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        // 创建 Kafka 消费者对象
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
        // 消费者订阅 topic. 可以同时订阅多个
        consumer.subscribe(Arrays.asList("first"));
        while (true) {
            // 读取数据, 超时简介为 100ms
            ConsumerRecords<String, String> records = consumer.poll(100);
            for (ConsumerRecord<String, String> record : records) {
                System.out.printf("offset = %d, key = %s, value = %s%n", record.offset(), record.key(), record.value());
            }
        }
    }
}
Copyright © 尚硅谷大数据 2019 all right reserved,powered by Gitbook
该文件最后修订时间: 2019-01-25 03:29:58

results matching ""

    No results matching ""