【kafka实战】02 kafka生产者和消费者示例

一、依赖引入

<dependency>
   <groupId>org.springframework.kafka</groupId>
   <artifactId>spring-kafka</artifactId>
</dependency>

二、生产者和消费者代码示例

public class KafkaSimpleTest {

    private static final String TOPIC_NAME = "hello.world";
    private static final String SERVERS = "192.168.56.201:9092";
    private static final String GROUP_ID = "group1";
    private static final String USER_NAME = "user";
    private static final String PASSWORD = "psd";

    @Test
    public void testConsume() {
        KafkaConsumer<String, String> consumer = kafkaConsumer();
        consumer.subscribe(Collections.singletonList(TOPIC_NAME));
        //一般是while(true)包裹下面的代码
        while (true) {
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(5000));
            for (ConsumerRecord<String, String> record : records) {
                System.out.println("接收到的消息为:" + record.value() +
                        ",partition:" + record.partition() + ",offset:" + record.offset() + ",key:" + record.key());
                TopicPartition topicPartition = new TopicPartition(TOPIC_NAME, record.partition());
                OffsetAndMetadata offsetAndMetadata = new OffsetAndMetadata(record.offset() + 1);
                consumer.commitSync(Collections.singletonMap(topicPartition, offsetAndMetadata));
            }
        }
    }

    @Test
    public void testSend() {
        Properties properties = new Properties();
        properties.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, SERVERS);
        properties.put("acks", "all");
        properties.put("retries", 0);
        properties.put("batch.size", 16384);
        properties.put("linger.ms", 1);
        properties.put("buffer.memory", 33554432);
        //序列化
        properties.put("key.serializer", StringSerializer.class);
        properties.put("value.serializer", StringSerializer.class);
        Producer<String, String> producer = new KafkaProducer<>(properties);
        for (int i = 0; i < 100; i++) {
            producer.send(new ProducerRecord<String, String>(TOPIC_NAME, Integer.toString(i), System.currentTimeMillis() + "," + "this is message:" + i));
        }
        System.out.println("消息发送完成");
        producer.close();
    }

    private KafkaConsumer<String, String> kafkaConsumer() {
        Properties props = new Properties();
        //设置Kafka服务器地址
        props.put("bootstrap.servers", SERVERS);
        //设置消费组
        props.put("group.id", GROUP_ID);
        //设置数据key的反序列化处理类
        props.put("key.deserializer", StringDeserializer.class.getName());
        //设置数据value的反序列化处理类
        props.put("value.deserializer", StringDeserializer.class.getName());
        props.put("enable.auto.commit", "false");
        props.put("auto.commit.interval.ms", "1000");
        props.put("session.timeout.ms", "30000");
        /*
        // 配置安全协议和认证方式
        properties.put("security.protocol", "SASL_PLAINTEXT");
        properties.put("sasl.mechanism", "PLAIN");
        // 设置用户名和密码
        properties.setProperty("sasl.jaas.config",
                String.format("org.apache.kafka.common.security.plain.PlainLoginModule required username=\"%s\" password=\"%s\";",
                        USER_NAME, PASSWORD));*/
        KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(props);
        kafkaConsumer.subscribe(Collections.singletonList(TOPIC_NAME));
        return kafkaConsumer;
    }
}

文章来源地址https://www.uudwc.com/A/6zjBX/

原文地址:https://blog.csdn.net/suyuaidan/article/details/133133662

本文来自互联网用户投稿,该文观点仅代表作者本人,不代表本站立场。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如若转载,请注明出处: 如若内容造成侵权/违法违规/事实不符,请联系站长进行投诉反馈,一经查实,立即删除!

h
上一篇 2023年10月04日 08:11
5-3 pytorch中的损失函数
下一篇 2023年10月04日 09:11