栏目分类:
子分类:
返回
名师互学网用户登录
快速导航关闭
当前搜索
当前分类
子分类
实用工具
热门搜索
名师互学网 > IT > 软件开发 > 后端开发 > Java

springboot-kafka注解自定义ListenerContainerFactory

Java 更新时间: 发布时间: IT归档 最新发布 模块sitemap 名妆网 法律咨询 聚返吧 英语巴士网 伯小乐 网商动力

springboot-kafka注解自定义ListenerContainerFactory

配置文件

  kafka:
    bootstrapServers: 服务器地址
    consumer:
      key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      enable-auto-commit: false
      auto-offset-reset: latest
      max-poll-interval: 1200000
      max-poll-records: 2 # 一次取多条,listener.type 要设置成 batch
      isolation-level: read_committed
    listener:
      ack-mode: manual
      type: batch

 properties类

package com.geely.algo.conf.kafka;

import lombok.Data;
import lombok.Getter;
import lombok.Setter;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.kafka.listener.ContainerProperties;
import org.springframework.stereotype.Component;


@Component
@ConfigurationProperties(prefix = "replenish.kafka")
@Getter
@Setter
public class ReplenishKafkaProperties {
    private String bootstrapServers;

    private Listener listener;

    private Consumer consumer;
}

@Data
class Listener {
    
    private KafkaProperties.Listener.Type type;

    
    private ContainerProperties.AckMode ackMode;
}

@Data
class Consumer{
    private Boolean enableAutoCommit;

    private String autoOffsetReset;

    private String maxPollRecords;

    private String maxPollInterval;

    private String isolationLevel;

    private String keyDeserializer;

    private String valueDeserializer;
}

自定义config类

package com.geely.algo.conf.kafka;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.stereotype.Component;

import java.util.HashMap;
import java.util.Map;
import java.util.Objects;

@Component
public class KafkaListenerConfig {

    @Autowired
    private ReplenishKafkaProperties replenishKafkaProperties;

    @Bean("replenishListenerContainerFactory")
    public KafkaListenerContainerFactory replenishListenerContainerFactory(){
        ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory();
        factory.setConsumerFactory(consumerFactory());
        //设置手动提交
        factory.getContainerProperties().setAckMode(replenishKafkaProperties.getListener().getAckMode());
        //设置批量获取数据的标识
        if(Objects.equals(replenishKafkaProperties.getListener().getType(), KafkaProperties.Listener.Type.BATCH) ){
            factory.setBatchListener(true);
        }
        return factory;
    }

    
    private ConsumerFactory consumerFactory() {
        return new DefaultKafkaConsumerFactory<>(consumerConfigs());
    }

    
    private Map consumerConfigs() {
        Map props = new HashMap<>(8);
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, replenishKafkaProperties.getBootstrapServers());
        props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, replenishKafkaProperties.getConsumer().getEnableAutoCommit());
        props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, replenishKafkaProperties.getConsumer().getAutoOffsetReset());
        props.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, replenishKafkaProperties.getConsumer().getIsolationLevel());
        props.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, replenishKafkaProperties.getConsumer().getMaxPollInterval());
        props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, replenishKafkaProperties.getConsumer().getMaxPollRecords());
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        return props;
    }
}

 引用类

package com.geely.algo.article.kafka.consumer;

import com.alibaba.fastjson.JSON;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Component;

import java.util.List;

@Component
@Slf4j
public class ReplenishBusinessConsumer {


    @KafkaListener(
            topics = {"#{'${kafka.replenish.consumer.topic}'}"},
            groupId = "#{'${kafka.replenish.consumer.group}'}",
            containerFactory = "replenishListenerContainerFactory")
    public void onMessage(List> records, Acknowledgment ack){


        System.out.println(records.size());
        System.out.println(JSON.toJSonString(records));
        ack.acknowledge();
    }
}

 

转载请注明:文章转载自 www.mshxw.com
本文地址:https://www.mshxw.com/it/531698.html
我们一直用心在做
关于我们 文章归档 网站地图 联系我们

版权所有 (c)2021-2022 MSHXW.COM

ICP备案号:晋ICP备2021003244-6号