1. 程式人生 > >Spring-Kafka XML配置方法實現生產和消費

Spring-Kafka XML配置方法實現生產和消費

 

Spring-Kafka XML配置方法實現生產和消費

 1. 生產者配置xml

Step1producerProperties:設定生產者公產需要的配置 ;

Step2producerFactory:定義了生產者工廠構造方法;

Step3 : kafkaTemplate:定義了使用producerFactory和是否自動重新整理,2個引數來構造kafka生產者模板類。xml主要配置了KafkaTemplate的構造引數producerFactory和autoFlush,對應了一個KafkaTemplate原始碼中的2參建構函式

<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
       xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:context="http://www.springframework.org/schema/context"
       xsi:schemaLocation="http://www.springframework.org/schema/beans
         http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
         http://www.springframework.org/schema/context
         http://www.springframework.org/schema/context/spring-context.xsd">
    <context:property-placeholder location="classpath*:application.properties" />

    <!-- 定義producer的引數 -->
    <!-- 1.producerProperties:設定生產者公產需要的配置-->
    <bean id="producerProperties" class="java.util.HashMap">
        <constructor-arg>
            <map>
                <entry key="bootstrap.servers" value="${bootstrap.servers}" />
                <entry key="group.id" value="${group.id}" />
                <entry key="retries" value="${retries}" />
                <entry key="batch.size" value="${batch.size}" />
                <entry key="linger.ms" value="${linger.ms}" />
                <entry key="buffer.memory" value="${buffer.memory}" />
                <entry key="acks" value="${acks}" />
                <entry key="key.serializer"
                       value="org.apache.kafka.common.serialization.StringSerializer" />
                <entry key="value.serializer"
                       value="org.apache.kafka.common.serialization.StringSerializer" />
            </map>
        </constructor-arg>
    </bean>


    <!-- 建立kafkatemplate需要使用的producerfactory bean -->
    <!--2.producerFactory:定義了生產者工廠構造方法-->
    <bean id="producerFactory"
          class="org.springframework.kafka.core.DefaultKafkaProducerFactory">
        <constructor-arg>
            <ref bean="producerProperties" />
        </constructor-arg>
    </bean>

    <!-- 建立kafkatemplate bean,使用的時候,只需要注入這個bean,即可使用template的send訊息方法 -->
    <!--3.kafkaTemplate:定義了使用producerFactory和是否自動重新整理,2個引數來構造kafka生產者模板類。-->
    <bean id="kafkaTemplate" class="org.springframework.kafka.core.KafkaTemplate">
        <constructor-arg ref="producerFactory" />
        <constructor-arg name="autoFlush" value="true" />
        <property name="defaultTopic" value="default" />
    </bean>

</beans>

【生產者引數配置】: 

#============== kafka config 生產者=======================
# brokers叢集
bootstrap.servers=192.168.80.150:9092
# 消費者群組ID,釋出-訂閱模式,即如果一個生產者,多個消費者都要消費,那麼需要定義自己的群組,同一群組內的消費者只有一個能消費到訊息
group.id=test
# 即所有副本都同步到資料時send方法才返回, 以此來完全判斷資料是否傳送成功, 理論上來講資料不會丟失
acks=all
# 傳送失敗重試次數
retries=0
# 批處理條數:當多個記錄被髮送到同一個分割槽時,生產者會嘗試將記錄合併到更少的請求中。這有助於客戶端和伺服器的效能
batch.size=16384
# 批處理延遲時間上限:即1ms過後,不管是否達到批處理數,都直接傳送一次請求
linger.ms=1
# 即32MB的批處理緩衝區
buffer.memory=33554432

 2. 生產者傳送訊息:

Step1:根據topic、partition、key傳送資料data。

Step2:接收ListenableFuture新增成功、失敗回撥函式

package com.caox.kafka._03_spring_kafka_xml;

import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.SendResult;
import org.springframework.util.concurrent.FailureCallback;
import org.springframework.util.concurrent.ListenableFuture;
import org.springframework.util.concurrent.SuccessCallback;

/**
 * Created by nazi on 2018/9/5.
 * @author nazi
 */
public class ProducerMain {
    public static void main(String[] argv) throws Exception {
        ApplicationContext context = new ClassPathXmlApplicationContext("applicationContext.xml");
        KafkaTemplate kafkaTemplate = context.getBean(KafkaTemplate.class);
        String key  = "test-key";
        String data = "this is a test message";
        ListenableFuture<SendResult<String, String>> listenableFuture = kafkaTemplate.send("topic-test4", 0, key, data);
        //傳送成功回撥
        SuccessCallback<SendResult<String, String>> successCallback = new SuccessCallback<SendResult<String, String>>() {
            @Override
            public void onSuccess(SendResult<String, String> result) {
                //成功業務邏輯
                System.out.println("success to send message !");
            }
        };
        //傳送失敗回撥
        FailureCallback failureCallback = new FailureCallback() {
            @Override
            public void onFailure(Throwable ex) {
                //失敗業務邏輯
            }
        };
        listenableFuture.addCallback(successCallback, failureCallback);
    }



}

 3. 消費者配置xml: 

Step1 : consumerProperties -> consumerFactory 載入配置構造消費者工廠;

Step2:  messageListener -> containerProperties 載入容器配置(topics);

Step3 :   consumerFactory+containerProperties -> messageListenerContainer 容器配置(topics)+訊息監聽器,構造一個併發訊息監聽容器,並執行初始化方法doStart【生產者引數配置】: 

<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
       xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:context="http://www.springframework.org/schema/context"
       xsi:schemaLocation="http://www.springframework.org/schema/beans
         http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
         http://www.springframework.org/schema/context
         http://www.springframework.org/schema/context/spring-context.xsd">
    <context:property-placeholder location="classpath*:application.properties" />

    <!-- 1.定義consumer的引數 -->
    <!-- consumerProperties -> consumerFactory  載入配置構造消費者工廠 -->
    <bean id="consumerProperties" class="java.util.HashMap">
        <constructor-arg>
            <map>
                <entry key="bootstrap.servers" value="${bootstrap.servers}" />
                <entry key="group.id" value="${group.id}" />
                <entry key="enable.auto.commit" value="${enable.auto.commit}" />
                <entry key="session.timeout.ms" value="${session.timeout.ms}" />
                <entry key="key.deserializer"
                       value="org.apache.kafka.common.serialization.StringDeserializer" />
                <entry key="value.deserializer"
                       value="org.apache.kafka.common.serialization.StringDeserializer" />
            </map>
        </constructor-arg>
    </bean>

    <!-- 2.建立consumerFactory bean -->
    <bean id="consumerFactory"
          class="org.springframework.kafka.core.DefaultKafkaConsumerFactory" >
        <constructor-arg>
            <ref bean="consumerProperties" />
        </constructor-arg>
    </bean>

    <!-- 3.定義消費實現類 -->
    <bean id="kafkaConsumerService" class="com.caox.kafka._03_spring_kafka_xml.KafkaConsumerServiceImpl3" />

    <!-- 4.消費者容器配置資訊 -->
    <!-- messageListener -> containerProperties  載入容器配置(topics)-->
    <bean id="containerProperties" class="org.springframework.kafka.listener.config.ContainerProperties">
        <!-- topic -->
        <constructor-arg name="topics">
            <list>
                <!--<value>${kafka.consumer.topic.credit.for.lease}</value>-->
                <!--<value>${loan.application.feedback.topic}</value>-->
                <!--<value>${templar.agreement.feedback.topic}</value>-->
                <!--<value>${templar.aggrement.active.feedback.topic}</value>-->
                <!--<value>${templar.aggrement.agreementRepaid.topic}</value>-->
                <value>${templar.aggrement.agreementWithhold.topic}</value>
                <!--<value>${templar.aggrement.agreementRepayRemind.topic}</value>-->
            </list>
        </constructor-arg>
        <property name="messageListener" ref="kafkaConsumerService" />
    </bean>

    <!-- 5.消費者併發訊息監聽容器,執行doStart()方法 -->
    <!-- consumerFactory+containerProperties -> messageListenerContainer 容器配置(topics)+ 訊息監聽器,構造一個併發訊息監聽容器,並執行初始化方法doStart -->
    <bean id="messageListenerContainer" class="org.springframework.kafka.listener.ConcurrentMessageListenerContainer" init-method="doStart" >
        <constructor-arg ref="consumerFactory" />
        <constructor-arg ref="containerProperties" />
        <property name="concurrency" value="${concurrency}" />
    </bean>

</beans>

【消費者引數配置】:  

#=============== 消費者 ===========================
# 如果為true,消費者的偏移量將在後臺定期提交
enable.auto.commit=false
# 在使用Kafka的組管理時,用於檢測消費者故障的超時
session.timeout.ms=15000
# 消費監聽器容器併發數
concurrency = 3
templar.aggrement.agreementWithhold.topic=topic-test4

 4. 消費者接受訊息: 注:方案二和方案三 必須實現 MessageListener否則報引數初始化異常

 4.1【方案一】:直接實現MessageListener介面,複寫onMessage方法,實現自定義消費業務邏輯。

package com.caox.kafka._03_spring_kafka_xml;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.listener.MessageListener;

/**
 * Created by nazi on 2018/9/11.
 * @author nazi
 */
public class KafkaConsumerSerivceImpl implements MessageListener<String, String> {
    @Override
    public void onMessage(ConsumerRecord<String, String> data) {
        //根據不同主題,消費
        if("topic-test4".equals(data.topic())){
            //邏輯1
            System.out.println("listen : " + " key:"+ data.key() + " value: " + data.value());
        }else if("topic-test5".equals(data.topic())){
            //邏輯2
        }
    }
}

 4.2【方案二】:@KafkaListener註解,並設定topic,支援SPEL表示式。這樣方便拆分多個不同topic處理不同業務邏輯。(特別是有自己的事務的時候,尤其方便)

package com.caox.kafka._03_spring_kafka_xml;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.listener.MessageListener;

/**
 * Created by nazi on 2018/9/11.
 * @author nazi
 */

public class KafkaConsumerServiceImpl3 implements MessageListener<String,String> {
    @KafkaListener(topics = "${templar.aggrement.agreementWithhold.topic}")
    public void onMessage(ConsumerRecord<String, String> stringStringConsumerRecord) {
        //消費業務邏輯
        System.out.println("listen 3 : " + " key:"+ stringStringConsumerRecord.key() + " value: " + stringStringConsumerRecord.value());
    }
}

 4.2【方案三】:@KafkaListener註解,並設定topic,支援SPEL表示式。這樣方便拆分多個不同topic處理不同業務邏輯。(特別是有自己的事務的時候,尤其方便) 

package com.caox.kafka._03_spring_kafka_xml;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.listener.MessageListener;

/**
 * Created by nazi on 2018/9/11.
 * @author nazi
 */
@KafkaListener(topics = "${templar.aggrement.agreementWithhold.topic}")
public class KafkaConsumerSerivceImpl2 implements MessageListener<String,String>{

    @Override
    public void onMessage(ConsumerRecord<String, String> data) {
        //根據不同主題,消費
        System.out.println("listen2 : " + " key:"+ data.key() + " value: " + data.value());
    }
}