1. 程式人生 > >springboot配置kafka生產者和消費者詳解

springboot配置kafka生產者和消費者詳解

在原有pom.xml依賴下新新增一下kafka依賴ar包

<!--kafka-->
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
<version>1.1.1.RELEASE</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.10</artifactId>
<version>0.10.0.1</version>
</dependency>

application.properties:

 1 #原始資料kafka讀取
 2 kafka.consumer.servers=IP:9092,IP:9092(kafka消費叢集ip+port埠)
 3 kafka.consumer.enable.auto.commit=true(是否自動提交)
 4 kafka.consumer.session.timeout=20000(連線超時時間)
 5 kafka.consumer.auto.commit.interval=100
 6 kafka.consumer.auto.offset.reset=latest(實時生產,實時消費,不會從頭開始消費)
7 kafka.consumer.topic=result(消費的topic) 8 kafka.consumer.group.id=test(消費組) 9 kafka.consumer.concurrency=10(設定消費執行緒數) 10 11 #協議轉換後儲存kafka 12 kafka.producer.servers=IP:9092,IP:9092(kafka生產叢集ip+port埠) 13 kafka.producer.topic=result(生產的topic) 14 kafka.producer.retries=0 15 kafka.producer.batch.size=4096 16
kafka.producer.linger=1 17 kafka.producer.buffer.memory=40960

springboot生產者配置:

 1 package com.mapbar.track_storage.config;
 2 
 3 import org.apache.kafka.clients.producer.ProducerConfig;
 4 import org.apache.kafka.common.serialization.StringSerializer;
 5 import org.springframework.beans.factory.annotation.Value;
 6 import org.springframework.context.annotation.Bean;
 7 import org.springframework.context.annotation.Configuration;
 8 import org.springframework.kafka.annotation.EnableKafka;
 9 import org.springframework.kafka.core.DefaultKafkaProducerFactory;
10 import org.springframework.kafka.core.KafkaTemplate;
11 import org.springframework.kafka.core.ProducerFactory;
12 
13 import java.util.HashMap;
14 import java.util.Map;
15 
16 /**
17 * kafka生產配置
18 * @author Lvjiapeng
19 *
20 */
21 @Configuration
22 @EnableKafka
23 public class KafkaProducerConfig {
24 @Value("${kafka.producer.servers}")
25 private String servers;
26 @Value("${kafka.producer.retries}")
27 private int retries;
28 @Value("${kafka.producer.batch.size}")
29 private int batchSize;
30 @Value("${kafka.producer.linger}")
31 private int linger;
32 @Value("${kafka.producer.buffer.memory}")
33 private int bufferMemory;
34 
35 public Map<String, Object> producerConfigs() {
36 Map<String, Object> props = new HashMap<>();
37 props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
38 props.put(ProducerConfig.RETRIES_CONFIG, retries);
39 props.put(ProducerConfig.BATCH_SIZE_CONFIG, batchSize);
40 props.put(ProducerConfig.LINGER_MS_CONFIG, linger);
41 props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, bufferMemory);
42 props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
43 props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
44 return props;
45 }
46 
47 public ProducerFactory<String, String> producerFactory() {
48 return new DefaultKafkaProducerFactory<>(producerConfigs());
49 }
50 
51 @Bean
52 public KafkaTemplate<String, String> kafkaTemplate() {
53 return new KafkaTemplate<String, String>(producerFactory());
54 }
55 }

springboot消費者配置:

 1 package com.mapbar.track_storage.config;
 2 
 3 import org.apache.kafka.clients.consumer.ConsumerConfig;
 4 import org.apache.kafka.common.serialization.StringDeserializer;
 5 import org.springframework.beans.factory.annotation.Value;
 6 import org.springframework.context.annotation.Bean;
 7 import org.springframework.context.annotation.Configuration;
 8 import org.springframework.kafka.annotation.EnableKafka;
 9 import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
10 import org.springframework.kafka.config.KafkaListenerContainerFactory;
11 import org.springframework.kafka.core.ConsumerFactory;
12 import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
13 import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
14 
15 import java.util.HashMap;
16 import java.util.Map;
17 
18 /**
19 * kafka消費者配置
20 * @author Lvjiapeng
21 *
22 */
23 @Configuration
24 @EnableKafka
25 public class KafkaConsumerConfig {
26 
27 @Value("${kafka.consumer.servers}")
28 private String servers;
29 @Value("${kafka.consumer.enable.auto.commit}")
30 private boolean enableAutoCommit;
31 @Value("${kafka.consumer.session.timeout}")
32 private String sessionTimeout;
33 @Value("${kafka.consumer.auto.commit.interval}")
34 private String autoCommitInterval;
35 @Value("${kafka.consumer.group.id}")
36 private String groupId;
37 @Value("${kafka.consumer.auto.offset.reset}")
38 private String autoOffsetReset;
39 @Value("${kafka.consumer.concurrency}")
40 private int concurrency;
41 
42 @Bean
43 public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> kafkaListenerContainerFactory() {
44 ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
45 factory.setConsumerFactory(consumerFactory());
46 factory.setConcurrency(concurrency);
47 factory.getContainerProperties().setPollTimeout(1500);
48 return factory;
49 }
50 
51 public ConsumerFactory<String, String> consumerFactory() {
52 return new DefaultKafkaConsumerFactory<>(consumerConfigs());
53 }
54 
55 
56 public Map<String, Object> consumerConfigs() {
57 Map<String, Object> propsMap = new HashMap<>();
58 propsMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
59 propsMap.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, enableAutoCommit);
60 propsMap.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, autoCommitInterval);
61 propsMap.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, sessionTimeout);
62 propsMap.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
63 propsMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
64 propsMap.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
65 propsMap.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, autoOffsetReset);
66 return propsMap;
67 }
68 /**
69 * kafka監聽
70 * @return
71 */
72 @Bean
73 public RawDataListener listener() {
74 return new RawDataListener();
75 }
76 
77 }

生產者測試:

 1 package com.mapbar.track_storage.controller;
 2 
 3 import org.springframework.beans.factory.annotation.Autowired;
 4 import org.springframework.kafka.core.KafkaTemplate;
 5 import org.springframework.stereotype.Controller;
 6 import org.springframework.web.bind.annotation.RequestMapping;
 7 import org.springframework.web.bind.annotation.RequestMethod;
 8 
 9 import javax.servlet.http.HttpServletRequest;
10 import javax.servlet.http.HttpServletResponse;
11 import java.io.IOException;
12 
13 @RequestMapping(value = "/kafka")
14 @Controller
15 public class ProducerController {
16 @Autowired
17 private KafkaTemplate kafkaTemplate;
18 
19 @RequestMapping(value = "/producer",method = RequestMethod.GET)
20 public void consume(HttpServletRequest request, HttpServletResponse response) throws IOException{
21 String value = "{\"code\":200,\"dataVersion\":\"17q1\",\"message\":\"\",\"id\":\"364f79f28eea48eefeca8c85477a10d3\",\"source\":\"didi\",\"tripList\":[{\"subTripList\":[{\"startTimeStamp\":1519879598,\"schemeList\":[{\"distance\":0.0,\"ids\":\"94666702,\",\"schemeId\":0,\"linkList\":[{\"score\":72,\"distance\":1,\"gpsList\":[{\"origLonLat\":\"116.321343,40.43242\",\"grabLonLat\":\"112.32312,40.32132\",\"timestamp\":1515149926000}]}]}],\"endTimeStamp\":1519879598,\"subTripId\":0},{\"startTimeStamp\":1519879727,\"schemeList\":[{\"distance\":1395.0,\"ids\":\"94666729,7298838,7291709,7291706,88613298,88613297,7297542,7297541,94698785,94698786,94698778,94698780,94698779,94698782,\",\"schemeId\":0,\"linkList\":[{\"score\":72,\"distance\":1,\"gpsList\":[{\"origLonLat\":\"116.321343,40.43242\",\"grabLonLat\":\"112.32312,40.32132\",\"timestamp\":1515149926000}]}]}],\"endTimeStamp\":1519879812,\"subTripId\":1},{\"startTimeStamp\":1519879836,\"schemeList\":[{\"distance\":0.0,\"ids\":\"54123007,\",\"schemeId\":0,\"linkList\":[{\"score\":72,\"distance\":1,\"gpsList\":[{\"origLonLat\":\"116.321343,40.43242\",\"grabLonLat\":\"112.32312,40.32132\",\"timestamp\":1515149926000}]}]}],\"endTimeStamp\":1519879904,\"subTripId\":2},{\"startTimeStamp\":1519879959,\"schemeList\":[{\"distance\":0.0,\"ids\":\"54190443,\",\"schemeId\":0,\"linkList\":[{\"score\":72,\"distance\":1,\"gpsList\":[{\"origLonLat\":\"116.321343,40.43242\",\"grabLonLat\":\"112.32312,40.32132\",\"timestamp\":1515149926000}]}]}],\"endTimeStamp\":1519879959,\"subTripId\":3},{\"startTimeStamp\":1519880088,\"schemeList\":[{\"distance\":2885.0,\"ids\":\"94698824,94698822,94698789,94698786,54123011,54123012,54123002,94698763,94698727,94698722,94698765,54123006,54123004,\",\"schemeId\":0,\"linkList\":[{\"score\":72,\"distance\":1,\"gpsList\":[{\"origLonLat\":\"116.321343,40.43242\",\"grabLonLat\":\"112.32312,40.32132\",\"timestamp\":1515149926000}]}]}],\"endTimeStamp\":1519880300,\"subTripId\":4},{\"startTimeStamp\":1519880393,\"schemeList\":[{\"distance\":2398.0,\"ids\":\"7309441,7303680,54123061,54123038,7309478,7309477,94698204,94698203,94698273,94698274,94698288,94698296,94698295,94698289,94698310,\",\"schemeId\":0,\"linkList\":[{\"score\":72,\"distance\":1,\"gpsList\":[{\"origLonLat\":\"116.321343,40.43242\",\"grabLonLat\":\"112.32312,40.32132\",\"timestamp\":1515149926000}]}]}],\"endTimeStamp\":1519880636,\"subTripId\":5},{\"startTimeStamp\":1519881064,\"schemeList\":[{\"distance\":35.0,\"ids\":\"7309474,\",\"schemeId\":0,\"linkList\":[{\"score\":72,\"distance\":1,\"gpsList\":[{\"origLonLat\":\"116.321343,40.43242\",\"grabLonLat\":\"112.32312,40.32132\",\"timestamp\":1515149926000}]}]}],\"endTimeStamp\":1519881204,\"subTripId\":6},{\"startTimeStamp\":1519881204,\"schemeList\":[{\"distance\":28.0,\"ids\":\"7309476,\",\"schemeId\":0,\"linkList\":[{\"score\":72,\"distance\":1,\"gpsList\":[{\"origLonLat\":\"116.321343,40.43242\",\"grabLonLat\":\"112.32312,40.32132\",\"timestamp\":1515149926000}]}]}],\"endTimeStamp\":1519881266,\"subTripId\":7},{\"startTimeStamp\":1519881291,\"schemeList\":[{\"distance\":463.0,\"ids\":\"7303683,\",\"schemeId\":0,\"linkList\":[{\"score\":72,\"distance\":1,\"gpsList\":[{\"origLonLat\":\"116.321343,40.43242\",\"grabLonLat\":\"112.32312,40.32132\",\"timestamp\":1515149926000}]}]}],\"endTimeStamp\":1519881329,\"subTripId\":8}],\"startTimeStamp\":1519879350,\"unUseTime\":1201,\"totalTime\":2049,\"endTimeStamp\":1519881399,\"tripId\":0}]}";
22 for (int i = 1; i<=500; i++){
23 kafkaTemplate.send("result",value);
24 }
25 }
26 }

消費者測試:

 1 import net.sf.json.JSONObject;
 2 import org.apache.kafka.clients.consumer.ConsumerRecord;
 3 import org.apache.log4j.Logger;
 4 import org.springframework.beans.factory.annotation.Autowired;
 5 import org.springframework.kafka.annotation.KafkaListener;
 6 import org.springframework.stereotype.Component;
 7 
 8 import java.io.IOException;
 9 import java.util.List;
10 
11 /**
12 * kafka監聽
13 * @author shangzz
14 *
15 */
16 @Component
17 public class RawDataListener {
18 Logger logger=Logger.getLogger(RawDataListener.class);
19 @Autowired
20 private MatchRoadService matchRoadService;
21 
22 /**
23 * 實時獲取kafka資料(生產一條,監聽生產topic自動消費一條)
24 * @param record
25 * @throws IOException
26 */
27 @KafkaListener(topics = {"${kafka.consumer.topic}"})
28 public void listen(ConsumerRecord<?, ?> record) throws IOException {
29 String value = (String) record.value();
30 System.out.println(value);
31 }
32 
33 }

總結:

         ①  生產者環境類配置好以後,@Autowired自動注入KafkaTemplate類,使用send方法生產訊息

         ②  消費者環境類配置好以後,方法頭前使用@KafkaListener(topics = {"${kafka.consumer.topic}"})註解監聽topic並傳入ConsumerRecord<?, ?> record物件即可自動消費topic

         ③  相關kafka配置只需在application.properties照葫蘆畫瓢新增,修改或者刪除配置並在環境配置類中做出相應修改即可