springCloud整合Kafka消息总线

首先将相关的依赖导入pom.xml文件中

    

		
		
			org.springframework.boot
			spring-boot-starter
			
			    
			      org.springframework.boot
			      spring-boot-starter-logging
			    
			
		
		
			  org.springframework.boot
			  spring-boot-starter-log4j2
		
		  
			com.fasterxml.jackson.dataformat
			jackson-dataformat-yaml
   		
		
			org.slf4j
			log4j-over-slf4j
		

		
		
			org.springframework.boot
			spring-boot-starter-web
		

		
		
			org.springframework.cloud
			spring-cloud-starter-eureka
		
		
		
		
            org.springframework.cloud
            spring-cloud-starter-config
        

		
		
			org.springframework.boot
			spring-boot-starter-test
			test
		

		
		
			org.springframework.boot
			spring-boot-starter-actuator
		

		
		
		  org.springframework.cloud
		  spring-cloud-starter-bus-kafka
		

		
		
			io.springfox
			springfox-swagger2
			2.6.1
		
		
			io.springfox
			springfox-swagger-ui
			2.6.1
		

		
		
			org.springframework.boot
			spring-boot-devtools
			true
		
		
		
			org.apache.poi
			poi-ooxml
			3.9
		
		
		
			javax.persistence
			persistence-api
			1.0
		
		
		
		
		
	        org.mybatis.spring.boot
	        mybatis-spring-boot-starter
	        1.1.1
   		 
	     
	        mysql
	        mysql-connector-java
	    
        
            com.github.pagehelper
            pagehelper-spring-boot-starter
            1.1.2
        
        
            tk.mybatis
            mapper-spring-boot-starter
            1.1.1
        
        
    	
        
			com.google.code.gson
			gson
			2.3.1
		
		
		
		    com.alibaba
		    fastjson
		    1.2.36
		
		
		    dom4j
		    dom4j
		    1.6.1
        
        
		
		    jaxen
		    jaxen
		    1.1.6
		
		
        
		    com.jcraft
		    jsch
		    0.1.54
		
		
		
			org.apache.camel
			camel-ftp
			2.13.2
		
		
		    org.apache.commons
		    commons-lang3
		    3.0
	    
		
		   com.alibaba
		   druid-spring-boot-starter
		   1.1.6
		
		
		    net.sf.json-lib
		    json-lib
		    2.4
		    jdk15
		
		
			org.json
			json
			20090211
		
		
		
		    org.projectlombok
		    lombok
		    1.14.4
		
		
		
            org.springframework.cloud
            spring-cloud-starter-feign
       	
		
		
            org.elasticsearch
            elasticsearch
            5.6.5
        
        
            org.elasticsearch.client
            transport
            5.6.5
        
        
            io.searchbox
            jest
            5.3.3
        
    
	

    
		
			
				org.springframework.boot
				spring-boot-maven-plugin
				
					true
					
					-Dfile.encoding=UTF-8
				
			
			
	            org.apache.maven.plugins
	            maven-compiler-plugin
	            3.1
	            
	                ${java.version}
	                ${java.version}
	            
	        

			
				org.jacoco
				jacoco-maven-plugin
				0.8.2
				
					
						target/jacoco.exec
					
				
				
					
						prepare-agent
						
							prepare-agent
						
					
					
						report
						prepare-package
						
							report
						
					
					
						post-unit-test
						test
						
							report
						
						
							target/jacoco.exec
							target/jacoco-ut
						
					
				
			
		
	

下面是消息接收者的配置文件

springCloud整合Kafka消息总线_第1张图片

测试kafka发送消息的Controller

@RestController
@RequestMapping("/kafka")
@Api(value = "/kafka", description = "kafka消息")
@Slf4j
public class KafkaSendController {
    @Autowired
    private KafkaTemplate kafkaTemplate;

    /**
     * 发送kafka消息
     * @param topic 消息的topic
     * @param jsonStr 消息内容
     */
    @PostMapping("/sendMessageToTopic")
    public void sendMessageToTopic(@RequestParam(value = "topic") String topic,
            @RequestParam(value = "jsonStr", required = false) String jsonStr) {
        if (StringUtils.isEmpty(jsonStr) || "empty".equalsIgnoreCase(jsonStr)) {
            log.error("error param : {}", new ResponseDto(new Result("1400530012",             
               "jsonStr is null"),"sendMessageToTopic error"));
            return;
        }
        kafkaTemplate.send(topic, jsonStr);
    }
}

3、configuration:kafka consumer

1)通过@Configuration ,声明Config并且打开KafkaTemplate能力。

2)通过@Value注入application.properties配置文件中的kafka配置。

3)生成bean,@Bean

@Configuration
public class KafkaConsumerConfig {
    @Value("${spring.kafka.bootstrap-servers}")
    private String bootstrapServers;
    @Value("${spring.kafka.group-id}")
    private String groupId;
    @Value("${spring.kafka.listener.concurrency}")
    private int concurrency;
    @Value("${spring.kafka.consumer.max-poll-records}")
    private int maxPollRecords;
    @Value("${spring.kafka.consumer.max-poll-interval-ms}")
    private int maxPollInterval;
    @Value("${spring.kafka.consumer.poll-timeout}")
    private int pollTimeout;

    @Bean
    public Map consumerConfigs() {
        Map props = new HashMap<>();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, 
         StringDeserializer.class);
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, 
          StringDeserializer.class);
        props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
        props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, maxPollRecords);
        props.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, maxPollInterval);
        props.put(ConsumerConfig.FETCH_MAX_WAIT_MS_CONFIG, pollTimeout);
        return props;
    }
    @Bean("batchContainerFactory")
    public ConcurrentKafkaListenerContainerFactory listenerContainer() {
        ConcurrentKafkaListenerContainerFactory container = new 
        ConcurrentKafkaListenerContainerFactory();
        container.setConsumerFactory(new DefaultKafkaConsumerFactory(consumerConfigs()));
        //设置并发量,小于或等于Topic的分区数
        container.setConcurrency(concurrency);
        //设置为批量监听
        container.setBatchListener(true);
        return container;
    }

}

进行监听消息类,获取发送过来的信息

@Slf4j
@Component
public class KafkaConsumer {
    @Autowired
    KafkaService kafkaService;
    /**
     * 监听MQ消息
     */
    @KafkaListener(topics = { “” }, containerFactory 
       = "batchContainerFactory")
    public void listen(List> recordList) {
        log.info("------------------------batch get {} topic msg------------------------        
        ", recordList.size());
        for (ConsumerRecord record : recordList) {
          final Optional message = Optional.ofNullable(record.value());
          log.info("get message record:" + record);
          final String topic = record.topic();
          if (message.isPresent()) {
              try {
                final String data = message.get().toString();
                // 数据库新增修改
                final ObjectMapper objectMapper = new ObjectMapper();
                final Map map = objectMapper.readValue(data, Map.class);
                final String channelCode = map.get("name");
                final String sourceChannelCode = map.get("age");
                final String channelName = map.get("sex");
                //赋值入对象进行数据库炒作

                   //省略相关代码


              } catch (final DataAccessException e){
                // 数据库的异常通过切面捕获,这里仅打印异常日志
                log.error(e.getMessage(), e);
              } catch (final Exception e) {
                log.error("kafka message{} handle exception", topic, e);
            return new BaseResponse(AlertCode.EXCEPTION,"system exception:" +         
         e.getMessage());
       }
    }
    return new BaseResponse(AlertCode.SUCCESS,"success");
  }
        }
    }
}

 

你可能感兴趣的:(springCloud整合Kafka消息总线)