- kafka生產者
//AsynUpdateKafkaMsgService.java
package com.baidu.com.kafka;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
@Service
public class AsynUpdateKafkaMsgService{
private Producer<String,String> producer;
private String topic="asyn-update-topic"; //kafka topic 名稱
private String location="asyn-update-producer.properties"; //生產者相關屬性配置
@PostConstruct
public void init(){
InputStream in=Thread.currentThread().getContextClassLoader().getResourceAsStream(location);
try{
Properties props=new Properties();
props.load(in);
ProducerConfig config=new ProducerConfig(props);
producer=new Producer<String,String>(config);
}catch(Exception e){
logger.error("生產者創建失敗 EXPREE: {} REASON: {}",e.getClass().getSimpleName(),e.getMessage());
}finally{
try{
if(in!=null){
in.close();
}
}catch(IOException e){
logger.error("生產者初始化失敗 EXPREE: {} REASON: {}",e.getClass().getSimpleName(),e.getMessage());
}
}
}
public void send(String partitionKey,String msg){
try{
if(StringUtils.isEmpty(msg)){
logger.debug("消息爲空 [topic={},partitionKey={},msg={}]",topic,partitionKey,msg);
return;
}
long timeMillis=System.currentTimeMillis();
producer.send(new KeyedMessage<String,String>(topic,partitionKey,msg));
logger.debug("生產者發送消息成功 [topic={},partitionKey={},msg={}]",topic,partitionKey,msg);
}catch(Exception e){
logger.error("生產者發送消息失敗 EXPREE: {} REASON: {}",e.getClass().getSimpleName(),e.getMessage());
}
}
@PreDestroy
public void close(){
if(producer!=null){
producer.close();
}
}
public void setTopic(String topic){
this.topic=topic;
}
public void setLocation(String location){
this.location=location;
}
}
其中asyn-update-producer.properties內容如下:
metadata.broker.list=188.5.155.16:12032,188.5.155.17:12032
producer.type=sync
request.required.acks=1
request.timeout.ms=10000
serializer.class=kafka.serializer.StringEncoder
key.serializer.class=kafka.serializer.StringEncoder
#partitioner.class=
compression.codec=none
compressed.topics=null
message.send.max.retries=3
retry.backoff.ms=1000
topic.metadata.refresh.interval.ms=600000
queue.buffering.max.ms=1000
queue.buffering.max.messages=100
batch.num.messages=10
pom.xml配置如下:
<!-- kafka config -->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.9.2</artifactId>
<version>0.8.2.2</version>
</dependency>
- kafka消費者
//KafKaMsgHandler.java
package com.baidu.com.kafka;
/**
* kafaka消息處理類
*
*/
public interface KafKaMsgHandler {
public void handle(String message);
}
//KafkaConsumerServer.java
package com.baidu.com.kafka;
import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.message.MessageAndMetadata;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
public class KafkaConsumerServer{
private String topic;
private String config;
private ConsumerConnector consumerConnector;
private ExecutorService executor;
private int threadCount;
private KafKaMsgHandler msgHandler;
public KafkaConsumerServer(String topic,String config,int threadCount){
this.topic=topic;
this.config=config;
this.threadCount=threadCount;
}
public void start(){
try{
Properties props=new Properties();
props.load(Thread.currentThread().getContextClassLoader().getResourceAsStream(config));
consumerConnector=Consumer.createJavaConsumerConnector(new ConsumerConfig(props));
Map<String,Integer> topicCountMap=new HashMap<String,Integer>();
topicCountMap.put(topic,new Integer(threadCount));
Map<String,List<KafkaStream<byte[],byte[]>>> consumerMap=consumerConnector.createMessageStreams(topicCountMap);
List<KafkaStream<byte[],byte[]>> streams=consumerMap.get(topic);
executor=Executors.newFixedThreadPool(threadCount);
int threadNum=0;
for(KafkaStream<byte[],byte[]> stream : streams){
executor.submit(new ListenerThread(stream,threadNum++));
}
logger.info("ChargeKafkaConsumer start successfully");
}catch(Exception e){
logger.error("ChargeKafkaConsumer start error",e);
}
}
public void shutdown(){
if(consumerConnector!=null)
consumerConnector.shutdown();
if(executor!=null)
executor.shutdown();
}
private class ListenerThread implements Runnable{
private KafkaStream<byte[],byte[]> stream;
int threadNum;
public ListenerThread(KafkaStream<byte[],byte[]> stream,int threadNum){
this.stream=stream;
this.threadNum=threadNum;
}
public void run(){
try{
logger.info("thread run, num:"+threadNum);
ConsumerIterator<byte[],byte[]> it=stream.iterator();
while(it.hasNext()){
try{
MessageAndMetadata<byte[],byte[]> messageAndMetadata=it.next();
String message=new String(messageAndMetadata.message());
logger.debug("consumer message, partition:"+messageAndMetadata.partition()+
",offset:"+messageAndMetadata.offset()+",message:"+message);
try{
msgHandler.handle(message);
}catch(Exception e){
logger.error("msgHandler.handle catch an exception, "+e.getMessage(),e);
}
}catch(Exception e){
logger.error("fetching message error",e);
}
}
}catch(Throwable e){
logger.error("thread dead!",e);
}
}
}
public void setTopic(String topic){
this.topic=topic;
}
public void setConfig(String config){
this.config=config;
}
public void setThreadCount(int threadCount){
this.threadCount=threadCount;
}
public void setMsgHandler(KafKaMsgHandler msgHandler){
this.msgHandler=msgHandler;
}
}
//AsynUpdateKafkaMsgHandler.java
package com.baidu.com.kafka;
import javax.annotation.Resource;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonSyntaxException;
public class AsynUpdateKafkaMsgHandler implements KafKaMsgHandler{
@SuppressWarnings("unchecked")
@Override
public void handle(String message){
try{
//業務處理
}catch(JsonSyntaxException e){
logger.error("解析異步處理數據庫消息格式異常:【"+message+"】");
return;
}
}
}
spring-kafka.xml
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-3.0.xsd">
<bean id="asynUpdateMsgHandler" class="com.baidu.com.kafka.AsynUpdateKafkaMsgHandler"/>
<bean id="asynUpdateConsumer" class="com.baidu.com.kafka.KafkaConsumerServer" init-method="start" destroy-method="shutdown">
<constructor-arg index="0" value="${asynUpdateMsgTopic}"/>
<constructor-arg index="1" value="${asynUpdateMsgKafkaConfig}"/>
<constructor-arg index="2" value="${asynUpdateMsgThreadCount}"/>
<property name="msgHandler" ref="asynUpdateMsgHandler"/>
</bean>
</beans>
system.properties
#kafka
asynUpdateMsgTopic=asyn-update-topic
asynUpdateMsgKafkaConfig=kafka-asyn-update-db.properties
asynUpdateMsgThreadCount=16
kafka.asyn.update.group.id=asyn_update_pro
kafka.zookeeper.asyn.upadte=188.45.56.10:15621,188.45.56.11:15621,188.45.56.12:15621
kafka-asyn-update-db.properties
group.id=${kafka.asyn.update.group.id}
zookeeper.connect=${kafka.zookeeper.asyn.upadte}
zookeeper.session.timeout.ms=60000
zookeeper.connection.timeout.ms=60000
zookeeper.sync.time.ms=2000
auto.commit.enable=true