小编给大家分享一下kafka生产者和消费者的javaAPI怎么用,相信大部分人都还不怎么了解,因此分享这篇文章给大家参考一下,希望大家阅读完这篇文章后大有收获,下面让我们一起去了解一下吧!
创新互联2013年开创至今,先为秀洲等服务建站,秀洲等地企业,进行企业商务咨询服务。为秀洲企业网站制作PC+手机+微官网三网同步一站式服务解决您的所有建站问题。
写了个kafka的java demo 顺便记录下,仅供参考
1.创建maven项目
目录如下:
2.pom文件:
4.0.0 Kafka-Maven Kafka-Maven 0.0.1-SNAPSHOT org.apache.kafka kafka_2.11 0.10.1.1 org.apache.hadoop hadoop-common 2.2.0 org.apache.hadoop hadoop-hdfs 2.2.0 org.apache.hadoop hadoop-client 2.2.0 org.apache.hbase hbase-client 1.0.3 org.apache.hbase hbase-server 1.0.3 org.apache.hadoop hadoop-hdfs 2.2.0 jdk.tools jdk.tools 1.7 system ${JAVA_HOME}/lib/tools.jar org.apache.httpcomponents httpclient 4.3.6 org.apache.maven.plugins maven-compiler-plugin 1.7
3.kafka生产者KafkaProduce:
package com.lijie.producer; import java.io.File; import java.io.FileInputStream; import java.util.Properties; import org.apache.kafka.clients.producer.Callback; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.RecordMetadata; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class KafkaProduce { private static Properties properties; static { properties = new Properties(); String path = KafkaProducer.class.getResource("/").getFile().toString() + "kafka.properties"; try { FileInputStream fis = new FileInputStream(new File(path)); properties.load(fis); } catch (Exception e) { e.printStackTrace(); } } /** * 发送消息 * * @param topic * @param key * @param value */ public void sendMsg(String topic, byte[] key, byte[] value) { // 实例化produce KafkaProducerkp = new KafkaProducer ( properties); // 消息封装 ProducerRecord pr = new ProducerRecord ( topic, key, value); // 发送数据 kp.send(pr, new Callback() { // 回调函数 @Override public void onCompletion(RecordMetadata metadata, Exception exception) { if (null != exception) { System.out.println("记录的offset在:" + metadata.offset()); System.out.println(exception.getMessage() + exception); } } }); // 关闭produce kp.close(); } }
4.kafka消费者KafkaConsume:
package com.lijie.consumer; import java.io.File; import java.io.FileInputStream; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import org.apache.htrace.fasterxml.jackson.databind.ObjectMapper; import com.lijie.pojo.User; import com.lijie.utils.JsonUtils; import kafka.consumer.ConsumerConfig; import kafka.consumer.ConsumerIterator; import kafka.consumer.KafkaStream; import kafka.javaapi.consumer.ConsumerConnector; import kafka.serializer.StringDecoder; import kafka.utils.VerifiableProperties; public class KafkaConsume { private final static String TOPIC = "lijietest"; private static Properties properties; static { properties = new Properties(); String path = KafkaConsume.class.getResource("/").getFile().toString() + "kafka.properties"; try { FileInputStream fis = new FileInputStream(new File(path)); properties.load(fis); } catch (Exception e) { e.printStackTrace(); } } /** * 获取消息 * * @throws Exception */ public void getMsg() throws Exception { ConsumerConfig config = new ConsumerConfig(properties); ConsumerConnector consumer = kafka.consumer.Consumer .createJavaConsumerConnector(config); MaptopicCountMap = new HashMap (); topicCountMap.put(TOPIC, new Integer(1)); StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties()); StringDecoder valueDecoder = new StringDecoder( new VerifiableProperties()); Map >> consumerMap = consumer .createMessageStreams(topicCountMap, keyDecoder, valueDecoder); KafkaStream stream = consumerMap.get(TOPIC).get(0); ConsumerIterator it = stream.iterator(); while (it.hasNext()) { String json = it.next().message(); User user = (User) JsonUtils.JsonToObj(json, User.class); System.out.println(user); } } }
5.kafka.properties文件
##produce bootstrap.servers=192.168.80.123:9092 producer.type=sync request.required.acks=1 serializer.class=kafka.serializer.DefaultEncoder key.serializer=org.apache.kafka.common.serialization.ByteArraySerializer value.serializer=org.apache.kafka.common.serialization.ByteArraySerializer bak.partitioner.class=kafka.producer.DefaultPartitioner bak.key.serializer=org.apache.kafka.common.serialization.StringSerializer bak.value.serializer=org.apache.kafka.common.serialization.StringSerializer ##consume zookeeper.connect=192.168.80.123:2181 group.id=lijiegroup zookeeper.session.timeout.ms=4000 zookeeper.sync.time.ms=200 auto.commit.interval.ms=1000 auto.offset.reset=smallest serializer.class=kafka.serializer.StringEncoder
以上是“kafka生产者和消费者的javaAPI怎么用”这篇文章的所有内容,感谢各位的阅读!相信大家都有了一定的了解,希望分享的内容对大家有所帮助,如果还想学习更多知识,欢迎关注创新互联行业资讯频道!