MacBook Linux安装Kafka
Linux解压安装Kafka
kafka可视化工具Kafka Tool安装使用
Kafka集群和kafka-manager安装
方式一 maven的pom.xml引入依赖<dependency>
<groupId>org.springframework.kafkagroupId>
<artifactId>spring-kafkaartifactId>
dependency>
配置文件
spring:
kafka:
bootstrap-servers: 192.168.1.7:9092
producer:
retries: 3
acks: 1
batch-size: 16384
properties:
linger:
ms: 0
buffer-memory: 33554432
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer
consumer:
properties:
group:
id: defaultConsumerGroup
session:
timeout:
ms: 120000
request:
timeout:
ms: 180000
enable-auto-commit: true
auto:
commit:
interval:
ms: 1000
auto-offset-reset: latest
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
max-poll-records: 50
listener:
missing-topics-fatal: false
type: batch
代码里直接使用: KafkaTemplate, 因为spring启动时会自动把bean对象加载到容器里
发送数据到kafka@Autowired
private KafkaTemplate kafkaTemplate;
//发送数据到kafka
private void sendKafka(String abcd, String efg) {
Map<String, Object> body = new HashMap<>(8);
body.put("time", System.currentTimeMillis());
body.put("abcd", abcd);
body.put("efg", efg);
kafkaTemplate.send("test-topic", JSON.toJSONString(body));
}
订阅数据
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
import java.util.List;
@Component
public class KafkaConsumer {
@Autowired
private ProcessDataComponent processDataComponent;
/**
* 单个topic订阅
*/
@KafkaListener(topics = "test-topic1")
public void onDeviceSubStatusMessageDevice(List<ConsumerRecord<String, ?>> listRecord) {
process(listRecord);
}
/**
* 订阅多个topic
*/
@KafkaListener(topics = {
"topic1",
"topic2",
"topic3"
})
public void onDeviceMessage(List<ConsumerRecord<String, ?>> listRecord) {
process(listRecord);
}
private void process(List<ConsumerRecord<String, ?>> listRecord) {
listRecord.forEach(record -> {
processDataComponent.process(record.key(), record.value() + "");
});
}
}
方式二
maven的pom.xml引入依赖
<dependency>
<groupId>org.apache.kafkagroupId>
<artifactId>kafka-clientsartifactId>
<version>1.0.2version>
dependency>
发送数据到kafka
package com.test.kafka.demo;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import java.util.Properties;
public class KafkaProducerDemo {
public static void main(String[] args) {
Properties props = new Properties();
props.put("bootstrap.servers", "192.168.1.7:9092");
props.put("acks", "all");
props.put("retries", 0);
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
Producer<String, String> producer = new KafkaProducer<>(props);
for (int i = 0; i < 100; i++) {
producer.send(new ProducerRecord<String, String>("my-topic", Integer.toString(i), Integer.toString(i)));
}
producer.close();
}
}
订阅数据
package com.test.kafka.demo;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Arrays;
import java.util.Properties;
public class KafkaComsumerDemo {
private static Logger logger = LoggerFactory.getLogger(KafkaComsumerDemo.class);
public static void main(String[] args) {
try {
Properties props = new Properties();
props.put("bootstrap.servers", "192.168.1.7:9092");
props.put("group.id", "group-foo1");
props.put("auto.offset.reset", "earliest");
//策略1 自动提交,周期性的提交偏移量
props.put("enable.auto.commit", "true");
props.put("auto.commit.interval.ms", "1000");
//策略2 consumer.commitSync() //调用commitSync,手动同步ack。每处理完1条消息,commitSync 1次
//策略3 consumer.commitASync() //手动异步ack
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<String, String>(props);
kafkaConsumer.subscribe(Arrays.asList("my-topic"));
boolean flag = true;
while (flag) {
ConsumerRecords<String, String> records = kafkaConsumer.poll(100);
for (ConsumerRecord<String, String> record : records) {
logger.info("offset = {},key = {},value = {}", record.offset(), record.key(), record.value());
}
}
kafkaConsumer.close();
logger.info("consumer client has been closed");
} catch (Exception e) {
logger.error("{}", e.getMessage());
}
}
}
欢迎分享,转载请注明来源:内存溢出
评论列表(0条)