在Debian上使用Kafka消息队列,可以按照以下步骤进行安装和配置:
sudo apt-get update
sudo apt-get install kafka
sudo systemctl start kafka
sudo systemctl enable kafka
sudo systemctl status kafka
Kafka的主要配置文件位于/etc/kafka/server.properties
。你可以根据需要修改以下配置项:
以下是一个简单的Java生产者示例,使用Spring Kafka库:
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;
import java.util.Properties;
@Service
public class KafkaProducerService {
@Value("${kafka.bootstrap-servers}")
private String bootstrapServers;
private final KafkaTemplate<String, String> kafkaTemplate;
public KafkaProducerService(KafkaTemplate<String, String> kafkaTemplate) {
this.kafkaTemplate = kafkaTemplate;
}
public void sendMessage(String topic, String message) {
Properties props = new Properties();
props.put("bootstrap.servers", bootstrapServers);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
kafkaTemplate.send(new ProducerRecord<>(topic, message));
}
}
以下是一个简单的Java消费者示例,使用Spring Kafka库:
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Service;
import java.time.Duration;
import java.util.Collections;
import java.util.Properties;
@Service
public class KafkaConsumerService {
@Value("${kafka.bootstrap-servers}")
private String bootstrapServers;
@Value("${kafka.consumer.group-id}")
private String groupId;
@KafkaListener(topics = "${kafka.consumer.topic}")
public void listen(ConsumerRecord<String, String> record) {
System.out.printf("offset = %d, key = %s, value = %s%n", record.offset(), record.key(), record.value());
}
public void startConsumer() {
Properties props = new Properties();
props.put("bootstrap.servers", bootstrapServers);
props.put("group.id", groupId);
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
consumer.subscribe(Collections.singletonList(groupId));
while (true) {
ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100));
for (ConsumerRecord<String, String> record : records) {
listen(record);
}
}
}
}
kafka.bootstrap-servers=localhost:9092
kafka.consumer.group-id=my-group
kafka.consumer.topic=my-topic
辰迅云「云服务器」,即开即用、新一代英特尔至强铂金CPU、三副本存储NVMe SSD云盘,价格低至29元/月。点击查看>>
推荐阅读: Node.js日志在Debian如何优化查询