|
@@ -0,0 +1,78 @@
|
|
|
+package com.seamew.lottery.test.application;
|
|
|
+
|
|
|
+import com.seamew.lottery.application.mq.KafkaProducer;
|
|
|
+import lombok.extern.slf4j.Slf4j;
|
|
|
+import org.apache.kafka.clients.consumer.ConsumerConfig;
|
|
|
+import org.apache.kafka.clients.consumer.ConsumerRecord;
|
|
|
+import org.apache.kafka.clients.consumer.ConsumerRecords;
|
|
|
+import org.apache.kafka.clients.consumer.KafkaConsumer;
|
|
|
+import org.junit.Test;
|
|
|
+import org.junit.runner.RunWith;
|
|
|
+import org.springframework.boot.test.context.SpringBootTest;
|
|
|
+import org.springframework.test.context.junit4.SpringRunner;
|
|
|
+
|
|
|
+import javax.annotation.Resource;
|
|
|
+import java.util.ArrayList;
|
|
|
+import java.util.HashMap;
|
|
|
+import java.util.List;
|
|
|
+import java.util.Map;
|
|
|
+
|
|
|
+/**
|
|
|
+ * @Author: seamew
|
|
|
+ * @Title: KafkaProducerTest
|
|
|
+ * @CreateTime: 2023年02月27日 11:14:00
|
|
|
+ * @Description:
|
|
|
+ * @Version: 1.0
|
|
|
+ */
|
|
|
+@RunWith(SpringRunner.class)
|
|
|
+@SpringBootTest
|
|
|
+@Slf4j
|
|
|
+public class KafkaProducerTest {
|
|
|
+
|
|
|
+
|
|
|
+ @Resource
|
|
|
+ private KafkaProducer kafkaProducer;
|
|
|
+
|
|
|
+ @Test
|
|
|
+ public void test_send() throws InterruptedException {
|
|
|
+ // 循环发送消息
|
|
|
+ // for (int i = 0; i < 5; i++) {
|
|
|
+ // kafkaProducer.send("你好,seamew 00" + i);
|
|
|
+ // Thread.sleep(3500);
|
|
|
+ // }
|
|
|
+ kafkaProducer.send("你好,seamew 007");
|
|
|
+ Thread.sleep(4000);
|
|
|
+ }
|
|
|
+
|
|
|
+ @Test
|
|
|
+ public void context() {
|
|
|
+ Map<String, Object> configs = new HashMap<>();
|
|
|
+ // 设置连接Kafka的初始连接用到的服务器地址
|
|
|
+ // 如果是集群,则可以通过此初始连接发现集群中的其他broker
|
|
|
+ configs.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "http://180.76.231.231:9092");
|
|
|
+ //KEY反序列化类
|
|
|
+ configs.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.IntegerDeserializer");
|
|
|
+ //value反序列化类
|
|
|
+ configs.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
|
|
|
+ configs.put(ConsumerConfig.GROUP_ID_CONFIG, "test-consumer-group");
|
|
|
+ configs.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
|
|
|
+ //设置手动提交
|
|
|
+ configs.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,false);
|
|
|
+ //创建消费者对象
|
|
|
+ KafkaConsumer<Integer, String> consumer = new KafkaConsumer<Integer, String>(configs);
|
|
|
+
|
|
|
+ List<String> topics = new ArrayList<>();
|
|
|
+ topics.add("Hello-Kafka");
|
|
|
+ //消费者订阅主题
|
|
|
+ consumer.subscribe(topics);
|
|
|
+ while (true){
|
|
|
+ //批量拉取主题消息,每3秒拉取一次
|
|
|
+ ConsumerRecords<Integer, String> records = consumer.poll(3000);
|
|
|
+ //变量消息
|
|
|
+ for (ConsumerRecord<Integer, String> record : records) {
|
|
|
+ System.out.println(record.value());
|
|
|
+ }
|
|
|
+ consumer.commitAsync();
|
|
|
+ }
|
|
|
+ }
|
|
|
+}
|