接着Kafka 入门(一),接着就要在应用中使用。
生产者,消费者分别使用不同的应用
org.springframework.kafka spring-kafka 3.0.4 org.projectlombok lombok 1.18.26 ch.qos.logback logback-classic 1.4.5
spring.kafka.bootstrap-servers=192.168.79.177:9092
INFO %date %level %logger %msg%n
扫描包下相关配置类,便于AnnotationConfigApplicationContext 加载。
@Configuration
@ComponentScan("pr.iceworld.fernando.spring6.kafka.producer")
public class KafkaCommonConfig {
}
Topic配置类
@Configuration
@PropertySource("classpath:application.properties")
public class KafkaTopicConfig {@Value(value = "${spring.kafka.bootstrap-servers}")private String bootstrapAddress;@Beanpublic KafkaAdmin admin() {Map configs = new HashMap<>();configs.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);return new KafkaAdmin(configs);}@Beanpublic NewTopic topic1() {return TopicBuilder.name("topic1").partitions(3).replicas(1).compact().build();}@Beanpublic NewTopic topic2() {return TopicBuilder.name("topic2").partitions(3).replicas(1).compact().build();}@Beanpublic NewTopic topic3() {return TopicBuilder.name("topic3").partitions(3).replicas(1).compact().build();}
}
生产者配置类
@Configuration
public class KafkaProducerConfig {@Value(value = "${spring.kafka.bootstrap-servers}")private String bootstrapAddress;@Beanpublic ProducerFactory producerFactory() {Map props = new HashMap<>();props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);return new DefaultKafkaProducerFactory<>(props);}@Beanpublic KafkaTemplate kafkaTemplate() {return new KafkaTemplate<>(producerFactory());}
}
发送消息类
@Service
@Slf4j
public class KafkaProducerService {@Autowiredprivate KafkaTemplate kafkaTemplate;Random random = new Random(10);public void sendMessageNormal(String message, String key, String topic) {int ranPartition = random.nextInt(3);kafkaTemplate.send(createRecord(message, ranPartition, key, topic));log.info("Normal sent topic={}, message={}, partition={}", topic, message, ranPartition);}public void sendMessageWithHeaders(String message, String key, String topic) {int ranPartition = random.nextInt(3);Message providedMessage = MessageBuilder.withPayload(message).setHeader(KafkaHeaders.TOPIC, topic).setHeader(KafkaHeaders.KEY, key).setHeader(KafkaHeaders.PARTITION, ranPartition).build();kafkaTemplate.send(providedMessage);log.info("Header normal sent topic={}, message={}, partition={}", topic, message, ranPartition);}public void sendMessageWithCallback(String message, String key, String topic) {final int ranPartition = random.nextInt(3);final ProducerRecord record = createRecord(message, ranPartition, key, topic);CompletableFuture> future = kafkaTemplate.send(record);future.whenComplete((result, ex) -> {if (ex == null) {handleSuccess(result, message);} else {handleFailure(record, ex);}});}private void handleFailure(ProducerRecord record, Throwable ex) {log.info("Fail to send topic={}, message={} due to : {}", record.topic(), record.value(), ex.getMessage());}private void handleSuccess(SendResult sendResult, String message) {log.info("Sent topic={}, message={}, offset={}, partition={}",sendResult.getRecordMetadata().topic(),message,sendResult.getRecordMetadata().offset(),sendResult.getRecordMetadata().partition());}private ProducerRecord createRecord(String message, int ranPartition, String key, String topic) {return new ProducerRecord(topic, ranPartition, key, message);}}
启动类
public class MainApp {public static void main(String[] args) {AnnotationConfigApplicationContext applicationContext = new AnnotationConfigApplicationContext(KafkaCommonConfig.class);KafkaProducerService kafkaProducerService = applicationContext.getBean(KafkaProducerService.class);for (int i = 0; i < 5; i++) {kafkaProducerService.sendMessageNormal(Uuid.randomUuid().toString(), "key1", "topic1");kafkaProducerService.sendMessageWithHeaders(Uuid.randomUuid().toString(), "key2", "topic2");kafkaProducerService.sendMessageWithCallback(Uuid.randomUuid().toString(), "key3", "topic3");}new Thread(() -> {try {Thread.sleep(10000);} catch (InterruptedException e) {throw new RuntimeException(e);}}).start();}
}
spring.kafka.bootstrap-servers=192.168.79.177:9092
扫描包下相关配置类,便于AnnotationConfigApplicationContext 加载。
@Configuration
@ComponentScan("pr.iceworld.fernando.spring6.kafka.consumer")
public class KafkaCommonConfig {
}
消费者配置类,配置监听。
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL)
@EnableKafka
Enable Kafka listener annotated endpoints that are created under the covers by a AbstractListenerContainerFactory.@EnableKafka
@Configuration
@PropertySource("classpath:application.properties")
public class KafkaConsumerConfig {@Value(value = "${spring.kafka.bootstrap-servers}")private String bootstrapAddress;@Beanpublic ConsumerFactory consumerFactory() {Map props = new HashMap<>();props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);props.put(ConsumerConfig.GROUP_ID_CONFIG, "local");props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);//props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");return new DefaultKafkaConsumerFactory<>(props);}@Beanpublic ConcurrentKafkaListenerContainerFactory kafkaListenerContainerFactory() {ConcurrentKafkaListenerContainerFactory factory =new ConcurrentKafkaListenerContainerFactory<>();factory.setConsumerFactory(consumerFactory());factory.setConcurrency(3);factory.getContainerProperties().setPollTimeout(3000);return factory;}@Beanpublic ConsumerFactory consumerFactoryAutoCommitFalse() {Map props = new HashMap<>();props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);props.put(ConsumerConfig.GROUP_ID_CONFIG, "localCommitFalse");props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);//props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);return new DefaultKafkaConsumerFactory<>(props);}@Beanpublic ConcurrentKafkaListenerContainerFactory kafkaListenerContainerFactoryAutoCommitFalse() {ConcurrentKafkaListenerContainerFactory factory =new ConcurrentKafkaListenerContainerFactory<>();factory.setConsumerFactory(consumerFactoryAutoCommitFalse());factory.setConcurrency(3);factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL);factory.getContainerProperties().setPollTimeout(3000);return factory;}
}
Topic接收类
@Service
@Slf4j
public class KafkaConsumerService {@KafkaListener(topics = "topic1",containerFactory = "kafkaListenerContainerFactory",groupId = "local")public void receiveMessage(String message, ConsumerRecord consumerRecord) {log.info("====> Topic={}, Partition={}, offset={}, Received message={}",consumerRecord.topic(), consumerRecord.partition(), consumerRecord.offset(), message);}@KafkaListener(topics = "topic2",containerFactory = "kafkaListenerContainerFactory",groupId = "local")public void receiveMessageWithHeaders(@Payload String message,@Header(KafkaHeaders.RECEIVED_TOPIC) String topic,@Header(KafkaHeaders.RECEIVED_PARTITION) int partition,@Header(KafkaHeaders.RECEIVED_KEY) String messageKey,@Header(KafkaHeaders.TIMESTAMP_TYPE) String timestampType,@Header(KafkaHeaders.RECEIVED_TIMESTAMP) Long timestamp,@Header(KafkaHeaders.OFFSET) int offset) {log.info("====> Headers topic={}, partition={}, offset={}, messageKey={}, "+ "timestampType={}, timestamp={}, Received message={}",topic, partition, offset, messageKey, timestampType, timestamp, message);}@KafkaListener(containerFactory = "kafkaListenerContainerFactoryAutoCommitFalse",groupId = "localCommitFalse",topicPartitions = {@TopicPartition(topic = "topic3",partitions = {"0"})})public void receiveMessageBySpecificPartition(String message, ConsumerRecord consumerRecord, Acknowledgment ack) {log.info("====> Specific topic={}, partition partition={}, offset={}, Received message={}",consumerRecord.topic(), consumerRecord.partition(), consumerRecord.offset(), message);ack.acknowledge();}
}
启动类,启动多个实例模拟消费
public class MainApp {public static void main(String[] args) throws InterruptedException {new AnnotationConfigApplicationContext(KafkaCommonConfig.class);}
}
生产日志
2023-03-10 20:39:16,967 INFO pr.iceworld.fernando.spring6.kafka.producer.service.KafkaProducerService Normal sent topic=topic1, message=6smhtHX7SJ6hEwnr4Io9kA, partition=0
2023-03-10 20:39:17,030 INFO pr.iceworld.fernando.spring6.kafka.producer.service.KafkaProducerService Header normal sent topic=topic2, message=QMIol2GlSRuODmK9hDDQcw, partition=0
2023-03-10 20:39:17,041 INFO pr.iceworld.fernando.spring6.kafka.producer.service.KafkaProducerService Normal sent topic=topic1, message=sRi5iHY7TUqSqzM4TbUF1g, partition=0
2023-03-10 20:39:17,042 INFO pr.iceworld.fernando.spring6.kafka.producer.service.KafkaProducerService Header normal sent topic=topic2, message=uiffzWw9TCab9u4QAQqoOA, partition=1
2023-03-10 20:39:17,043 INFO pr.iceworld.fernando.spring6.kafka.producer.service.KafkaProducerService Normal sent topic=topic1, message=Vsl9WmPGQnCn9Ox6R3y-3Q, partition=1
2023-03-10 20:39:17,043 INFO pr.iceworld.fernando.spring6.kafka.producer.service.KafkaProducerService Header normal sent topic=topic2, message=RSmQ34W9T9GiJsLBwW5XiQ, partition=1
2023-03-10 20:39:17,044 INFO pr.iceworld.fernando.spring6.kafka.producer.service.KafkaProducerService Normal sent topic=topic1, message=vM447x4yTgqXp1ciulyeSw, partition=1
2023-03-10 20:39:17,044 INFO pr.iceworld.fernando.spring6.kafka.producer.service.KafkaProducerService Header normal sent topic=topic2, message=WXQldMTxRxiUXhn0Ov2gPg, partition=1
2023-03-10 20:39:17,045 INFO pr.iceworld.fernando.spring6.kafka.producer.service.KafkaProducerService Normal sent topic=topic1, message=ffYc0g3tR-euzDKO3Y2DQw, partition=0
2023-03-10 20:39:17,046 INFO pr.iceworld.fernando.spring6.kafka.producer.service.KafkaProducerService Header normal sent topic=topic2, message=xpZy-YswSdeoobQaeZIFZA, partition=0
2023-03-10 20:39:17,053 INFO pr.iceworld.fernando.spring6.kafka.producer.service.KafkaProducerService Sent topic=topic3, message=m0A92H1xQLq_cfxqNkc6ZQ, offset=69, partition=0
2023-03-10 20:39:17,060 INFO pr.iceworld.fernando.spring6.kafka.producer.service.KafkaProducerService Sent topic=topic3, message=SwXaJVq7SQCZe3NkanhUBA, offset=70, partition=0
2023-03-10 20:39:17,070 INFO pr.iceworld.fernando.spring6.kafka.producer.service.KafkaProducerService Sent topic=topic3, message=Qt4G_oXNTfy8gFs43SWp4Q, offset=23, partition=1
2023-03-10 20:39:17,070 INFO pr.iceworld.fernando.spring6.kafka.producer.service.KafkaProducerService Sent topic=topic3, message=5qWN4-AXT9O2gLLvwH3s6A, offset=23, partition=2
2023-03-10 20:39:17,071 INFO pr.iceworld.fernando.spring6.kafka.producer.service.KafkaProducerService Sent topic=topic3, message=vuaoBOK_SamaeQ1e3MJ-kg, offset=71, partition=0
消费者1 topic3 partition = {“0”}
2023-03-10 20:39:17,113 INFO pr.iceworld.fernando.spring6.kafka.consumer.service.KafkaConsumerService ====> Specific topic=topic3, partition partition=0, offset=69, Received message=m0A92H1xQLq_cfxqNkc6ZQ
2023-03-10 20:39:17,131 INFO pr.iceworld.fernando.spring6.kafka.consumer.service.KafkaConsumerService ====> Specific topic=topic3, partition partition=0, offset=70, Received message=SwXaJVq7SQCZe3NkanhUBA
2023-03-10 20:39:17,134 INFO pr.iceworld.fernando.spring6.kafka.consumer.service.KafkaConsumerService ====> Specific topic=topic3, partition partition=0, offset=71, Received message=vuaoBOK_SamaeQ1e3MJ-kg
消费者2 topic3 partition = {“1”, “2”}
2023-03-10 20:39:17,021 INFO pr.iceworld.fernando.spring6.kafka.consumer.service.KafkaConsumerService ====> Topic=topic1, Partition=0, offset=69, Received message=6smhtHX7SJ6hEwnr4Io9kA
2023-03-10 20:39:17,037 INFO pr.iceworld.fernando.spring6.kafka.consumer.service.KafkaConsumerService ====> Headers topic=topic2, partition=0, offset=106, messageKey=key2, timestampType=CREATE_TIME, timestamp=1678451957029, Received message=QMIol2GlSRuODmK9hDDQcw
2023-03-10 20:39:17,051 INFO pr.iceworld.fernando.spring6.kafka.consumer.service.KafkaConsumerService ====> Topic=topic1, Partition=0, offset=70, Received message=sRi5iHY7TUqSqzM4TbUF1g
2023-03-10 20:39:17,054 INFO pr.iceworld.fernando.spring6.kafka.consumer.service.KafkaConsumerService ====> Specific topic=topic3, partition partition=1, offset=23, Received message=Qt4G_oXNTfy8gFs43SWp4Q
2023-03-10 20:39:17,054 INFO pr.iceworld.fernando.spring6.kafka.consumer.service.KafkaConsumerService ====> Specific topic=topic3, partition partition=2, offset=23, Received message=5qWN4-AXT9O2gLLvwH3s6A
2023-03-10 20:39:17,056 INFO pr.iceworld.fernando.spring6.kafka.consumer.service.KafkaConsumerService ====> Topic=topic1, Partition=1, offset=46, Received message=Vsl9WmPGQnCn9Ox6R3y-3Q
2023-03-10 20:39:17,056 INFO pr.iceworld.fernando.spring6.kafka.consumer.service.KafkaConsumerService ====> Topic=topic1, Partition=1, offset=47, Received message=vM447x4yTgqXp1ciulyeSw
2023-03-10 20:39:17,057 INFO pr.iceworld.fernando.spring6.kafka.consumer.service.KafkaConsumerService ====> Headers topic=topic2, partition=1, offset=9, messageKey=key2, timestampType=CREATE_TIME, timestamp=1678451957041, Received message=uiffzWw9TCab9u4QAQqoOA
2023-03-10 20:39:17,058 INFO pr.iceworld.fernando.spring6.kafka.consumer.service.KafkaConsumerService ====> Headers topic=topic2, partition=1, offset=10, messageKey=key2, timestampType=CREATE_TIME, timestamp=1678451957043, Received message=RSmQ34W9T9GiJsLBwW5XiQ
2023-03-10 20:39:17,058 INFO pr.iceworld.fernando.spring6.kafka.consumer.service.KafkaConsumerService ====> Headers topic=topic2, partition=1, offset=11, messageKey=key2, timestampType=CREATE_TIME, timestamp=1678451957044, Received message=WXQldMTxRxiUXhn0Ov2gPg
2023-03-10 20:39:17,062 INFO pr.iceworld.fernando.spring6.kafka.consumer.service.KafkaConsumerService ====> Topic=topic1, Partition=0, offset=71, Received message=ffYc0g3tR-euzDKO3Y2DQw
2023-03-10 20:39:17,064 INFO pr.iceworld.fernando.spring6.kafka.consumer.service.KafkaConsumerService ====> Headers topic=topic2, partition=0, offset=107, messageKey=key2, timestampType=CREATE_TIME, timestamp=1678451957046, Received message=xpZy-YswSdeoobQaeZIFZA
https://docs.spring.io/spring-kafka/reference/html/