spring-kafka 2.7.0
Using @RetryableTopic causing listener to go into infinite loop when exception occurs
Retry logic is working as it should. But they seem to be reading the same message infinitely while pushing to the correct topics.
Tried playing with the commented lines in the config. but didn't work.
KafkaConfiguration.java
@Configuration//@EnableKafkaRetryTopic@EnableKafka@Slf4j@ConditionalOnProperty(value = "kafka.config.enabled", havingValue = "true", matchIfMissing = false)public class KafkaConfiguration { @Autowired ReformerKafkaProperties kafkaProperties; @Bean public ProducerFactory<String, String> producerFactory() { Map<String, Object> configProps = new HashMap<>(); configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaProperties.getBootstrapServers()); configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); configProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); configProps.put(JsonDeserializer.VALUE_DEFAULT_TYPE, Object.class); configProps.put(JsonDeserializer.TRUSTED_PACKAGES, "*"); return new DefaultKafkaProducerFactory<>(configProps); } @Bean public KafkaTemplate<String, String> kafkaTemplate() { return new KafkaTemplate<>(producerFactory()); } @Bean public ConsumerFactory<String, String> consumerFactory() { Map<String, Object> consumerProps = new HashMap<>(); consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaProperties.getBootstrapServers());// consumerProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true);// consumerProps.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, 1000); consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, kafkaProperties.getConsumerGroup()); consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); consumerProps.put(JsonDeserializer.VALUE_DEFAULT_TYPE, Object.class); consumerProps.put(JsonDeserializer.TRUSTED_PACKAGES, "*"); return new DefaultKafkaConsumerFactory<>(consumerProps); } @Bean public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() { ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<String, String>(); factory.setConsumerFactory(consumerFactory());// factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.RECORD);// factory.setErrorHandler(new SeekToCurrentErrorHandler(new FixedBackOff(0L, 0))); return factory; }}
AsyncReformationKafkaListener.java
@Slf4j@Component@ConditionalOnProperty(value = "kafka.config.enabled", havingValue = "true", matchIfMissing = false)public class AsyncReformationKafkaListener { @RetryableTopic( backoff = @Backoff(value = 30000), attempts = "3", autoCreateTopics = "true", include = RuntimeException.class) @KafkaListener( topics = "${kafka.config.async-topic}", groupId = "${kafka.config.consumer-group}") public void executeReformation(String messageString, @Header(KafkaHeaders.RECEIVED_TOPIC) String topic) { log.info("Kafka msg at topic {}", topic); throw new RuntimeException(); }}
Only one message was produced on reformer-async-local. It got duplicated exponentially on retry topics
a13400412@MAC-a13400412 bin % sh kafka-get-offsets.sh --bootstrap-server=localhost:9092 --topic=reformer-async-local-retry-1reformer-async-local-retry-1:0:1492a13400412@MAC-a13400412 bin % sh kafka-get-offsets.sh --bootstrap-server=localhost:9092 --topic=reformer-async-local-retry-0reformer-async-local-retry-0:0:175a13400412@MAC-a13400412 bin % sh kafka-get-offsets.sh --bootstrap-server=localhost:9092 --topic=reformer-async-local reformer-async-local:0:1a13400412@MAC-a13400412 bin %