diff --git a/data-prepper-plugins/kafka-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafka/consumer/KafkaCustomConsumerTest.java b/data-prepper-plugins/kafka-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafka/consumer/KafkaCustomConsumerTest.java index 69ab72810a..ef6b2fbab2 100644 --- a/data-prepper-plugins/kafka-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafka/consumer/KafkaCustomConsumerTest.java +++ b/data-prepper-plugins/kafka-plugins/src/test/java/org/opensearch/dataprepper/plugins/kafka/consumer/KafkaCustomConsumerTest.java @@ -274,16 +274,13 @@ public void testBufferOverflowPauseResume() throws InterruptedException, Excepti } @Test - public void testKafkaMetadata() { + public void testKafkaMetadata() throws Exception { String topic = topicConfig.getName(); consumerRecords = createPlainTextRecords(topic, 0L); when(kafkaConsumer.poll(any(Duration.class))).thenReturn(consumerRecords); consumer = createObjectUnderTest("plaintext", false); - - try { - consumer.onPartitionsAssigned(List.of(new TopicPartition(topic, testPartition))); - consumer.consumeRecords(); - } catch (Exception e){} + consumer.onPartitionsAssigned(List.of(new TopicPartition(topic, testPartition))); + consumer.consumeRecords(); final Map.Entry>, CheckpointState> bufferRecords = buffer.read(1000); ArrayList> bufferedRecords = new ArrayList<>(bufferRecords.getKey()); Assertions.assertEquals(consumerRecords.count(), bufferedRecords.size());