|
17 | 17 | package org.springframework.batch.item.kafka;
|
18 | 18 |
|
19 | 19 | import java.time.Duration;
|
20 |
| -import java.util.Properties; |
| 20 | +import java.util.*; |
21 | 21 |
|
| 22 | +import com.fasterxml.jackson.databind.ObjectMapper; |
| 23 | +import org.apache.kafka.clients.consumer.ConsumerConfig; |
| 24 | +import org.apache.kafka.common.TopicPartition; |
22 | 25 | import org.apache.kafka.common.serialization.StringDeserializer;
|
23 | 26 | import org.junit.jupiter.api.Test;
|
| 27 | +import org.mockito.MockedConstruction; |
24 | 28 |
|
25 |
| -import static org.junit.jupiter.api.Assertions.assertEquals; |
26 |
| -import static org.junit.jupiter.api.Assertions.assertThrows; |
| 29 | +import org.springframework.batch.item.ExecutionContext; |
| 30 | + |
| 31 | +import static org.junit.jupiter.api.Assertions.*; |
| 32 | +import static org.mockito.Mockito.mockConstruction; |
27 | 33 |
|
28 | 34 | /**
|
29 | 35 | * @author Mathieu Ouellet
|
@@ -77,4 +83,144 @@ void testValidation() {
|
77 | 83 | assertEquals("pollTimeout must not be negative", exception.getMessage());
|
78 | 84 | }
|
79 | 85 |
|
| 86 | + @Test |
| 87 | + void testExecutionContextSerializationWithJackson() throws Exception { |
| 88 | + Properties consumerProperties = new Properties(); |
| 89 | + consumerProperties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "mockServer"); |
| 90 | + consumerProperties.put(ConsumerConfig.GROUP_ID_CONFIG, "testGroup"); |
| 91 | + consumerProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); |
| 92 | + consumerProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); |
| 93 | + |
| 94 | + KafkaItemReader<String, String> reader = new KafkaItemReader<>(consumerProperties, "testTopic", 0, 1); |
| 95 | + reader.setName("kafkaItemReader"); |
| 96 | + |
| 97 | + // Simulate how Jackson would serialize/deserialize the offset data |
| 98 | + ExecutionContext executionContext = new ExecutionContext(); |
| 99 | + List<Map<String, Object>> offsets = new ArrayList<>(); |
| 100 | + |
| 101 | + Map<String, Object> offset1 = new HashMap<>(); |
| 102 | + offset1.put("topic", "testTopic"); |
| 103 | + offset1.put("partition", 0); |
| 104 | + offset1.put("offset", 100L); |
| 105 | + offsets.add(offset1); |
| 106 | + |
| 107 | + Map<String, Object> offset2 = new HashMap<>(); |
| 108 | + offset2.put("topic", "testTopic"); |
| 109 | + offset2.put("partition", 1); |
| 110 | + offset2.put("offset", 200L); |
| 111 | + offsets.add(offset2); |
| 112 | + |
| 113 | + // Simulate Jackson serialization/deserialization |
| 114 | + ObjectMapper objectMapper = new ObjectMapper(); |
| 115 | + String serialized = objectMapper.writeValueAsString(offsets); |
| 116 | + List<Map<String, Object>> deserializedOffsets = objectMapper.readValue(serialized, List.class); |
| 117 | + |
| 118 | + executionContext.put("kafkaItemReader.topic.partition.offsets", deserializedOffsets); |
| 119 | + |
| 120 | + try (MockedConstruction<org.apache.kafka.clients.consumer.KafkaConsumer> mockedConstruction = mockConstruction( |
| 121 | + org.apache.kafka.clients.consumer.KafkaConsumer.class)) { |
| 122 | + |
| 123 | + reader.open(executionContext); |
| 124 | + |
| 125 | + ExecutionContext newContext = new ExecutionContext(); |
| 126 | + reader.update(newContext); |
| 127 | + |
| 128 | + List<Map<String, Object>> savedOffsets = (List<Map<String, Object>>) newContext.get("kafkaItemReader.topic.partition.offsets"); |
| 129 | + assertNotNull(savedOffsets); |
| 130 | + assertEquals(2, savedOffsets.size()); |
| 131 | + |
| 132 | + boolean foundPartition0 = false; |
| 133 | + boolean foundPartition1 = false; |
| 134 | + for (Map<String, Object> offsetEntry : savedOffsets) { |
| 135 | + String topic = (String) offsetEntry.get("topic"); |
| 136 | + Integer partition = (Integer) offsetEntry.get("partition"); |
| 137 | + Long offset = (Long) offsetEntry.get("offset"); |
| 138 | + |
| 139 | + assertEquals("testTopic", topic); |
| 140 | + assertNotNull(offset); |
| 141 | + |
| 142 | + if (partition == 0) { |
| 143 | + foundPartition0 = true; |
| 144 | + assertEquals(101L, offset); // restored offset + 1 |
| 145 | + } else if (partition == 1) { |
| 146 | + foundPartition1 = true; |
| 147 | + assertEquals(201L, offset); // restored offset + 1 |
| 148 | + } |
| 149 | + } |
| 150 | + |
| 151 | + assertTrue(foundPartition0); |
| 152 | + assertTrue(foundPartition1); |
| 153 | + } |
| 154 | + } |
| 155 | + |
| 156 | + @Test |
| 157 | + void testExecutionContextWithStringKeys() throws Exception { |
| 158 | + Properties consumerProperties = new Properties(); |
| 159 | + consumerProperties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "mockServer"); |
| 160 | + consumerProperties.put(ConsumerConfig.GROUP_ID_CONFIG, "testGroup"); |
| 161 | + consumerProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); |
| 162 | + consumerProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); |
| 163 | + |
| 164 | + KafkaItemReader<String, String> reader = new KafkaItemReader<>(consumerProperties, "testTopic", 0, 1); |
| 165 | + reader.setName("kafkaItemReader"); |
| 166 | + |
| 167 | + // Create ExecutionContext with list of maps (as it would be after Jackson |
| 168 | + // deserialization) |
| 169 | + ExecutionContext executionContext = new ExecutionContext(); |
| 170 | + List<Map<String, Object>> storedOffsets = new ArrayList<>(); |
| 171 | + |
| 172 | + Map<String, Object> offset1 = new HashMap<>(); |
| 173 | + offset1.put("topic", "testTopic"); |
| 174 | + offset1.put("partition", 0); |
| 175 | + offset1.put("offset", 100L); |
| 176 | + storedOffsets.add(offset1); |
| 177 | + |
| 178 | + Map<String, Object> offset2 = new HashMap<>(); |
| 179 | + offset2.put("topic", "testTopic"); |
| 180 | + offset2.put("partition", 1); |
| 181 | + offset2.put("offset", 200L); |
| 182 | + storedOffsets.add(offset2); |
| 183 | + |
| 184 | + executionContext.put("kafkaItemReader.topic.partition.offsets", storedOffsets); |
| 185 | + |
| 186 | + try (MockedConstruction<org.apache.kafka.clients.consumer.KafkaConsumer> mockedConstruction = mockConstruction( |
| 187 | + org.apache.kafka.clients.consumer.KafkaConsumer.class)) { |
| 188 | + |
| 189 | + reader.open(executionContext); |
| 190 | + |
| 191 | + // Verify that offsets are saved correctly |
| 192 | + ExecutionContext newContext = new ExecutionContext(); |
| 193 | + reader.update(newContext); |
| 194 | + |
| 195 | + List<Map<String, Object>> savedOffsets = (List<Map<String, Object>>) newContext.get("kafkaItemReader.topic.partition.offsets"); |
| 196 | + assertNotNull(savedOffsets); |
| 197 | + assertEquals(2, savedOffsets.size()); |
| 198 | + } |
| 199 | + } |
| 200 | + |
| 201 | + @Test |
| 202 | + void testJacksonSerializationFormat() throws Exception { |
| 203 | + // Test to verify the actual format when Jackson serializes our offset structure |
| 204 | + List<Map<String, Object>> offsets = new ArrayList<>(); |
| 205 | + |
| 206 | + Map<String, Object> offset1 = new HashMap<>(); |
| 207 | + offset1.put("topic", "test-topic"); |
| 208 | + offset1.put("partition", 0); |
| 209 | + offset1.put("offset", 100L); |
| 210 | + offsets.add(offset1); |
| 211 | + |
| 212 | + ObjectMapper objectMapper = new ObjectMapper(); |
| 213 | + String serialized = objectMapper.writeValueAsString(offsets); |
| 214 | + |
| 215 | + // Verify the structure can be deserialized correctly |
| 216 | + List<Map<String, Object>> deserialized = objectMapper.readValue(serialized, List.class); |
| 217 | + assertEquals(1, deserialized.size()); |
| 218 | + |
| 219 | + Map<String, Object> deserializedOffset = deserialized.get(0); |
| 220 | + assertEquals("test-topic", deserializedOffset.get("topic")); |
| 221 | + // Jackson may deserialize numbers as Integer or Long depending on the value |
| 222 | + assertEquals(0, ((Number) deserializedOffset.get("partition")).intValue()); |
| 223 | + assertEquals(100L, ((Number) deserializedOffset.get("offset")).longValue()); |
| 224 | + } |
| 225 | + |
80 | 226 | }
|
0 commit comments