具有多个不同Avro生产者和交易的kafkconsumer



我有一个kafka消费者。它消耗一个字符串。然后根据字符串将其转换为不同的avro对象,并将它们发布到不同的主题。我们需要EOS,我们得到的问题是标有@Primary的生产者可以工作,但是没有primary的生产者失败了,出现以下错误。有办法同时容纳两个人吗?

KafkaConsumer

@Configuration
public class KafkaConsumerConfig {
@Value("${kafka.server}")
String server;
@Value("${kafka.consumer.groupid}")
String groupid;
@Autowired
Tracer tracer;
@Bean
public ConsumerFactory<String, String> consumerFactory() {
Map<String, Object> config = new HashMap<>();
config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, server);
config.put(ConsumerConfig.GROUP_ID_CONFIG, groupid);
config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
config.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
config.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, "read_committed");
config.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, 120000);
config.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 10000);
//config.put(ConsumerConfig.HEARTBEAT_INTERVAL_MS_CONFIG, 15000);       
return new TracingConsumerFactory<>(new DefaultKafkaConsumerFactory<>(config), tracer);
}
@Bean
public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory(
KafkaAwareTransactionManager<Object, Object> transactionManager) {
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<String, String>();
factory.setConsumerFactory(consumerFactory());
factory.setAutoStartup(false);
factory.setConcurrency(2);
factory.setBatchListener(true);
factory.getContainerProperties().setAckMode(AckMode.BATCH);
factory.getContainerProperties().setEosMode(EOSMode.ALPHA);
factory.getContainerProperties().setTransactionManager(transactionManager);
return factory;
}
}

KafkaProducer 1

@Configuration
public class KafkaProducerConfig {
@Value("${kafka.server}")
String server;
@Autowired
public Tracer tracer;
String tranId = "eventsanavro";

@Bean(name = "transactionalProducerFactoryAvro")
public ProducerFactory<String, TransactionAvroEntity> producerFactoryavro() {
Map<String, Object> config = new HashMap<>();
config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, server);
config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, AvroSerializer.class.getName());
config.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, "true");
config.put(ProducerConfig.ACKS_CONFIG, "all");
config.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, tranId);
config.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, "snappy");
config.put(ProducerConfig.LINGER_MS_CONFIG, "200");
config.put(ProducerConfig.BATCH_SIZE_CONFIG, Integer.toString(256 * 1024));
config.put(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG, 120000);
config.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, 60000);
config.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, 5);
config.put(ProducerConfig.BUFFER_MEMORY_CONFIG, Integer.toString(32768 * 1024));
config.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, "true");
config.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, tranId);
return new TracingProducerFactory<>(new DefaultKafkaProducerFactory<>(config), tracer);
}

@Qualifier("transactionalProducerFactoryAvro")
@Bean(name = "transactionalKafkaTemplateAvro")
public KafkaTemplate<String, TransactionAvroEntity> kafkaTemplate() {
return new KafkaTemplate<>(producerFactoryavro());
}

@Qualifier("transactionalProducerFactoryAvro")
@Bean(name = "transactionalKafkaTransactionManagerAvro")
public KafkaAwareTransactionManager<?, ?> kafkaTransactionManager(
ProducerFactory<String, TransactionAvroEntity> producerFactory) {
return new KafkaTransactionManager<>(producerFactory);
}
}

KafkaProducer 2

@Configuration
public class KafkaProducerNonAvroConfig {
@Value("${kafka.server}")
String server;
@Autowired
public Tracer tracer;

String tranId = "eventsannonavro";
@Primary
@Bean(name = "transactionalProducerFactoryNonAvro")
public ProducerFactory<String, String> producerFactoryNonAvro() {
Map<String, Object> config = new HashMap<>();
config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, server);
config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
config.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, "true");
config.put(ProducerConfig.ACKS_CONFIG, "all");
config.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, tranId);
config.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, "snappy");
config.put(ProducerConfig.LINGER_MS_CONFIG, "200");
config.put(ProducerConfig.BATCH_SIZE_CONFIG, Integer.toString(256 * 1024));     
config.put(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG,120000);
config.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG,60000);                             
config.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, 5);        
config.put(ProducerConfig.BUFFER_MEMORY_CONFIG, Integer.toString(32768* 1024));
config.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, "true");
config.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, tranId);
return new TracingProducerFactory<>(new DefaultKafkaProducerFactory<>(config), tracer);
}
@Primary
@Qualifier("transactionalProducerFactoryNonAvro")
@Bean(name = "transactionalKafkaTemplateNonAvro")
public KafkaTemplate<String, String> kafkatemplate() {
return new KafkaTemplate<>(producerFactoryNonAvro());
}

@Primary
@Qualifier("transactionalProducerFactoryNonAvro")
@Bean(name = "transactionalKafkaTransactionManagerNonAvro")
public KafkaAwareTransactionManager<?, ?> kafkaTransactionManager(ProducerFactory<String, String> producerFactory) {
return new KafkaTransactionManager<>(producerFactory);
}

}

ProducerWrapper

@Service
public class KafkaTopicProducer {
@Autowired
private KafkaTemplate<String, TransactionAvroEntity> kafkaTemplate;
@Autowired
private KafkaTemplate<String, String> kafkaProducerNonAvrokafkaTemplate;
public void topicProducerAvro(TransactionAvroEntity payload, String topic, Headers headers) {
ProducerRecord<String, TransactionAvroEntity> producerRecord = new ProducerRecord<String, TransactionAvroEntity>(
topic, null, UUID.randomUUID().toString(), payload, headers);

kafkaTemplate.send(producerRecord);
}
public void kafkaAvroFlush() {
kafkaTemplate.flush();
}
public void topicProducerNonAvro(String payload, String topic, Headers headers) {
ProducerRecord<String, String> producerRecord = new ProducerRecord<String, String>(topic, null,
UUID.randomUUID().toString(), payload, headers);
kafkaProducerNonAvrokafkaTemplate.send(producerRecord);
}
public void kafkaNonAvroFlush() {
kafkaProducerNonAvrokafkaTemplate.flush();
}
}

错误java.lang.IllegalStateException:没有事务正在处理;可能的解决方案:在template. executeintransaction()操作范围内运行模板操作,在调用模板方法之前使用@Transactional启动事务,在使用记录

时在侦听器容器启动的事务中运行。全栈跟踪

2022-05-03 09:35:11,358  INFO  [nerMoz-0-C-1] o.a.kafka.clients.consumer.KafkaConsumer : [Consumer clientId=consumer-ifhEventSanitizer-1, groupId=ifhEventSanitizer] Seeking to offset 0 for partition za.local.file.singleLineGLTransactionEvent.1-0 
2022-05-03 09:35:11,883  INFO  [nerMoz-0-C-1] o.a.kafka.clients.producer.KafkaProducer : [Producer clientId=producer-eventsanavroifhEventSanitizer.za.local.file.singleLineGLTransactionEvent.1.0, transactionalId=eventsanavroifhEventSanitizer.za.local.file.singleLineGLTransactionEvent.1.0] Aborting incomplete transaction 
2022-05-03 09:35:11,884  ERROR [nerMoz-0-C-1] essageListenerContainer$ListenerConsumer : Transaction rolled back 
org.springframework.kafka.listener.ListenerExecutionFailedException: Listener method 'public boolean com.fnb.fin.ifhEventSanitizer.kafka.KafkaConsumerMoz.consume(java.util.List<org.apache.kafka.clients.consumer.ConsumerRecord<java.lang.String, java.lang.String>>,org.apache.kafka.clients.consumer.Consumer<?, ?>)' threw exception; nested exception is java.lang.IllegalStateException: No transaction is in process; possible solutions: run the template operation within the scope of a template.executeInTransaction() operation, start a transaction with @Transactional before invoking the template method, run in a transaction started by a listener container when consuming a record; nested exception is java.lang.IllegalStateException: No transaction is in process; possible solutions: run the template operation within the scope of a template.executeInTransaction() operation, start a transaction with @Transactional before invoking the template method, run in a transaction started by a listener container when consuming a record
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.decorateException(KafkaMessageListenerContainer.java:2372)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeBatchOnMessage(KafkaMessageListenerContainer.java:2008)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeBatchOnMessageWithRecordsOrList(KafkaMessageListenerContainer.java:1978)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeBatchOnMessage(KafkaMessageListenerContainer.java:1930)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeBatchListener(KafkaMessageListenerContainer.java:1842)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.access$2100(KafkaMessageListenerContainer.java:518)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer$1.doInTransactionWithoutResult(KafkaMessageListenerContainer.java:1749)
at org.springframework.transaction.support.TransactionCallbackWithoutResult.doInTransaction(TransactionCallbackWithoutResult.java:36)
at org.springframework.transaction.support.TransactionTemplate.execute(TransactionTemplate.java:140)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeBatchListenerInTx(KafkaMessageListenerContainer.java:1740)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeBatchListener(KafkaMessageListenerContainer.java:1722)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeListener(KafkaMessageListenerContainer.java:1704)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeIfHaveRecords(KafkaMessageListenerContainer.java:1274)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.pollAndInvoke(KafkaMessageListenerContainer.java:1266)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:1161)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:832)
Suppressed: org.springframework.kafka.listener.ListenerExecutionFailedException: Restored Stack Trace
at org.springframework.kafka.listener.adapter.MessagingMessageListenerAdapter.invokeHandler(MessagingMessageListenerAdapter.java:363)
at org.springframework.kafka.listener.adapter.BatchMessagingMessageListenerAdapter.invoke(BatchMessagingMessageListenerAdapter.java:180)
at org.springframework.kafka.listener.adapter.BatchMessagingMessageListenerAdapter.onMessage(BatchMessagingMessageListenerAdapter.java:172)
at org.springframework.kafka.listener.adapter.BatchMessagingMessageListenerAdapter.onMessage(BatchMessagingMessageListenerAdapter.java:61)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeBatchOnMessage(KafkaMessageListenerContainer.java:1988)
Caused by: java.lang.IllegalStateException: No transaction is in process; possible solutions: run the template operation within the scope of a template.executeInTransaction() operation, start a transaction with @Transactional before invoking the template method, run in a transaction started by a listener container when consuming a record
at org.springframework.util.Assert.state(Assert.java:76)
at org.springframework.kafka.core.KafkaTemplate.getTheProducer(KafkaTemplate.java:657)
at org.springframework.kafka.core.KafkaTemplate.doSend(KafkaTemplate.java:569)
at org.springframework.kafka.core.KafkaTemplate.send(KafkaTemplate.java:406)
at com.fnb.fin.ifhEventSanitizer.kafka.KafkaTopicProducer.topicProducerNonAvro(KafkaTopicProducer.java:44)
at com.fnb.fin.ifhEventSanitizer.kafka.KafkaConsumerMoz.consume(KafkaConsumerMoz.java:108)
at jdk.internal.reflect.GeneratedMethodAccessor111.invoke(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:564)
at org.springframework.messaging.handler.invocation.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:171)
at org.springframework.messaging.handler.invocation.InvocableHandlerMethod.invoke(InvocableHandlerMethod.java:120)
at org.springframework.kafka.listener.adapter.HandlerAdapter.invoke(HandlerAdapter.java:56)
at org.springframework.kafka.listener.adapter.MessagingMessageListenerAdapter.invokeHandler(MessagingMessageListenerAdapter.java:347)
at org.springframework.kafka.listener.adapter.BatchMessagingMessageListenerAdapter.invoke(BatchMessagingMessageListenerAdapter.java:180)
at org.springframework.kafka.listener.adapter.BatchMessagingMessageListenerAdapter.onMessage(BatchMessagingMessageListenerAdapter.java:172)
at org.springframework.kafka.listener.adapter.BatchMessagingMessageListenerAdapter.onMessage(BatchMessagingMessageListenerAdapter.java:61)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeBatchOnMessage(KafkaMessageListenerContainer.java:1988)
... 16 common frames omitted

KafkaTransactionManager只能从一个工厂开始在一个生产者中进行交易;即使它可以启动两个,你也会失去EOS的保证,因为它们是不同的交易,所以如果你向两个都执行发送,它们就不会在同一个交易中。

为了解决这个问题,您应该使用一个具有DelegatingByTypeSerializerDelegatingByTopicSerializer的生产工厂。

public ProducerFactory<String, Object> producerFactory() {
...
Map<Class<?>, Serializer> delegates = new LinkedHashMap<>(); // retains the order when iterating
delegates.put(String.class, new StringSerializer());
delegates.put(Object.class, new JsonSerializer<>());
DelegatingByTypeSerializer dbts = new DelegatingByTypeSerializer(delegates, true);
return new TracingProducerFactory<>(
new DefaultKafkaProducerFactory<>(config, new StringSerializer(), dbts), tracer);
}

最新更新