<dependency> <groupId>org.springframework.cloud</groupId> <artifactId>spring-cloud-stream-binder-kafka</artifactId> </dependency>
或java
<dependency> <groupId>org.springframework.cloud</groupId> <artifactId>spring-cloud-starter-stream-kafka</artifactId> </dependency>
private KafkaTemplate<String, Object> kafkaTemplate; @Autowired public KafkaController(KafkaTemplate kafkaTemplate) { this.kafkaTemplate = kafkaTemplate; } @GetMapping("/send") public Person send(@PathVariable String name) { Person person = new Person(); person.setId(System.currentTimeMillis()); person.setName(name); kafkaTemplate.send("test-topic", person); return person; }
@KafkaListener(topics = "test-topic") public void consume(Person person){ System.out.println(person.toString()); }
//生产者端错误信息 There was an unexpected error (type=Internal Server Error, status=500). Can't convert value of class com.service.Person to class org.apache.kafka.common.serialization.ByteArraySerializer specified in value.serializerspring
消费者端错误信息 nested exception is org.springframework.messaging.converter.MessageConversionException: Cannot convert from [[B] to [com.service.Person]apache
KafkaProperties-> Consumer->valueDeserializerapp
// 解决办法 KafkaProperties-> Producer->valueSerializeride
spring: kafka: producer: valueSerializer: com.service.kafka.ObjectSerializer #加入自定义序列化方式 consumer: groupId: test valueDeserializer: com.service.kafka.ObjectDeSerializer
public class ObjectSerializer implements Serializer<Serializable> { @Override public void configure(Map<String, ?> map, boolean b) { } @Override public byte[] serialize(String s, Serializable serializable) { System.out.printf("topic:%s, data:%s", s, serializable); ByteArrayOutputStream bos = new ByteArrayOutputStream(); byte[] bytes = null; try (ObjectOutputStream oos = new ObjectOutputStream(bos)) { oos.writeObject(serializable); bytes = bos.toByteArray(); } catch (IOException e) { e.printStackTrace(); } return bytes; } @Override public void close() { } } public class ObjectDeSerializer implements Deserializer<Serializable> { @Override public void configure(Map map, boolean b) { } @Override public Serializable deserialize(String s, byte[] bytes) { ByteArrayInputStream bs = new ByteArrayInputStream(bytes); Serializable result = null; try (ObjectInputStream os = new ObjectInputStream(bs)) { result = (Serializable) os.readObject(); } catch (IOException | ClassNotFoundException e) { e.printStackTrace(); } System.out.printf("topic:%s, data:%s", s, result); return result; } @Override public void close() { } }
@GetMapping("/stream/{name}") public Person streamSend(@PathVariable String name){ Person person = new Person(); person.setId(System.currentTimeMillis()); person.setName(name); MessageChannel mc = source.output(); mc.send(MessageBuilder.withPayload(person).build()); return person; }
public interface PersonSource { /** * Name of the output channel. */ String TOPIC = "test-topic"; /** * @return output channel */ @Output(PersonSource.TOPIC) MessageChannel source(); }
// 加入注解 @EnableBinding(value = {Source.class,PersonSource.class}) // 将source替换为新定义的personSource MessageChannel mc = personSource.source();
// 使用以下方式会报错 @KafkaListener(topics = "test-topic") public void consume(Person person){ System.out.println(person.toString()); }
// 以下方式正常 @StreamListener("test-topic") public void streamConsumer(Person person){ System.out.println(person.toString()); }
mc.send(MessageBuilder.withPayload(person).setHeader("Content-Type","application/bean").build());
经过加入header的方式依然不能反序列化成功.ui
虽然Spring Cloud Stream Binder 中存在Spring Kafka的整合,可是Spring Kafka和Spring Cloud Stream Kafka在处理数据的生产与消费是存在差别的。因此在使用上必定要配套使用。this
当Spring Cloud Stream Kafka 发送消息包含头信息时,Kafka DeSerializer在实现方法回调的时候并不会处理。code
通常状况能够经过StreamListener 来监听数据(主体),若是须要处理消息的header信息的话能够经过SubscribableChannel来处理xml
@Bean public ApplicationRunner createRunner() { return (args) -> personSink.input().subscribe(message -> { MessageHeaders headers = message.getHeaders(); Object obj = message.getPayload(); System.out.printf("receive message, header:%s, body:%s", headers, obj); }); }
可是若是上述代码与以下代码同时存在,那么他们会轮流执行对象
@StreamListener("test-topic") public void streamConsumer(Person person){ System.out.println(person.toString()); }
二者均屏蔽了具体Stream的具体实现。 不管是
@Input
仍是@Output
他们的value
不容许重复(bean
不容许重复),能够经过destination
来申明topic
spring: cloud: stream: bindings: test-topic-provider: destination: test-topic test-topic-consume: group: test02 destination: test-topic
/** * Name of the output channel. */ String TOPIC = "test-topic-provider"; /** * @return output channel */ @Output(PersonSource.TOPIC) MessageChannel source();
/** * Input channel name. */ String INPUT = "test-topic-consume"; /** * @return input channel. */ @Input(INPUT) SubscribableChannel input();
@StreamListener(PersonSource.TOPIC) public void streamConsumer(Person person){ System.out.println(person.toString()); }
二者实现存在着差别,SubscribableChannel会触发kafka的自定义反序列化,因此Spring Cloud Stream Kafka 是将对象序列化成JSON, 经过JSON反序列化成对象(不通过自定义kafka的Serializer/DeSerializer)。