example.avro.User Java Examples

The following examples show how to use example.avro.User. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AvroCodecTests.java    From schema-evolution-samples with Apache License 2.0 6 votes vote down vote up
@Test
public void genericEncoderV1SpecificDecoderV1() throws Exception{
	Schema schema = load("users_v1.schema");
	SchemaRegistryClient client = mock(SchemaRegistryClient.class);
	AvroCodec codec = new AvroCodec();
	codec.setSchemaRegistryClient(client);
	when(client.register(any())).thenReturn(1);
	when(client.fetch(eq(1))).thenReturn(schema);
	GenericRecord record = new GenericData.Record(schema);
	record.put("name","joe");
	record.put("favoriteNumber",42);
	record.put("favoriteColor","blue");
	byte[] results = codec.encode(record);
	User decoded = codec.decode(results,User.class);
	Assert.assertEquals(record.get("name").toString(),decoded.getName().toString());

}
 
Example #2
Source File: SpringKafkaApplicationTest.java    From spring-kafka with MIT License 5 votes vote down vote up
@Test
public void testReceiver() throws Exception {
  User user = User.newBuilder().setName("John Doe").setFavoriteColor("green")
      .setFavoriteNumber(null).build();
  sender.send(user);

  receiver.getLatch().await(10000, TimeUnit.MILLISECONDS);
  assertThat(receiver.getLatch().getCount()).isEqualTo(0);
}
 
Example #3
Source File: AvroCodecTests.java    From schema-evolution-samples with Apache License 2.0 5 votes vote down vote up
@Test
public void specificEncoderV1GenericDecoderV1() throws Exception{
	Schema schema = load("users_v1.schema");
	SchemaRegistryClient client = mock(SchemaRegistryClient.class);
	AvroCodec codec = new AvroCodec();
	codec.setSchemaRegistryClient(client);
	when(client.register(any())).thenReturn(1);
	when(client.fetch(eq(1))).thenReturn(schema);
	User user = User.newBuilder().setName("joe").setFavoriteColor("blue").setFavoriteNumber(42).build();
	byte[] results = codec.encode(user);
	GenericRecord decoded = codec.decode(results,GenericRecord.class);
	Assert.assertEquals(user.getName().toString(),decoded.get("name").toString());
}
 
Example #4
Source File: AvroCodecTests.java    From schema-evolution-samples with Apache License 2.0 5 votes vote down vote up
@Test
public void schemaResolveTest() throws Exception {
	User user = new User();
	Class aClass = user.getClass();
	Schema schema = ((GenericRecord)aClass.newInstance()).getSchema();
	System.out.println(schema);
}
 
Example #5
Source File: AvroSerializerTest.java    From spring-kafka with MIT License 5 votes vote down vote up
@Test
public void testSerialize() {
  User user = User.newBuilder().setName("John Doe").setFavoriteColor("green")
      .setFavoriteNumber(null).build();

  AvroSerializer<User> avroSerializer = new AvroSerializer<>();
  assertThat(avroSerializer.serialize("avro.t", user))
      .isEqualTo(DatatypeConverter.parseHexBinary("104A6F686E20446F6502000A677265656E"));

  avroSerializer.close();
}
 
Example #6
Source File: AvroDeserializerTest.java    From spring-kafka with MIT License 5 votes vote down vote up
@Test
public void testDeserialize() {
  User user = User.newBuilder().setName("John Doe").setFavoriteColor("green")
      .setFavoriteNumber(null).build();

  byte[] data = DatatypeConverter.parseHexBinary("104A6F686E20446F6502000A677265656E");

  AvroDeserializer<User> avroDeserializer = new AvroDeserializer<>(User.class);

  assertThat(avroDeserializer.deserialize("avro.t", data)).isEqualTo(user);
  avroDeserializer.close();
}
 
Example #7
Source File: ReceiverConfig.java    From spring-kafka with MIT License 5 votes vote down vote up
@Bean
public ConcurrentKafkaListenerContainerFactory<String, User> kafkaListenerContainerFactory() {
  ConcurrentKafkaListenerContainerFactory<String, User> factory =
      new ConcurrentKafkaListenerContainerFactory<>();
  factory.setConsumerFactory(consumerFactory());

  return factory;
}
 
Example #8
Source File: ReceiverConfig.java    From spring-kafka with MIT License 5 votes vote down vote up
@Bean
public ConcurrentKafkaListenerContainerFactory<String, User> kafkaListenerContainerFactory() {
  ConcurrentKafkaListenerContainerFactory<String, User> factory =
      new ConcurrentKafkaListenerContainerFactory<>();
  factory.setConsumerFactory(consumerFactory());

  return factory;
}
 
Example #9
Source File: AvroDeserializerTest.java    From spring-kafka with MIT License 5 votes vote down vote up
@Test
public void testDeserialize() {
  User user = User.newBuilder().setName("John Doe").setFavoriteColor("green")
      .setFavoriteNumber(null).build();

  byte[] data = DatatypeConverter.parseHexBinary("104A6F686E20446F6502000A677265656E");
  AvroDeserializer<User> avroDeserializer = new AvroDeserializer<>(User.class);

  assertThat(avroDeserializer.deserialize("avro-bijection.t", data)).isEqualTo(user);
  avroDeserializer.close();
}
 
Example #10
Source File: AvroSerializerTest.java    From spring-kafka with MIT License 5 votes vote down vote up
@Test
public void testSerialize() {
  User user = User.newBuilder().setName("John Doe").setFavoriteColor("green")
      .setFavoriteNumber(null).build();

  AvroSerializer<User> avroSerializer = new AvroSerializer<>();

  assertThat(avroSerializer.serialize("avro-bijection.t", user))
      .isEqualTo(DatatypeConverter.parseHexBinary("104A6F686E20446F6502000A677265656E"));
  avroSerializer.close();
}
 
Example #11
Source File: SpringKafkaApplicationTest.java    From spring-kafka with MIT License 5 votes vote down vote up
@Test
public void testReceiver() throws Exception {
  User user = User.newBuilder().setName("John Doe").setFavoriteColor("blue")
      .setFavoriteNumber(null).build();
  sender.send(user);

  receiver.getLatch().await(10000, TimeUnit.MILLISECONDS);
  assertThat(receiver.getLatch().getCount()).isEqualTo(0);
}
 
Example #12
Source File: TestAvroConsumerConfluent.java    From flink with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
	// parse input arguments
	final ParameterTool parameterTool = ParameterTool.fromArgs(args);

	if (parameterTool.getNumberOfParameters() < 6) {
		System.out.println("Missing parameters!\n" +
			"Usage: Kafka --input-topic <topic> --output-topic <topic> " +
			"--bootstrap.servers <kafka brokers> " +
			"--zookeeper.connect <zk quorum> " +
			"--schema-registry-url <confluent schema registry> --group.id <some id>");
		return;
	}
	Properties config = new Properties();
	config.setProperty("bootstrap.servers", parameterTool.getRequired("bootstrap.servers"));
	config.setProperty("group.id", parameterTool.getRequired("group.id"));
	config.setProperty("zookeeper.connect", parameterTool.getRequired("zookeeper.connect"));
	String schemaRegistryUrl = parameterTool.getRequired("schema-registry-url");

	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.getConfig().disableSysoutLogging();

	DataStreamSource<User> input = env
		.addSource(
			new FlinkKafkaConsumer010<>(
				parameterTool.getRequired("input-topic"),
				ConfluentRegistryAvroDeserializationSchema.forSpecific(User.class, schemaRegistryUrl),
				config).setStartFromEarliest());

	SingleOutputStreamOperator<String> mapToString = input
		.map((MapFunction<User, String>) SpecificRecordBase::toString);

	FlinkKafkaProducer010<String> stringFlinkKafkaProducer010 = new FlinkKafkaProducer010<>(
		parameterTool.getRequired("output-topic"),
		new SimpleStringSchema(),
		config);

	mapToString.addSink(stringFlinkKafkaProducer010);
	env.execute("Kafka 0.10 Confluent Schema Registry AVRO Example");
}
 
Example #13
Source File: Receiver.java    From spring-kafka with MIT License 4 votes vote down vote up
@KafkaListener(topics = "${kafka.topic.avro}")
public void receive(User user) {
  LOGGER.info("received user='{}'", user.toString());
  latch.countDown();
}
 
Example #14
Source File: ReceiverConfig.java    From spring-kafka with MIT License 4 votes vote down vote up
@Bean
public ConsumerFactory<String, User> consumerFactory() {
  return new DefaultKafkaConsumerFactory<>(consumerConfigs(), new StringDeserializer(),
      new AvroDeserializer<>(User.class));
}
 
Example #15
Source File: SenderConfig.java    From spring-kafka with MIT License 4 votes vote down vote up
@Bean
public KafkaTemplate<String, User> kafkaTemplate() {
  return new KafkaTemplate<>(producerFactory());
}
 
Example #16
Source File: SenderConfig.java    From spring-kafka with MIT License 4 votes vote down vote up
@Bean
public ProducerFactory<String, User> producerFactory() {
  return new DefaultKafkaProducerFactory<>(producerConfigs());
}
 
Example #17
Source File: Sender.java    From spring-kafka with MIT License 4 votes vote down vote up
public void send(User user) {
  LOGGER.info("sending user='{}'", user.toString());
  kafkaTemplate.send(avroTopic, user);
}
 
Example #18
Source File: Receiver.java    From spring-kafka with MIT License 4 votes vote down vote up
@KafkaListener(topics = "${kafka.topic.avro-bijection}")
public void receive(User user) {
  LOGGER.info("received user='{}'", user.toString());
  latch.countDown();
}
 
Example #19
Source File: ReceiverConfig.java    From spring-kafka with MIT License 4 votes vote down vote up
@Bean
public ConsumerFactory<String, User> consumerFactory() {
  return new DefaultKafkaConsumerFactory<>(consumerConfigs(), new StringDeserializer(),
      new AvroDeserializer<>(User.class));
}
 
Example #20
Source File: SenderConfig.java    From spring-kafka with MIT License 4 votes vote down vote up
@Bean
public KafkaTemplate<String, User> kafkaTemplate() {
  return new KafkaTemplate<>(producerFactory());
}
 
Example #21
Source File: SenderConfig.java    From spring-kafka with MIT License 4 votes vote down vote up
@Bean
public ProducerFactory<String, User> producerFactory() {
  return new DefaultKafkaProducerFactory<>(producerConfigs());
}
 
Example #22
Source File: Sender.java    From spring-kafka with MIT License 4 votes vote down vote up
public void send(User user) {
  LOGGER.info("sending user='{}'", user.toString());
  kafkaTemplate.send(avroBijectionTopic, user);
}
 
Example #23
Source File: TestAvroConsumerConfluent.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
	// parse input arguments
	final ParameterTool parameterTool = ParameterTool.fromArgs(args);

	if (parameterTool.getNumberOfParameters() < 6) {
		System.out.println("Missing parameters!\n" +
			"Usage: Kafka --input-topic <topic> --output-topic <topic> " +
			"--bootstrap.servers <kafka brokers> " +
			"--zookeeper.connect <zk quorum> " +
			"--schema-registry-url <confluent schema registry> --group.id <some id>");
		return;
	}
	Properties config = new Properties();
	config.setProperty("bootstrap.servers", parameterTool.getRequired("bootstrap.servers"));
	config.setProperty("group.id", parameterTool.getRequired("group.id"));
	config.setProperty("zookeeper.connect", parameterTool.getRequired("zookeeper.connect"));
	String schemaRegistryUrl = parameterTool.getRequired("schema-registry-url");

	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.getConfig().disableSysoutLogging();

	DataStreamSource<User> input = env
		.addSource(
			new FlinkKafkaConsumer010<>(
				parameterTool.getRequired("input-topic"),
				ConfluentRegistryAvroDeserializationSchema.forSpecific(User.class, schemaRegistryUrl),
				config).setStartFromEarliest());

	SingleOutputStreamOperator<String> mapToString = input
		.map((MapFunction<User, String>) SpecificRecordBase::toString);

	FlinkKafkaProducer010<String> stringFlinkKafkaProducer010 = new FlinkKafkaProducer010<>(
		parameterTool.getRequired("output-topic"),
		new SimpleStringSchema(),
		config);

	mapToString.addSink(stringFlinkKafkaProducer010);
	env.execute("Kafka 0.10 Confluent Schema Registry AVRO Example");
}
 
Example #24
Source File: TestAvroConsumerConfluent.java    From flink with Apache License 2.0 3 votes vote down vote up
public static void main(String[] args) throws Exception {
	// parse input arguments
	final ParameterTool parameterTool = ParameterTool.fromArgs(args);

	if (parameterTool.getNumberOfParameters() < 6) {
		System.out.println("Missing parameters!\n" +
			"Usage: Kafka --input-topic <topic> --output-string-topic <topic> --output-avro-topic <topic> " +
			"--bootstrap.servers <kafka brokers> " +
			"--schema-registry-url <confluent schema registry> --group.id <some id>");
		return;
	}
	Properties config = new Properties();
	config.setProperty("bootstrap.servers", parameterTool.getRequired("bootstrap.servers"));
	config.setProperty("group.id", parameterTool.getRequired("group.id"));
	String schemaRegistryUrl = parameterTool.getRequired("schema-registry-url");

	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

	DataStreamSource<User> input = env
		.addSource(
			new FlinkKafkaConsumer010<>(
				parameterTool.getRequired("input-topic"),
				ConfluentRegistryAvroDeserializationSchema.forSpecific(User.class, schemaRegistryUrl),
				config).setStartFromEarliest());

	SingleOutputStreamOperator<String> mapToString = input
		.map((MapFunction<User, String>) SpecificRecordBase::toString);

	FlinkKafkaProducer010<String> stringFlinkKafkaProducer010 = new FlinkKafkaProducer010<>(
		parameterTool.getRequired("output-string-topic"),
		new SimpleStringSchema(),
		config);
	mapToString.addSink(stringFlinkKafkaProducer010);

	FlinkKafkaProducer010<User> avroFlinkKafkaProducer010 = new FlinkKafkaProducer010<>(
			parameterTool.getRequired("output-avro-topic"),
			ConfluentRegistryAvroSerializationSchema.forSpecific(User.class, parameterTool.getRequired("output-subject"), schemaRegistryUrl),
			config);
	input.addSink(avroFlinkKafkaProducer010);

	env.execute("Kafka 0.10 Confluent Schema Registry AVRO Example");
}