Java Code Examples for io.confluent.kafka.serializers.KafkaAvroDeserializer#configure()
The following examples show how to use
io.confluent.kafka.serializers.KafkaAvroDeserializer#configure() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ConfluentRegistryCompatibleResourceTest.java From registry with Apache License 2.0 | 6 votes |
@Test public void testConfluentSerDes() throws Exception { org.apache.avro.Schema schema = new org.apache.avro.Schema.Parser().parse(GENERIC_TEST_RECORD_SCHEMA); GenericRecord record = new GenericRecordBuilder(schema).set("field1", "some value").set("field2", "some other value").build(); Map<String, Object> config = new HashMap<>(); config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, rootTarget.getUri().toString()); KafkaAvroSerializer kafkaAvroSerializer = new KafkaAvroSerializer(); kafkaAvroSerializer.configure(config, false); byte[] bytes = kafkaAvroSerializer.serialize("topic", record); KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(); kafkaAvroDeserializer.configure(config, false); GenericRecord result = (GenericRecord) kafkaAvroDeserializer.deserialize("topic", bytes); LOG.info(result.toString()); }
Example 2
Source File: FkJoinTableToTable.java From kafka-tutorials with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") static <T> Serde<T> getPrimitiveAvroSerde(final Properties envProps, boolean isKey) { final KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer(); final KafkaAvroSerializer serializer = new KafkaAvroSerializer(); final Map<String, String> config = new HashMap<>(); config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, envProps.getProperty("schema.registry.url")); deserializer.configure(config, isKey); serializer.configure(config, isKey); return (Serde<T>)Serdes.serdeFrom(serializer, deserializer); }
Example 3
Source File: CogroupingStreams.java From kafka-tutorials with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") static <T> Serde<T> getPrimitiveAvroSerde(final Properties envProps, boolean isKey) { final KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer(); final KafkaAvroSerializer serializer = new KafkaAvroSerializer(); final Map<String, String> config = new HashMap<>(); config.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, envProps.getProperty("schema.registry.url")); deserializer.configure(config, isKey); serializer.configure(config, isKey); return (Serde<T>)Serdes.serdeFrom(serializer, deserializer); }
Example 4
Source File: DynamicOutputTopic.java From kafka-tutorials with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") static <T> Serde<T> getPrimitiveAvroSerde(final Properties envProps, boolean isKey) { final KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer(); final KafkaAvroSerializer serializer = new KafkaAvroSerializer(); final Map<String, String> config = new HashMap<>(); config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, envProps.getProperty("schema.registry.url")); deserializer.configure(config, isKey); serializer.configure(config, isKey); return (Serde<T>)Serdes.serdeFrom(serializer, deserializer); }
Example 5
Source File: AvroMessageDeserializer.java From Kafdrop with Apache License 2.0 | 5 votes |
private KafkaAvroDeserializer getDeserializer() { Map<String, Object> config = new HashMap<>(); config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl); KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(); kafkaAvroDeserializer.configure(config, false); return kafkaAvroDeserializer; }
Example 6
Source File: NativeKafkaWithAvroDecoderTest.java From hermes with Apache License 2.0 | 4 votes |
@Test public void testNative() throws IOException, InterruptedException, ExecutionException { final String topic = "kafka.SimpleAvroTopic"; int msgNum = 200; final CountDownLatch countDown = new CountDownLatch(msgNum); Properties producerProps = new Properties(); producerProps.put("bootstrap.servers", ""); // Avro Decoder/Encoder CachedSchemaRegistryClient schemaRegistry = new CachedSchemaRegistryClient("", AbstractKafkaAvroSerDeConfig.MAX_SCHEMAS_PER_SUBJECT_DEFAULT); Map<String, String> configs = new HashMap<String, String>(); configs.put("schema.registry.url", ""); KafkaAvroSerializer avroKeySerializer = new KafkaAvroSerializer(); avroKeySerializer.configure(configs, true); KafkaAvroSerializer avroValueSerializer = new KafkaAvroSerializer(); avroValueSerializer.configure(configs, false); Map<String, String> deserializerConfigs = new HashMap<String, String>(); deserializerConfigs.put("specific.avro.reader", Boolean.TRUE.toString()); deserializerConfigs.put("schema.registry.url", ""); KafkaAvroDeserializer avroKeyDeserializer = new KafkaAvroDeserializer(schemaRegistry, deserializerConfigs); avroKeyDeserializer.configure(configs, true); KafkaAvroDeserializer avroValueDeserializer = new KafkaAvroDeserializer(schemaRegistry, deserializerConfigs); avroValueDeserializer.configure(configs, false); // Consumer final Properties consumerProps = new Properties(); consumerProps.put("bootstrap.servers", ""); consumerProps.put("group.id", "GROUP_" + topic); final List<Object> actualResult = new ArrayList<Object>(); final List<Object> expectedResult = new ArrayList<Object>(); final KafkaConsumer<Object, Object> consumer = new KafkaConsumer<Object, Object>(consumerProps, avroKeyDeserializer, avroValueDeserializer); consumer.subscribe(Arrays.asList(topic)); class KafkaConsumerThread implements Runnable { private final AtomicBoolean closed = new AtomicBoolean(false); public void run() { try { while (!closed.get()) { ConsumerRecords<Object, Object> records = consumer.poll(100); for (ConsumerRecord<Object, Object> consumerRecord : records) { System.out.println("received: " + consumerRecord.value()); actualResult.add(consumerRecord.value()); countDown.countDown(); } } } catch (WakeupException e) { if (!closed.get()) throw e; } finally { consumer.commitSync(); consumer.close(); } } public void shutdown() { closed.set(true); consumer.wakeup(); } } KafkaConsumerThread thread = new KafkaConsumerThread(); new Thread(thread).start(); KafkaProducer<Object, Object> producer = new KafkaProducer<Object, Object>(producerProps, avroKeySerializer, avroValueSerializer); int i = 0; while (i++ < msgNum) { ProducerRecord<Object, Object> data = new ProducerRecord<Object, Object>(topic, null, (Object) KafkaAvroTest.generateEvent()); Future<RecordMetadata> send = producer.send(data); send.get(); if (send.isDone()) { System.out.println("sending: " + data.value()); expectedResult.add(data.value()); } } countDown.await(); thread.shutdown(); producer.close(); Assert.assertEquals(expectedResult.size(), actualResult.size()); }