io.confluent.kafka.serializers.KafkaAvroDeserializer Java Examples

The following examples show how to use io.confluent.kafka.serializers.KafkaAvroDeserializer. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ConfluentRegistryCompatibleResourceTest.java    From registry with Apache License 2.0 6 votes vote down vote up
@Test
public void testConfluentSerDes() throws Exception {

    org.apache.avro.Schema schema = new org.apache.avro.Schema.Parser().parse(GENERIC_TEST_RECORD_SCHEMA);
    GenericRecord record = new GenericRecordBuilder(schema).set("field1", "some value").set("field2", "some other value").build();

    Map<String, Object> config = new HashMap<>();
    config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, rootTarget.getUri().toString());

    KafkaAvroSerializer kafkaAvroSerializer = new KafkaAvroSerializer();
    kafkaAvroSerializer.configure(config, false);
    byte[] bytes = kafkaAvroSerializer.serialize("topic", record);

    KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer();
    kafkaAvroDeserializer.configure(config, false);

    GenericRecord result = (GenericRecord) kafkaAvroDeserializer.deserialize("topic", bytes);
    LOG.info(result.toString());
}
 
Example #2
Source File: KsqlGenericRowAvroDeserializerTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 6 votes vote down vote up
@Test
public void shouldCreateCorrectRow() {

  KafkaAvroDeserializer kafkaAvroDeserializer = EasyMock.mock(KafkaAvroDeserializer.class);
  EasyMock.expect(kafkaAvroDeserializer.deserialize(EasyMock.anyString(), EasyMock.anyObject())
  ).andReturn(genericRecord);
  expectLastCall();
  replay(kafkaAvroDeserializer);

  KsqlGenericRowAvroDeserializer ksqlGenericRowAvroDeserializer = new
      KsqlGenericRowAvroDeserializer(schema, kafkaAvroDeserializer, false);

  GenericRow genericRow = ksqlGenericRowAvroDeserializer.deserialize("", new byte[]{});

  assertThat("Column number does not match.", genericRow.getColumns().size(), equalTo(6));
  assertThat("Invalid column value.", genericRow.getColumns().get(0), equalTo(1511897796092L));
  assertThat("Invalid column value.", genericRow.getColumns().get(1), equalTo(1L));
  assertThat("Invalid column value.", ((Double[])genericRow.getColumns().get(4))[0], equalTo
      (100.0));
  assertThat("Invalid column value.", ((Map<String, Double>)genericRow.getColumns().get(5))
                 .get("key1"),
             equalTo
      (100.0));
}
 
Example #3
Source File: KafkaAvroTest.java    From quarkus with Apache License 2.0 6 votes vote down vote up
public static KafkaConsumer<Integer, Pet> createConsumer() {
    String registry = System.getProperty("schema.url");

    Properties props = new Properties();
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:19092");
    props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-avro");
    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class.getName());
    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaAvroDeserializer.class.getName());
    props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    props.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, registry);

    // Without you get GenericData.Record instead of `Pet`
    props.put(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, true);

    KafkaConsumer<Integer, Pet> consumer = new KafkaConsumer<>(props);
    consumer.subscribe(Collections.singletonList("test-avro-producer"));
    return consumer;
}
 
Example #4
Source File: AvroKafkaSource.java    From hudi with Apache License 2.0 5 votes vote down vote up
public AvroKafkaSource(TypedProperties props, JavaSparkContext sparkContext, SparkSession sparkSession,
    SchemaProvider schemaProvider) {
  super(props, sparkContext, sparkSession, schemaProvider);
  props.put("key.deserializer", StringDeserializer.class);
  props.put("value.deserializer", KafkaAvroDeserializer.class);
  offsetGen = new KafkaOffsetGen(props);
}
 
Example #5
Source File: KafkaConfluentSchemaRegistryAvroMessageDecoder.java    From incubator-pinot with Apache License 2.0 5 votes vote down vote up
@Override
public void init(Map<String, String> props, Set<String> fieldsToRead, String topicName)
    throws Exception {
  checkState(props.containsKey(SCHEMA_REGISTRY_REST_URL), "Missing required property '%s'", SCHEMA_REGISTRY_REST_URL);
  String schemaRegistryUrl = props.get(SCHEMA_REGISTRY_REST_URL);
  SchemaRegistryClient schemaRegistryClient = new CachedSchemaRegistryClient(schemaRegistryUrl, 1000);
  _deserializer = new KafkaAvroDeserializer(schemaRegistryClient);
  Preconditions.checkNotNull(topicName, "Topic must be provided");
  _topicName = topicName;
  _avroRecordExtractor = PluginManager.get().createInstance(AvroRecordExtractor.class.getName());
  _avroRecordExtractor.init(fieldsToRead, null);
}
 
Example #6
Source File: KafkaDeserializerExtractorTest.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
@Test
public void testConfluentAvroDeserializerForSchemaEvolution() throws IOException, RestClientException, SchemaRegistryException {
  WorkUnitState mockWorkUnitState = getMockWorkUnitState(0L, 10L);
  mockWorkUnitState.setProp("schema.registry.url", TEST_URL);

  Schema schemaV1 = SchemaBuilder.record(TEST_RECORD_NAME)
      .namespace(TEST_NAMESPACE).fields()
      .name(TEST_FIELD_NAME).type().stringType().noDefault()
      .endRecord();

  Schema schemaV2 = SchemaBuilder.record(TEST_RECORD_NAME)
      .namespace(TEST_NAMESPACE).fields()
      .name(TEST_FIELD_NAME).type().stringType().noDefault()
      .optionalString(TEST_FIELD_NAME2).endRecord();

  GenericRecord testGenericRecord = new GenericRecordBuilder(schemaV1).set(TEST_FIELD_NAME, "testValue").build();

  SchemaRegistryClient mockSchemaRegistryClient = mock(SchemaRegistryClient.class);
  when(mockSchemaRegistryClient.getByID(any(Integer.class))).thenReturn(schemaV1);

  Serializer<Object> kafkaEncoder = new KafkaAvroSerializer(mockSchemaRegistryClient);
  Deserializer<Object> kafkaDecoder = new KafkaAvroDeserializer(mockSchemaRegistryClient);

  ByteBuffer testGenericRecordByteBuffer =
      ByteBuffer.wrap(kafkaEncoder.serialize(TEST_TOPIC_NAME, testGenericRecord));

  KafkaSchemaRegistry<Integer, Schema> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class);
  when(mockKafkaSchemaRegistry.getLatestSchemaByTopic(TEST_TOPIC_NAME)).thenReturn(schemaV2);

  KafkaDeserializerExtractor kafkaDecoderExtractor = new KafkaDeserializerExtractor(mockWorkUnitState,
      Optional.fromNullable(Deserializers.CONFLUENT_AVRO), kafkaDecoder, mockKafkaSchemaRegistry);

  when(kafkaDecoderExtractor.getSchema()).thenReturn(schemaV2);

  ByteArrayBasedKafkaRecord mockMessageAndOffset = getMockMessageAndOffset(testGenericRecordByteBuffer);

  GenericRecord received = (GenericRecord) kafkaDecoderExtractor.decodeRecord(mockMessageAndOffset);
  Assert.assertEquals(received.toString(), "{\"testField\": \"testValue\", \"testField2\": null}");

}
 
Example #7
Source File: KafkaDeserializerExtractorTest.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
@Test
public void testConfluentAvroDeserializer() throws IOException, RestClientException {
  WorkUnitState mockWorkUnitState = getMockWorkUnitState(0L,10L);

  mockWorkUnitState.setProp("schema.registry.url", TEST_URL);

  Schema schema = SchemaBuilder.record(TEST_RECORD_NAME)
      .namespace(TEST_NAMESPACE).fields()
      .name(TEST_FIELD_NAME).type().stringType().noDefault()
      .endRecord();

  GenericRecord testGenericRecord = new GenericRecordBuilder(schema).set(TEST_FIELD_NAME, "testValue").build();

  SchemaRegistryClient mockSchemaRegistryClient = mock(SchemaRegistryClient.class);
  when(mockSchemaRegistryClient.getByID(any(Integer.class))).thenReturn(schema);

  Serializer<Object> kafkaEncoder = new KafkaAvroSerializer(mockSchemaRegistryClient);
  Deserializer<Object> kafkaDecoder = new KafkaAvroDeserializer(mockSchemaRegistryClient);

  ByteBuffer testGenericRecordByteBuffer =
      ByteBuffer.wrap(kafkaEncoder.serialize(TEST_TOPIC_NAME, testGenericRecord));

  KafkaSchemaRegistry<Integer, Schema> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class);
  KafkaDeserializerExtractor kafkaDecoderExtractor =
      new KafkaDeserializerExtractor(mockWorkUnitState,
          Optional.fromNullable(Deserializers.CONFLUENT_AVRO), kafkaDecoder, mockKafkaSchemaRegistry);

  ByteArrayBasedKafkaRecord mockMessageAndOffset = getMockMessageAndOffset(testGenericRecordByteBuffer);

  Assert.assertEquals(kafkaDecoderExtractor.decodeRecord(mockMessageAndOffset), testGenericRecord);
}
 
Example #8
Source File: AvroMessageDeserializer.java    From Kafdrop with Apache License 2.0 5 votes vote down vote up
private KafkaAvroDeserializer getDeserializer() {
   Map<String, Object> config = new HashMap<>();
   config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl);
   KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer();
   kafkaAvroDeserializer.configure(config, false);
   return kafkaAvroDeserializer;
}
 
Example #9
Source File: AvroMessageDeserializer.java    From Kafdrop with Apache License 2.0 5 votes vote down vote up
@Override
public String deserializeMessage(ByteBuffer buffer) {
   KafkaAvroDeserializer deserializer = getDeserializer();

   // Convert byte buffer to byte array
   byte[] bytes = ByteUtils.convertToByteArray(buffer);

   return formatJsonMessage(deserializer.deserialize(topicName, bytes).toString());
}
 
Example #10
Source File: ConfluentSchemaRegistryDeserializerProvider.java    From beam with Apache License 2.0 5 votes vote down vote up
@Override
public Deserializer<T> getDeserializer(Map<String, ?> configs, boolean isKey) {
  ImmutableMap<String, Object> csrConfig =
      ImmutableMap.<String, Object>builder()
          .putAll(configs)
          .put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl)
          .build();
  Deserializer<T> deserializer =
      (Deserializer<T>) new KafkaAvroDeserializer(getSchemaRegistryClient());
  deserializer.configure(csrConfig, isKey);
  return deserializer;
}
 
Example #11
Source File: ConfluentClientTest.java    From apicurio-registry with Apache License 2.0 5 votes vote down vote up
@Test
public void testSerdeAvro() throws Exception {
    SchemaRegistryClient client = buildClient();

    String subject = generateArtifactId();

    Schema schema = new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"myrecord3\",\"fields\":[{\"name\":\"bar\",\"type\":\"string\"}]}");
    int id = client.register(subject + "-value", schema);
    client.reset();

    // global id can be mapped async
    retry(() -> {
        Schema schema2 = client.getById(id);
        Assertions.assertNotNull(schema2);
        return schema2;
    });

    try (KafkaAvroSerializer serializer = new KafkaAvroSerializer(client);
         KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer(client);) {

        GenericData.Record record = new GenericData.Record(schema);
        record.put("bar", "somebar");

        byte[] bytes = serializer.serialize(subject, record);
        GenericData.Record ir = (GenericData.Record) deserializer.deserialize(subject, bytes);

        Assertions.assertEquals("somebar", ir.get("bar").toString());
    }
}
 
Example #12
Source File: SchemaRegistryConsumer.java    From blog with MIT License 5 votes vote down vote up
public static void main(String[] args) {

    /** TODO: 设置 Consumer 属性 */
    Properties properties = new Properties();
    /** TODO: Kafka 服务地址 */
    properties.put(
        ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "node-160:9092,node-161:9092,node-162:9092");
    /** TODO: Key 序列化类 */
    properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
    /** TODO: Value 序列化类 */
    properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaAvroDeserializer.class);
    /** TODO: Consumer 组 */
    properties.put(ConsumerConfig.GROUP_ID_CONFIG, "consumer_group_schema");

    /** TODO: 设置 schema.registry */
    properties.put("schema.registry.url", "http://node-160:8081");

    /** TODO: 创建 Consumer */
    KafkaConsumer<String, GenericRecord> consumer = new KafkaConsumer<>(properties);

    /** TODO: 订阅主题:可以使用 Pattern.compile("") 正则表达式 */
    consumer.subscribe(Arrays.asList("topic01"));

    /** TODO: 遍历消息队列 */
    try {
      while (true) {
        /** TODO: 设置间隔多长时间获取消息 */
        ConsumerRecords<String, GenericRecord> consumerRecords =
            consumer.poll(Duration.ofSeconds(1));
        consumerRecords.forEach(
            r ->
                System.out.printf(
                    "partition = %d, offset = %d, key = %s, value = %s%n",
                    r.partition(), r.offset(), r.key(), r.value()));
      }
    } finally {
      /** TODO: 关闭 Consumer */
      consumer.close();
    }
  }
 
Example #13
Source File: AvroEndpoint.java    From quarkus with Apache License 2.0 5 votes vote down vote up
public static KafkaConsumer<Integer, Pet> createConsumer(String registry) {
    Properties props = new Properties();
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:19092");
    props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-avro-consumer");
    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class.getName());
    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaAvroDeserializer.class.getName());
    props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    props.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, registry);
    props.put(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, true);
    KafkaConsumer<Integer, Pet> consumer = new KafkaConsumer<>(props);
    consumer.subscribe(Collections.singletonList("test-avro-consumer"));
    return consumer;
}
 
Example #14
Source File: KsqlGenericRowAvroSerializerTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
@Test
public void shouldSerializeRowWithNullCorrectly() {
  SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
  KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer
      (schema, schemaRegistryClient, new KsqlConfig(new HashMap<>()));

  List columns = Arrays.asList(1511897796092L, 1L, null, 10.0, new Double[]{100.0},
                               Collections.singletonMap("key1", 100.0));

  GenericRow genericRow = new GenericRow(columns);
  byte[] serializedRow = ksqlGenericRowAvroSerializer.serialize("t1", genericRow);
  KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient);
  GenericRecord genericRecord = (GenericRecord) kafkaAvroDeserializer.deserialize("t1", serializedRow);
  Assert.assertNotNull(genericRecord);
  assertThat("Incorrect serialization.", genericRecord.get("ordertime".toUpperCase()), equalTo
      (1511897796092L));
  assertThat("Incorrect serialization.", genericRecord.get("orderid".toUpperCase()), equalTo
      (1L));
  assertThat("Incorrect serialization.", genericRecord.get("itemid".toUpperCase()), equalTo
      (null));
  assertThat("Incorrect serialization.", genericRecord.get("orderunits".toUpperCase()), equalTo
      (10.0));

  GenericData.Array array = (GenericData.Array) genericRecord.get("arraycol".toUpperCase());
  Map map = (Map) genericRecord.get("mapcol".toUpperCase());

  assertThat("Incorrect serialization.", array.size(), equalTo(1));
  assertThat("Incorrect serialization.", array.get(0), equalTo(100.0));
  assertThat("Incorrect serialization.", map,
             equalTo(Collections.singletonMap(new Utf8("key1"), 100.0)));

}
 
Example #15
Source File: AvroConsumer.java    From Kafka-Streams-Real-time-Stream-Processing with The Unlicense 5 votes vote down vote up
/**
 * Application entry point
 *
 * @param args topicName and groupName
 */
@SuppressWarnings("InfiniteLoopStatement")
public static void main(String[] args) {

    if (args.length < 2) {
        System.out.println("Please provide command line arguments: topicName groupName");
        System.exit(-1);
    }
    String topicName = args[0];
    String groupName = args[1];

    Properties properties = new Properties();
    try {
        InputStream kafkaConfigStream = ClassLoader.class.getResourceAsStream(kafkaConfig);
        properties.load(kafkaConfigStream);
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, groupName);
        //Set autocommit to false so you can execute it again for the same set of messages
        properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
        properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaAvroDeserializer.class);
        properties.put(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, true);
        properties.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://localhost:8081");

    } catch (IOException e) {
        logger.error(e.getMessage());
        throw new RuntimeException(e);
    }

    final KafkaConsumer<String, StockData> consumer = new KafkaConsumer<>(properties);
    consumer.subscribe(Collections.singletonList(topicName));
    while (true) {
        ConsumerRecords<String, StockData> records = consumer.poll(Duration.ofMillis(100));
        for (ConsumerRecord<String, StockData> record : records) {
            System.out.println(record.value());
        }
    }
}
 
Example #16
Source File: KsqlGenericRowAvroSerializerTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
@Test
public void shouldSerializeRowCorrectly() {
  SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
  KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer
      (schema, schemaRegistryClient, new KsqlConfig(new HashMap<>()));

  List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0, new Double[]{100.0},
                               Collections.singletonMap("key1", 100.0));

  GenericRow genericRow = new GenericRow(columns);
  byte[] serializedRow = ksqlGenericRowAvroSerializer.serialize("t1", genericRow);
  KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient);
  GenericRecord genericRecord = (GenericRecord) kafkaAvroDeserializer.deserialize("t1", serializedRow);
  Assert.assertNotNull(genericRecord);
  assertThat("Incorrect serialization.", genericRecord.get("ordertime".toUpperCase()), equalTo
      (1511897796092L));
  assertThat("Incorrect serialization.", genericRecord.get("orderid".toUpperCase()), equalTo
      (1L));
  assertThat("Incorrect serialization.", genericRecord.get("itemid".toUpperCase()).toString(), equalTo("item_1"));
  assertThat("Incorrect serialization.", genericRecord.get("orderunits".toUpperCase()), equalTo
      (10.0));

  GenericData.Array array = (GenericData.Array) genericRecord.get("arraycol".toUpperCase());
  Map map = (Map) genericRecord.get("mapcol".toUpperCase());

  assertThat("Incorrect serialization.", array.size(), equalTo(1));
  assertThat("Incorrect serialization.", array.get(0), equalTo(100.0));
  assertThat("Incorrect serialization.", map.size(), equalTo(1));
  assertThat("Incorrect serialization.", map.get(new Utf8("key1")), equalTo(100.0));

}
 
Example #17
Source File: KsqlGenericRowAvroDeserializer.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
KsqlGenericRowAvroDeserializer(
    Schema schema,
    KafkaAvroDeserializer kafkaAvroDeserializer,
    boolean isInternal
) {
  if (isInternal) {
    this.schema = SchemaUtil.getAvroSerdeKsqlSchema(schema);
  } else {
    this.schema = SchemaUtil.getSchemaWithNoAlias(schema);
  }

  this.kafkaAvroDeserializer = kafkaAvroDeserializer;

}
 
Example #18
Source File: KsqlGenericRowAvroDeserializer.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
public KsqlGenericRowAvroDeserializer(
    Schema schema,
    SchemaRegistryClient schemaRegistryClient,
    boolean isInternal
) {
  this(schema, new KafkaAvroDeserializer(schemaRegistryClient), isInternal);
}
 
Example #19
Source File: TopicStreamWriter.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
@Override
public boolean isFormat(
    String topicName, ConsumerRecord<String, Bytes> record,
    SchemaRegistryClient schemaRegistryClient
) {
  this.topicName = topicName;
  try {
    avroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient);
    avroDeserializer.deserialize(topicName, record.value().get());
    return true;
  } catch (Throwable t) {
    return false;
  }
}
 
Example #20
Source File: FkJoinTableToTable.java    From kafka-tutorials with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
static <T> Serde<T> getPrimitiveAvroSerde(final Properties envProps, boolean isKey) {
    final KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer();
    final KafkaAvroSerializer serializer = new KafkaAvroSerializer();
    final Map<String, String> config = new HashMap<>();
    config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG,
            envProps.getProperty("schema.registry.url"));
    deserializer.configure(config, isKey);
    serializer.configure(config, isKey);
    return (Serde<T>)Serdes.serdeFrom(serializer, deserializer);
}
 
Example #21
Source File: CogroupingStreams.java    From kafka-tutorials with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
static <T> Serde<T> getPrimitiveAvroSerde(final Properties envProps, boolean isKey) {
    final KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer();
    final KafkaAvroSerializer serializer = new KafkaAvroSerializer();
    final Map<String, String> config = new HashMap<>();
    config.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG,
            envProps.getProperty("schema.registry.url"));
    deserializer.configure(config, isKey);
    serializer.configure(config, isKey);
    return (Serde<T>)Serdes.serdeFrom(serializer, deserializer);
}
 
Example #22
Source File: DynamicOutputTopic.java    From kafka-tutorials with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
static <T> Serde<T> getPrimitiveAvroSerde(final Properties envProps, boolean isKey) {
    final KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer();
    final KafkaAvroSerializer serializer = new KafkaAvroSerializer();
    final Map<String, String> config = new HashMap<>();
    config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG,
            envProps.getProperty("schema.registry.url"));
    deserializer.configure(config, isKey);
    serializer.configure(config, isKey);
    return (Serde<T>)Serdes.serdeFrom(serializer, deserializer);
}
 
Example #23
Source File: KafkaEventReceiver.java    From stream-registry with Apache License 2.0 5 votes vote down vote up
static Map<String, Object> consumerConfig(Config config) {
  return Map.of(
      BOOTSTRAP_SERVERS_CONFIG, config.getBootstrapServers(),
      GROUP_ID_CONFIG, config.getGroupId(),
      AUTO_OFFSET_RESET_CONFIG, "earliest",
      ENABLE_AUTO_COMMIT_CONFIG, false,
      KEY_DESERIALIZER_CLASS_CONFIG, KafkaAvroDeserializer.class,
      VALUE_DESERIALIZER_CLASS_CONFIG, KafkaAvroDeserializer.class,
      SCHEMA_REGISTRY_URL_CONFIG, config.getSchemaRegistryUrl(),
      SPECIFIC_AVRO_READER_CONFIG, true
  );
}
 
Example #24
Source File: KafkaClients.java    From apicurio-registry with Apache License 2.0 4 votes vote down vote up
public static CompletableFuture<Integer> consumeAvroConfluentMessages(String topicName,  int messageCount) {
    return consumeMessages(topicName, messageCount, StringDeserializer.class.getName(), KafkaAvroDeserializer.class.getName());
}
 
Example #25
Source File: SecorSchemaRegistryClientTest.java    From secor with Apache License 2.0 4 votes vote down vote up
private void initKafka() {
    schemaRegistryClient = new MockSchemaRegistryClient();
    kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient);
    avroSerializer = new KafkaAvroSerializer(schemaRegistryClient);
}
 
Example #26
Source File: SecorSchemaRegistryClient.java    From secor with Apache License 2.0 4 votes vote down vote up
protected void init(SecorConfig config) {
    deserializer = new KafkaAvroDeserializer(schemaRegistryClient);
    serializer = new KafkaAvroSerializer(schemaRegistryClient);
}
 
Example #27
Source File: KafkaDeserializerExtractorTest.java    From incubator-gobblin with Apache License 2.0 4 votes vote down vote up
@Test
public void testConfluentShouldNotQuerySchemaRegistryWhenTheGapIsZero()
    throws IOException, RestClientException, SchemaRegistryException {
  WorkUnitState mockWorkUnitState = getMockWorkUnitState(0L, 0L);
  mockWorkUnitState.setProp("schema.registry.url", TEST_URL);


  SchemaRegistryClient mockSchemaRegistryClient = mock(SchemaRegistryClient.class);

  Deserializer<Object> kafkaDecoder = new KafkaAvroDeserializer(mockSchemaRegistryClient);

  KafkaSchemaRegistry<Integer, Schema> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class);

  KafkaDeserializerExtractor kafkaDecoderExtractor = new KafkaDeserializerExtractor(mockWorkUnitState,
      Optional.fromNullable(Deserializers.CONFLUENT_AVRO), kafkaDecoder, mockKafkaSchemaRegistry);

  verify(mockKafkaSchemaRegistry, never()).getLatestSchemaByTopic(any());

  kafkaDecoderExtractor.getSchema();

}
 
Example #28
Source File: AvroConsumerVertx.java    From df_data_service with Apache License 2.0 4 votes vote down vote up
@Override
    public void start() throws Exception {
        System.out.println("Test");
        Properties props = new Properties();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
        props.put(ConsumerConfig.GROUP_ID_CONFIG, "group1");
        props.put("schema.registry.url", "http://localhost:8002");
        props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
        props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000");
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, KafkaAvroDeserializer.class);
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaAvroDeserializer.class);
        String topic = "test_stock";

        KafkaConsumer<String, String> consumer = KafkaConsumer.create(vertx, props);
        ArrayList<JsonObject> responseList = new ArrayList<JsonObject>();

//        consumer.handler(record -> {// TODO handler does not work
//            System.out.println("Processing value=" + record.record().value() +
//                    ",partition=" + record.record().partition() + ",offset=" + record.record().offset());
//            responseList.add(new JsonObject()
//                    .put("offset", record.record().offset())
//                    .put("value", record.record().value().toString()));
//            if(responseList.size() >= 10 ) {
//                consumer.pause();
//                consumer.commit();
//                consumer.close();
//            }
//        });
//
//        // Subscribe to a single topic
//        consumer.subscribe(topic, ar -> {
//            if (ar.succeeded()) {
//                System.out.println("topic " + topic + " is subscribed");
//            } else {
//                System.out.println("Could not subscribe " + ar.cause().getMessage());
//            }
//        });

        consumer.partitionsFor(topic, ar -> {

            if (ar.succeeded()) {

                for (PartitionInfo partitionInfo : ar.result()) {
                    System.out.println(partitionInfo);
                }
            }
        });

    }
 
Example #29
Source File: DFDataProcessor.java    From df_data_service with Apache License 2.0 4 votes vote down vote up
/**
 * Poll all available information from specific topic
 * @param routingContext
 *
 * @api {get} /avroconsumer 7.List all df tasks using specific topic
 * @apiVersion 0.1.1
 * @apiName poolAllFromTopic
 * @apiGroup All
 * @apiPermission none
 * @apiDescription This is where consume data from specific topic in one pool.
 * @apiSuccess	{JsonObject[]}	topic    Consumer from the topic.
 * @apiSampleRequest http://localhost:8080/api/df/avroconsumer
 */
private void pollAllFromTopic(RoutingContext routingContext) {

    final String topic = routingContext.request().getParam("id");
    if (topic == null) {
        routingContext.response()
                .setStatusCode(ConstantApp.STATUS_CODE_BAD_REQUEST)
                .end(DFAPIMessage.getResponseMessage(9000));
        LOG.error(DFAPIMessage.getResponseMessage(9000, "TOPIC_IS_NULL"));
    } else {
            Properties props = new Properties();
            props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka_server_host_and_port);
            props.put(ConsumerConfig.GROUP_ID_CONFIG, ConstantApp.DF_CONNECT_KAFKA_CONSUMER_GROUP_ID);
            props.put(ConstantApp.SCHEMA_URI_KEY, "http://" + schema_registry_host_and_port);
            props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
            props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000");
            props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, KafkaAvroDeserializer.class);
            props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaAvroDeserializer.class);

            KafkaConsumer<String, String> consumer = KafkaConsumer.create(vertx, props);
            ArrayList<JsonObject> responseList = new ArrayList<JsonObject>();

            consumer.handler(record -> {
                //LOG.debug("Processing value=" + record.record().value() + ",offset=" + record.record().offset());
                responseList.add(new JsonObject()
                        .put("id", record.record().offset())
                        .put("value", new JsonObject(record.record().value().toString()))
                        .put("valueString", Json.encodePrettily(new JsonObject(record.record().value().toString())))
                );
                if(responseList.size() >= ConstantApp.AVRO_CONSUMER_BATCH_SIE ) {
                    HelpFunc.responseCorsHandleAddOn(routingContext.response())
                            .putHeader("X-Total-Count", responseList.size() + "")
                            .end(Json.encodePrettily(responseList));
                    consumer.pause();
                    consumer.commit();
                    consumer.close();
                }
            });
            consumer.exceptionHandler(e -> {
                LOG.error(DFAPIMessage.logResponseMessage(9031, topic + "-" + e.getMessage()));
            });

            // Subscribe to a single topic
            consumer.subscribe(topic, ar -> {
                if (ar.succeeded()) {
                    LOG.info(DFAPIMessage.logResponseMessage(1027, "topic = " + topic));
                } else {
                    LOG.error(DFAPIMessage.logResponseMessage(9030, topic + "-" + ar.cause().getMessage()));
                }
            });
    }
}
 
Example #30
Source File: DFDataProcessor.java    From df_data_service with Apache License 2.0 4 votes vote down vote up
/**
 * Describe topic with topic specified
 *
 * @api {get} /s2p/:taskId   6. Get partition information for the specific subject/topic
 * @apiVersion 0.1.1
 * @apiName getAllTopicPartitions
 * @apiGroup All
 * @apiPermission none
 * @apiDescription This is where we get partition information for the subject/topic.
 * @apiParam {String}   topic      topic name.
 * @apiSuccess	{JsonObject[]}	info.    partition info.
 * @apiSampleRequest http://localhost:8080/api/df/s2p/:taskId
 */
private void getAllTopicPartitions(RoutingContext routingContext) {
    final String topic = routingContext.request().getParam("id");
    if (topic == null) {
        routingContext.response()
                .setStatusCode(ConstantApp.STATUS_CODE_BAD_REQUEST)
                .end(DFAPIMessage.getResponseMessage(9000));
        LOG.error(DFAPIMessage.getResponseMessage(9000, topic));
    } else {
        Properties props = new Properties();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka_server_host_and_port);
        props.put(ConsumerConfig.GROUP_ID_CONFIG, ConstantApp.DF_CONNECT_KAFKA_CONSUMER_GROUP_ID);
        props.put(ConstantApp.SCHEMA_URI_KEY, "http://" + schema_registry_host_and_port);
        props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
        props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000");
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, KafkaAvroDeserializer.class);
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaAvroDeserializer.class);

        KafkaConsumer<String, String> consumer = KafkaConsumer.create(vertx, props);
        ArrayList<JsonObject> responseList = new ArrayList<JsonObject>();

        // Subscribe to a single topic
        consumer.partitionsFor(topic, ar -> {
            if (ar.succeeded()) {
                for (PartitionInfo partitionInfo : ar.result()) {
                    responseList.add(new JsonObject()
                            .put("id", partitionInfo.getTopic())
                            .put("partitionNumber", partitionInfo.getPartition())
                            .put("leader", partitionInfo.getLeader().getIdString())
                            .put("replicas", StringUtils.join(partitionInfo.getReplicas(), ','))
                            .put("insyncReplicas", StringUtils.join(partitionInfo.getInSyncReplicas(), ','))
                    );

                    HelpFunc.responseCorsHandleAddOn(routingContext.response())
                            .putHeader("X-Total-Count", responseList.size() + "")
                            .end(Json.encodePrettily(responseList));
                    consumer.close();
                }
            } else {
                LOG.error(DFAPIMessage.logResponseMessage(9030, topic + "-" +
                        ar.cause().getMessage()));
            }
        });

        consumer.exceptionHandler(e -> {
            LOG.error(DFAPIMessage.logResponseMessage(9031, topic + "-" + e.getMessage()));
        });
    }
}