Java Code Examples for org.apache.kafka.common.serialization.Serializer#configure()

The following examples show how to use org.apache.kafka.common.serialization.Serializer#configure() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: KafkaAvroSerdesTest.java    From registry with Apache License 2.0 7 votes vote down vote up
@Test
public void testToggleStoringSchemaInHeader() {
    TestRecord record = new TestRecord();
    record.setField1("Hello");
    record.setField2("World");
    String keySchemaHeaderName = KafkaAvroSerde.DEFAULT_KEY_SCHEMA_VERSION_ID;

    for (Boolean storeScheamIdInHeader : Arrays.asList(true, false)) {
        Map<String, Object> configs = new HashMap<>();
        configs.put(KafkaAvroSerializer.STORE_SCHEMA_VERSION_ID_IN_HEADER, storeScheamIdInHeader.toString());
        configs.put(AbstractAvroSnapshotDeserializer.SPECIFIC_AVRO_READER, true);

        KafkaAvroSerde serde = new KafkaAvroSerde(schemaRegistryClient);
        final Serializer<Object> serializer = serde.serializer();
        serializer.configure(configs, true);

        Headers headers = new RecordHeaders();
        final byte[] bytes = serializer.serialize(topic, headers, record);
        Assert.assertEquals(storeScheamIdInHeader, headers.lastHeader(keySchemaHeaderName) != null);

        final Deserializer<Object> deserializer = serde.deserializer();
        deserializer.configure(configs, true);
        final TestRecord actual = (TestRecord) deserializer.deserialize(topic, headers, bytes);
        Assert.assertEquals(record, actual);
    }
}
 
Example 2
Source File: TestDelimitedSerializer.java    From envelope with Apache License 2.0 6 votes vote down vote up
@Test
public void testDelimitedSerialization() {
  List<StructField> fields = Lists.newArrayList(
      DataTypes.createStructField("field1", DataTypes.StringType, true),
      DataTypes.createStructField("field2", DataTypes.IntegerType, true),
      DataTypes.createStructField("field3", DataTypes.BooleanType, true)
  );
  Row row = new RowWithSchema(DataTypes.createStructType(fields), "hello", 1, false);
  
  Map<String, String> configs = Maps.newHashMap();
  configs.put(DelimitedSerializer.FIELD_DELIMITER_CONFIG_NAME, "||");
  Serializer<Row> serializer = new DelimitedSerializer();
  serializer.configure(configs, false);
  
  byte[] serialized = serializer.serialize("test", row);
  serializer.close();
  
  assertEquals(new String(serialized), "hello||1||false");
}
 
Example 3
Source File: TestDelimitedSerializer.java    From envelope with Apache License 2.0 6 votes vote down vote up
@Test
public void testDelimitedWithNullSerialization() {
  List<StructField> fields = Lists.newArrayList(
      DataTypes.createStructField("field1", DataTypes.StringType, true),
      DataTypes.createStructField("field2", DataTypes.IntegerType, true),
      DataTypes.createStructField("field3", DataTypes.BooleanType, true)
  );
  Row row = new RowWithSchema(DataTypes.createStructType(fields), null, 1, false);

  Map<String, String> configs = Maps.newHashMap();
  configs.put(DelimitedSerializer.FIELD_DELIMITER_CONFIG_NAME, "||");
  configs.put(DelimitedSerializer.USE_FOR_NULL_CONFIG_NAME, "BANG");
  Serializer<Row> serializer = new DelimitedSerializer();
  serializer.configure(configs, false);

  byte[] serialized = serializer.serialize("test", row);
  serializer.close();

  assertEquals(new String(serialized), "BANG||1||false");
}
 
Example 4
Source File: TestDelimitedSerializer.java    From envelope with Apache License 2.0 6 votes vote down vote up
@Test
public void testDelimitedWithDefaultNullSerialization() {
  List<StructField> fields = Lists.newArrayList(
      DataTypes.createStructField("field1", DataTypes.StringType, true),
      DataTypes.createStructField("field2", DataTypes.IntegerType, true),
      DataTypes.createStructField("field3", DataTypes.BooleanType, true)
  );
  Row row = new RowWithSchema(DataTypes.createStructType(fields), null, 1, false);

  Map<String, String> configs = Maps.newHashMap();
  configs.put(DelimitedSerializer.FIELD_DELIMITER_CONFIG_NAME, "||");
  Serializer<Row> serializer = new DelimitedSerializer();
  serializer.configure(configs, false);

  byte[] serialized = serializer.serialize("test", row);
  serializer.close();

  assertEquals(new String(serialized), DelimitedSerializer.USE_FOR_NULL_DEFAULT_VALUE + "||1||false");
}
 
Example 5
Source File: KafkaDeserializerExtractorTest.java    From incubator-gobblin with Apache License 2.0 6 votes vote down vote up
@Test
public void testConfluentJsonDeserializer() throws IOException {
  WorkUnitState mockWorkUnitState = getMockWorkUnitState(0L, 10L);
  mockWorkUnitState.setProp("json.value.type", KafkaRecord.class.getName());

  KafkaRecord testKafkaRecord = new KafkaRecord("Hello World");

  Serializer<KafkaRecord> kafkaEncoder = new KafkaJsonSerializer<>();
  kafkaEncoder.configure(PropertiesUtils.propsToStringKeyMap(mockWorkUnitState.getProperties()), false);

  Deserializer<KafkaRecord> kafkaDecoder = new KafkaJsonDeserializer<>();
  kafkaDecoder.configure(PropertiesUtils.propsToStringKeyMap(mockWorkUnitState.getProperties()), false);

  ByteBuffer testKafkaRecordByteBuffer = ByteBuffer.wrap(kafkaEncoder.serialize(TEST_TOPIC_NAME, testKafkaRecord));

  KafkaSchemaRegistry<?, ?> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class);
  KafkaDeserializerExtractor kafkaDecoderExtractor =
      new KafkaDeserializerExtractor(mockWorkUnitState,
          Optional.fromNullable(Deserializers.CONFLUENT_JSON), kafkaDecoder, mockKafkaSchemaRegistry);

  ByteArrayBasedKafkaRecord mockMessageAndOffset = getMockMessageAndOffset(testKafkaRecordByteBuffer);
  Assert.assertEquals(kafkaDecoderExtractor.decodeRecord(mockMessageAndOffset), testKafkaRecord);
}
 
Example 6
Source File: KsqlJsonTopicSerDe.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
@Override
public Serde<GenericRow> getGenericRowSerde(Schema schema, KsqlConfig ksqlConfig,
                                            boolean isInternal,
                                            SchemaRegistryClient schemaRegistryClient) {
  Map<String, Object> serdeProps = new HashMap<>();
  serdeProps.put("JsonPOJOClass", GenericRow.class);

  final Serializer<GenericRow> genericRowSerializer = new KsqlJsonSerializer(schema);
  genericRowSerializer.configure(serdeProps, false);

  final Deserializer<GenericRow> genericRowDeserializer = new KsqlJsonDeserializer(schema);
  genericRowDeserializer.configure(serdeProps, false);

  return Serdes.serdeFrom(genericRowSerializer, genericRowDeserializer);
}
 
Example 7
Source File: KsqlDelimitedTopicSerDe.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
@Override
public Serde<GenericRow> getGenericRowSerde(Schema schema, KsqlConfig ksqlConfig,
                                            boolean isInternal,
                                            SchemaRegistryClient schemaRegistryClient) {
  Map<String, Object> serdeProps = new HashMap<>();

  final Serializer<GenericRow> genericRowSerializer = new KsqlDelimitedSerializer(schema);
  genericRowSerializer.configure(serdeProps, false);

  final Deserializer<GenericRow> genericRowDeserializer = new KsqlDelimitedDeserializer(schema);
  genericRowDeserializer.configure(serdeProps, false);

  return Serdes.serdeFrom(genericRowSerializer, genericRowDeserializer);
}
 
Example 8
Source File: KafkaAvroSerdesTest.java    From registry with Apache License 2.0 5 votes vote down vote up
private void testSchemaHeaderNames(String customKeySchemaHeaderName,
                                   String customValueSchemaHeaderName) {
    TestRecord record = new TestRecord();
    record.setField1("Hello");
    record.setField2("World");

    Map<String, Object> configs = new HashMap<>();
    configs.put(KafkaAvroSerde.KEY_SCHEMA_VERSION_ID_HEADER_NAME, customKeySchemaHeaderName);
    configs.put(KafkaAvroSerde.VALUE_SCHEMA_VERSION_ID_HEADER_NAME, customValueSchemaHeaderName);
    configs.put(KafkaAvroSerializer.STORE_SCHEMA_VERSION_ID_IN_HEADER, "true");
    configs.put(AbstractAvroSnapshotDeserializer.SPECIFIC_AVRO_READER, true);

    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    AvroSerDesHandler handler = new DefaultAvroSerDesHandler();
    handler.handlePayloadSerialization(outputStream, record);

    for (Boolean isKey : Arrays.asList(true, false)) {
        KafkaAvroSerde serde = new KafkaAvroSerde(schemaRegistryClient);
        final Serializer<Object> serializer = serde.serializer();
        serializer.configure(configs, isKey);

        Headers headers = new RecordHeaders();
        final byte[] bytes = serializer.serialize(topic, headers, record);
        Assert.assertArrayEquals(outputStream.toByteArray(), bytes);
        Assert.assertEquals(isKey, headers.lastHeader(customKeySchemaHeaderName) != null);
        Assert.assertEquals(!isKey, headers.lastHeader(customValueSchemaHeaderName) != null);

        final Deserializer<Object> deserializer = serde.deserializer();
        deserializer.configure(configs, isKey);
        final TestRecord actual = (TestRecord) deserializer.deserialize(topic, headers, bytes);
        Assert.assertEquals(record, actual);
    }
}
 
Example 9
Source File: TestAvroSerializer.java    From envelope with Apache License 2.0 5 votes vote down vote up
@Test
public void testAvroSerialization() throws IOException {
  Row row = Contexts.getSparkSession().sql("SELECT " +
      "'hello' field1, " +
      "true field2, " +
      "BINARY('world') field3, " +
      "CAST(1.0 AS DOUBLE) field4, " +
      "CAST(1 AS INT) field5, " +
      "CAST(1.0 AS FLOAT) field6, " +
      "CAST(1 AS BIGINT) field7, " +
      "NULL field8, NULL field9, NULL field10, NULL field11, NULL field12, NULL field13, NULL field14"
  ).collectAsList().get(0);
  
  Map<String, String> configs = Maps.newHashMap();
  configs.put(AvroSerializer.SCHEMA_PATH_CONFIG_NAME, getClass().getResource("/kafka/serde/avro-serialization-test.avsc").getFile());
  Serializer<Row> serializer = new AvroSerializer();
  serializer.configure(configs, false);
  
  byte[] serialized = serializer.serialize("test", row);
  serializer.close();
  
  Schema schema = new Schema.Parser().parse(new File(getClass().getResource("/kafka/serde/avro-serialization-test.avsc").getFile()));
  GenericDatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>(schema);
  Decoder decoder = DecoderFactory.get().binaryDecoder(serialized, null);
  GenericRecord deserialized = reader.read(null, decoder);

  assertEquals("hello", deserialized.get("field1").toString());
  assertEquals(true, deserialized.get("field2"));
  assertEquals("world", new String(((ByteBuffer) deserialized.get("field3")).array()));
  assertEquals(1.0d, deserialized.get("field4"));
  assertEquals(1, deserialized.get("field5"));
  assertEquals(1.0f, deserialized.get("field6"));
  assertEquals(1L, deserialized.get("field7"));
  for (int i = 8; i <= 14; i++) {
    assertNull(deserialized.get("field" + i));
  }
}
 
Example 10
Source File: EphemeralKafkaBroker.java    From kafka-junit with Apache License 2.0 5 votes vote down vote up
/**
 * Create a producer that can write to this broker
 *
 * @param keySerializer   Key serializer class
 * @param valueSerializer Valuer serializer class
 * @param overrideConfig  Producer config to override. Pass null if there aren't any.
 * @param <K>             Type of Key
 * @param <V>             Type of Value
 * @return KafkaProducer
 */
public <K, V> KafkaProducer<K, V> createProducer(Serializer<K> keySerializer, Serializer<V> valueSerializer,
                                                 Properties overrideConfig) {
    Properties conf = producerConfig();
    if (overrideConfig != null) {
        conf.putAll(overrideConfig);
    }
    keySerializer.configure(Maps.fromProperties(conf), true);
    valueSerializer.configure(Maps.fromProperties(conf), false);
    return new KafkaProducer<>(conf, keySerializer, valueSerializer);
}
 
Example 11
Source File: RegistrySerdeTest.java    From apicurio-registry with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("unchecked")
@RegistryServiceTest
public void testConfiguration(Supplier<RegistryService> supplier) throws Exception {
    Schema schema = new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"myrecord3\",\"fields\":[{\"name\":\"bar\",\"type\":\"string\"}]}");

    String artifactId = generateArtifactId();

    CompletionStage<ArtifactMetaData> csa = supplier.get().createArtifact(
        ArtifactType.AVRO,
        artifactId + "-myrecord3",
        null, 
        new ByteArrayInputStream(schema.toString().getBytes(StandardCharsets.UTF_8))
    );
    ArtifactMetaData amd = ConcurrentUtil.result(csa);
    // reset any cache
    supplier.get().reset();
    // wait for global id store to populate (in case of Kafka / Streams)
    ArtifactMetaData amdById = retry(() -> supplier.get().getArtifactMetaDataByGlobalId(amd.getGlobalId()));
    Assertions.assertNotNull(amdById);

    GenericData.Record record = new GenericData.Record(schema);
    record.put("bar", "somebar");

    Map<String, Object> config = new HashMap<>();
    config.put(AbstractKafkaSerDe.REGISTRY_URL_CONFIG_PARAM, "http://localhost:8081/api");
    config.put(AbstractKafkaSerializer.REGISTRY_ARTIFACT_ID_STRATEGY_CONFIG_PARAM, new TopicRecordIdStrategy());
    config.put(AbstractKafkaSerializer.REGISTRY_GLOBAL_ID_STRATEGY_CONFIG_PARAM, new FindLatestIdStrategy<>());
    config.put(AvroDatumProvider.REGISTRY_AVRO_DATUM_PROVIDER_CONFIG_PARAM, new DefaultAvroDatumProvider<>());
    Serializer<GenericData.Record> serializer = (Serializer<GenericData.Record>) getClass().getClassLoader()
                                                                                           .loadClass(AvroKafkaSerializer.class.getName())
                                                                                           .newInstance();
    serializer.configure(config, true);
    byte[] bytes = serializer.serialize(artifactId, record);

    Deserializer<GenericData.Record> deserializer = (Deserializer<GenericData.Record>) getClass().getClassLoader()
                                                                                                 .loadClass(AvroKafkaDeserializer.class.getName())
                                                                                                 .newInstance();
    deserializer.configure(config, true);

    record = deserializer.deserialize(artifactId, bytes);
    Assertions.assertEquals("somebar", record.get("bar").toString());

    config.put(AbstractKafkaSerializer.REGISTRY_ARTIFACT_ID_STRATEGY_CONFIG_PARAM, TopicRecordIdStrategy.class);
    config.put(AbstractKafkaSerializer.REGISTRY_GLOBAL_ID_STRATEGY_CONFIG_PARAM, FindLatestIdStrategy.class);
    config.put(AvroDatumProvider.REGISTRY_AVRO_DATUM_PROVIDER_CONFIG_PARAM, DefaultAvroDatumProvider.class);
    serializer.configure(config, true);
    bytes = serializer.serialize(artifactId, record);
    deserializer.configure(config, true);
    record = deserializer.deserialize(artifactId, bytes);
    Assertions.assertEquals("somebar", record.get("bar").toString());

    config.put(AbstractKafkaSerializer.REGISTRY_ARTIFACT_ID_STRATEGY_CONFIG_PARAM, TopicRecordIdStrategy.class.getName());
    config.put(AbstractKafkaSerializer.REGISTRY_GLOBAL_ID_STRATEGY_CONFIG_PARAM, FindLatestIdStrategy.class.getName());
    config.put(AvroDatumProvider.REGISTRY_AVRO_DATUM_PROVIDER_CONFIG_PARAM, DefaultAvroDatumProvider.class.getName());
    serializer.configure(config, true);
    bytes = serializer.serialize(artifactId, record);
    deserializer.configure(config, true);
    record = deserializer.deserialize(artifactId, bytes);
    Assertions.assertEquals("somebar", record.get("bar").toString());

    serializer.close();
    deserializer.close();
}
 
Example 12
Source File: KsqlRestApplication.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 4 votes vote down vote up
private static <T> Serializer<T> getJsonSerializer(boolean isKey) {
  Serializer<T> result = new KafkaJsonSerializer<>();
  result.configure(Collections.emptyMap(), isKey);
  return result;
}
 
Example 13
Source File: KsqlResourceTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 4 votes vote down vote up
private static <T> Serializer<T> getJsonSerializer(boolean isKey) {
  Serializer<T> result = new KafkaJsonSerializer<>();
  result.configure(Collections.emptyMap(), isKey);
  return result;
}