Java Code Examples for org.apache.kafka.common.serialization.Serializer#close()

The following examples show how to use org.apache.kafka.common.serialization.Serializer#close() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestDelimitedSerializer.java    From envelope with Apache License 2.0 6 votes vote down vote up
@Test
public void testDelimitedSerialization() {
  List<StructField> fields = Lists.newArrayList(
      DataTypes.createStructField("field1", DataTypes.StringType, true),
      DataTypes.createStructField("field2", DataTypes.IntegerType, true),
      DataTypes.createStructField("field3", DataTypes.BooleanType, true)
  );
  Row row = new RowWithSchema(DataTypes.createStructType(fields), "hello", 1, false);
  
  Map<String, String> configs = Maps.newHashMap();
  configs.put(DelimitedSerializer.FIELD_DELIMITER_CONFIG_NAME, "||");
  Serializer<Row> serializer = new DelimitedSerializer();
  serializer.configure(configs, false);
  
  byte[] serialized = serializer.serialize("test", row);
  serializer.close();
  
  assertEquals(new String(serialized), "hello||1||false");
}
 
Example 2
Source File: TestDelimitedSerializer.java    From envelope with Apache License 2.0 6 votes vote down vote up
@Test
public void testDelimitedWithNullSerialization() {
  List<StructField> fields = Lists.newArrayList(
      DataTypes.createStructField("field1", DataTypes.StringType, true),
      DataTypes.createStructField("field2", DataTypes.IntegerType, true),
      DataTypes.createStructField("field3", DataTypes.BooleanType, true)
  );
  Row row = new RowWithSchema(DataTypes.createStructType(fields), null, 1, false);

  Map<String, String> configs = Maps.newHashMap();
  configs.put(DelimitedSerializer.FIELD_DELIMITER_CONFIG_NAME, "||");
  configs.put(DelimitedSerializer.USE_FOR_NULL_CONFIG_NAME, "BANG");
  Serializer<Row> serializer = new DelimitedSerializer();
  serializer.configure(configs, false);

  byte[] serialized = serializer.serialize("test", row);
  serializer.close();

  assertEquals(new String(serialized), "BANG||1||false");
}
 
Example 3
Source File: TestDelimitedSerializer.java    From envelope with Apache License 2.0 6 votes vote down vote up
@Test
public void testDelimitedWithDefaultNullSerialization() {
  List<StructField> fields = Lists.newArrayList(
      DataTypes.createStructField("field1", DataTypes.StringType, true),
      DataTypes.createStructField("field2", DataTypes.IntegerType, true),
      DataTypes.createStructField("field3", DataTypes.BooleanType, true)
  );
  Row row = new RowWithSchema(DataTypes.createStructType(fields), null, 1, false);

  Map<String, String> configs = Maps.newHashMap();
  configs.put(DelimitedSerializer.FIELD_DELIMITER_CONFIG_NAME, "||");
  Serializer<Row> serializer = new DelimitedSerializer();
  serializer.configure(configs, false);

  byte[] serialized = serializer.serialize("test", row);
  serializer.close();

  assertEquals(new String(serialized), DelimitedSerializer.USE_FOR_NULL_DEFAULT_VALUE + "||1||false");
}
 
Example 4
Source File: TestAvroSerializer.java    From envelope with Apache License 2.0 5 votes vote down vote up
@Test
public void testAvroSerialization() throws IOException {
  Row row = Contexts.getSparkSession().sql("SELECT " +
      "'hello' field1, " +
      "true field2, " +
      "BINARY('world') field3, " +
      "CAST(1.0 AS DOUBLE) field4, " +
      "CAST(1 AS INT) field5, " +
      "CAST(1.0 AS FLOAT) field6, " +
      "CAST(1 AS BIGINT) field7, " +
      "NULL field8, NULL field9, NULL field10, NULL field11, NULL field12, NULL field13, NULL field14"
  ).collectAsList().get(0);
  
  Map<String, String> configs = Maps.newHashMap();
  configs.put(AvroSerializer.SCHEMA_PATH_CONFIG_NAME, getClass().getResource("/kafka/serde/avro-serialization-test.avsc").getFile());
  Serializer<Row> serializer = new AvroSerializer();
  serializer.configure(configs, false);
  
  byte[] serialized = serializer.serialize("test", row);
  serializer.close();
  
  Schema schema = new Schema.Parser().parse(new File(getClass().getResource("/kafka/serde/avro-serialization-test.avsc").getFile()));
  GenericDatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>(schema);
  Decoder decoder = DecoderFactory.get().binaryDecoder(serialized, null);
  GenericRecord deserialized = reader.read(null, decoder);

  assertEquals("hello", deserialized.get("field1").toString());
  assertEquals(true, deserialized.get("field2"));
  assertEquals("world", new String(((ByteBuffer) deserialized.get("field3")).array()));
  assertEquals(1.0d, deserialized.get("field4"));
  assertEquals(1, deserialized.get("field5"));
  assertEquals(1.0f, deserialized.get("field6"));
  assertEquals(1L, deserialized.get("field7"));
  for (int i = 8; i <= 14; i++) {
    assertNull(deserialized.get("field" + i));
  }
}
 
Example 5
Source File: RegistrySerdeTest.java    From apicurio-registry with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("unchecked")
@RegistryServiceTest
public void testConfiguration(Supplier<RegistryService> supplier) throws Exception {
    Schema schema = new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"myrecord3\",\"fields\":[{\"name\":\"bar\",\"type\":\"string\"}]}");

    String artifactId = generateArtifactId();

    CompletionStage<ArtifactMetaData> csa = supplier.get().createArtifact(
        ArtifactType.AVRO,
        artifactId + "-myrecord3",
        null, 
        new ByteArrayInputStream(schema.toString().getBytes(StandardCharsets.UTF_8))
    );
    ArtifactMetaData amd = ConcurrentUtil.result(csa);
    // reset any cache
    supplier.get().reset();
    // wait for global id store to populate (in case of Kafka / Streams)
    ArtifactMetaData amdById = retry(() -> supplier.get().getArtifactMetaDataByGlobalId(amd.getGlobalId()));
    Assertions.assertNotNull(amdById);

    GenericData.Record record = new GenericData.Record(schema);
    record.put("bar", "somebar");

    Map<String, Object> config = new HashMap<>();
    config.put(AbstractKafkaSerDe.REGISTRY_URL_CONFIG_PARAM, "http://localhost:8081/api");
    config.put(AbstractKafkaSerializer.REGISTRY_ARTIFACT_ID_STRATEGY_CONFIG_PARAM, new TopicRecordIdStrategy());
    config.put(AbstractKafkaSerializer.REGISTRY_GLOBAL_ID_STRATEGY_CONFIG_PARAM, new FindLatestIdStrategy<>());
    config.put(AvroDatumProvider.REGISTRY_AVRO_DATUM_PROVIDER_CONFIG_PARAM, new DefaultAvroDatumProvider<>());
    Serializer<GenericData.Record> serializer = (Serializer<GenericData.Record>) getClass().getClassLoader()
                                                                                           .loadClass(AvroKafkaSerializer.class.getName())
                                                                                           .newInstance();
    serializer.configure(config, true);
    byte[] bytes = serializer.serialize(artifactId, record);

    Deserializer<GenericData.Record> deserializer = (Deserializer<GenericData.Record>) getClass().getClassLoader()
                                                                                                 .loadClass(AvroKafkaDeserializer.class.getName())
                                                                                                 .newInstance();
    deserializer.configure(config, true);

    record = deserializer.deserialize(artifactId, bytes);
    Assertions.assertEquals("somebar", record.get("bar").toString());

    config.put(AbstractKafkaSerializer.REGISTRY_ARTIFACT_ID_STRATEGY_CONFIG_PARAM, TopicRecordIdStrategy.class);
    config.put(AbstractKafkaSerializer.REGISTRY_GLOBAL_ID_STRATEGY_CONFIG_PARAM, FindLatestIdStrategy.class);
    config.put(AvroDatumProvider.REGISTRY_AVRO_DATUM_PROVIDER_CONFIG_PARAM, DefaultAvroDatumProvider.class);
    serializer.configure(config, true);
    bytes = serializer.serialize(artifactId, record);
    deserializer.configure(config, true);
    record = deserializer.deserialize(artifactId, bytes);
    Assertions.assertEquals("somebar", record.get("bar").toString());

    config.put(AbstractKafkaSerializer.REGISTRY_ARTIFACT_ID_STRATEGY_CONFIG_PARAM, TopicRecordIdStrategy.class.getName());
    config.put(AbstractKafkaSerializer.REGISTRY_GLOBAL_ID_STRATEGY_CONFIG_PARAM, FindLatestIdStrategy.class.getName());
    config.put(AvroDatumProvider.REGISTRY_AVRO_DATUM_PROVIDER_CONFIG_PARAM, DefaultAvroDatumProvider.class.getName());
    serializer.configure(config, true);
    bytes = serializer.serialize(artifactId, record);
    deserializer.configure(config, true);
    record = deserializer.deserialize(artifactId, bytes);
    Assertions.assertEquals("somebar", record.get("bar").toString());

    serializer.close();
    deserializer.close();
}