io.confluent.kafka.serializers.KafkaAvroSerializer Java Examples

The following examples show how to use io.confluent.kafka.serializers.KafkaAvroSerializer. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: KsqlGenericRowAvroSerializer.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 6 votes vote down vote up
public KsqlGenericRowAvroSerializer(
    org.apache.kafka.connect.data.Schema schema,
    SchemaRegistryClient schemaRegistryClient, KsqlConfig
    ksqlConfig
) {
  String avroSchemaStr = SchemaUtil.buildAvroSchema(schema, "avro_schema");
  
  Schema.Parser parser = new Schema.Parser();
  avroSchema = parser.parse(avroSchemaStr);
  fields = avroSchema.getFields();

  Map<String, Object> map = new HashMap<>();

  // Automatically register the schema in the Schema Registry if it has not been registered.
  map.put(AbstractKafkaAvroSerDeConfig.AUTO_REGISTER_SCHEMAS, true);
  map.put(
      AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG,
      ksqlConfig.getString(KsqlConfig.SCHEMA_REGISTRY_URL_PROPERTY)
  );
  kafkaAvroSerializer = new KafkaAvroSerializer(schemaRegistryClient, map);

}
 
Example #2
Source File: ConfluentRegistryCompatibleResourceTest.java    From registry with Apache License 2.0 6 votes vote down vote up
@Test
public void testConfluentSerDes() throws Exception {

    org.apache.avro.Schema schema = new org.apache.avro.Schema.Parser().parse(GENERIC_TEST_RECORD_SCHEMA);
    GenericRecord record = new GenericRecordBuilder(schema).set("field1", "some value").set("field2", "some other value").build();

    Map<String, Object> config = new HashMap<>();
    config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, rootTarget.getUri().toString());

    KafkaAvroSerializer kafkaAvroSerializer = new KafkaAvroSerializer();
    kafkaAvroSerializer.configure(config, false);
    byte[] bytes = kafkaAvroSerializer.serialize("topic", record);

    KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer();
    kafkaAvroDeserializer.configure(config, false);

    GenericRecord result = (GenericRecord) kafkaAvroDeserializer.deserialize("topic", bytes);
    LOG.info(result.toString());
}
 
Example #3
Source File: KsqlGenericRowAvroDeserializerTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 6 votes vote down vote up
private byte[] getSerializedRow(String topicName, SchemaRegistryClient schemaRegistryClient,
                                Schema rowAvroSchema,
                                GenericRow
    genericRow) {
  Map map = new HashMap();
  // Automatically register the schema in the Schema Registry if it has not been registered.
  map.put(AbstractKafkaAvroSerDeConfig.AUTO_REGISTER_SCHEMAS, true);
  map.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "");
  KafkaAvroSerializer kafkaAvroSerializer = new KafkaAvroSerializer(schemaRegistryClient, map);
  GenericRecord avroRecord = new GenericData.Record(rowAvroSchema);
  List<Schema.Field> fields = rowAvroSchema.getFields();
  for (int i = 0; i < genericRow.getColumns().size(); i++) {
    if (fields.get(i).schema().getType() == Schema.Type.ARRAY) {
      avroRecord.put(fields.get(i).name(), Arrays.asList((Object[]) genericRow.getColumns().get(i)));
    } else {
      avroRecord.put(fields.get(i).name(), genericRow.getColumns().get(i));
    }
  }

  return kafkaAvroSerializer.serialize(topicName, avroRecord);
}
 
Example #4
Source File: FkJoinTableToTable.java    From kafka-tutorials with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
static <T> Serde<T> getPrimitiveAvroSerde(final Properties envProps, boolean isKey) {
    final KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer();
    final KafkaAvroSerializer serializer = new KafkaAvroSerializer();
    final Map<String, String> config = new HashMap<>();
    config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG,
            envProps.getProperty("schema.registry.url"));
    deserializer.configure(config, isKey);
    serializer.configure(config, isKey);
    return (Serde<T>)Serdes.serdeFrom(serializer, deserializer);
}
 
Example #5
Source File: ConfluentClientTest.java    From apicurio-registry with Apache License 2.0 5 votes vote down vote up
@Test
public void testSerdeAvro() throws Exception {
    SchemaRegistryClient client = buildClient();

    String subject = generateArtifactId();

    Schema schema = new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"myrecord3\",\"fields\":[{\"name\":\"bar\",\"type\":\"string\"}]}");
    int id = client.register(subject + "-value", schema);
    client.reset();

    // global id can be mapped async
    retry(() -> {
        Schema schema2 = client.getById(id);
        Assertions.assertNotNull(schema2);
        return schema2;
    });

    try (KafkaAvroSerializer serializer = new KafkaAvroSerializer(client);
         KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer(client);) {

        GenericData.Record record = new GenericData.Record(schema);
        record.put("bar", "somebar");

        byte[] bytes = serializer.serialize(subject, record);
        GenericData.Record ir = (GenericData.Record) deserializer.deserialize(subject, bytes);

        Assertions.assertEquals("somebar", ir.get("bar").toString());
    }
}
 
Example #6
Source File: SimpleTextAvroProducer.java    From landoop-avro-generator with Apache License 2.0 5 votes vote down vote up
private static Producer<String, Object> getStringAvroProducer(String brokers, String schemaregistry) {
  System.out.println("Starting [AvroProducer] with brokers=[" + brokers + "] and schema-registry=[" + schemaregistry + "]");
  Properties producerProps = new Properties();
  producerProps.put("bootstrap.servers", brokers);
  producerProps.put("acks", "all");
  producerProps.put("key.serializer", StringSerializer.class.getName());
  producerProps.put("value.serializer", KafkaAvroSerializer.class.getName());
  producerProps.put("linger.ms", "10"); // ?
  producerProps.put("schema.registry.url", schemaregistry);
  return new KafkaProducer<>(producerProps);
}
 
Example #7
Source File: SimpleAvroProducer.java    From landoop-avro-generator with Apache License 2.0 5 votes vote down vote up
private static Producer<Object, Object> getAvroProducer(String brokers, String schemaregistry) {
  System.out.println("Starting [AvroProducer] with brokers=[" + brokers + "] and schema-registry=[" + schemaregistry + "]");
  Properties producerProps = new Properties();
  producerProps.put("bootstrap.servers", brokers);
  producerProps.put("acks", "all");
  producerProps.put("key.serializer", KafkaAvroSerializer.class.getName());
  producerProps.put("value.serializer", KafkaAvroSerializer.class.getName());
  producerProps.put("linger.ms", "10"); // ?
  producerProps.put("schema.registry.url", schemaregistry);
  return new KafkaProducer<>(producerProps);
}
 
Example #8
Source File: SchemaRegistryProducer.java    From blog with MIT License 5 votes vote down vote up
public static void main(String[] args) {

    /** TODO: 使用 Avro 解析默认 */
    Schema.Parser parser = new Schema.Parser();
    Schema schema = parser.parse(userSchema);

    /** TODO: 设置 Producer 属性 */
    Properties properties = new Properties();
    /** TODO: 设置 Kafka 服务地址 */
    properties.put(
        ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "node-160:9092,node-161:9092,node-162:9092");
    /** TODO: 设置 Key 序列化类 */
    properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    /** TODO: 设置 Value 序列化类 */
    properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class);

    /** TODO: 设置 schema.registry */
    properties.put("schema.registry.url", "http://node-160:8081");

    /** TODO: 创建 Producer */
    KafkaProducer<Object, Object> producer = new KafkaProducer<>(properties);

    /** TODO: 创建消息 */
    GenericData.Record record = new GenericData.Record(schema);
    record.put("name", "hvkcoder");
    producer.send(new ProducerRecord<>("topic01", record));

    /** TODO: 关闭 Producer */
    producer.close();
  }
 
Example #9
Source File: KafkaAvroTest.java    From quarkus with Apache License 2.0 5 votes vote down vote up
public static KafkaProducer<Integer, Pet> createProducer() {
    String registry = System.getProperty("schema.url");

    Properties props = new Properties();
    props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:19092");
    props.put(ProducerConfig.CLIENT_ID_CONFIG, "test-avro");
    props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class.getName());
    props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName());
    props.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, registry);
    return new KafkaProducer<>(props);
}
 
Example #10
Source File: AvroEndpoint.java    From quarkus with Apache License 2.0 5 votes vote down vote up
public static KafkaProducer<Integer, Pet> createProducer(String registry) {
    Properties props = new Properties();
    props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:19092");
    props.put(ProducerConfig.CLIENT_ID_CONFIG, "test-avro");
    props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class.getName());
    props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName());
    props.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, registry);
    return new KafkaProducer<>(props);
}
 
Example #11
Source File: PosSimulator.java    From Kafka-Streams-Real-time-Stream-Processing with The Unlicense 5 votes vote down vote up
public static void main(String[] args) {
    if (args.length < 3) {
        System.out.println("Please provide command line arguments: topicName noOfProducers produceSpeed");
        System.exit(-1);
    }
    String topicName = args[0];
    int noOfProducers = new Integer(args[1]);
    int produceSpeed = new Integer(args[2]);
    Properties properties = new Properties();
    properties.put(ProducerConfig.CLIENT_ID_CONFIG, "StockSimulator");
    properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092,localhost:9093");
    properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
    properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName());
    properties.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://localhost:8081");

    KafkaProducer<String, PosInvoice> kafkaProducer = new KafkaProducer<>(properties);
    ExecutorService executor = Executors.newFixedThreadPool(3);
    final List<RunnableProducer> runnableProducers = new ArrayList<>();
    for (int i = 0; i < noOfProducers; i++) {
        RunnableProducer runnableProducer = new RunnableProducer(i, kafkaProducer, topicName, produceSpeed);
        runnableProducers.add(runnableProducer);
        executor.submit(runnableProducer);
    }

    Runtime.getRuntime().addShutdownHook(new Thread(() -> {
        for (RunnableProducer p : runnableProducers)
            p.shutdown();
        executor.shutdown();
        logger.info("Closing Executor Service");
        try {
            executor.awaitTermination(produceSpeed * 2, TimeUnit.MILLISECONDS);
        } catch (InterruptedException e) {
            throw new RuntimeException(e);
        }
    }));

}
 
Example #12
Source File: NotificationEventConfig.java    From stream-registry with Apache License 2.0 5 votes vote down vote up
@Bean
@ConditionalOnProperty(name = KAFKA_NOTIFICATIONS_ENABLED_PROPERTY)
public ProducerFactory<SpecificRecord, SpecificRecord> producerFactory() {
  log.info("Building kafka producer in cluster {} with schema registry {}", bootstrapServers, schemaRegistryUrl);
  Objects.requireNonNull(bootstrapServers, getWarningMessageOnNotDefinedProp("enabled notification events", KAFKA_BOOTSTRAP_SERVERS_PROPERTY));
  Objects.requireNonNull(schemaRegistryUrl, getWarningMessageOnNotDefinedProp("enabled notification events", KAFKA_SCHEMA_REGISTRY_URL_PROPERTY));

  val props = new HashMap<String, Object>();
  props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
  props.put(KafkaAvroSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl);
  props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class);
  props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class);

  return new DefaultKafkaProducerFactory<>(props);
}
 
Example #13
Source File: KafkaEventSender.java    From stream-registry with Apache License 2.0 5 votes vote down vote up
static Map<String, Object> producerConfig(Config config) {
  return Map.of(
      BOOTSTRAP_SERVERS_CONFIG, config.getBootstrapServers(),
      ACKS_CONFIG, "all",
      KEY_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class,
      VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class,
      SCHEMA_REGISTRY_URL_CONFIG, config.getSchemaRegistryUrl()
  );
}
 
Example #14
Source File: KafkaDeserializerExtractorTest.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
@Test
public void testConfluentAvroDeserializer() throws IOException, RestClientException {
  WorkUnitState mockWorkUnitState = getMockWorkUnitState(0L,10L);

  mockWorkUnitState.setProp("schema.registry.url", TEST_URL);

  Schema schema = SchemaBuilder.record(TEST_RECORD_NAME)
      .namespace(TEST_NAMESPACE).fields()
      .name(TEST_FIELD_NAME).type().stringType().noDefault()
      .endRecord();

  GenericRecord testGenericRecord = new GenericRecordBuilder(schema).set(TEST_FIELD_NAME, "testValue").build();

  SchemaRegistryClient mockSchemaRegistryClient = mock(SchemaRegistryClient.class);
  when(mockSchemaRegistryClient.getByID(any(Integer.class))).thenReturn(schema);

  Serializer<Object> kafkaEncoder = new KafkaAvroSerializer(mockSchemaRegistryClient);
  Deserializer<Object> kafkaDecoder = new KafkaAvroDeserializer(mockSchemaRegistryClient);

  ByteBuffer testGenericRecordByteBuffer =
      ByteBuffer.wrap(kafkaEncoder.serialize(TEST_TOPIC_NAME, testGenericRecord));

  KafkaSchemaRegistry<Integer, Schema> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class);
  KafkaDeserializerExtractor kafkaDecoderExtractor =
      new KafkaDeserializerExtractor(mockWorkUnitState,
          Optional.fromNullable(Deserializers.CONFLUENT_AVRO), kafkaDecoder, mockKafkaSchemaRegistry);

  ByteArrayBasedKafkaRecord mockMessageAndOffset = getMockMessageAndOffset(testGenericRecordByteBuffer);

  Assert.assertEquals(kafkaDecoderExtractor.decodeRecord(mockMessageAndOffset), testGenericRecord);
}
 
Example #15
Source File: KafkaBasicsApplication.java    From spring_io_2019 with Apache License 2.0 5 votes vote down vote up
@Bean
Map<String, Object> producerConfigs() {
	Map<String, Object> props = new HashMap<>();
	props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
	props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class);
	props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class);
	props.put("schema.registry.url", "http://localhost:8081");
	return props;
}
 
Example #16
Source File: DynamicOutputTopic.java    From kafka-tutorials with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
static <T> Serde<T> getPrimitiveAvroSerde(final Properties envProps, boolean isKey) {
    final KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer();
    final KafkaAvroSerializer serializer = new KafkaAvroSerializer();
    final Map<String, String> config = new HashMap<>();
    config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG,
            envProps.getProperty("schema.registry.url"));
    deserializer.configure(config, isKey);
    serializer.configure(config, isKey);
    return (Serde<T>)Serdes.serdeFrom(serializer, deserializer);
}
 
Example #17
Source File: CogroupingStreams.java    From kafka-tutorials with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
static <T> Serde<T> getPrimitiveAvroSerde(final Properties envProps, boolean isKey) {
    final KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer();
    final KafkaAvroSerializer serializer = new KafkaAvroSerializer();
    final Map<String, String> config = new HashMap<>();
    config.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG,
            envProps.getProperty("schema.registry.url"));
    deserializer.configure(config, isKey);
    serializer.configure(config, isKey);
    return (Serde<T>)Serdes.serdeFrom(serializer, deserializer);
}
 
Example #18
Source File: KafkaDeserializerExtractorTest.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
@Test
public void testConfluentAvroDeserializerForSchemaEvolution() throws IOException, RestClientException, SchemaRegistryException {
  WorkUnitState mockWorkUnitState = getMockWorkUnitState(0L, 10L);
  mockWorkUnitState.setProp("schema.registry.url", TEST_URL);

  Schema schemaV1 = SchemaBuilder.record(TEST_RECORD_NAME)
      .namespace(TEST_NAMESPACE).fields()
      .name(TEST_FIELD_NAME).type().stringType().noDefault()
      .endRecord();

  Schema schemaV2 = SchemaBuilder.record(TEST_RECORD_NAME)
      .namespace(TEST_NAMESPACE).fields()
      .name(TEST_FIELD_NAME).type().stringType().noDefault()
      .optionalString(TEST_FIELD_NAME2).endRecord();

  GenericRecord testGenericRecord = new GenericRecordBuilder(schemaV1).set(TEST_FIELD_NAME, "testValue").build();

  SchemaRegistryClient mockSchemaRegistryClient = mock(SchemaRegistryClient.class);
  when(mockSchemaRegistryClient.getByID(any(Integer.class))).thenReturn(schemaV1);

  Serializer<Object> kafkaEncoder = new KafkaAvroSerializer(mockSchemaRegistryClient);
  Deserializer<Object> kafkaDecoder = new KafkaAvroDeserializer(mockSchemaRegistryClient);

  ByteBuffer testGenericRecordByteBuffer =
      ByteBuffer.wrap(kafkaEncoder.serialize(TEST_TOPIC_NAME, testGenericRecord));

  KafkaSchemaRegistry<Integer, Schema> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class);
  when(mockKafkaSchemaRegistry.getLatestSchemaByTopic(TEST_TOPIC_NAME)).thenReturn(schemaV2);

  KafkaDeserializerExtractor kafkaDecoderExtractor = new KafkaDeserializerExtractor(mockWorkUnitState,
      Optional.fromNullable(Deserializers.CONFLUENT_AVRO), kafkaDecoder, mockKafkaSchemaRegistry);

  when(kafkaDecoderExtractor.getSchema()).thenReturn(schemaV2);

  ByteArrayBasedKafkaRecord mockMessageAndOffset = getMockMessageAndOffset(testGenericRecordByteBuffer);

  GenericRecord received = (GenericRecord) kafkaDecoderExtractor.decodeRecord(mockMessageAndOffset);
  Assert.assertEquals(received.toString(), "{\"testField\": \"testValue\", \"testField2\": null}");

}
 
Example #19
Source File: KeyAvroSerializer.java    From kafka-connect-couchbase with Apache License 2.0 4 votes vote down vote up
public KeyAvroSerializer(SchemaRegistryClient client) {
  inner = new KafkaAvroSerializer(client);
}
 
Example #20
Source File: SecorSchemaRegistryClientTest.java    From secor with Apache License 2.0 4 votes vote down vote up
private void initKafka() {
    schemaRegistryClient = new MockSchemaRegistryClient();
    kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient);
    avroSerializer = new KafkaAvroSerializer(schemaRegistryClient);
}
 
Example #21
Source File: SecorSchemaRegistryClient.java    From secor with Apache License 2.0 4 votes vote down vote up
protected void init(SecorConfig config) {
    deserializer = new KafkaAvroDeserializer(schemaRegistryClient);
    serializer = new KafkaAvroSerializer(schemaRegistryClient);
}
 
Example #22
Source File: KeyAvroSerializer.java    From kafka-connect-couchbase with Apache License 2.0 4 votes vote down vote up
/**
 * Constructor used by Kafka Streams.
 */
public KeyAvroSerializer() {
  inner = new KafkaAvroSerializer();
}
 
Example #23
Source File: ValueAvroSerializer.java    From kafka-connect-couchbase with Apache License 2.0 4 votes vote down vote up
public ValueAvroSerializer(SchemaRegistryClient client) {
  inner = new KafkaAvroSerializer(client);
}
 
Example #24
Source File: ValueAvroSerializer.java    From kafka-connect-couchbase with Apache License 2.0 4 votes vote down vote up
/**
 * Constructor used by Kafka Streams.
 */
public ValueAvroSerializer() {
  inner = new KafkaAvroSerializer();
}
 
Example #25
Source File: ReplicatorKafkaJSONTest.java    From replicator with Apache License 2.0 4 votes vote down vote up
private Map<String, Object> getConfiguration() {
    Map<String, Object> configuration = new HashMap<>();

    configuration.put(ZookeeperCoordinator.Configuration.CONNECTION_STRING, ReplicatorKafkaJSONTest.zookeeper.getURL());
    configuration.put(ZookeeperCoordinator.Configuration.LEADERSHIP_PATH, ReplicatorKafkaJSONTest.ZOOKEEPER_LEADERSHIP_PATH);

    configuration.put(WebServer.Configuration.TYPE, WebServer.ServerType.JETTY.name());

    configuration.put(BinaryLogSupplier.Configuration.MYSQL_HOSTNAME, Collections.singletonList(ReplicatorKafkaJSONTest.mysqlBinaryLog.getHost()));
    configuration.put(BinaryLogSupplier.Configuration.MYSQL_PORT, String.valueOf(ReplicatorKafkaJSONTest.mysqlBinaryLog.getPort()));
    configuration.put(BinaryLogSupplier.Configuration.MYSQL_SCHEMA, ReplicatorKafkaJSONTest.MYSQL_SCHEMA);
    configuration.put(BinaryLogSupplier.Configuration.MYSQL_USERNAME, ReplicatorKafkaJSONTest.MYSQL_ROOT_USERNAME);
    configuration.put(BinaryLogSupplier.Configuration.MYSQL_PASSWORD, ReplicatorKafkaJSONTest.MYSQL_PASSWORD);

    configuration.put(ActiveSchemaManager.Configuration.MYSQL_HOSTNAME, ReplicatorKafkaJSONTest.mysqlActiveSchema.getHost());
    configuration.put(ActiveSchemaManager.Configuration.MYSQL_PORT, String.valueOf(ReplicatorKafkaJSONTest.mysqlActiveSchema.getPort()));
    configuration.put(ActiveSchemaManager.Configuration.MYSQL_ACTIVE_SCHEMA, ReplicatorKafkaJSONTest.MYSQL_ACTIVE_SCHEMA);
    configuration.put(ActiveSchemaManager.Configuration.MYSQL_USERNAME, ReplicatorKafkaJSONTest.MYSQL_ROOT_USERNAME);
    configuration.put(ActiveSchemaManager.Configuration.MYSQL_PASSWORD, ReplicatorKafkaJSONTest.MYSQL_PASSWORD);

    configuration.put(AugmenterContext.Configuration.TRANSACTION_BUFFER_LIMIT, String.valueOf(ReplicatorKafkaJSONTest.TRANSACTION_LIMIT));
    configuration.put(AugmenterContext.Configuration.TRANSACTIONS_ENABLED, true);

    configuration.put(String.format("%s%s", KafkaApplier.Configuration.PRODUCER_PREFIX, ProducerConfig.BOOTSTRAP_SERVERS_CONFIG), ReplicatorKafkaJSONTest.kafka.getURL());
    configuration.put(String.format("%s%s", KafkaApplier.Configuration.PRODUCER_PREFIX, ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG), ByteArraySerializer.class);
    configuration.put(String.format("%s%s", KafkaApplier.Configuration.PRODUCER_PREFIX, ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG), KafkaAvroSerializer.class);
    configuration.put(KafkaApplier.Configuration.FORMAT, "json");

    configuration.put(String.format("%s%s", KafkaSeeker.Configuration.CONSUMER_PREFIX, ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG), ReplicatorKafkaJSONTest.kafka.getURL());
    configuration.put(String.format("%s%s", KafkaSeeker.Configuration.CONSUMER_PREFIX, ConsumerConfig.GROUP_ID_CONFIG), ReplicatorKafkaJSONTest.KAFKA_REPLICATOR_GROUP_ID);
    configuration.put(String.format("%s%s", KafkaSeeker.Configuration.CONSUMER_PREFIX, ConsumerConfig.AUTO_OFFSET_RESET_CONFIG), "earliest");
    configuration.put(KafkaApplier.Configuration.TOPIC, ReplicatorKafkaJSONTest.KAFKA_REPLICATOR_TOPIC_NAME);

    configuration.put(Coordinator.Configuration.TYPE, Coordinator.Type.ZOOKEEPER.name());

    configuration.put(Supplier.Configuration.TYPE, Supplier.Type.BINLOG.name());
    configuration.put(BinaryLogSupplier.Configuration.POSITION_TYPE, BinaryLogSupplier.PositionType.BINLOG);

    configuration.put(Augmenter.Configuration.SCHEMA_TYPE, Augmenter.SchemaType.ACTIVE.name());
    configuration.put(Seeker.Configuration.TYPE, Seeker.Type.KAFKA.name());

    configuration.put(Partitioner.Configuration.TYPE, Partitioner.Type.TABLE_NAME.name());

    configuration.put(Applier.Configuration.TYPE, Applier.Type.KAFKA.name());
    configuration.put(CheckpointApplier.Configuration.TYPE, CheckpointApplier.Type.COORDINATOR.name());
    configuration.put(Replicator.Configuration.CHECKPOINT_PATH, ReplicatorKafkaJSONTest.ZOOKEEPER_CHECKPOINT_PATH);
    configuration.put(Replicator.Configuration.CHECKPOINT_DEFAULT, ReplicatorKafkaJSONTest.CHECKPOINT_DEFAULT);
    configuration.put(Replicator.Configuration.REPLICATOR_THREADS, String.valueOf(ReplicatorKafkaJSONTest.KAFKA_TOPIC_PARTITIONS));
    configuration.put(Replicator.Configuration.REPLICATOR_TASKS, String.valueOf(ReplicatorKafkaJSONTest.KAFKA_TOPIC_PARTITIONS));

    return configuration;
}
 
Example #26
Source File: ReplicatorKafkaAvroTest.java    From replicator with Apache License 2.0 4 votes vote down vote up
private Map<String, Object> getConfiguration() {
    Map<String, Object> configuration = new HashMap<>();

    configuration.put(ZookeeperCoordinator.Configuration.CONNECTION_STRING, ReplicatorKafkaAvroTest.zookeeper.getURL());
    configuration.put(ZookeeperCoordinator.Configuration.LEADERSHIP_PATH, ReplicatorKafkaAvroTest.ZOOKEEPER_LEADERSHIP_PATH);

    configuration.put(WebServer.Configuration.TYPE, WebServer.ServerType.JETTY.name());

    configuration.put(BinaryLogSupplier.Configuration.MYSQL_HOSTNAME, Collections.singletonList(ReplicatorKafkaAvroTest.mysqlBinaryLog.getHost()));
    configuration.put(BinaryLogSupplier.Configuration.MYSQL_PORT, String.valueOf(ReplicatorKafkaAvroTest.mysqlBinaryLog.getPort()));
    configuration.put(BinaryLogSupplier.Configuration.MYSQL_SCHEMA, ReplicatorKafkaAvroTest.MYSQL_SCHEMA);
    configuration.put(BinaryLogSupplier.Configuration.MYSQL_USERNAME, ReplicatorKafkaAvroTest.MYSQL_ROOT_USERNAME);
    configuration.put(BinaryLogSupplier.Configuration.MYSQL_PASSWORD, ReplicatorKafkaAvroTest.MYSQL_PASSWORD);

    configuration.put(ActiveSchemaManager.Configuration.MYSQL_HOSTNAME, ReplicatorKafkaAvroTest.mysqlActiveSchema.getHost());
    configuration.put(ActiveSchemaManager.Configuration.MYSQL_PORT, String.valueOf(ReplicatorKafkaAvroTest.mysqlActiveSchema.getPort()));
    configuration.put(ActiveSchemaManager.Configuration.MYSQL_ACTIVE_SCHEMA, ReplicatorKafkaAvroTest.MYSQL_ACTIVE_SCHEMA);
    configuration.put(ActiveSchemaManager.Configuration.MYSQL_USERNAME, ReplicatorKafkaAvroTest.MYSQL_ROOT_USERNAME);
    configuration.put(ActiveSchemaManager.Configuration.MYSQL_PASSWORD, ReplicatorKafkaAvroTest.MYSQL_PASSWORD);

    configuration.put(AugmenterContext.Configuration.TRANSACTION_BUFFER_LIMIT, String.valueOf(ReplicatorKafkaAvroTest.TRANSACTION_LIMIT));
    configuration.put(AugmenterContext.Configuration.TRANSACTIONS_ENABLED, true);

    configuration.put(String.format("%s%s", KafkaApplier.Configuration.PRODUCER_PREFIX, ProducerConfig.BOOTSTRAP_SERVERS_CONFIG), ReplicatorKafkaAvroTest.kafka.getURL());
    configuration.put(String.format("%s%s", KafkaApplier.Configuration.PRODUCER_PREFIX, ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG), ByteArraySerializer.class);
    configuration.put(String.format("%s%s", KafkaApplier.Configuration.PRODUCER_PREFIX, ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG), KafkaAvroSerializer.class);
    configuration.put(KafkaApplier.Configuration.SCHEMA_REGISTRY_URL, String.format("http://%s:%d", ReplicatorKafkaAvroTest.schemaRegistry.getHost(), ReplicatorKafkaAvroTest.schemaRegistry.getPort()));
    configuration.put(KafkaApplier.Configuration.FORMAT, "avro");

    configuration.put(String.format("%s%s", KafkaSeeker.Configuration.CONSUMER_PREFIX, ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG), ReplicatorKafkaAvroTest.kafka.getURL());
    configuration.put(String.format("%s%s", KafkaSeeker.Configuration.CONSUMER_PREFIX, ConsumerConfig.GROUP_ID_CONFIG), ReplicatorKafkaAvroTest.KAFKA_REPLICATOR_GROUP_ID);
    configuration.put(String.format("%s%s", KafkaSeeker.Configuration.CONSUMER_PREFIX, ConsumerConfig.AUTO_OFFSET_RESET_CONFIG), "earliest");
    configuration.put(KafkaApplier.Configuration.TOPIC, ReplicatorKafkaAvroTest.KAFKA_REPLICATOR_TOPIC_NAME);

    configuration.put(Coordinator.Configuration.TYPE, Coordinator.Type.ZOOKEEPER.name());

    configuration.put(Supplier.Configuration.TYPE, Supplier.Type.BINLOG.name());
    configuration.put(BinaryLogSupplier.Configuration.POSITION_TYPE, BinaryLogSupplier.PositionType.BINLOG);

    configuration.put(Augmenter.Configuration.SCHEMA_TYPE, Augmenter.SchemaType.ACTIVE.name());
    configuration.put(Seeker.Configuration.TYPE, Seeker.Type.KAFKA.name());

    configuration.put(Partitioner.Configuration.TYPE, Partitioner.Type.TABLE_NAME.name());

    configuration.put(Applier.Configuration.TYPE, Applier.Type.KAFKA.name());
    configuration.put(CheckpointApplier.Configuration.TYPE, CheckpointApplier.Type.COORDINATOR.name());
    configuration.put(Replicator.Configuration.CHECKPOINT_PATH, ReplicatorKafkaAvroTest.ZOOKEEPER_CHECKPOINT_PATH);
    configuration.put(Replicator.Configuration.CHECKPOINT_DEFAULT, ReplicatorKafkaAvroTest.CHECKPOINT_DEFAULT);
    configuration.put(Replicator.Configuration.REPLICATOR_THREADS, String.valueOf(ReplicatorKafkaAvroTest.KAFKA_TOPIC_PARTITIONS));
    configuration.put(Replicator.Configuration.REPLICATOR_TASKS, String.valueOf(ReplicatorKafkaAvroTest.KAFKA_TOPIC_PARTITIONS));

    return configuration;
}
 
Example #27
Source File: KafkaApplier.java    From replicator with Apache License 2.0 4 votes vote down vote up
public KafkaApplier(Map<String, Object> configuration) {

        Object topic = configuration.get(Configuration.TOPIC);

        Objects.requireNonNull(topic, String.format("Configuration required: %s", Configuration.TOPIC));

        this.producers          = new ConcurrentHashMap<>();
        this.configuration      = new MapFilter(configuration).filter(Configuration.PRODUCER_PREFIX);
        this.topic              = topic.toString();
        this.totalPartitions    = this.getTotalPartitions();
        this.partitioner        = Partitioner.build(configuration);
        this.metrics            = Metrics.getInstance(configuration);
        this.dataFormat         = configuration.get(Configuration.FORMAT) == null ? MessageFormat.AVRO : String.valueOf(configuration.get(Configuration.FORMAT));

        if (Objects.equals(dataFormat, MessageFormat.AVRO)) {
            Object schemaRegistryUrlConfig = configuration.get(Configuration.SCHEMA_REGISTRY_URL);
            Objects.requireNonNull(schemaRegistryUrlConfig, String.format("Configuration required: %s", Configuration.SCHEMA_REGISTRY_URL));

            this.schemaRegistryClient = new BCachedSchemaRegistryClient(String.valueOf(schemaRegistryUrlConfig), 2000);
            this.kafkaAvroSerializer  = new KafkaAvroSerializer(this.schemaRegistryClient);
        }

        Objects.requireNonNull(topic, String.format("Configuration required: %s", Configuration.TOPIC));

        this.metricBase = MetricRegistry.name(this.metrics.basePath());

        this.setupColumnsFilter(configuration);

        METRIC_APPLIER_DELAY = MetricRegistry.name(
                String.valueOf(configuration.getOrDefault(Metrics.Configuration.BASE_PATH, "")),
                "applier","kafka","delay"
        );

        this.metrics.register(METRIC_APPLIER_DELAY, (Gauge<Long>) () -> {
            if (lastEventSent.get() != null) {
                return System.currentTimeMillis() - lastEventSent.get().getHeader().getTimestamp();
            } else {
                return 0L;
            }
        });

    }
 
Example #28
Source File: NativeKafkaWithAvroDecoderTest.java    From hermes with Apache License 2.0 4 votes vote down vote up
@Test
public void testNative() throws IOException, InterruptedException, ExecutionException {
	final String topic = "kafka.SimpleAvroTopic";
	int msgNum = 200;
	final CountDownLatch countDown = new CountDownLatch(msgNum);

	Properties producerProps = new Properties();
	producerProps.put("bootstrap.servers", "");

	// Avro Decoder/Encoder
	CachedSchemaRegistryClient schemaRegistry = new CachedSchemaRegistryClient("",
	      AbstractKafkaAvroSerDeConfig.MAX_SCHEMAS_PER_SUBJECT_DEFAULT);
	Map<String, String> configs = new HashMap<String, String>();
	configs.put("schema.registry.url", "");

	KafkaAvroSerializer avroKeySerializer = new KafkaAvroSerializer();
	avroKeySerializer.configure(configs, true);
	KafkaAvroSerializer avroValueSerializer = new KafkaAvroSerializer();
	avroValueSerializer.configure(configs, false);

	Map<String, String> deserializerConfigs = new HashMap<String, String>();
	deserializerConfigs.put("specific.avro.reader", Boolean.TRUE.toString());
	deserializerConfigs.put("schema.registry.url", "");
	KafkaAvroDeserializer avroKeyDeserializer = new KafkaAvroDeserializer(schemaRegistry, deserializerConfigs);
	avroKeyDeserializer.configure(configs, true);
	KafkaAvroDeserializer avroValueDeserializer = new KafkaAvroDeserializer(schemaRegistry, deserializerConfigs);
	avroValueDeserializer.configure(configs, false);

	// Consumer
	final Properties consumerProps = new Properties();
	consumerProps.put("bootstrap.servers", "");
	consumerProps.put("group.id", "GROUP_" + topic);

	final List<Object> actualResult = new ArrayList<Object>();
	final List<Object> expectedResult = new ArrayList<Object>();

	final KafkaConsumer<Object, Object> consumer = new KafkaConsumer<Object, Object>(consumerProps,
	      avroKeyDeserializer, avroValueDeserializer);
	consumer.subscribe(Arrays.asList(topic));

	class KafkaConsumerThread implements Runnable {

		private final AtomicBoolean closed = new AtomicBoolean(false);

		public void run() {
			try {
				while (!closed.get()) {
					ConsumerRecords<Object, Object> records = consumer.poll(100);
					for (ConsumerRecord<Object, Object> consumerRecord : records) {
						System.out.println("received: " + consumerRecord.value());
						actualResult.add(consumerRecord.value());
						countDown.countDown();
					}
				}
			} catch (WakeupException e) {
				if (!closed.get())
					throw e;
			} finally {
				consumer.commitSync();
				consumer.close();
			}
		}

		public void shutdown() {
			closed.set(true);
			consumer.wakeup();
		}
	}

	KafkaConsumerThread thread = new KafkaConsumerThread();
	new Thread(thread).start();

	KafkaProducer<Object, Object> producer = new KafkaProducer<Object, Object>(producerProps, avroKeySerializer,
	      avroValueSerializer);
	int i = 0;
	while (i++ < msgNum) {
		ProducerRecord<Object, Object> data = new ProducerRecord<Object, Object>(topic, null,
		      (Object) KafkaAvroTest.generateEvent());
		Future<RecordMetadata> send = producer.send(data);
		send.get();
		if (send.isDone()) {
			System.out.println("sending: " + data.value());
			expectedResult.add(data.value());
		}
	}

	countDown.await();

	thread.shutdown();
	producer.close();

	Assert.assertEquals(expectedResult.size(), actualResult.size());
}
 
Example #29
Source File: TopicStreamWriterFormatTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 4 votes vote down vote up
@Test
public void shouldMatchAvroFormatter() throws Exception {

  /**
   * Build an AVRO message
   */
  String USER_SCHEMA = "{\n" +
          "    \"fields\": [\n" +
          "        { \"name\": \"str1\", \"type\": \"string\" }\n" +
          "    ],\n" +
          "    \"name\": \"myrecord\",\n" +
          "    \"type\": \"record\"\n" +
          "}";
  Schema.Parser parser = new Schema.Parser();
  Schema schema = parser.parse(USER_SCHEMA);

  GenericData.Record avroRecord = new GenericData.Record(schema);
  avroRecord.put("str1", "My first string");

  /**
   * Setup expects
   */
  SchemaRegistryClient schemaRegistryClient = mock(SchemaRegistryClient.class);
  expect(schemaRegistryClient.register(anyString(), anyObject())).andReturn(1);
  expect(schemaRegistryClient.getById(anyInt())).andReturn(schema);

  replay(schemaRegistryClient);


  Map<String, String> props = new HashMap<>();
  props.put("schema.registry.url", "localhost:9092");

  KafkaAvroSerializer avroSerializer = new KafkaAvroSerializer(schemaRegistryClient, props);


  /**
   * Test data
   */
  byte[] testRecordBytes = avroSerializer.serialize("topic", avroRecord);
  ConsumerRecord<String, Bytes> record = new ConsumerRecord<String, Bytes>("topic", 1, 1, "key", new Bytes(testRecordBytes));

  /** Assert
   */
  assertTrue(TopicStreamWriter.Format.AVRO.isFormat("topic", record, schemaRegistryClient));
}
 
Example #30
Source File: AvroProducer.java    From snowflake-kafka-connector with Apache License 2.0 4 votes vote down vote up
AvroProducer()
{
  this.props = getProperties(KafkaAvroSerializer.class.getCanonicalName());
  this.producer = new KafkaProducer<>(props);
}