io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient Java Examples

The following examples show how to use io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FindDistinctEventsTest.java    From kafka-tutorials with Apache License 2.0 7 votes vote down vote up
private static SpecificAvroSerde<Click> makeSerializer(Properties envProps)
    throws IOException, RestClientException {

  final MockSchemaRegistryClient client = new MockSchemaRegistryClient();
  String inputTopic = envProps.getProperty("input.topic.name");
  String outputTopic = envProps.getProperty("output.topic.name");

  final Schema schema = Click.SCHEMA$;
  client.register(inputTopic + "-value", schema);
  client.register(outputTopic + "-value", schema);

  SpecificAvroSerde<Click> serde = new SpecificAvroSerde<>(client);

  Map<String, String> config = new HashMap<>();
  config.put("schema.registry.url", envProps.getProperty("schema.registry.url"));
  serde.configure(config, false);

  return serde;
}
 
Example #2
Source File: AggregatingSumTest.java    From kafka-tutorials with Apache License 2.0 7 votes vote down vote up
private SpecificAvroSerde<TicketSale> makeSerializer(Properties envProps)
    throws IOException, RestClientException {

  final MockSchemaRegistryClient client = new MockSchemaRegistryClient();
  String inputTopic = envProps.getProperty("input.topic.name");
  String outputTopic = envProps.getProperty("output.topic.name");

  final Schema schema = TicketSale.SCHEMA$;
  client.register(inputTopic + "-value", schema);
  client.register(outputTopic + "-value", schema);

  SpecificAvroSerde<TicketSale> serde = new SpecificAvroSerde<>(client);

  Map<String, String> config = new HashMap<>();
  config.put("schema.registry.url", envProps.getProperty("schema.registry.url"));
  serde.configure(config, false);

  return serde;
}
 
Example #3
Source File: AggregatingCountTest.java    From kafka-tutorials with Apache License 2.0 7 votes vote down vote up
private SpecificAvroSerde<TicketSale> makeSerializer(Properties envProps)
    throws IOException, RestClientException {

  final MockSchemaRegistryClient client = new MockSchemaRegistryClient();
  String inputTopic = envProps.getProperty("input.topic.name");
  String outputTopic = envProps.getProperty("output.topic.name");

  final Schema schema = TicketSale.SCHEMA$;
  client.register(inputTopic + "-value", schema);
  client.register(outputTopic + "-value", schema);

  SpecificAvroSerde<TicketSale> serde = new SpecificAvroSerde<>(client);

  Map<String, String> config = new HashMap<>();
  config.put("schema.registry.url", envProps.getProperty("schema.registry.url"));
  serde.configure(config, false);

  return serde;
}
 
Example #4
Source File: SqlPredicateTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 6 votes vote down vote up
@Test
public void testFilterBiggerExpression() throws Exception {
  String selectQuery = "SELECT col0, col2, col3 FROM test1 WHERE col0 > 100 AND LEN(col2) = 5;";
  PlanNode logicalPlan = buildLogicalPlan(selectQuery);
  FilterNode filterNode = (FilterNode) logicalPlan.getSources().get(0).getSources().get(0);

  initialSchemaKStream = new SchemaKStream(logicalPlan.getTheSourceNode().getSchema(),
                                           kStream,
                                           ksqlStream.getKeyField(), new ArrayList<>(),
                                           SchemaKStream.Type.SOURCE, functionRegistry, new MockSchemaRegistryClient());
  SqlPredicate predicate = new SqlPredicate(filterNode.getPredicate(), initialSchemaKStream
      .getSchema(), false, functionRegistry);

  Assert.assertTrue(predicate
                        .getFilterExpression()
                        .toString()
                        .equalsIgnoreCase("((TEST1.COL0 > 100) AND"
                                          + " (LEN(TEST1.COL2) = 5))"));
  Assert.assertTrue(predicate.getColumnIndexes().length == 3);

}
 
Example #5
Source File: KsqlStructuredDataOutputNodeTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 6 votes vote down vote up
@Test
public void shouldCreateSinkWithCorrectCleanupPolicyNonWindowedTable() {
  KafkaTopicClient topicClientForNonWindowTable = EasyMock.mock(KafkaTopicClient.class);
  KsqlStructuredDataOutputNode outputNode = getKsqlStructuredDataOutputNode(false);
  StreamsBuilder streamsBuilder = new StreamsBuilder();
  Map<String, String> topicConfig = ImmutableMap.of(
      TopicConfig.CLEANUP_POLICY_CONFIG, TopicConfig.CLEANUP_POLICY_COMPACT);
  topicClientForNonWindowTable.createTopic("output", 4, (short) 3, topicConfig);
  EasyMock.replay(topicClientForNonWindowTable);
  SchemaKStream schemaKStream = outputNode.buildStream(
      streamsBuilder,
      ksqlConfig,
      topicClientForNonWindowTable,
      new FunctionRegistry(),
      new HashMap<>(),
      new MockSchemaRegistryClient());
  assertThat(schemaKStream, instanceOf(SchemaKTable.class));
  EasyMock.verify();

}
 
Example #6
Source File: KsqlStructuredDataOutputNodeTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 6 votes vote down vote up
@Test
public void shouldCreateSinkWithCorrectCleanupPolicyWindowedTable() {
  KafkaTopicClient topicClientForWindowTable = EasyMock.mock(KafkaTopicClient.class);
  KsqlStructuredDataOutputNode outputNode = getKsqlStructuredDataOutputNode(true);

  StreamsBuilder streamsBuilder = new StreamsBuilder();
  topicClientForWindowTable.createTopic("output", 4, (short) 3, Collections.emptyMap());
  EasyMock.replay(topicClientForWindowTable);
  SchemaKStream schemaKStream = outputNode.buildStream(
      streamsBuilder,
      ksqlConfig,
      topicClientForWindowTable,
      new FunctionRegistry(),
      new HashMap<>(),
      new MockSchemaRegistryClient());
  assertThat(schemaKStream, instanceOf(SchemaKTable.class));
  EasyMock.verify();

}
 
Example #7
Source File: KsqlStructuredDataOutputNodeTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 6 votes vote down vote up
@Test
public void shouldCreateSinkWithCorrectCleanupPolicyStream() {
  KafkaTopicClient topicClientForWindowTable = EasyMock.mock(KafkaTopicClient.class);

  StreamsBuilder streamsBuilder = new StreamsBuilder();
  topicClientForWindowTable.createTopic("output", 4, (short) 3, Collections.emptyMap());
  EasyMock.replay(topicClientForWindowTable);
  SchemaKStream schemaKStream = outputNode.buildStream(
      streamsBuilder,
      ksqlConfig,
      topicClientForWindowTable,
      new FunctionRegistry(),
      new HashMap<>(),
      new MockSchemaRegistryClient());
  assertThat(schemaKStream, instanceOf(SchemaKStream.class));
  EasyMock.verify();

}
 
Example #8
Source File: ProjectNodeTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 6 votes vote down vote up
@Test(expected = KsqlException.class)
public void shouldThrowKsqlExcptionIfSchemaSizeDoesntMatchProjection() {
  mockSourceNode();

  EasyMock.replay(source, stream);

  final ProjectNode node = new ProjectNode(new PlanNodeId("1"),
      source,
      SchemaBuilder.struct()
          .field("field1", Schema.STRING_SCHEMA)
          .field("field2", Schema.STRING_SCHEMA)
          .build(),
      Collections.singletonList(new BooleanLiteral("true")));


  node.buildStream(builder,
      ksqlConfig,
      kafkaTopicClient,
      functionRegistry,
      props, new MockSchemaRegistryClient());
}
 
Example #9
Source File: SchemaKStreamTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 6 votes vote down vote up
@Test
public void testSelectSchemaKStream() {
  String selectQuery = "SELECT col0, col2, col3 FROM test1 WHERE col0 > 100;";
  PlanNode logicalPlan = planBuilder.buildLogicalPlan(selectQuery);
  ProjectNode projectNode = (ProjectNode) logicalPlan.getSources().get(0);
  initialSchemaKStream = new SchemaKStream(logicalPlan.getTheSourceNode().getSchema(), kStream,
                                           ksqlStream.getKeyField(), new ArrayList<>(),
                                           SchemaKStream.Type.SOURCE, functionRegistry, new MockSchemaRegistryClient());

  List<Pair<String, Expression>> projectNameExpressionPairList = projectNode.getProjectNameExpressionPairList();
  SchemaKStream projectedSchemaKStream = initialSchemaKStream.select(projectNameExpressionPairList);
  Assert.assertTrue(projectedSchemaKStream.getSchema().fields().size() == 3);
  Assert.assertTrue(projectedSchemaKStream.getSchema().field("COL0") ==
                    projectedSchemaKStream.getSchema().fields().get(0));
  Assert.assertTrue(projectedSchemaKStream.getSchema().field("COL2") ==
                    projectedSchemaKStream.getSchema().fields().get(1));
  Assert.assertTrue(projectedSchemaKStream.getSchema().field("COL3") ==
                    projectedSchemaKStream.getSchema().fields().get(2));

  Assert.assertTrue(projectedSchemaKStream.getSchema().field("COL0").schema().type() == Schema.Type.INT64);
  Assert.assertTrue(projectedSchemaKStream.getSchema().field("COL2").schema().type() == Schema.Type.STRING);
  Assert.assertTrue(projectedSchemaKStream.getSchema().field("COL3").schema().type() == Schema.Type.FLOAT64);

  Assert.assertTrue(projectedSchemaKStream.getSourceSchemaKStreams().get(0) ==
                    initialSchemaKStream);
}
 
Example #10
Source File: SchemaKStreamTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 6 votes vote down vote up
@Test
public void testSelectWithExpression() throws Exception {
  String selectQuery = "SELECT col0, LEN(UCASE(col2)), col3*3+5 FROM test1 WHERE col0 > 100;";
  PlanNode logicalPlan = planBuilder.buildLogicalPlan(selectQuery);
  ProjectNode projectNode = (ProjectNode) logicalPlan.getSources().get(0);
  initialSchemaKStream = new SchemaKStream(logicalPlan.getTheSourceNode().getSchema(), kStream,
                                           ksqlStream.getKeyField(), new ArrayList<>(),
                                           SchemaKStream.Type.SOURCE, functionRegistry, new MockSchemaRegistryClient());
  SchemaKStream projectedSchemaKStream = initialSchemaKStream.select(projectNode.getProjectNameExpressionPairList());
  Assert.assertTrue(projectedSchemaKStream.getSchema().fields().size() == 3);
  Assert.assertTrue(projectedSchemaKStream.getSchema().field("COL0") ==
                    projectedSchemaKStream.getSchema().fields().get(0));
  Assert.assertTrue(projectedSchemaKStream.getSchema().field("KSQL_COL_1") ==
                    projectedSchemaKStream.getSchema().fields().get(1));
  Assert.assertTrue(projectedSchemaKStream.getSchema().field("KSQL_COL_2") ==
                    projectedSchemaKStream.getSchema().fields().get(2));

  Assert.assertTrue(projectedSchemaKStream.getSchema().field("COL0").schema().type() == Schema.Type.INT64);
  Assert.assertTrue(projectedSchemaKStream.getSchema().fields().get(1).schema().type() == Schema.Type.INT32);
  Assert.assertTrue(projectedSchemaKStream.getSchema().fields().get(2).schema().type() == Schema.Type.FLOAT64);

  Assert.assertTrue(projectedSchemaKStream.getSourceSchemaKStreams().get(0) == initialSchemaKStream);
}
 
Example #11
Source File: SchemaKStreamTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 6 votes vote down vote up
@Test
public void testGroupByKey() {
  String selectQuery = "SELECT col0, col1 FROM test1 WHERE col0 > 100;";
  PlanNode logicalPlan = planBuilder.buildLogicalPlan(selectQuery);
  initialSchemaKStream = new SchemaKStream(logicalPlan.getTheSourceNode().getSchema(), kStream,
      ksqlStream.getKeyField(), new ArrayList<>(),
      SchemaKStream.Type.SOURCE, functionRegistry, new MockSchemaRegistryClient());

  Expression keyExpression = new DereferenceExpression(
      new QualifiedNameReference(QualifiedName.of("TEST1")), "COL0");
  KsqlTopicSerDe ksqlTopicSerDe = new KsqlJsonTopicSerDe();
  Serde<GenericRow> rowSerde = ksqlTopicSerDe.getGenericRowSerde(
      initialSchemaKStream.getSchema(), null, false, null);
  List<Expression> groupByExpressions = Arrays.asList(keyExpression);
  SchemaKGroupedStream groupedSchemaKStream = initialSchemaKStream.groupBy(
      Serdes.String(), rowSerde, groupByExpressions);

  Assert.assertEquals(groupedSchemaKStream.getKeyField().name(), "COL0");
}
 
Example #12
Source File: SchemaKStreamTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 6 votes vote down vote up
@Test
public void testGroupByMultipleColumns() {
  String selectQuery = "SELECT col0, col1 FROM test1 WHERE col0 > 100;";
  PlanNode logicalPlan = planBuilder.buildLogicalPlan(selectQuery);
  initialSchemaKStream = new SchemaKStream(logicalPlan.getTheSourceNode().getSchema(), kStream,
      ksqlStream.getKeyField(), new ArrayList<>(),
      SchemaKStream.Type.SOURCE, functionRegistry, new MockSchemaRegistryClient());

  Expression col0Expression = new DereferenceExpression(
      new QualifiedNameReference(QualifiedName.of("TEST1")), "COL0");
  Expression col1Expression = new DereferenceExpression(
      new QualifiedNameReference(QualifiedName.of("TEST1")), "COL1");
  KsqlTopicSerDe ksqlTopicSerDe = new KsqlJsonTopicSerDe();
  Serde<GenericRow> rowSerde = ksqlTopicSerDe.getGenericRowSerde(
      initialSchemaKStream.getSchema(), null, false, null);
  List<Expression> groupByExpressions = Arrays.asList(col1Expression, col0Expression);
  SchemaKGroupedStream groupedSchemaKStream = initialSchemaKStream.groupBy(
      Serdes.String(), rowSerde, groupByExpressions);

  Assert.assertEquals(groupedSchemaKStream.getKeyField().name(), "TEST1.COL1|+|TEST1.COL0");
}
 
Example #13
Source File: SqlPredicateTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 6 votes vote down vote up
@Test
public void testFilter() throws Exception {
  String selectQuery = "SELECT col0, col2, col3 FROM test1 WHERE col0 > 100;";
  PlanNode logicalPlan = buildLogicalPlan(selectQuery);
  FilterNode filterNode = (FilterNode) logicalPlan.getSources().get(0).getSources().get(0);

  initialSchemaKStream = new SchemaKStream(logicalPlan.getTheSourceNode().getSchema(),
                                           kStream,
                                           ksqlStream.getKeyField(), new ArrayList<>(),
                                           SchemaKStream.Type.SOURCE, functionRegistry, new MockSchemaRegistryClient());
  SqlPredicate predicate = new SqlPredicate(filterNode.getPredicate(), initialSchemaKStream
      .getSchema(), false, functionRegistry);

  Assert.assertTrue(predicate.getFilterExpression()
                        .toString().equalsIgnoreCase("(TEST1.COL0 > 100)"));
  Assert.assertTrue(predicate.getColumnIndexes().length == 1);

}
 
Example #14
Source File: ConfluentSchemaRegistryCoderTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test(expected = IOException.class)
public void testMagicByteVerification() throws Exception {
	MockSchemaRegistryClient client = new MockSchemaRegistryClient();
	int schemaId = client.register("testTopic", Schema.create(Schema.Type.BOOLEAN));

	ConfluentSchemaRegistryCoder coder = new ConfluentSchemaRegistryCoder(client);
	ByteArrayOutputStream byteOutStream = new ByteArrayOutputStream();
	DataOutputStream dataOutputStream = new DataOutputStream(byteOutStream);
	dataOutputStream.writeByte(5);
	dataOutputStream.writeInt(schemaId);
	dataOutputStream.flush();

	ByteArrayInputStream byteInStream = new ByteArrayInputStream(byteOutStream.toByteArray());
	coder.readSchema(byteInStream);

	// exception is thrown
}
 
Example #15
Source File: PhysicalPlanBuilderTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 6 votes vote down vote up
private PhysicalPlanBuilder buildPhysicalPlanBuilder(Map<String, Object> overrideProperties) {
  final StreamsBuilder streamsBuilder = new StreamsBuilder();
  final FunctionRegistry functionRegistry = new FunctionRegistry();
  Map<String, Object> configMap = new HashMap<>();
  configMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
  configMap.put("application.id", "KSQL");
  configMap.put("commit.interval.ms", 0);
  configMap.put("cache.max.bytes.buffering", 0);
  configMap.put("auto.offset.reset", "earliest");
  ksqlConfig = new KsqlConfig(configMap);
  return new PhysicalPlanBuilder(streamsBuilder,
      ksqlConfig,
      new FakeKafkaTopicClient(),
      functionRegistry,
      overrideProperties,
      false,
      metaStore,
      new MockSchemaRegistryClient(),
      testKafkaStreamsBuilder
  );

}
 
Example #16
Source File: KsqlGenericRowAvroSerializerTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 6 votes vote down vote up
@Test
public void shouldFailForIncompatibleType() {
  SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
  KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer
      (schema, schemaRegistryClient, new KsqlConfig(new HashMap<>()));

  List columns = Arrays.asList(1511897796092L, 1L, "item_1", "10.0", new Double[]{100.0},
                               Collections.singletonMap("key1", 100.0));

  GenericRow genericRow = new GenericRow(columns);
  try {
    byte[] serilizedRow = ksqlGenericRowAvroSerializer.serialize("t1", genericRow);
    Assert.fail("Did not fail for incompatible types.");
  } catch (Exception e) {
    assertThat(e.getMessage(), equalTo("org.apache.kafka.common.errors.SerializationException: Error serializing Avro message"));
  }

}
 
Example #17
Source File: ConfluentSchemaRegistryCoderTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testSpecificRecordWithConfluentSchemaRegistry() throws Exception {
	MockSchemaRegistryClient client = new MockSchemaRegistryClient();

	Schema schema = SchemaBuilder.record("testRecord")
		.fields()
		.optionalString("testField")
		.endRecord();
	int schemaId = client.register("testTopic", schema);

	ConfluentSchemaRegistryCoder registryCoder = new ConfluentSchemaRegistryCoder(client);
	ByteArrayOutputStream byteOutStream = new ByteArrayOutputStream();
	DataOutputStream dataOutputStream = new DataOutputStream(byteOutStream);
	dataOutputStream.writeByte(0);
	dataOutputStream.writeInt(schemaId);
	dataOutputStream.flush();

	ByteArrayInputStream byteInStream = new ByteArrayInputStream(byteOutStream.toByteArray());
	Schema readSchema = registryCoder.readSchema(byteInStream);

	assertEquals(schema, readSchema);
	assertEquals(0, byteInStream.available());
}
 
Example #18
Source File: ConfluentSchemaRegistryCoderTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test(expected = IOException.class)
public void testMagicByteVerification() throws Exception {
	MockSchemaRegistryClient client = new MockSchemaRegistryClient();
	int schemaId = client.register("testTopic", Schema.create(Schema.Type.BOOLEAN));

	ConfluentSchemaRegistryCoder coder = new ConfluentSchemaRegistryCoder(client);
	ByteArrayOutputStream byteOutStream = new ByteArrayOutputStream();
	DataOutputStream dataOutputStream = new DataOutputStream(byteOutStream);
	dataOutputStream.writeByte(5);
	dataOutputStream.writeInt(schemaId);
	dataOutputStream.flush();

	ByteArrayInputStream byteInStream = new ByteArrayInputStream(byteOutStream.toByteArray());
	coder.readSchema(byteInStream);

	// exception is thrown
}
 
Example #19
Source File: ConfluentKafkaSchemaRegistryTest.java    From incubator-gobblin with Apache License 2.0 6 votes vote down vote up
@Test
public void testRegisterAndGetByKey() throws SchemaRegistryException {
  Properties properties = new Properties();
  properties.setProperty(KafkaSchemaRegistry.KAFKA_SCHEMA_REGISTRY_URL, TEST_URL);

  SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
  KafkaSchemaRegistry<Integer, Schema> kafkaSchemaRegistry =
      new ConfluentKafkaSchemaRegistry(properties, schemaRegistryClient);

  Schema schema =
      SchemaBuilder.record(TEST_RECORD_NAME).namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type()
          .stringType().noDefault().endRecord();

  Integer id = kafkaSchemaRegistry.register(schema);
  Assert.assertEquals(schema, kafkaSchemaRegistry.getSchemaByKey(id));
}
 
Example #20
Source File: ConfluentKafkaSchemaRegistryTest.java    From incubator-gobblin with Apache License 2.0 6 votes vote down vote up
private void doTestRegisterAndGetLatest(Properties properties) throws SchemaRegistryException {

    SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
    KafkaSchemaRegistry<Integer, Schema> kafkaSchemaRegistry =
        new ConfluentKafkaSchemaRegistry(properties, schemaRegistryClient);

    Schema schema1 =
        SchemaBuilder.record(TEST_RECORD_NAME + "1").namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type()
            .stringType().noDefault().endRecord();

    Schema schema2 =
        SchemaBuilder.record(TEST_RECORD_NAME + "2").namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type()
            .stringType().noDefault().endRecord();

    kafkaSchemaRegistry.register(schema1, TEST_TOPIC_NAME);
    kafkaSchemaRegistry.register(schema2, TEST_TOPIC_NAME);

    Assert.assertNotEquals(schema1, kafkaSchemaRegistry.getLatestSchemaByTopic(TEST_TOPIC_NAME));
    Assert.assertEquals(schema2, kafkaSchemaRegistry.getLatestSchemaByTopic(TEST_TOPIC_NAME));
  }
 
Example #21
Source File: ConfluentSchemaRegistryCoderTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testSpecificRecordWithConfluentSchemaRegistry() throws Exception {
	MockSchemaRegistryClient client = new MockSchemaRegistryClient();

	Schema schema = SchemaBuilder.record("testRecord")
		.fields()
		.optionalString("testField")
		.endRecord();
	int schemaId = client.register("testTopic", schema);

	ConfluentSchemaRegistryCoder registryCoder = new ConfluentSchemaRegistryCoder(client);
	ByteArrayOutputStream byteOutStream = new ByteArrayOutputStream();
	DataOutputStream dataOutputStream = new DataOutputStream(byteOutStream);
	dataOutputStream.writeByte(0);
	dataOutputStream.writeInt(schemaId);
	dataOutputStream.flush();

	ByteArrayInputStream byteInStream = new ByteArrayInputStream(byteOutStream.toByteArray());
	Schema readSchema = registryCoder.readSchema(byteInStream);

	assertEquals(schema, readSchema);
	assertEquals(0, byteInStream.available());
}
 
Example #22
Source File: StreamsIngestTest.java    From kafka-tutorials with Apache License 2.0 6 votes vote down vote up
private SpecificAvroSerde<City> makeSerializer(Properties envProps)
    throws IOException, RestClientException {

  final MockSchemaRegistryClient client = new MockSchemaRegistryClient();
  String inputTopic = envProps.getProperty("input.topic.name");
  String outputTopic = envProps.getProperty("output.topic.name");

  final Schema schema = City.SCHEMA$;
  client.register(inputTopic + "-value", schema);
  client.register(outputTopic + "-value", schema);

  SpecificAvroSerde<City> serde = new SpecificAvroSerde<>(client);

  Map<String, String> config = new HashMap<>();
  config.put("schema.registry.url", envProps.getProperty("schema.registry.url"));
  serde.configure(config, false);

  return serde;
}
 
Example #23
Source File: ConfluentSchemaRegistryCoderTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test(expected = IOException.class)
public void testMagicByteVerification() throws Exception {
	MockSchemaRegistryClient client = new MockSchemaRegistryClient();
	int schemaId = client.register("testTopic", Schema.create(Schema.Type.BOOLEAN));

	ConfluentSchemaRegistryCoder coder = new ConfluentSchemaRegistryCoder(client);
	ByteArrayOutputStream byteOutStream = new ByteArrayOutputStream();
	DataOutputStream dataOutputStream = new DataOutputStream(byteOutStream);
	dataOutputStream.writeByte(5);
	dataOutputStream.writeInt(schemaId);
	dataOutputStream.flush();

	ByteArrayInputStream byteInStream = new ByteArrayInputStream(byteOutStream.toByteArray());
	coder.readSchema(byteInStream);

	// exception is thrown
}
 
Example #24
Source File: FilterEventsTest.java    From kafka-tutorials with Apache License 2.0 6 votes vote down vote up
private SpecificAvroSerde<Publication> makeSerializer(Properties envProps)
    throws IOException, RestClientException {

  final MockSchemaRegistryClient client = new MockSchemaRegistryClient();
  String inputTopic = envProps.getProperty("input.topic.name");
  String outputTopic = envProps.getProperty("output.topic.name");

  final Schema schema = Publication.SCHEMA$;
  client.register(inputTopic + "-value", schema);
  client.register(outputTopic + "-value", schema);

  SpecificAvroSerde<Publication> serde = new SpecificAvroSerde<>(client);

  Map<String, String> config = new HashMap<>();
  config.put("schema.registry.url", envProps.getProperty("schema.registry.url"));
  serde.configure(config, false);

  return serde;
}
 
Example #25
Source File: ConfluentSchemaRegistryCoderTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testSpecificRecordWithConfluentSchemaRegistry() throws Exception {
	MockSchemaRegistryClient client = new MockSchemaRegistryClient();

	Schema schema = SchemaBuilder.record("testRecord")
		.fields()
		.optionalString("testField")
		.endRecord();
	int schemaId = client.register("testTopic", schema);

	ConfluentSchemaRegistryCoder registryCoder = new ConfluentSchemaRegistryCoder(client);
	ByteArrayOutputStream byteOutStream = new ByteArrayOutputStream();
	DataOutputStream dataOutputStream = new DataOutputStream(byteOutStream);
	dataOutputStream.writeByte(0);
	dataOutputStream.writeInt(schemaId);
	dataOutputStream.flush();

	ByteArrayInputStream byteInStream = new ByteArrayInputStream(byteOutStream.toByteArray());
	Schema readSchema = registryCoder.readSchema(byteInStream);

	assertEquals(schema, readSchema);
	assertEquals(0, byteInStream.available());
}
 
Example #26
Source File: AggregateNodeTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
private SchemaKStream buildStream(AggregateNode aggregateNode) {
  return aggregateNode.buildStream(builder,
      ksqlConfig,
      topicClient,
      new FunctionRegistry(),
      new HashMap<>(), new MockSchemaRegistryClient());
}
 
Example #27
Source File: AvroGenericUtils.java    From simplesource with Apache License 2.0 5 votes vote down vote up
public static Serde<GenericRecord> genericAvroSerde(
        final String schemaRegistryUrl,
        final boolean useMockSchemaRegistry,
        final boolean isKey,
        final SchemaNameStrategy schemaNameStrategy) {
    final Map<String, Object> configMap = avroSchemaRegistryConfig(schemaRegistryUrl, schemaNameStrategy);
    final Serde<GenericRecord> serde = useMockSchemaRegistry
            ? new GenericAvroSerde(new MockSchemaRegistryClient())
            : new GenericAvroSerde();
    serde.configure(configMap, isKey);
    return serde;
}
 
Example #28
Source File: KsqlGenericRowAvroSerializerTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
@Test
@SuppressWarnings("unchecked")
public void shouldSerializeRowWithNullValues() {
  SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
  KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer
      (schema, schemaRegistryClient, new KsqlConfig(new HashMap<>()));

  List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0, null, null);

  GenericRow genericRow = new GenericRow(columns);
  ksqlGenericRowAvroSerializer.serialize("t1", genericRow);

}
 
Example #29
Source File: KsqlGenericRowAvroSerializerTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
@Test
public void shouldSerializeRowWithNullCorrectly() {
  SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
  KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer
      (schema, schemaRegistryClient, new KsqlConfig(new HashMap<>()));

  List columns = Arrays.asList(1511897796092L, 1L, null, 10.0, new Double[]{100.0},
                               Collections.singletonMap("key1", 100.0));

  GenericRow genericRow = new GenericRow(columns);
  byte[] serializedRow = ksqlGenericRowAvroSerializer.serialize("t1", genericRow);
  KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient);
  GenericRecord genericRecord = (GenericRecord) kafkaAvroDeserializer.deserialize("t1", serializedRow);
  Assert.assertNotNull(genericRecord);
  assertThat("Incorrect serialization.", genericRecord.get("ordertime".toUpperCase()), equalTo
      (1511897796092L));
  assertThat("Incorrect serialization.", genericRecord.get("orderid".toUpperCase()), equalTo
      (1L));
  assertThat("Incorrect serialization.", genericRecord.get("itemid".toUpperCase()), equalTo
      (null));
  assertThat("Incorrect serialization.", genericRecord.get("orderunits".toUpperCase()), equalTo
      (10.0));

  GenericData.Array array = (GenericData.Array) genericRecord.get("arraycol".toUpperCase());
  Map map = (Map) genericRecord.get("mapcol".toUpperCase());

  assertThat("Incorrect serialization.", array.size(), equalTo(1));
  assertThat("Incorrect serialization.", array.get(0), equalTo(100.0));
  assertThat("Incorrect serialization.", map,
             equalTo(Collections.singletonMap(new Utf8("key1"), 100.0)));

}
 
Example #30
Source File: KsqlGenericRowAvroSerializerTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
@Test
public void shouldSerializeRowCorrectly() {
  SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
  KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer
      (schema, schemaRegistryClient, new KsqlConfig(new HashMap<>()));

  List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0, new Double[]{100.0},
                               Collections.singletonMap("key1", 100.0));

  GenericRow genericRow = new GenericRow(columns);
  byte[] serializedRow = ksqlGenericRowAvroSerializer.serialize("t1", genericRow);
  KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient);
  GenericRecord genericRecord = (GenericRecord) kafkaAvroDeserializer.deserialize("t1", serializedRow);
  Assert.assertNotNull(genericRecord);
  assertThat("Incorrect serialization.", genericRecord.get("ordertime".toUpperCase()), equalTo
      (1511897796092L));
  assertThat("Incorrect serialization.", genericRecord.get("orderid".toUpperCase()), equalTo
      (1L));
  assertThat("Incorrect serialization.", genericRecord.get("itemid".toUpperCase()).toString(), equalTo("item_1"));
  assertThat("Incorrect serialization.", genericRecord.get("orderunits".toUpperCase()), equalTo
      (10.0));

  GenericData.Array array = (GenericData.Array) genericRecord.get("arraycol".toUpperCase());
  Map map = (Map) genericRecord.get("mapcol".toUpperCase());

  assertThat("Incorrect serialization.", array.size(), equalTo(1));
  assertThat("Incorrect serialization.", array.get(0), equalTo(100.0));
  assertThat("Incorrect serialization.", map.size(), equalTo(1));
  assertThat("Incorrect serialization.", map.get(new Utf8("key1")), equalTo(100.0));

}