org.apache.avro.specific.SpecificRecord Java Examples
The following examples show how to use
org.apache.avro.specific.SpecificRecord.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AzureBlobAvroWriter.java From samza with Apache License 2.0 | 7 votes |
@VisibleForTesting byte[] encodeRecord(IndexedRecord record) { ByteArrayOutputStream out = new ByteArrayOutputStream(); Schema schema = record.getSchema(); try { EncoderFactory encoderfactory = new EncoderFactory(); BinaryEncoder encoder = encoderfactory.binaryEncoder(out, null); DatumWriter<IndexedRecord> writer; if (record instanceof SpecificRecord) { writer = new SpecificDatumWriter<>(schema); } else { writer = new GenericDatumWriter<>(schema); } writer.write(record, encoder); encoder.flush(); //encoder may buffer } catch (Exception e) { throw new SamzaException("Unable to serialize Avro record using schema within the record: " + schema.toString(), e); } return out.toByteArray(); }
Example #2
Source File: ParquetRowInputFormatTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testReadRowFromSimpleRecord() throws IOException { Tuple3<Class<? extends SpecificRecord>, SpecificRecord, Row> simple = TestUtil.getSimpleRecordTestData(); Path path = TestUtil.createTempParquetFile( tempRoot.getRoot(), TestUtil.SIMPLE_SCHEMA, Arrays.asList(simple.f1, simple.f1)); MessageType simpleType = SCHEMA_CONVERTER.convert(TestUtil.SIMPLE_SCHEMA); ParquetRowInputFormat inputFormat = new ParquetRowInputFormat(path, simpleType); inputFormat.setRuntimeContext(TestUtil.getMockRuntimeContext()); FileInputSplit[] splits = inputFormat.createInputSplits(1); assertEquals(1, splits.length); inputFormat.open(splits[0]); Row row = inputFormat.nextRecord(null); assertNotNull(row); assertEquals(simple.f2, row); row = inputFormat.nextRecord(null); assertNotNull(row); assertEquals(simple.f2, row); assertTrue(inputFormat.reachedEnd()); }
Example #3
Source File: PulsarAvroTableSink.java From pulsar with Apache License 2.0 | 6 votes |
public AvroKeyExtractor( String keyFieldName, String[] fieldNames, TypeInformation<?>[] fieldTypes, Class<? extends SpecificRecord> recordClazz) { checkArgument(fieldNames.length == fieldTypes.length, "Number of provided field names and types does not match."); Schema schema = SpecificData.get().getSchema(recordClazz); Schema.Field keyField = schema.getField(keyFieldName); Schema.Type keyType = keyField.schema().getType(); int keyIndex = Arrays.asList(fieldNames).indexOf(keyFieldName); checkArgument(keyIndex >= 0, "Key field '" + keyFieldName + "' not found"); checkArgument(Schema.Type.STRING.equals(keyType), "Key field must be of type 'STRING'"); this.keyIndex = keyIndex; }
Example #4
Source File: Thrift.java From hdfs2cass with Apache License 2.0 | 6 votes |
public static <T extends SpecificRecord> PCollection<ThriftRecord> byFieldNames( final PCollection<T> collection, final String rowKeyFieldName, final String ttlFieldName, final String timestampFieldName ) { final Class<T> recordType = collection.getPType().getTypeClass(); T record; try { record = recordType.getConstructor().newInstance(); } catch (Exception e) { throw new RuntimeException("Could not create an instance of the record to determine it's schema", e); } ThriftByFieldNamesFn<T> doFn = new ThriftByFieldNamesFn<T>(record.getSchema(), rowKeyFieldName, ttlFieldName, timestampFieldName); return collection.parallelDo(doFn, ThriftRecord.PTYPE); }
Example #5
Source File: AvroMessageEncoderUtil.java From brooklin with BSD 2-Clause "Simplified" License | 6 votes |
/** * generates the md5 hash of the schemaId and appends it to the given byte array. * the byte array representing the payload of a BrooklinEnvelope * * This is done so when the client decodes the payload, it will contain a schemaId which * can be used to retrieve the schema from the Schema Registry * * This method also converts an IndexedRecord into a byte array first */ public static byte[] encode(String schemaId, IndexedRecord record) throws AvroEncodingException { Validate.notNull(record, "cannot encode null Record, schemaId: " + schemaId); ByteArrayOutputStream out = new ByteArrayOutputStream(); out.write(MAGIC_BYTE); byte[] md5Bytes = hexToMd5(schemaId); try { out.write(md5Bytes); BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null); DatumWriter<org.apache.avro.generic.IndexedRecord> writer; if (record instanceof SpecificRecord) { writer = new SpecificDatumWriter<>(record.getSchema()); } else { writer = new GenericDatumWriter<>(record.getSchema()); } writer.write(record, encoder); encoder.flush(); //encoder may buffer } catch (IOException e) { throw new AvroEncodingException(e); } return out.toByteArray(); }
Example #6
Source File: NotificationEventUtils.java From stream-registry with Apache License 2.0 | 6 votes |
public static <W, R extends SpecificRecord> Function<W, R> loadToAvroStaticMethod(String clazz, String methodName, Class<W> argType) throws ClassNotFoundException, NoSuchMethodException { val method = Class.forName(clazz) .getDeclaredMethod(methodName, argType); Function<W, R> toAvroFn = obj -> { try { // We set null as first argument, since we're expecting an static method return (R) method.invoke(null, obj); } catch (Exception e) { log.error("There was an error in {}.{} (toAvro) method: {}", clazz, methodName, e.getMessage(), e); throw new RuntimeException(e); } }; return toAvroFn; }
Example #7
Source File: ParquetRowInputFormatTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testReadRowFromNestedRecord() throws IOException { Tuple3<Class<? extends SpecificRecord>, SpecificRecord, Row> nested = TestUtil.getNestedRecordTestData(); Path path = TestUtil.createTempParquetFile(tempRoot.newFolder(), TestUtil.NESTED_SCHEMA, Collections.singletonList(nested.f1)); MessageType nestedType = SCHEMA_CONVERTER.convert(TestUtil.NESTED_SCHEMA); ParquetRowInputFormat inputFormat = new ParquetRowInputFormat(path, nestedType); inputFormat.setRuntimeContext(TestUtil.getMockRuntimeContext()); FileInputSplit[] splits = inputFormat.createInputSplits(1); assertEquals(1, splits.length); inputFormat.open(splits[0]); Row row = inputFormat.nextRecord(null); assertNotNull(row); assertEquals(7, row.getArity()); assertEquals(nested.f2.getField(0), row.getField(0)); assertEquals(nested.f2.getField(1), row.getField(1)); assertArrayEquals((Long[]) nested.f2.getField(3), (Long[]) row.getField(3)); assertArrayEquals((String[]) nested.f2.getField(4), (String[]) row.getField(4)); assertEquals(nested.f2.getField(5), row.getField(5)); assertArrayEquals((Row[]) nested.f2.getField(6), (Row[]) row.getField(6)); }
Example #8
Source File: PulsarAvroTableSink.java From pulsar with Apache License 2.0 | 6 votes |
/** * Create PulsarAvroTableSink. * * @param serviceUrl pulsar service url * @param topic topic in pulsar to which table is written * @param routingKeyFieldName routing key field name */ public PulsarAvroTableSink( String serviceUrl, String topic, Authentication authentication, String routingKeyFieldName, Class<? extends SpecificRecord> recordClazz) { checkArgument(StringUtils.isNotBlank(serviceUrl), "Service url not set"); checkArgument(StringUtils.isNotBlank(topic), "Topic is null"); checkNotNull(authentication, "authentication is null, set new AuthenticationDisabled() instead"); clientConfigurationData = new ClientConfigurationData(); producerConfigurationData = new ProducerConfigurationData(); clientConfigurationData.setServiceUrl(serviceUrl); clientConfigurationData.setAuthentication(authentication); producerConfigurationData.setTopicName(topic); this.routingKeyFieldName = routingKeyFieldName; this.recordClazz = recordClazz; }
Example #9
Source File: AvroDeserializationSchema.java From flink with Apache License 2.0 | 6 votes |
void checkAvroInitialized() { if (datumReader != null) { return; } ClassLoader cl = Thread.currentThread().getContextClassLoader(); if (SpecificRecord.class.isAssignableFrom(recordClazz)) { SpecificData specificData = new SpecificData(cl); this.datumReader = new SpecificDatumReader<>(specificData); this.reader = specificData.getSchema(recordClazz); } else { this.reader = new Schema.Parser().parse(schemaString); GenericData genericData = new GenericData(cl); this.datumReader = new GenericDatumReader<>(null, this.reader, genericData); } this.inputStream = new MutableByteArrayInputStream(); this.decoder = DecoderFactory.get().binaryDecoder(inputStream, null); }
Example #10
Source File: NetworkTransport.java From reef with Apache License 2.0 | 5 votes |
/** * Sends a message to the C# side of the bridge. * @param message An Avro message class derived from SpecificRecord. * @throws RuntimeException if invoked before initialization is complete. */ public void send(final long identifier, final SpecificRecord message) { if (sender != null) { sender.onNext(serializer.write(message, identifier)); } else { final String msgClassName = message.getClass().getCanonicalName(); LOG.log(Level.SEVERE, "Attempt to send message [{0}] before network is initialized", msgClassName); throw new RuntimeException("NetworkTransport not initialized: failed to send " + msgClassName); } }
Example #11
Source File: ConsumerEventHandlerForKafka.java From stream-registry with Apache License 2.0 | 5 votes |
private Future<SendResult<SpecificRecord, SpecificRecord>> sendConsumerNotificationEvent(NotificationEvent<Consumer> event) { return sendEntityNotificationEvent( consumerToKeyRecord, consumerToValueRecord, kafkaTemplate::send, notificationEventsTopic, event ); }
Example #12
Source File: ConsumerParserProperties.java From stream-registry with Apache License 2.0 | 5 votes |
private <R extends SpecificRecord> Function<Consumer, R> loadKeyParser() { Objects.requireNonNull(keyParserClass, getWarningMessageOnNotDefinedProp("enabled producer type parser", CUSTOM_CONSUMER_KEY_PARSER_CLASS_PROPERTY)); Objects.requireNonNull(keyParserMethod, getWarningMessageOnNotDefinedProp("enabled producer type parser", CUSTOM_CONSUMER_KEY_PARSER_METHOD_PROPERTY)); try { return NotificationEventUtils.loadToAvroStaticMethod(keyParserClass, keyParserMethod, Consumer.class); } catch (Exception e) { throw new IllegalStateException(e); } }
Example #13
Source File: AvroRowDeSerializationSchemaTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testSpecificSerializeDeserializeFromClass() throws IOException { final Tuple3<Class<? extends SpecificRecord>, SpecificRecord, Row> testData = AvroTestUtils.getSpecificTestData(); final AvroRowSerializationSchema serializationSchema = new AvroRowSerializationSchema(testData.f0); final AvroRowDeserializationSchema deserializationSchema = new AvroRowDeserializationSchema(testData.f0); final byte[] bytes = serializationSchema.serialize(testData.f2); final Row actual = deserializationSchema.deserialize(bytes); assertEquals(testData.f2, actual); }
Example #14
Source File: StreamParserProperties.java From stream-registry with Apache License 2.0 | 5 votes |
private <R extends SpecificRecord> Function<Stream, R> loadKeyParser() { Objects.requireNonNull(keyParserClass, getWarningMessageOnNotDefinedProp("enabled stream type parser", CUSTOM_STREAM_KEY_PARSER_CLASS_PROPERTY)); Objects.requireNonNull(keyParserMethod, getWarningMessageOnNotDefinedProp("enabled stream type parser", CUSTOM_STREAM_KEY_PARSER_METHOD_PROPERTY)); try { return NotificationEventUtils.loadToAvroStaticMethod(keyParserClass, keyParserMethod, Stream.class); } catch (Exception e) { throw new IllegalStateException(e); } }
Example #15
Source File: PulsarAvroTableSink.java From pulsar with Apache License 2.0 | 5 votes |
public PulsarAvroTableSink( ClientConfigurationData clientConfigurationData, ProducerConfigurationData producerConfigurationData, String routingKeyFieldName, Class<? extends SpecificRecord> recordClazz) { this.clientConfigurationData = checkNotNull(clientConfigurationData, "client config can not be null"); this.producerConfigurationData = checkNotNull(producerConfigurationData, "producer config can not be null"); checkArgument(StringUtils.isNotBlank(clientConfigurationData.getServiceUrl()), "Service url not set"); checkArgument(StringUtils.isNotBlank(producerConfigurationData.getTopicName()), "Topic is null"); this.routingKeyFieldName = routingKeyFieldName; this.recordClazz = recordClazz; }
Example #16
Source File: JavaSessionize.java From hadoop-arch-book with Apache License 2.0 | 5 votes |
@Override public int compareTo(SpecificRecord o) { if (this == o) return 0; if (o instanceof SerializableLogLine) { SerializableLogLine that = (SerializableLogLine) o; if (this.getTimestamp() < that.getTimestamp()) return -1; if (this.getTimestamp() > that.getTimestamp()) return 1; return 0; } else { throw new IllegalArgumentException("Can only compare two " + "LogLines"); } }
Example #17
Source File: AvroRowDeSerializationSchemaTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testSpecificDeserializeFromClassSeveralTimes() throws IOException { final Tuple3<Class<? extends SpecificRecord>, SpecificRecord, Row> testData = AvroTestUtils.getSpecificTestData(); final AvroRowSerializationSchema serializationSchema = new AvroRowSerializationSchema(testData.f0); final AvroRowDeserializationSchema deserializationSchema = new AvroRowDeserializationSchema(testData.f0); final byte[] bytes = serializationSchema.serialize(testData.f2); deserializationSchema.deserialize(bytes); deserializationSchema.deserialize(bytes); final Row actual = deserializationSchema.deserialize(bytes); assertEquals(testData.f2, actual); }
Example #18
Source File: ParquetRowInputFormatTest.java From flink with Apache License 2.0 | 5 votes |
@Test(expected = IllegalArgumentException.class) public void testInvalidProjectionOfNestedRecord() throws Exception { Tuple3<Class<? extends SpecificRecord>, SpecificRecord, Row> nested = TestUtil.getNestedRecordTestData(); Path path = TestUtil.createTempParquetFile(tempRoot.newFolder(), TestUtil.NESTED_SCHEMA, Collections.singletonList(nested.f1)); MessageType nestedType = SCHEMA_CONVERTER.convert(TestUtil.NESTED_SCHEMA); ParquetRowInputFormat inputFormat = new ParquetRowInputFormat(path, nestedType); inputFormat.setRuntimeContext(TestUtil.getMockRuntimeContext()); inputFormat.selectFields(new String[]{"bar", "celona"}); }
Example #19
Source File: AvroSchemaConverter.java From flink with Apache License 2.0 | 5 votes |
/** * Converts an Avro class into a nested row structure with deterministic field order and data * types that are compatible with Flink's Table & SQL API. * * @param avroClass Avro specific record that contains schema information * @return type information matching the schema */ @SuppressWarnings("unchecked") public static <T extends SpecificRecord> TypeInformation<Row> convertToTypeInfo(Class<T> avroClass) { Preconditions.checkNotNull(avroClass, "Avro specific record class must not be null."); // determine schema to retrieve deterministic field order final Schema schema = SpecificData.get().getSchema(avroClass); return (TypeInformation<Row>) convertToTypeInfo(schema); }
Example #20
Source File: AvroRowFormatFactory.java From flink with Apache License 2.0 | 5 votes |
@Override public DeserializationSchema<Row> createDeserializationSchema(Map<String, String> properties) { final DescriptorProperties descriptorProperties = getValidatedProperties(properties); // create and configure if (descriptorProperties.containsKey(AvroValidator.FORMAT_RECORD_CLASS)) { return new AvroRowDeserializationSchema( descriptorProperties.getClass(AvroValidator.FORMAT_RECORD_CLASS, SpecificRecord.class)); } else { return new AvroRowDeserializationSchema(descriptorProperties.getString(AvroValidator.FORMAT_AVRO_SCHEMA)); } }
Example #21
Source File: NotificationEventListenerKafkaIntegrationTest.java From stream-registry with Apache License 2.0 | 5 votes |
public KafkaTemplate<SpecificRecord, SpecificRecord> kafkaTemplate() { KafkaTemplate<SpecificRecord, SpecificRecord> template = new KafkaTemplate<>(producerFactory()); template.setProducerListener(new ProducerListener<>() { @Override public void onSuccess(ProducerRecord<SpecificRecord, SpecificRecord> producerRecord, RecordMetadata recordMetadata) { log.info("Produced record {}", producerRecord); producedEvents.put((AvroKey) producerRecord.key(), (AvroEvent) producerRecord.value()); producedHeaders.put((AvroKey) producerRecord.key(), producerRecord.headers()); } }); return template; }
Example #22
Source File: ParquetMapInputFormatTest.java From flink with Apache License 2.0 | 5 votes |
@Test @SuppressWarnings("unchecked") public void testReadMapFromNestedRecord() throws IOException { Tuple3<Class<? extends SpecificRecord>, SpecificRecord, Row> nested = TestUtil.getNestedRecordTestData(); Path path = TestUtil.createTempParquetFile(tempRoot.getRoot(), TestUtil.NESTED_SCHEMA, Collections.singletonList(nested.f1)); MessageType nestedType = SCHEMA_CONVERTER.convert(TestUtil.NESTED_SCHEMA); ParquetMapInputFormat inputFormat = new ParquetMapInputFormat(path, nestedType); inputFormat.setRuntimeContext(TestUtil.getMockRuntimeContext()); FileInputSplit[] splits = inputFormat.createInputSplits(1); assertEquals(1, splits.length); inputFormat.open(splits[0]); Map map = inputFormat.nextRecord(null); assertNotNull(map); assertEquals(5, map.size()); assertArrayEquals((Long[]) nested.f2.getField(3), (Long[]) map.get("arr")); assertArrayEquals((String[]) nested.f2.getField(4), (String[]) map.get("strArray")); Map<String, String> mapItem = (Map<String, String>) ((Map) map.get("nestedMap")).get("mapItem"); assertEquals(2, mapItem.size()); assertEquals("map", mapItem.get("type")); assertEquals("hashMap", mapItem.get("value")); List<Map<String, String>> nestedArray = (List<Map<String, String>>) map.get("nestedArray"); assertEquals(1, nestedArray.size()); assertEquals("color", nestedArray.get(0).get("type")); assertEquals(1L, nestedArray.get(0).get("value")); }
Example #23
Source File: AvroRowFormatFactory.java From flink with Apache License 2.0 | 5 votes |
@Override public SerializationSchema<Row> createSerializationSchema(Map<String, String> properties) { final DescriptorProperties descriptorProperties = getValidatedProperties(properties); // create and configure if (descriptorProperties.containsKey(AvroValidator.FORMAT_RECORD_CLASS)) { return new AvroRowSerializationSchema( descriptorProperties.getClass(AvroValidator.FORMAT_RECORD_CLASS, SpecificRecord.class)); } else { return new AvroRowSerializationSchema(descriptorProperties.getString(AvroValidator.FORMAT_AVRO_SCHEMA)); } }
Example #24
Source File: AvroRowDeSerializationSchemaTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testSpecificDeserializeFromSchemaSeveralTimes() throws IOException { final Tuple3<Class<? extends SpecificRecord>, SpecificRecord, Row> testData = AvroTestUtils.getSpecificTestData(); final String schemaString = testData.f1.getSchema().toString(); final AvroRowSerializationSchema serializationSchema = new AvroRowSerializationSchema(schemaString); final AvroRowDeserializationSchema deserializationSchema = new AvroRowDeserializationSchema(schemaString); final byte[] bytes = serializationSchema.serialize(testData.f2); deserializationSchema.deserialize(bytes); deserializationSchema.deserialize(bytes); final Row actual = deserializationSchema.deserialize(bytes); assertEquals(testData.f2, actual); }
Example #25
Source File: ConsumerParserProperties.java From stream-registry with Apache License 2.0 | 5 votes |
private <R extends SpecificRecord> Function<Consumer, R> loadValueParser() { Objects.requireNonNull(valueParserClass, getWarningMessageOnNotDefinedProp("enabled producer type parser", CUSTOM_CONSUMER_VALUE_PARSER_CLASS_PROPERTY)); Objects.requireNonNull(valueParserMethod, getWarningMessageOnNotDefinedProp("enabled producer type parser", CUSTOM_CONSUMER_VALUE_PARSER_METHOD_PROPERTY)); try { return NotificationEventUtils.loadToAvroStaticMethod(valueParserClass, valueParserMethod, Consumer.class); } catch (Exception e) { throw new IllegalStateException(e); } }
Example #26
Source File: FastSerdeBenchmarkSupport.java From avro-fastserde with Apache License 2.0 | 5 votes |
public static <T extends SpecificRecord> T toSpecificRecord(GenericData.Record record) throws IOException { GenericDatumWriter<GenericData.Record> datumWriter = new GenericDatumWriter<>(record.getSchema()); ByteArrayOutputStream baos = new ByteArrayOutputStream(); Encoder binaryEncoder = EncoderFactory.get().binaryEncoder(baos, null); datumWriter.write(record, binaryEncoder); binaryEncoder.flush(); SpecificDatumReader<T> datumReader = new SpecificDatumReader<>(record.getSchema()); return datumReader.read(null, DecoderFactory.get().binaryDecoder(baos.toByteArray(), null)); }
Example #27
Source File: AvroRowDeserializationSchema.java From flink with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") private void readObject(ObjectInputStream inputStream) throws ClassNotFoundException, IOException { recordClazz = (Class<? extends SpecificRecord>) inputStream.readObject(); schemaString = inputStream.readUTF(); typeInfo = (RowTypeInfo) AvroSchemaConverter.<Row>convertToTypeInfo(schemaString); schema = new Schema.Parser().parse(schemaString); if (recordClazz != null) { record = (SpecificRecord) SpecificData.newInstance(recordClazz, schema); } else { record = new GenericData.Record(schema); } datumReader = new SpecificDatumReader<>(schema); this.inputStream = new MutableByteArrayInputStream(); decoder = DecoderFactory.get().binaryDecoder(this.inputStream, null); }
Example #28
Source File: AvroRowDeserializationSchema.java From flink with Apache License 2.0 | 5 votes |
/** * Creates a Avro deserialization schema for the given specific record class. Having the * concrete Avro record class might improve performance. * * @param recordClazz Avro record class used to deserialize Avro's record to Flink's row */ public AvroRowDeserializationSchema(Class<? extends SpecificRecord> recordClazz) { Preconditions.checkNotNull(recordClazz, "Avro record class must not be null."); this.recordClazz = recordClazz; schema = SpecificData.get().getSchema(recordClazz); typeInfo = (RowTypeInfo) AvroSchemaConverter.convertToTypeInfo(recordClazz); schemaString = schema.toString(); record = (IndexedRecord) SpecificData.newInstance(recordClazz, schema); datumReader = new SpecificDatumReader<>(schema); inputStream = new MutableByteArrayInputStream(); decoder = DecoderFactory.get().binaryDecoder(inputStream, null); }
Example #29
Source File: AvroRowFormatFactory.java From flink with Apache License 2.0 | 5 votes |
@Override public SerializationSchema<Row> createSerializationSchema(Map<String, String> properties) { final DescriptorProperties descriptorProperties = getValidatedProperties(properties); // create and configure if (descriptorProperties.containsKey(AvroValidator.FORMAT_RECORD_CLASS)) { return new AvroRowSerializationSchema( descriptorProperties.getClass(AvroValidator.FORMAT_RECORD_CLASS, SpecificRecord.class)); } else { return new AvroRowSerializationSchema(descriptorProperties.getString(AvroValidator.FORMAT_AVRO_SCHEMA)); } }
Example #30
Source File: AvroRowFormatFactory.java From flink with Apache License 2.0 | 5 votes |
@Override public DeserializationSchema<Row> createDeserializationSchema(Map<String, String> properties) { final DescriptorProperties descriptorProperties = getValidatedProperties(properties); // create and configure if (descriptorProperties.containsKey(AvroValidator.FORMAT_RECORD_CLASS)) { return new AvroRowDeserializationSchema( descriptorProperties.getClass(AvroValidator.FORMAT_RECORD_CLASS, SpecificRecord.class)); } else { return new AvroRowDeserializationSchema(descriptorProperties.getString(AvroValidator.FORMAT_AVRO_SCHEMA)); } }