Java Code Examples for org.apache.avro.reflect.ReflectDatumReader

The following examples show how to use org.apache.avro.reflect.ReflectDatumReader. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: Flink-CEPplus   Source File: AvroInputFormat.java    License: Apache License 2.0 6 votes vote down vote up
private DataFileReader<E> initReader(FileInputSplit split) throws IOException {
	DatumReader<E> datumReader;

	if (org.apache.avro.generic.GenericRecord.class == avroValueType) {
		datumReader = new GenericDatumReader<E>();
	} else {
		datumReader = org.apache.avro.specific.SpecificRecordBase.class.isAssignableFrom(avroValueType)
			? new SpecificDatumReader<E>(avroValueType) : new ReflectDatumReader<E>(avroValueType);
	}
	if (LOG.isInfoEnabled()) {
		LOG.info("Opening split {}", split);
	}

	SeekableInput in = new FSDataInputStreamWrapper(stream, split.getPath().getFileSystem().getFileStatus(split.getPath()).getLen());
	DataFileReader<E> dataFileReader = (DataFileReader) DataFileReader.openReader(in, datumReader);

	if (LOG.isDebugEnabled()) {
		LOG.debug("Loaded SCHEMA: {}", dataFileReader.getSchema());
	}

	end = split.getStart() + split.getLength();
	recordsReadSinceLastSync = 0;
	return dataFileReader;
}
 
Example 2
Source Project: flink   Source File: AvroInputFormat.java    License: Apache License 2.0 6 votes vote down vote up
private DataFileReader<E> initReader(FileInputSplit split) throws IOException {
	DatumReader<E> datumReader;

	if (org.apache.avro.generic.GenericRecord.class == avroValueType) {
		datumReader = new GenericDatumReader<E>();
	} else {
		datumReader = org.apache.avro.specific.SpecificRecordBase.class.isAssignableFrom(avroValueType)
			? new SpecificDatumReader<E>(avroValueType) : new ReflectDatumReader<E>(avroValueType);
	}
	if (LOG.isInfoEnabled()) {
		LOG.info("Opening split {}", split);
	}

	SeekableInput in = new FSDataInputStreamWrapper(stream, split.getPath().getFileSystem().getFileStatus(split.getPath()).getLen());
	DataFileReader<E> dataFileReader = (DataFileReader) DataFileReader.openReader(in, datumReader);

	if (LOG.isDebugEnabled()) {
		LOG.debug("Loaded SCHEMA: {}", dataFileReader.getSchema());
	}

	end = split.getStart() + split.getLength();
	recordsReadSinceLastSync = 0;
	return dataFileReader;
}
 
Example 3
Source Project: hadoop   Source File: AvroTestUtil.java    License: Apache License 2.0 6 votes vote down vote up
public static void testReflect(Object value, Type type, String schema)
  throws Exception {

  // check that schema matches expected
  Schema s = ReflectData.get().getSchema(type);
  assertEquals(Schema.parse(schema), s);

  // check that value is serialized correctly
  ReflectDatumWriter<Object> writer = new ReflectDatumWriter<Object>(s);
  ByteArrayOutputStream out = new ByteArrayOutputStream();
  writer.write(value, EncoderFactory.get().directBinaryEncoder(out, null));
  ReflectDatumReader<Object> reader = new ReflectDatumReader<Object>(s);
  Object after =
    reader.read(null,
                DecoderFactory.get().binaryDecoder(out.toByteArray(), null));
  assertEquals(value, after);
}
 
Example 4
Source Project: big-c   Source File: AvroTestUtil.java    License: Apache License 2.0 6 votes vote down vote up
public static void testReflect(Object value, Type type, String schema)
  throws Exception {

  // check that schema matches expected
  Schema s = ReflectData.get().getSchema(type);
  assertEquals(Schema.parse(schema), s);

  // check that value is serialized correctly
  ReflectDatumWriter<Object> writer = new ReflectDatumWriter<Object>(s);
  ByteArrayOutputStream out = new ByteArrayOutputStream();
  writer.write(value, EncoderFactory.get().directBinaryEncoder(out, null));
  ReflectDatumReader<Object> reader = new ReflectDatumReader<Object>(s);
  Object after =
    reader.read(null,
                DecoderFactory.get().binaryDecoder(out.toByteArray(), null));
  assertEquals(value, after);
}
 
Example 5
Source Project: flink   Source File: AvroInputFormat.java    License: Apache License 2.0 6 votes vote down vote up
private DataFileReader<E> initReader(FileInputSplit split) throws IOException {
	DatumReader<E> datumReader;

	if (org.apache.avro.generic.GenericRecord.class == avroValueType) {
		datumReader = new GenericDatumReader<E>();
	} else {
		datumReader = org.apache.avro.specific.SpecificRecordBase.class.isAssignableFrom(avroValueType)
			? new SpecificDatumReader<E>(avroValueType) : new ReflectDatumReader<E>(avroValueType);
	}
	if (LOG.isInfoEnabled()) {
		LOG.info("Opening split {}", split);
	}

	SeekableInput in = new FSDataInputStreamWrapper(stream, split.getPath().getFileSystem().getFileStatus(split.getPath()).getLen());
	DataFileReader<E> dataFileReader = (DataFileReader) DataFileReader.openReader(in, datumReader);

	if (LOG.isDebugEnabled()) {
		LOG.debug("Loaded SCHEMA: {}", dataFileReader.getSchema());
	}

	end = split.getStart() + split.getLength();
	recordsReadSinceLastSync = 0;
	return dataFileReader;
}
 
Example 6
Source Project: flink   Source File: AvroStreamingFileSinkITCase.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testWriteAvroReflect() throws Exception {
	File folder = TEMPORARY_FOLDER.newFolder();

	List<Datum> data = Arrays.asList(
		new Datum("a", 1),
		new Datum("b", 2),
		new Datum("c", 3));

	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.setParallelism(1);
	env.enableCheckpointing(100);

	AvroWriterFactory<Datum> avroWriterFactory = AvroWriters.forReflectRecord(Datum.class);
	DataStream<Datum> stream = env.addSource(
		new FiniteTestSource<>(data),
		TypeInformation.of(Datum.class));
	stream.addSink(StreamingFileSink.forBulkFormat(
		Path.fromLocalFile(folder),
		avroWriterFactory).build());
	env.execute();

	validateResults(folder, new ReflectDatumReader<>(Datum.class), data);
}
 
Example 7
Source Project: stratosphere   Source File: AvroInputFormat.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void open(FileInputSplit split) throws IOException {
	super.open(split);
	
	this.wrapper = InstantiationUtil.instantiate(avroWrapperTypeClass, AvroBaseValue.class);
	
	DatumReader<E> datumReader;
	if (org.apache.avro.specific.SpecificRecordBase.class.isAssignableFrom(avroValueType)) {
		datumReader = new SpecificDatumReader<E>(avroValueType);
	} else {
		datumReader = new ReflectDatumReader<E>(avroValueType);
	}
	
	LOG.info("Opening split " + split);
	
	SeekableInput in = new FSDataInputStreamWrapper(stream, (int) split.getLength());
	
	dataFileReader = DataFileReader.openReader(in, datumReader);
	dataFileReader.sync(split.getStart());
	
	reuseAvroValue = null;
}
 
Example 8
Source Project: stratosphere   Source File: AvroInputFormat.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void open(FileInputSplit split) throws IOException {
	super.open(split);

	DatumReader<E> datumReader;
	if (org.apache.avro.specific.SpecificRecordBase.class.isAssignableFrom(avroValueType)) {
		datumReader = new SpecificDatumReader<E>(avroValueType);
	} else {
		datumReader = new ReflectDatumReader<E>(avroValueType);
	}
	
	LOG.info("Opening split " + split);
	
	SeekableInput in = new FSDataInputStreamWrapper(stream, (int) split.getLength());
	
	dataFileReader = DataFileReader.openReader(in, datumReader);
	dataFileReader.sync(split.getStart());
}
 
Example 9
@Override
public DatumReader<T> createDatumReader(Schema schema) {
    if (readerSchema == null) {
        return new ReflectDatumReader<>(schema);
    } else {
        return new ReflectDatumReader<>(schema, readerSchema);
    }
}
 
Example 10
Source Project: Flink-CEPplus   Source File: AvroFactory.java    License: Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("OptionalUsedAsFieldOrParameterType")
private static <T> AvroFactory<T> fromReflective(Class<T> type, ClassLoader cl, Optional<Schema> previousSchema) {
	ReflectData reflectData = new ReflectData(cl);
	Schema newSchema = reflectData.getSchema(type);

	return new AvroFactory<>(
		reflectData,
		newSchema,
		new ReflectDatumReader<>(previousSchema.orElse(newSchema), newSchema, reflectData),
		new ReflectDatumWriter<>(newSchema, reflectData)
	);
}
 
Example 11
Source Project: flink   Source File: AvroFactory.java    License: Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("OptionalUsedAsFieldOrParameterType")
private static <T> AvroFactory<T> fromReflective(Class<T> type, ClassLoader cl, Optional<Schema> previousSchema) {
	ReflectData reflectData = new ReflectData(cl);
	Schema newSchema = reflectData.getSchema(type);

	return new AvroFactory<>(
		reflectData,
		newSchema,
		new ReflectDatumReader<>(previousSchema.orElse(newSchema), newSchema, reflectData),
		new ReflectDatumWriter<>(newSchema, reflectData)
	);
}
 
Example 12
Source Project: hadoop   Source File: AvroReflectSerialization.java    License: Apache License 2.0 5 votes vote down vote up
@InterfaceAudience.Private
@Override
public DatumReader getReader(Class<Object> clazz) {
  try {
    return new ReflectDatumReader(clazz);
  } catch (Exception e) {
    throw new RuntimeException(e);
  }
}
 
Example 13
Source Project: jMetalSP   Source File: AvroDeserializationSchema.java    License: MIT License 5 votes vote down vote up
private void ensureInitialized() {
    if (reader == null) {
        if (org.apache.avro.specific.SpecificRecordBase.class.isAssignableFrom(avroType)) {
            reader = new SpecificDatumReader<T>(avroType);
        } else {
            reader = new ReflectDatumReader<T>(avroType);
        }
    }
}
 
Example 14
Source Project: jMetalSP   Source File: SimpleAVROSchema.java    License: MIT License 5 votes vote down vote up
private void ensureInitialized() {
    if (reader == null) {
        if (org.apache.avro.specific.SpecificRecordBase.class.isAssignableFrom(avroType)) {
            reader = new SpecificDatumReader<T>(avroType);
        } else {
            reader = new ReflectDatumReader<T>(avroType);
        }
    }
}
 
Example 15
Source Project: big-c   Source File: AvroReflectSerialization.java    License: Apache License 2.0 5 votes vote down vote up
@InterfaceAudience.Private
@Override
public DatumReader getReader(Class<Object> clazz) {
  try {
    return new ReflectDatumReader(clazz);
  } catch (Exception e) {
    throw new RuntimeException(e);
  }
}
 
Example 16
Source Project: schema-evolution-samples   Source File: AvroCodec.java    License: Apache License 2.0 5 votes vote down vote up
private DatumReader getDatumReader(Class<?> type, Schema writer){
	DatumReader reader = null;
	if(SpecificRecord.class.isAssignableFrom(type)){
		reader = new SpecificDatumReader<>(writer,getReaderSchema(writer));
	}
	else if(GenericRecord.class.isAssignableFrom(type)){
		reader = new GenericDatumReader<>(writer,getReaderSchema(writer));
	}else{
		reader = new ReflectDatumReader<>(writer,getReaderSchema(writer));
	}

	return reader;
}
 
Example 17
Source Project: beam   Source File: AvroCoder.java    License: Apache License 2.0 5 votes vote down vote up
protected AvroCoder(Class<T> type, Schema schema) {
  this.type = type;
  this.schemaSupplier = new SerializableSchemaSupplier(schema);
  typeDescriptor = TypeDescriptor.of(type);
  nonDeterministicReasons = new AvroDeterminismChecker().check(TypeDescriptor.of(type), schema);

  // Decoder and Encoder start off null for each thread. They are allocated and potentially
  // reused inside encode/decode.
  this.decoder = new EmptyOnDeserializationThreadLocal<>();
  this.encoder = new EmptyOnDeserializationThreadLocal<>();

  this.reflectData = Suppliers.memoize(new SerializableReflectDataSupplier(getType()));

  // Reader and writer are allocated once per thread per Coder
  this.reader =
      new EmptyOnDeserializationThreadLocal<DatumReader<T>>() {
        private final AvroCoder<T> myCoder = AvroCoder.this;

        @Override
        public DatumReader<T> initialValue() {
          return myCoder.getType().equals(GenericRecord.class)
              ? new GenericDatumReader<>(myCoder.getSchema())
              : new ReflectDatumReader<>(
                  myCoder.getSchema(), myCoder.getSchema(), myCoder.reflectData.get());
        }
      };

  this.writer =
      new EmptyOnDeserializationThreadLocal<DatumWriter<T>>() {
        private final AvroCoder<T> myCoder = AvroCoder.this;

        @Override
        public DatumWriter<T> initialValue() {
          return myCoder.getType().equals(GenericRecord.class)
              ? new GenericDatumWriter<>(myCoder.getSchema())
              : new ReflectDatumWriter<>(myCoder.getSchema(), myCoder.reflectData.get());
        }
      };
}
 
Example 18
Source Project: beam   Source File: AvroIOTest.java    License: Apache License 2.0 5 votes vote down vote up
static void assertTestOutputs(
    String[] expectedElements, int numShards, String outputFilePrefix, String shardNameTemplate)
    throws IOException {
  // Validate that the data written matches the expected elements in the expected order
  List<File> expectedFiles = new ArrayList<>();
  for (int i = 0; i < numShards; i++) {
    expectedFiles.add(
        new File(
            DefaultFilenamePolicy.constructName(
                    FileBasedSink.convertToFileResourceIfPossible(outputFilePrefix),
                    shardNameTemplate,
                    ".avro",
                    i,
                    numShards,
                    null,
                    null)
                .toString()));
  }

  List<String> actualElements = new ArrayList<>();
  for (File outputFile : expectedFiles) {
    assertTrue("Expected output file " + outputFile.getName(), outputFile.exists());
    try (DataFileReader<String> reader =
        new DataFileReader<>(
            outputFile, new ReflectDatumReader(ReflectData.get().getSchema(String.class)))) {
      Iterators.addAll(actualElements, reader);
    }
  }
  assertThat(actualElements, containsInAnyOrder(expectedElements));
}
 
Example 19
Source Project: pulsar   Source File: AvroReader.java    License: Apache License 2.0 5 votes vote down vote up
public AvroReader(Schema schema, ClassLoader classLoader, boolean jsr310ConversionEnabled) {
    if (classLoader != null) {
        ReflectData reflectData = new ReflectData(classLoader);
        AvroSchema.addLogicalTypeConversions(reflectData, jsr310ConversionEnabled);
        this.reader = new ReflectDatumReader<>(schema, schema, reflectData);
    } else {
        this.reader = new ReflectDatumReader<>(schema);
    }
}
 
Example 20
Source Project: pulsar   Source File: AvroReader.java    License: Apache License 2.0 5 votes vote down vote up
public AvroReader(Schema writerSchema, Schema readerSchema, ClassLoader classLoader,
    boolean jsr310ConversionEnabled) {
    if (classLoader != null) {
        ReflectData reflectData = new ReflectData(classLoader);
        AvroSchema.addLogicalTypeConversions(reflectData, jsr310ConversionEnabled);
        this.reader = new ReflectDatumReader<>(writerSchema, readerSchema, reflectData);
    } else {
        this.reader = new ReflectDatumReader<>(writerSchema, readerSchema);
    }
}
 
Example 21
Source Project: flink   Source File: AvroFactory.java    License: Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("OptionalUsedAsFieldOrParameterType")
private static <T> AvroFactory<T> fromReflective(Class<T> type, ClassLoader cl, Optional<Schema> previousSchema) {
	ReflectData reflectData = new ReflectData(cl);
	Schema newSchema = reflectData.getSchema(type);

	return new AvroFactory<>(
		reflectData,
		newSchema,
		new ReflectDatumReader<>(previousSchema.orElse(newSchema), newSchema, reflectData),
		new ReflectDatumWriter<>(newSchema, reflectData)
	);
}
 
Example 22
Source Project: parquet-mr   Source File: TestStringBehavior.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testReflectJavaClass() throws IOException {
  Schema reflectSchema = ReflectData.get()
      .getSchema(ReflectRecordJavaClass.class);
  System.err.println("Schema: " + reflectSchema.toString(true));
  ReflectRecordJavaClass avroRecord;
  try(DataFileReader<ReflectRecordJavaClass> avro =
        new DataFileReader<>(avroFile,
          new ReflectDatumReader<>(reflectSchema))) {
    avroRecord = avro.next();
  }

  ReflectRecordJavaClass parquetRecord;
  Configuration conf = new Configuration();
  conf.setBoolean(AvroReadSupport.AVRO_COMPATIBILITY, false);
  AvroReadSupport.setAvroDataSupplier(conf, ReflectDataSupplier.class);
  AvroReadSupport.setAvroReadSchema(conf, reflectSchema);
  AvroReadSupport.setRequestedProjection(conf, reflectSchema);
  try(ParquetReader<ReflectRecordJavaClass> parquet = AvroParquetReader
      .<ReflectRecordJavaClass>builder(parquetFile)
      .withConf(conf)
      .build()) {
    parquetRecord = parquet.read();
  }

  // Avro uses String even if CharSequence is set
  Assert.assertEquals("Avro default string class should be String",
      String.class, avroRecord.default_class.getClass());
  Assert.assertEquals("Parquet default string class should be String",
      String.class, parquetRecord.default_class.getClass());

  Assert.assertEquals("Avro stringable class should be BigDecimal",
      BigDecimal.class, avroRecord.stringable_class.getClass());
  Assert.assertEquals("Parquet stringable class should be BigDecimal",
      BigDecimal.class, parquetRecord.stringable_class.getClass());
  Assert.assertEquals("Should have the correct BigDecimal value",
      BIG_DECIMAL, parquetRecord.stringable_class);
}
 
Example 23
Source Project: mt-flume   Source File: TestLog4jAppenderWithAvro.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testAvroReflect() throws IOException {
  loadProperties("flume-log4jtest-avro-reflect.properties");
  PropertyConfigurator.configure(props);
  Logger logger = LogManager.getLogger(TestLog4jAppenderWithAvro.class);
  String msg = "This is log message number " + String.valueOf(0);

  AppEvent appEvent = new AppEvent();
  appEvent.setMessage(msg);

  logger.info(appEvent);

  Transaction transaction = ch.getTransaction();
  transaction.begin();
  Event event = ch.take();
  Assert.assertNotNull(event);

  Schema schema = ReflectData.get().getSchema(appEvent.getClass());

  ReflectDatumReader<AppEvent> reader = new ReflectDatumReader<AppEvent>(AppEvent.class);
  BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(event.getBody(), null);
  AppEvent recordFromEvent = reader.read(null, decoder);
  Assert.assertEquals(msg, recordFromEvent.getMessage());

  Map<String, String> hdrs = event.getHeaders();

  Assert.assertNull(hdrs.get(Log4jAvroHeaders.MESSAGE_ENCODING.toString()));

  Assert.assertNull("Schema URL should not be set",
      hdrs.get(Log4jAvroHeaders.AVRO_SCHEMA_URL.toString()));
  Assert.assertEquals("Schema string should be set", schema.toString(),
      hdrs.get(Log4jAvroHeaders.AVRO_SCHEMA_LITERAL.toString()));

  transaction.commit();
  transaction.close();

}
 
Example 24
Source Project: stratosphere   Source File: AvroBaseValue.java    License: Apache License 2.0 5 votes vote down vote up
private ReflectDatumReader<T> getReader() {
	if (this.reader == null) {
		Class<T> datumClass = ReflectionUtil.getTemplateType1(getClass());
		this.reader = new ReflectDatumReader<T>(datumClass);
	}
	return this.reader;
}
 
Example 25
Source Project: stratosphere   Source File: AvroSerializer.java    License: Apache License 2.0 5 votes vote down vote up
private final void checkAvroInitialized() {
	if (this.reader == null) {
		this.reader = new ReflectDatumReader<T>(type);
		this.writer = new ReflectDatumWriter<T>(type);
		this.encoder = new DataOutputEncoder();
		this.decoder = new DataInputDecoder();
	}
}
 
Example 26
Source Project: kite   Source File: DataModelUtil.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Get the DatumReader for the given type.
 *
 * @param <E> The entity type
 * @param type The Java class of the entity type
 * @param writerSchema The {@link Schema} for entities
 * @return The DatumReader for the given type
 */
@SuppressWarnings("unchecked")
public static <E> DatumReader<E> getDatumReaderForType(Class<E> type, Schema writerSchema) {
  Schema readerSchema = getReaderSchema(type, writerSchema);
  GenericData dataModel = getDataModelForType(type);
  if (dataModel instanceof ReflectData) {
    return new ReflectDatumReader<E>(writerSchema, readerSchema, (ReflectData)dataModel);
  } else if (dataModel instanceof SpecificData) {
    return new SpecificDatumReader<E>(writerSchema, readerSchema, (SpecificData)dataModel);
  } else {
    return new GenericDatumReader<E>(writerSchema, readerSchema, dataModel);
  }
}
 
Example 27
Source Project: kite   Source File: TestDataModelUtil.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testGetDatumReaderForReflectType() {
  Class<String> type = String.class;
  Schema writerSchema = Schema.create(Schema.Type.STRING);
  DatumReader result = DataModelUtil.getDatumReaderForType(type, writerSchema);
  assertEquals(ReflectDatumReader.class, result.getClass());
}
 
Example 28
Source Project: kite   Source File: TestLog4jAppenderWithAvro.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testAvroReflect() throws IOException {
  loadProperties("flume-log4jtest-avro-reflect.properties");
  PropertyConfigurator.configure(props);
  Logger logger = LogManager.getLogger(TestLog4jAppenderWithAvro.class);
  String msg = "This is log message number " + String.valueOf(0);

  AppEvent appEvent = new AppEvent();
  appEvent.setMessage(msg);

  logger.info(appEvent);

  Transaction transaction = ch.getTransaction();
  transaction.begin();
  Event event = ch.take();
  Assert.assertNotNull(event);

  Schema schema = ReflectData.get().getSchema(appEvent.getClass());

  ReflectDatumReader<AppEvent> reader = new ReflectDatumReader<AppEvent>(AppEvent.class);
  BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(event.getBody(), null);
  AppEvent recordFromEvent = reader.read(null, decoder);
  Assert.assertEquals(msg, recordFromEvent.getMessage());

  Map<String, String> hdrs = event.getHeaders();

  Assert.assertNull(hdrs.get(Log4jAvroHeaders.MESSAGE_ENCODING.toString()));

  Assert.assertNull("Schema URL should not be set",
      hdrs.get(Log4jAvroHeaders.AVRO_SCHEMA_URL.toString()));
  Assert.assertEquals("Schema string should be set", schema.toString(),
      hdrs.get(Log4jAvroHeaders.AVRO_SCHEMA_LITERAL.toString()));

  transaction.commit();
  transaction.close();

}
 
Example 29
Source Project: pulsar   Source File: AvroReader.java    License: Apache License 2.0 4 votes vote down vote up
public AvroReader(Schema schema) {
    this.reader = new ReflectDatumReader<>(schema);
}
 
Example 30
Source Project: parquet-mr   Source File: TestStringBehavior.java    License: Apache License 2.0 4 votes vote down vote up
@Test
public void testReflect() throws IOException {
  Schema reflectSchema = ReflectData.get()
      .getSchema(ReflectRecord.class);

  ReflectRecord avroRecord;
  try(DataFileReader<ReflectRecord> avro = new DataFileReader<>(
    avroFile, new ReflectDatumReader<>(reflectSchema))) {
    avroRecord = avro.next();
  }

  ReflectRecord parquetRecord;
  Configuration conf = new Configuration();
  conf.setBoolean(AvroReadSupport.AVRO_COMPATIBILITY, false);
  AvroReadSupport.setAvroDataSupplier(conf, ReflectDataSupplier.class);
  AvroReadSupport.setAvroReadSchema(conf, reflectSchema);
  try(ParquetReader<ReflectRecord> parquet = AvroParquetReader
      .<ReflectRecord>builder(parquetFile)
      .withConf(conf)
      .build()) {
    parquetRecord = parquet.read();
  }

  Assert.assertEquals("Avro default string class should be String",
      String.class, avroRecord.default_class.getClass());
  Assert.assertEquals("Parquet default string class should be String",
      String.class, parquetRecord.default_class.getClass());

  Assert.assertEquals("Avro avro.java.string=String class should be String",
      String.class, avroRecord.string_class.getClass());
  Assert.assertEquals("Parquet avro.java.string=String class should be String",
      String.class, parquetRecord.string_class.getClass());

  Assert.assertEquals("Avro stringable class should be BigDecimal",
      BigDecimal.class, avroRecord.stringable_class.getClass());
  Assert.assertEquals("Parquet stringable class should be BigDecimal",
      BigDecimal.class, parquetRecord.stringable_class.getClass());
  Assert.assertEquals("Should have the correct BigDecimal value",
      BIG_DECIMAL, parquetRecord.stringable_class);

  Assert.assertEquals("Avro map default string class should be String",
      String.class, keyClass(avroRecord.default_map));
  Assert.assertEquals("Parquet map default string class should be String",
      String.class, keyClass(parquetRecord.default_map));

  Assert.assertEquals("Avro map avro.java.string=String class should be String",
      String.class, keyClass(avroRecord.string_map));
  Assert.assertEquals("Parquet map avro.java.string=String class should be String",
      String.class, keyClass(parquetRecord.string_map));

  Assert.assertEquals("Avro map stringable class should be BigDecimal",
      BigDecimal.class, keyClass(avroRecord.stringable_map));
  Assert.assertEquals("Parquet map stringable class should be BigDecimal",
      BigDecimal.class, keyClass(parquetRecord.stringable_map));
}