Java Code Examples for org.apache.avro.Schema#getFields()

The following examples show how to use org.apache.avro.Schema#getFields() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: AvroWriteSupportInt96Avro18.java    From datacollector with Apache License 2.0 7 votes vote down vote up
private void writeRecordFields(GroupType schema, Schema avroSchema,
    Object record) {
  List<Type> fields = schema.getFields();
  List<Schema.Field> avroFields = avroSchema.getFields();
  int index = 0; // parquet ignores Avro nulls, so index may differ
  for (int avroIndex = 0; avroIndex < avroFields.size(); avroIndex++) {
    Schema.Field avroField = avroFields.get(avroIndex);
    if (avroField.schema().getType().equals(Schema.Type.NULL)) {
      continue;
    }
    Type fieldType = fields.get(index);
    Object value = model.getField(record, avroField.name(), avroIndex);
    if (value != null) {
      recordConsumer.startField(fieldType.getName(), index);
      writeValue(fieldType, avroField.schema(), value);
      recordConsumer.endField(fieldType.getName(), index);
    } else if (fieldType.isRepetition(Type.Repetition.REQUIRED)) {
      throw new RuntimeException("Null-value for required field: " + avroField.name());
    }
    index++;
  }
}
 
Example 2
Source File: TestNiFiOrcUtils.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void test_getOrcField_primitive() throws Exception {
    // Expected ORC types
    TypeInfo[] expectedTypes = {
            TypeInfoFactory.getPrimitiveTypeInfo("int"),
            TypeInfoFactory.getPrimitiveTypeInfo("bigint"),
            TypeInfoFactory.getPrimitiveTypeInfo("boolean"),
            TypeInfoFactory.getPrimitiveTypeInfo("float"),
            TypeInfoFactory.getPrimitiveTypeInfo("double"),
            TypeInfoFactory.getPrimitiveTypeInfo("binary"),
            TypeInfoFactory.getPrimitiveTypeInfo("string")
    };

    // Build a fake Avro record with all types
    Schema testSchema = buildPrimitiveAvroSchema();
    List<Schema.Field> fields = testSchema.getFields();
    for (int i = 0; i < fields.size(); i++) {
        assertEquals(expectedTypes[i], NiFiOrcUtils.getOrcField(fields.get(i).schema()));
    }

}
 
Example 3
Source File: FastSerializerGenerator.java    From avro-util with BSD 2-Clause "Simplified" License 6 votes vote down vote up
private void processRecord(final Schema recordSchema, JExpression recordExpr, final JBlock containerBody) {
  if (methodAlreadyDefined(recordSchema)) {
    containerBody.invoke(getMethod(recordSchema)).arg(recordExpr).arg(JExpr.direct(ENCODER));
    return;
  }
  JMethod method = createMethod(recordSchema);
  containerBody.invoke(getMethod(recordSchema)).arg(recordExpr).arg(JExpr.direct(ENCODER));

  JBlock body = method.body();
  recordExpr = method.listParams()[0];

  for (Schema.Field field : recordSchema.getFields()) {
    Schema fieldSchema = field.schema();
    if (SchemaAssistant.isComplexType(fieldSchema)) {
      JClass fieldClass = schemaAssistant.classFromSchema(fieldSchema);
      JVar containerVar = declareValueVar(field.name(), fieldSchema, body);
      JExpression valueExpression = JExpr.invoke(recordExpr, "get").arg(JExpr.lit(field.pos()));
      containerVar.init(JExpr.cast(fieldClass, valueExpression));

      processComplexType(fieldSchema, containerVar, body);
    } else {
      processSimpleType(fieldSchema, recordExpr.invoke("get").arg(JExpr.lit(field.pos())), body);
    }
  }
}
 
Example 4
Source File: TestAvroImport.java    From aliyun-maxcompute-data-collectors with Apache License 2.0 6 votes vote down vote up
public void testFirstUnderscoreInColumnName() throws IOException {
  String [] names = { "_NAME" };
  String [] types = { "INT" };
  String [] vals = { "1987" };
  createTableWithColTypesAndNames(names, types, vals);

  runImport(getOutputArgv(true, null));

  Path outputFile = new Path(getTablePath(), "part-m-00000.avro");
  DataFileReader<GenericRecord> reader = read(outputFile);
  Schema schema = reader.getSchema();
  assertEquals(Schema.Type.RECORD, schema.getType());
  List<Field> fields = schema.getFields();
  assertEquals(types.length, fields.size());

  checkField(fields.get(0), "__NAME", Type.INT);

  GenericRecord record1 = reader.next();
  assertEquals("__NAME", 1987, record1.get("__NAME"));
}
 
Example 5
Source File: MarketoLeadClient.java    From components with Apache License 2.0 6 votes vote down vote up
public List<IndexedRecord> convertLeadRecords(List<Map<String, String>> records, Schema schema,
        Map<String, String> mappings) {
    List<IndexedRecord> results = new ArrayList<>();
    for (Map<String, String> input : records) {
        IndexedRecord record = new Record(schema);
        for (Field f : schema.getFields()) {
            String col = mappings.get(f.name());
            Object tmp = input.get(col);
            if (col != null) {
                record.put(f.pos(), getValueType(f, tmp));
            }
        }
        results.add(record);
    }
    return results;
}
 
Example 6
Source File: SalesforceComponentTestIT.java    From components with Apache License 2.0 6 votes vote down vote up
protected void testSchemaWithAPIVersion(String version) throws Throwable {
    TSalesforceInputProperties props = (TSalesforceInputProperties) getComponentService()
            .getComponentProperties(TSalesforceInputDefinition.COMPONENT_NAME);
    props.connection.endpoint.setValue("https://login.salesforce.com/services/Soap/u/" + version);
    setupProps(props.connection, !ADD_QUOTES);

    Form f = props.module.getForm(Form.REFERENCE);
    SalesforceModuleProperties moduleProps = (SalesforceModuleProperties) f.getProperties();
    moduleProps = (SalesforceModuleProperties) PropertiesTestUtils.checkAndBeforeActivate(getComponentService(), f,
            "moduleName", moduleProps);
    moduleProps.moduleName.setValue("Account");
    moduleProps = (SalesforceModuleProperties) checkAndAfter(f, "moduleName", moduleProps);
    Schema schema = moduleProps.main.schema.getValue();
    LOGGER.debug(schema.toString());
    for (Schema.Field child : schema.getFields()) {
        LOGGER.debug(child.name());
    }
    assertEquals("Id", schema.getFields().get(0).name());
    LOGGER.debug("Endpoint:" + props.connection.endpoint.getValue());
    LOGGER.debug("Module \"Account\" column size:" + schema.getFields().size());
    assertTrue(schema.getFields().size() > 40);
}
 
Example 7
Source File: BoltCommandHandlerHelper.java    From DBus with Apache License 2.0 5 votes vote down vote up
public static <T extends Object> PairWrapper<String, Object> convertAvroRecordUseBeforeMap(GenericRecord record, Set<T> noorderKeys) {
    Schema schema = record.getSchema();
    List<Schema.Field> fields = schema.getFields();
    PairWrapper<String, Object> wrapper = new PairWrapper<>();

    for (Schema.Field field : fields) {
        String key = field.name();
        Object value = record.get(key);
        // 分离存储是否关心顺序的key-value
        if (noorderKeys.contains(field.name())) {
            //wrapper.addProperties(key, value);
            addPairWrapperProperties(wrapper, key, value);
        }
    }

    GenericRecord before = getFromRecord(MessageBodyKey.BEFORE, record);

    Map<String, Object> beforeMap = convert2map(before);

    for (Map.Entry<String, Object> entry : beforeMap.entrySet()) {
        if (!entry.getKey().endsWith(MessageBodyKey.IS_MISSING_SUFFIX)) {
            //wrapper.addPair(new Pair<>(entry.getKey(), CharSequence.class.isInstance(entry.getValue()) ? entry.getValue().toString() : entry.getValue()));
            addPairWrapperValue(wrapper, entry.getKey(), entry.getValue());
        } else if ((Boolean) entry.getValue()) {
            wrapper.addMissingField(entry.getKey());
        }
    }

    return wrapper;
}
 
Example 8
Source File: KeyValueUtils.java    From components with Apache License 2.0 5 votes vote down vote up
/**
 * Generate a new Index Record which is the filtered result of the input record.
 *
 * The user can freely remove column, add empty column or change the place of column in the same hierarchical level.
 *
 * @return the new record
 */
public static IndexedRecord extractIndexedRecord(IndexedRecord inputRecord, Schema outputSchema) {
    GenericRecordBuilder outputRecord = new GenericRecordBuilder(outputSchema);
    Schema inputSchema = getUnwrappedSchema(inputRecord);
    for (Field field : outputSchema.getFields()) {
        if (inputSchema.getField(field.name()) != null) {
            // The column was existing on the input record, we forward it to the output record.
            Object inputValue = inputRecord.get(inputSchema.getField(field.name()).pos());

            // The current column can be a Record (an hierarchical sub-object) or directly a value.
            // If we are on a record, we need to recursively do the process
            // if we are on a object, we save it to the output.
            if (inputValue instanceof Record) {
                // The sub-schema at this level is a union of "empty" and a record,
                // so we need to get the true sub-schema
                Schema inputChildSchema = getUnwrappedSchema(inputSchema.getField(field.name()));
                Schema outputChildSchema = getUnwrappedSchema(outputSchema.getField(field.name()));
                if (inputChildSchema.getType().equals(Type.RECORD)
                        && outputChildSchema.getType().equals(Type.RECORD)) {
                    Object childRecord = extractIndexedRecord((IndexedRecord) inputValue, outputChildSchema);
                    outputRecord.set(field.name(), childRecord);
                }
            } else {
                outputRecord.set(field.name(), inputValue);
            }
        } else {
            // element not found => set to the value and its hierarchy to null
            outputRecord.set(field.name(), KeyValueUtils.generateEmptyRecord(outputSchema, field.name()));
        }
    }
    return outputRecord.build();
}
 
Example 9
Source File: TSnowflakeOutputProperties.java    From components with Apache License 2.0 5 votes vote down vote up
protected List<String> getFieldNames(Property<?> schema) {
    Schema s = (Schema) schema.getValue();
    List<String> fieldNames = new ArrayList<>();
    for (Schema.Field f : s.getFields()) {
        fieldNames.add(f.name());
    }
    return fieldNames;
}
 
Example 10
Source File: MarketoRESTClientTestIT.java    From components with Apache License 2.0 5 votes vote down vote up
@Test
public void testGetLeadDynamic() throws Exception {
    iprops.inputOperation.setValue(getLead);
    iprops.leadKeyTypeREST.setValue(LeadKeyTypeREST.id);
    iprops.batchSize.setValue(1);
    iprops.afterInputOperation();
    iprops.leadKeyValue.setValue(createdLeads.get(0).toString());
    iprops.schemaInput.schema.setValue(
            SchemaBuilder.builder().record("test").prop(SchemaConstants.INCLUDE_ALL_FIELDS, "true").fields().endRecord());
    MarketoSource source = new MarketoSource();
    source.initialize(null, iprops);
    MarketoRESTClient client = (MarketoRESTClient) source.getClientService(null);
    Schema design = this.iprops.schemaInput.schema.getValue();
    // preserve mappings to re-apply them after
    Map<String, String> mappings = iprops.mappingInput.getNameMappingsForMarketo();
    Schema runtimeSchema = source.getDynamicSchema("", design);
    List<String> columnNames = new ArrayList<>();
    List<String> mktoNames = new ArrayList<>();
    for (Field f : runtimeSchema.getFields()) {
        columnNames.add(f.name());
        if (mappings.get(f.name()) != null) {
            mktoNames.add(mappings.get(f.name()));
        } else {
            mktoNames.add("");
        }
    }
    iprops.mappingInput.columnName.setValue(columnNames);
    iprops.mappingInput.marketoColumnName.setValue(mktoNames);
    iprops.schemaInput.schema.setValue(runtimeSchema);

    MarketoRecordResult result = client.getLead(iprops, null);
    LOG.debug("result = {}.", result);
    IndexedRecord r = result.getRecords().get(0);
    assertNotNull(r);
    LOG.debug("r = {}.", r);
    assertEquals("Retail-Dev", r.get(runtimeSchema.getField("company").pos()));
    assertEquals(COMMON_LINKEDIN_ID.toString(), r.get(runtimeSchema.getField("linkedInId").pos()));
    assertEquals(COMMON_SFDC_ACCOUNT_ID, r.get(runtimeSchema.getField("sfdcAccountId").pos()));
}
 
Example 11
Source File: TSalesforceOutputProperties.java    From components with Apache License 2.0 5 votes vote down vote up
private List<Schema.Field> cloneFields(Schema metadataSchema) {
    List<Schema.Field> copyFieldList = new ArrayList<>();
    for (Schema.Field se : metadataSchema.getFields()) {
        Schema.Field field = new Schema.Field(se.name(), se.schema(), se.doc(), se.defaultVal(), se.order());
        field.getObjectProps().putAll(se.getObjectProps());
        for (Map.Entry<String, Object> entry : se.getObjectProps().entrySet()) {
            field.addProp(entry.getKey(), entry.getValue());
        }
        copyFieldList.add(field);
    }

    return copyFieldList;
}
 
Example 12
Source File: DBTestUtils.java    From components with Apache License 2.0 5 votes vote down vote up
private static IndexedRecord copyValueFrom(IndexedRecord record) {
    Schema schema = record.getSchema();
    IndexedRecord result = new GenericData.Record(schema);
    List<Schema.Field> fields = schema.getFields();
    for (int i = 0; i < fields.size(); i++) {
        result.put(i, record.get(i));
    }

    return result;
}
 
Example 13
Source File: NetSuiteMockTestBase.java    From components with Apache License 2.0 5 votes vote down vote up
public static <T> List<IndexedRecord> makeIndexedRecords(
        NetSuiteClientService<?> clientService, Schema schema,
        ObjectComposer<T> objectComposer, int count) throws Exception {

    NsObjectInputTransducer transducer = new NsObjectInputTransducer(clientService, schema, schema.getName());

    List<IndexedRecord> recordList = new ArrayList<>();

    while (count > 0) {
        T nsRecord = objectComposer.composeObject();

        IndexedRecord convertedRecord = transducer.read(nsRecord);
        Schema recordSchema = convertedRecord.getSchema();

        GenericRecord record = new GenericData.Record(recordSchema);
        for (Schema.Field field : schema.getFields()) {
            Object value = convertedRecord.get(field.pos());
            record.put(field.pos(), value);
        }

        recordList.add(record);

        count--;
    }

    return recordList;
}
 
Example 14
Source File: SnowflakeWriter.java    From components with Apache License 2.0 5 votes vote down vote up
protected Schema initRuntimeSchemaAndMapIfNecessary() throws IOException {
    if (!dbColumnName2RuntimeField.isEmpty()) {
        return null;
    }
    String tableName = sprops.convertColumnsAndTableToUppercase.getValue() ? sprops.getTableName().toUpperCase()
            : sprops.getTableName();
    Schema runtimeSchema = sink.getSchema(container, processingConnection, tableName);
    if (runtimeSchema != null) {
        for (Field field : runtimeSchema.getFields()) {
            String dbColumnName = field.getProp(SchemaConstants.TALEND_COLUMN_DB_COLUMN_NAME);
            dbColumnName2RuntimeField.put(dbColumnName, field);
        }
    }
    return runtimeSchema;
}
 
Example 15
Source File: PigSchema2Avro.java    From spork with Apache License 2.0 5 votes vote down vote up
/**
 * Validate a Pig tuple is compatible with Avro record. If the Avro schema 
 * is not complete (with uncovered fields), then convert those fields using 
 * methods in set 1. 
 * 
 * Notice that users can get rid of Pig tuple wrappers, e.g. an Avro schema
 * "int" is compatible with a Pig schema "T:(int)"
 * 
 */
protected static Schema validateAndConvertRecord(Schema avroSchema, ResourceFieldSchema[] pigFields) throws IOException {

    /* Get rid of Pig tuple wrappers. */
    if (!avroSchema.getType().equals(Schema.Type.RECORD)) {
        if (pigFields.length != 1)
            throw new IOException("Expect only one field in Pig tuple schema. Avro schema is " + avroSchema.getType());

        return validateAndConvert(avroSchema, pigFields[0]);
    }

    /* validate and convert a pig tuple with avro record */
    boolean isPartialSchema = AvroStorageUtils.isUDPartialRecordSchema(avroSchema);
    AvroStorageLog.details("isPartialSchema=" + isPartialSchema);

    String typeName = isPartialSchema ? getRecordName() : avroSchema.getName();
    Schema outSchema = Schema.createRecord(typeName, avroSchema.getDoc(), avroSchema.getNamespace(), false);

    List<Schema.Field> inFields = avroSchema.getFields();
    if (!isPartialSchema && inFields.size() != pigFields.length) {
        throw new IOException("Expect " + inFields.size() + " fields in pig schema." + " But there are " + pigFields.length);
    }

    List<Schema.Field> outFields = new ArrayList<Schema.Field>();

    for (int i = 0; i < pigFields.length; i++) {
        /* get user defined avro field schema */
        Field inputField = isPartialSchema ? AvroStorageUtils.getUDField(avroSchema, i) : inFields.get(i);

        /* get schema */
        Schema fieldSchema = null;
        if (inputField == null) { 
            /* convert pig schema (nullable) */
            fieldSchema = convert(pigFields[i], true);
        } else if (inputField.schema() == null) { 
            /* convert pig schema (not-null) */
            fieldSchema = convert(pigFields[i], false);
        } else { 
            /* validate pigFields[i] with given avro schema */
            fieldSchema = validateAndConvert(inputField.schema(),
                                            pigFields[i]);
        }

        /* get field name of output */
        String outname = (isPartialSchema) ? pigFields[i].getName() : inputField.name();
        if (outname == null)
            outname = FIELD_NAME + "_" + i; // field name cannot be null

        /* get doc of output */
        String doc = (isPartialSchema) ? pigFields[i].getDescription() : inputField.doc();

        JsonNode defaultvalue = (inputField != null) ? inputField.defaultValue() : null;

        outFields.add(new Field(outname, fieldSchema, doc, defaultvalue));

    }

    outSchema.setFields(outFields);
    return outSchema;

}
 
Example 16
Source File: DatumBuilder.java    From xml-avro with Apache License 2.0 5 votes vote down vote up
Schema.Field getFieldBySource(Schema schema, Source source) {
    if(schema.getType() == Schema.Type.UNION) {
      return getFieldBySource(schema.getTypes().get(1), source);
    } else {
      for (Schema.Field field : schema.getFields()) {
        String fieldSource = field.getProp(Source.SOURCE);
        if (caseSensitiveNames && source.toString().equals(fieldSource))
          return field;
        if (!caseSensitiveNames && source.toString().equalsIgnoreCase(fieldSource))
          return field;
      }

      return null;
    }
}
 
Example 17
Source File: MarketoSOAPClient.java    From components with Apache License 2.0 4 votes vote down vote up
public List<IndexedRecord> convertLeadRecords(List<LeadRecord> recordList, Schema schema, Map<String, String> mappings) {
    List<IndexedRecord> results = new ArrayList<>();
    for (LeadRecord input : recordList) {
        IndexedRecord record = new Record(schema);
        for (Field f : schema.getFields()) {
            // find matching marketo column name
            String col = mappings.get(f.name());
            if (col == null) {
                LOG.warn("[converLeadRecord] Couldn't find mapping for column {}.", f.name());
                continue;
            }
            switch (col) {
            case FIELD_ID:
                record.put(f.pos(), input.getId() != null ? input.getId().getValue() : null);
                break;
            case FIELD_EMAIL:
                record.put(f.pos(), input.getEmail() != null ? input.getEmail().getValue() : null);
                break;
            case FIELD_FOREIGN_SYS_PERSON_ID:
                record.put(f.pos(), input.getForeignSysPersonId() != null ? input.getForeignSysPersonId().getValue() : null);
                break;
            case FIELD_FOREIGN_SYS_TYPE:
                record.put(f.pos(),
                        input.getForeignSysType() != null && input.getForeignSysType().getValue() != null
                                ? input.getForeignSysType().getValue().value()
                                : null);
                break;
            default:
                if (!input.getLeadAttributeList().isNil()) {
                    for (Attribute attr : input.getLeadAttributeList().getValue().getAttributes()) {
                        if (attr.getAttrName().equals(col)) {
                            record.put(f.pos(), attr.getAttrValue());
                        }
                    }
                }
            }
        }
        results.add(record);
    }

    return results;
}
 
Example 18
Source File: AvroFieldsGenerator.java    From registry with Apache License 2.0 4 votes vote down vote up
private void parseSchema(Schema schema, List<SchemaFieldInfo> schemaFieldInfos, Set<String> visitedRecords) {
    Schema.Type type = schema.getType();
    LOG.debug("Visiting type: [{}]", type);

    switch (type) {
        case RECORD:

            String completeName = schema.getFullName();

            // Since we are only interested in primitive data types, if we encounter a record that was already parsed it can be ignored
            if (!visitedRecords.contains(completeName)) {
                visitedRecords.add(completeName);

                // store fields of a record.
                List<Schema.Field> fields = schema.getFields();
                for (Schema.Field recordField : fields) {
                    parseField(recordField, schemaFieldInfos, visitedRecords);
                }
            }
            break;
        case MAP:
            Schema valueTypeSchema = schema.getValueType();
            parseSchema(valueTypeSchema, schemaFieldInfos, visitedRecords);
            break;
        case ENUM:
            break;
        case ARRAY:
            Schema elementType = schema.getElementType();
            parseSchema(elementType, schemaFieldInfos, visitedRecords);
            break;

        case UNION:
            List<Schema> unionTypes = schema.getTypes();
            for (Schema typeSchema : unionTypes) {
                parseSchema(typeSchema, schemaFieldInfos, visitedRecords);
            }
            break;

        case STRING:
        case INT:
        case LONG:
        case FLOAT:
        case DOUBLE:
        case FIXED:
        case BOOLEAN:
        case BYTES:
        case NULL:

            break;

        default:
            throw new RuntimeException("Unsupported type: " + type);

    }

}
 
Example 19
Source File: AvroStorageDataConversionUtilities.java    From spork with Apache License 2.0 4 votes vote down vote up
/**
 * Packs a Pig Tuple into an Avro record.
 * @param t the Pig tuple to pack into the avro object
 * @param s The avro schema for which to determine the type
 * @return the avro record corresponding to the input tuple
 * @throws IOException
 */
public static GenericData.Record packIntoAvro(final Tuple t, final Schema s)
    throws IOException {

  try {
    GenericData.Record record = new GenericData.Record(s);
    for (Field f : s.getFields()) {
      Object o = t.get(f.pos());
      Schema innerSchema = f.schema();
      if (AvroStorageSchemaConversionUtilities.isNullableUnion(innerSchema)) {
        if (o == null) {
          record.put(f.pos(), null);
          continue;
        }
        innerSchema = AvroStorageSchemaConversionUtilities
            .removeSimpleUnion(innerSchema);
      }
      switch(innerSchema.getType()) {
      case RECORD:
        record.put(f.pos(), packIntoAvro((Tuple) o, innerSchema));
        break;
      case ARRAY:
        record.put(f.pos(), packIntoAvro((DataBag) o, innerSchema));
        break;
      case BYTES:
        record.put(f.pos(), ByteBuffer.wrap(((DataByteArray) o).get()));
        break;
      case FIXED:
        record.put(f.pos(), new GenericData.Fixed(
            innerSchema, ((DataByteArray) o).get()));
        break;
      default:
        if (t.getType(f.pos()) == DataType.DATETIME) {
          record.put(f.pos(), ((DateTime) o).getMillis() );
        } else {
          record.put(f.pos(), o);
        }
      }
    }
    return record;
  } catch (Exception e) {
    throw new IOException(
        "exception in AvroStorageDataConversionUtilities.packIntoAvro", e);
  }
}
 
Example 20
Source File: JDBCInputTestIT.java    From components with Apache License 2.0 4 votes vote down vote up
/**
 * Checks {@link JDBCInputReader} outputs {@link IndexedRecord} which contains nullable String type data for every SQL/JDBC
 * type
 */
@Test
public void testReaderAllTypesString() throws IOException {
    TJDBCInputDefinition definition = new TJDBCInputDefinition();
    TJDBCInputProperties properties = DBTestUtils.createCommonJDBCInputProperties(allSetting, definition);

    properties.main.schema.setValue(DBTestUtils.createAllTypesSchema(tablename_all_type));
    properties.tableSelection.tablename.setValue(tablename_all_type);
    properties.sql.setValue(DBTestUtils.getSQL(tablename_all_type));

    Reader reader = DBTestUtils.createCommonJDBCInputReader(properties);

    reader.start();

    IndexedRecord record = (IndexedRecord) reader.getCurrent();
    Short col0 = (Short) record.get(0);
    Integer col1 = (Integer) record.get(1);
    Long col2 = (Long) record.get(2);
    Float col3 = (Float) record.get(3);
    Double col4 = (Double) record.get(4);
    BigDecimal col5 = (BigDecimal) record.get(5);
    String col6 = (String) record.get(6);
    String col7 = (String) record.get(7);
    byte[] col8 = (byte[]) record.get(8);
    String col9 = (String) record.get(9);
    Timestamp col10 = (Timestamp) record.get(10);
    Timestamp col11 = (Timestamp) record.get(11);
    Timestamp col12 = (Timestamp) record.get(12);
    Boolean col13 = (Boolean) record.get(13);

    assertEquals(32767, col0.shortValue());
    assertEquals(2147483647, col1.intValue());
    assertEquals(9223372036854775807l, col2.longValue());
    assertTrue(col3 > 1);
    assertTrue(col4 > 2);
    assertEquals(new BigDecimal("1234567890.1234567890"), col5);
    assertEquals("abcd", col6);
    assertEquals("abcdefg", col7);
    byte[] blob = {0,1,2,3,4,5,6,7,8,9};
    assertArrayEquals(blob, col8);
    assertEquals("abcdefg", col9);
    assertEquals("2016-12-28", new SimpleDateFormat("yyyy-MM-dd").format(col10));
    assertEquals("14:30:33", new SimpleDateFormat("HH:mm:ss").format(col11));
    assertEquals(Timestamp.valueOf("2016-12-28 14:31:56.12345"), col12);
    assertEquals(true, col13);

    Schema actualSchema = record.getSchema();
    List<Field> actualFields = actualSchema.getFields();

    assertEquals(14, actualFields.size());
    reader.close();
}