Java Code Examples for org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory

The following examples show how to use org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
public BlurObjectInspectorGenerator(Collection<ColumnDefinition> colDefs, BlurColumnNameResolver columnNameResolver)
    throws SerDeException {
  _columnNameResolver = columnNameResolver;
  List<ColumnDefinition> colDefList = new ArrayList<ColumnDefinition>(colDefs);
  Collections.sort(colDefList, COMPARATOR);

  _columnNames.add(ROWID);
  _columnTypes.add(TypeInfoFactory.stringTypeInfo);

  _columnNames.add(RECORDID);
  _columnTypes.add(TypeInfoFactory.stringTypeInfo);

  for (ColumnDefinition columnDefinition : colDefList) {
    String hiveColumnName = _columnNameResolver.fromBlurToHive(columnDefinition.getColumnName());
    _columnNames.add(hiveColumnName);
    _columnTypes.add(getTypeInfo(columnDefinition));
  }
  _objectInspector = createObjectInspector();
}
 
Example 2
Source Project: flink   Source File: HiveTypeUtil.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public TypeInfo visit(VarCharType varCharType) {
	// Flink's StringType is defined as VARCHAR(Integer.MAX_VALUE)
	// We don't have more information in LogicalTypeRoot to distinguish StringType and a VARCHAR(Integer.MAX_VALUE) instance
	// Thus always treat VARCHAR(Integer.MAX_VALUE) as StringType
	if (varCharType.getLength() == Integer.MAX_VALUE) {
		return TypeInfoFactory.stringTypeInfo;
	}
	if (varCharType.getLength() > HiveVarchar.MAX_VARCHAR_LENGTH) {
		throw new CatalogException(
				String.format("HiveCatalog doesn't support varchar type with length of '%d'. " +
							"The maximum length is %d",
							varCharType.getLength(), HiveVarchar.MAX_VARCHAR_LENGTH));
	}
	return TypeInfoFactory.getVarcharTypeInfo(varCharType.getLength());
}
 
Example 3
Source Project: kite   Source File: TestSchemaConversion.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testConvertSchemaWithComplexRecord() {
  // convertSchema returns a list of FieldSchema objects rather than TypeInfo
  List<FieldSchema> fields = HiveSchemaConverter.convertSchema(COMPLEX_RECORD);

  Assert.assertEquals("Field names should match",
      Lists.newArrayList("groupName", "simpleRecords"),
      Lists.transform(fields, GET_NAMES));
  Assert.assertEquals("Field types should match",
      Lists.newArrayList(
          STRING_TYPE_INFO.toString(),
          TypeInfoFactory.getListTypeInfo(
              TypeInfoFactory.getStructTypeInfo(
                  Lists.newArrayList("id", "name"),
                  Lists.newArrayList(
                      INT_TYPE_INFO,
                      STRING_TYPE_INFO))).toString()),
      Lists.transform(fields, GET_TYPE_STRINGS));
}
 
Example 4
Source Project: kite   Source File: TestSchemaConversion.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testUnion() {
  TypeInfo type = HiveSchemaConverter.convert(SchemaBuilder.builder().unionOf()
      .bytesType().and()
      .fixed("fixed").size(12).and()
      .doubleType().and()
      .longType()
      .endUnion());

  Assert.assertEquals("Union should be converted to union",
      TypeInfoFactory.getUnionTypeInfo(Lists.newArrayList(
          BINARY_TYPE_INFO,
          BINARY_TYPE_INFO,
          DOUBLE_TYPE_INFO,
          LONG_TYPE_INFO)),
      type);
}
 
Example 5
Source Project: localization_nifi   Source File: NiFiOrcUtils.java    License: Apache License 2.0 6 votes vote down vote up
public static TypeInfo getPrimitiveOrcTypeFromPrimitiveAvroType(Schema.Type avroType) throws IllegalArgumentException {
    if (avroType == null) {
        throw new IllegalArgumentException("Avro type is null");
    }
    switch (avroType) {
        case INT:
            return TypeInfoFactory.getPrimitiveTypeInfo("int");
        case LONG:
            return TypeInfoFactory.getPrimitiveTypeInfo("bigint");
        case BOOLEAN:
            return TypeInfoFactory.getPrimitiveTypeInfo("boolean");
        case BYTES:
            return TypeInfoFactory.getPrimitiveTypeInfo("binary");
        case DOUBLE:
            return TypeInfoFactory.getPrimitiveTypeInfo("double");
        case FLOAT:
            return TypeInfoFactory.getPrimitiveTypeInfo("float");
        case STRING:
            return TypeInfoFactory.getPrimitiveTypeInfo("string");
        default:
            throw new IllegalArgumentException("Avro type " + avroType.getName() + " is not a primitive type");
    }
}
 
Example 6
Source Project: nifi   Source File: TestNiFiOrcUtils.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void test_getOrcField_primitive() throws Exception {
    // Expected ORC types
    TypeInfo[] expectedTypes = {
            TypeInfoFactory.getPrimitiveTypeInfo("int"),
            TypeInfoFactory.getPrimitiveTypeInfo("bigint"),
            TypeInfoFactory.getPrimitiveTypeInfo("boolean"),
            TypeInfoFactory.getPrimitiveTypeInfo("float"),
            TypeInfoFactory.getPrimitiveTypeInfo("double"),
            TypeInfoFactory.getPrimitiveTypeInfo("binary"),
            TypeInfoFactory.getPrimitiveTypeInfo("string")
    };

    // Build a fake Avro record with all types
    Schema testSchema = buildPrimitiveAvroSchema();
    List<Schema.Field> fields = testSchema.getFields();
    for (int i = 0; i < fields.size(); i++) {
        assertEquals(expectedTypes[i], NiFiOrcUtils.getOrcField(fields.get(i).schema()));
    }

}
 
Example 7
@Test
public void pushdownTuple() {
    setup();
    ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "field1", null, false);
    ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, 5);
    List<ExprNodeDesc> children = Lists.newArrayList();
    children.add(column);
    children.add(constant);
    ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqual(), children);
    assertNotNull(node);
    String filterExpr = Utilities.serializeExpression(node);
    conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
    try {
        List<IndexSearchCondition> sConditions = handler.getSearchConditions(conf);
        assertEquals(sConditions.size(), 1);
        AccumuloPredicateHandler.PushdownTuple tuple = new AccumuloPredicateHandler.PushdownTuple(sConditions.get(0));
        byte [] expectedVal = new byte[4];
        ByteBuffer.wrap(expectedVal).putInt(5);
        assertEquals(tuple.getConstVal(), expectedVal);
        assertEquals(tuple.getcOpt().getClass(), Equal.class);
        assertEquals(tuple.getpCompare().getClass(), IntCompare.class);
    } catch (Exception e) {
        fail(StringUtils.stringifyException(e));
    }
}
 
Example 8
Source Project: nifi   Source File: NiFiOrcUtils.java    License: Apache License 2.0 6 votes vote down vote up
public static TypeInfo getPrimitiveOrcTypeFromPrimitiveAvroType(Schema.Type avroType) throws IllegalArgumentException {
    if (avroType == null) {
        throw new IllegalArgumentException("Avro type is null");
    }
    switch (avroType) {
        case INT:
            return TypeInfoFactory.getPrimitiveTypeInfo("int");
        case LONG:
            return TypeInfoFactory.getPrimitiveTypeInfo("bigint");
        case BOOLEAN:
        case NULL: // ORC has no null type, so just pick the smallest. All values are necessarily null.
            return TypeInfoFactory.getPrimitiveTypeInfo("boolean");
        case BYTES:
            return TypeInfoFactory.getPrimitiveTypeInfo("binary");
        case DOUBLE:
            return TypeInfoFactory.getPrimitiveTypeInfo("double");
        case FLOAT:
            return TypeInfoFactory.getPrimitiveTypeInfo("float");
        case STRING:
            return TypeInfoFactory.getPrimitiveTypeInfo("string");
        default:
            throw new IllegalArgumentException("Avro type " + avroType.getName() + " is not a primitive type");
    }
}
 
Example 9
Source Project: nifi   Source File: NiFiOrcUtils.java    License: Apache License 2.0 6 votes vote down vote up
public static TypeInfo getPrimitiveOrcTypeFromPrimitiveFieldType(DataType rawDataType) throws IllegalArgumentException {
    if (rawDataType == null) {
        throw new IllegalArgumentException("Avro type is null");
    }
    RecordFieldType fieldType = rawDataType.getFieldType();
    if (RecordFieldType.INT.equals(fieldType)) {
        return TypeInfoFactory.getPrimitiveTypeInfo("int");
    }
    if (RecordFieldType.LONG.equals(fieldType)) {
        return TypeInfoFactory.getPrimitiveTypeInfo("bigint");
    }
    if (RecordFieldType.BOOLEAN.equals(fieldType)) {
        return TypeInfoFactory.getPrimitiveTypeInfo("boolean");
    }
    if (RecordFieldType.DOUBLE.equals(fieldType)) {
        return TypeInfoFactory.getPrimitiveTypeInfo("double");
    }
    if (RecordFieldType.FLOAT.equals(fieldType)) {
        return TypeInfoFactory.getPrimitiveTypeInfo("float");
    }
    if (RecordFieldType.STRING.equals(fieldType)) {
        return TypeInfoFactory.getPrimitiveTypeInfo("string");
    }

    throw new IllegalArgumentException("Field type " + fieldType.name() + " is not a primitive type");
}
 
Example 10
Source Project: incubator-hivemall   Source File: TryCastUDFTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testList() throws IOException, HiveException {
    // try_cast(array(1.0,2.0,3.0), 'array<string>');
    TryCastUDF udf = new TryCastUDF();

    udf.initialize(new ObjectInspector[] {
            ObjectInspectorFactory.getStandardListObjectInspector(
                PrimitiveObjectInspectorFactory.writableDoubleObjectInspector),
            PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
                TypeInfoFactory.stringTypeInfo, new Text("array<string>"))});

    DeferredObject[] args = new DeferredObject[] {new GenericUDF.DeferredJavaObject(
        WritableUtils.toWritableList(new double[] {0.1, 1.1, 2.1}))};

    Object result = udf.evaluate(args);

    Assert.assertEquals(WritableUtils.val("0.1", "1.1", "2.1"), result);

    udf.close();
}
 
Example 11
Source Project: nifi   Source File: TestNiFiOrcUtils.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void test_getOrcField_primitive() {
    // Expected ORC types
    TypeInfo[] expectedTypes = {
            TypeInfoFactory.getPrimitiveTypeInfo("int"),
            TypeInfoFactory.getPrimitiveTypeInfo("bigint"),
            TypeInfoFactory.getPrimitiveTypeInfo("boolean"),
            TypeInfoFactory.getPrimitiveTypeInfo("float"),
            TypeInfoFactory.getPrimitiveTypeInfo("double"),
            TypeInfoFactory.getPrimitiveTypeInfo("binary"),
            TypeInfoFactory.getPrimitiveTypeInfo("string")
    };

    // Build a fake Avro record with all types
    RecordSchema testSchema = buildPrimitiveRecordSchema();
    List<RecordField> fields = testSchema.getFields();
    for (int i = 0; i < fields.size(); i++) {
        assertEquals(expectedTypes[i], NiFiOrcUtils.getOrcField(fields.get(i).getDataType(), false));
    }
}
 
Example 12
Source Project: nifi   Source File: TestNiFiOrcUtils.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void test_getOrcField_record() {
    final SchemaBuilder.FieldAssembler<Schema> builder = SchemaBuilder.record("testRecord").namespace("any.data").fields();
    builder.name("Int").type().intType().noDefault();
    builder.name("Long").type().longType().longDefault(1L);
    builder.name("Array").type().array().items().stringType().noDefault();
    RecordSchema testSchema = AvroTypeUtil.createSchema(builder.endRecord());
    // Normalize field names for Hive, assert that their names are now lowercase
    TypeInfo orcType = NiFiOrcUtils.getOrcSchema(testSchema, true);
    assertEquals(
            TypeInfoFactory.getStructTypeInfo(
                    Arrays.asList("int", "long", "array"),
                    Arrays.asList(
                            TypeInfoCreator.createInt(),
                            TypeInfoCreator.createLong(),
                            TypeInfoFactory.getListTypeInfo(TypeInfoCreator.createString()))),
            orcType);
}
 
Example 13
@Test()
public void rangeEqual() {
    setup();
    ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
    ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa");
    List<ExprNodeDesc> children = Lists.newArrayList();
    children.add(column);
    children.add(constant);
    ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqual(), children);
    assertNotNull(node);
    String filterExpr = Utilities.serializeExpression(node);
    conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
    try {
        Collection<Range> ranges = handler.getRanges(conf);
        assertEquals(ranges.size(), 1);
        Range range = ranges.iterator().next();
        assertTrue(range.isStartKeyInclusive());
        assertFalse(range.isEndKeyInclusive());
        assertTrue(range.contains(new Key(new Text("aaa"))));
        assertTrue(range.afterEndKey(new Key(new Text("aab"))));
        assertTrue(range.beforeStartKey(new Key(new Text("aa"))));
    } catch (Exception e) {
        fail("Error getting search conditions");
    }
}
 
Example 14
Source Project: bigdata-tutorial   Source File: JSONCDHSerDe.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * An initialization function used to gather information about the table.
 * Typically, a SerDe implementation will be interested in the list of
 * column names and their types. That information will be used to help perform
 * actual serialization and deserialization of data.
 */
@Override
public void initialize(Configuration conf, Properties tbl)
		throws SerDeException {
	// Get a list of the table's column names.
	String colNamesStr = tbl.getProperty(serdeConstants.LIST_COLUMNS);
	colNames = Arrays.asList(colNamesStr.split(","));

	// Get a list of TypeInfos for the columns. This list lines up with
	// the list of column names.
	String colTypesStr = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
	List<TypeInfo> colTypes =
			TypeInfoUtils.getTypeInfosFromTypeString(colTypesStr);

	rowTypeInfo =
			(StructTypeInfo) TypeInfoFactory.getStructTypeInfo(colNames, colTypes);
	rowOI =
			TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(rowTypeInfo);
}
 
Example 15
Source Project: flink   Source File: HiveTypeUtil.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public TypeInfo visit(CharType charType) {
	// Flink and Hive have different length limit for CHAR. Promote it to STRING if it exceeds the limits of
	// Hive and we're told not to check precision. This can be useful when calling Hive UDF to process data.
	if (charType.getLength() > HiveChar.MAX_CHAR_LENGTH || charType.getLength() < 1) {
		if (checkPrecision) {
			throw new CatalogException(
					String.format("HiveCatalog doesn't support char type with length of '%d'. " +
									"The supported length is [%d, %d]",
							charType.getLength(), 1, HiveChar.MAX_CHAR_LENGTH));
		} else {
			return TypeInfoFactory.stringTypeInfo;
		}
	}
	return TypeInfoFactory.getCharTypeInfo(charType.getLength());
}
 
Example 16
Source Project: flink   Source File: HiveTypeUtil.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public TypeInfo visit(VarCharType varCharType) {
	// Flink's StringType is defined as VARCHAR(Integer.MAX_VALUE)
	// We don't have more information in LogicalTypeRoot to distinguish StringType and a VARCHAR(Integer.MAX_VALUE) instance
	// Thus always treat VARCHAR(Integer.MAX_VALUE) as StringType
	if (varCharType.getLength() == Integer.MAX_VALUE) {
		return TypeInfoFactory.stringTypeInfo;
	}
	// Flink and Hive have different length limit for VARCHAR. Promote it to STRING if it exceeds the limits of
	// Hive and we're told not to check precision. This can be useful when calling Hive UDF to process data.
	if (varCharType.getLength() > HiveVarchar.MAX_VARCHAR_LENGTH || varCharType.getLength() < 1) {
		if (checkPrecision) {
			throw new CatalogException(
					String.format("HiveCatalog doesn't support varchar type with length of '%d'. " +
									"The supported length is [%d, %d]",
							varCharType.getLength(), 1, HiveVarchar.MAX_VARCHAR_LENGTH));
		} else {
			return TypeInfoFactory.stringTypeInfo;
		}
	}
	return TypeInfoFactory.getVarcharTypeInfo(varCharType.getLength());
}
 
Example 17
Source Project: nifi   Source File: TestNiFiOrcUtils.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void test_getOrcField_record() throws Exception {
    final SchemaBuilder.FieldAssembler<Schema> builder = SchemaBuilder.record("testRecord").namespace("any.data").fields();
    builder.name("int").type().intType().noDefault();
    builder.name("long").type().longType().longDefault(1L);
    builder.name("array").type().array().items().stringType().noDefault();
    Schema testSchema = builder.endRecord();
    TypeInfo orcType = NiFiOrcUtils.getOrcField(testSchema);
    assertEquals(
            TypeInfoFactory.getStructTypeInfo(
                    Arrays.asList("int", "long", "array"),
                    Arrays.asList(
                            TypeInfoCreator.createInt(),
                            TypeInfoCreator.createLong(),
                            TypeInfoFactory.getListTypeInfo(TypeInfoCreator.createString()))),
            orcType);
}
 
Example 18
Source Project: hive-dwrf   Source File: TestObjectInspector.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Tests that after copying a lazy boolean object, calling materialize on the original and the
 * copy doesn't advance the tree reader twice
 * @throws Exception
 */
@Test
public void TestCopyBoolean() throws Exception {
  ReaderWriterProfiler.setProfilerOptions(null);
  OrcLazyBoolean lazyBoolean = new OrcLazyBoolean(new LazyBooleanTreeReader(0, 0) {
    int nextCalls = 0;

    @Override
    public Object next(Object previous) throws IOException {
      if (nextCalls == 0) {
        return new BooleanWritable(true);
      }

      throw new IOException("next should only be called once");
    }

    @Override
    protected boolean seekToRow(long currentRow) throws IOException {
      return true;
    }
  });

  BooleanObjectInspector booleanOI = (BooleanObjectInspector)
      OrcLazyObjectInspectorUtils.createLazyObjectInspector(TypeInfoFactory.booleanTypeInfo);

  OrcLazyBoolean lazyBoolean2 = (OrcLazyBoolean) booleanOI.copyObject(lazyBoolean);

  Assert.assertEquals(true, ((BooleanWritable) lazyBoolean.materialize()).get());
  Assert.assertEquals(true, ((BooleanWritable) lazyBoolean2.materialize()).get());
}
 
Example 19
Source Project: nifi   Source File: TestNiFiOrcUtils.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void test_getOrcField_nested_map() throws Exception {
    final SchemaBuilder.FieldAssembler<Schema> builder = SchemaBuilder.record("testRecord").namespace("any.data").fields();
    builder.name("map").type().map().values().map().values().doubleType().noDefault();
    Schema testSchema = builder.endRecord();
    TypeInfo orcType = NiFiOrcUtils.getOrcField(testSchema.getField("map").schema());
    assertEquals(
            TypeInfoFactory.getMapTypeInfo(TypeInfoCreator.createString(),
                    TypeInfoFactory.getMapTypeInfo(TypeInfoCreator.createString(), TypeInfoCreator.createDouble())),
            orcType);
}
 
Example 20
Source Project: flink   Source File: HiveTypeUtil.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public TypeInfo visit(VarBinaryType varBinaryType) {
	// Flink's BytesType is defined as VARBINARY(Integer.MAX_VALUE)
	// We don't have more information in LogicalTypeRoot to distinguish BytesType and a VARBINARY(Integer.MAX_VALUE) instance
	// Thus always treat VARBINARY(Integer.MAX_VALUE) as BytesType
	if (varBinaryType.getLength() == VarBinaryType.MAX_LENGTH) {
		return TypeInfoFactory.binaryTypeInfo;
	}
	return defaultMethod(varBinaryType);
}
 
Example 21
Source Project: nifi   Source File: TestNiFiOrcUtils.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void test_getOrcField_decimal() {
    // given
    final DecimalTypeInfo expected = TypeInfoFactory.getDecimalTypeInfo(4, 2);
    final DataType decimalDataType = RecordFieldType.DECIMAL.getDecimalDataType(4, 2);

    // when
    final TypeInfo orcField = NiFiOrcUtils.getOrcField(decimalDataType, false);

    // then
    Assert.assertEquals(expected, orcField);
}
 
Example 22
Source Project: flink   Source File: HiveTypeUtil.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public TypeInfo visit(ArrayType arrayType) {
	LogicalType elementType = arrayType.getElementType();
	TypeInfo elementTypeInfo = elementType.accept(new TypeInfoLogicalTypeVisitor(dataType));
	if (null != elementTypeInfo) {
		return TypeInfoFactory.getListTypeInfo(elementTypeInfo);
	} else {
		return defaultMethod(arrayType);
	}
}
 
Example 23
Source Project: flink   Source File: HiveTypeUtil.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public TypeInfo visit(MapType mapType) {
	LogicalType keyType  = mapType.getKeyType();
	LogicalType valueType = mapType.getValueType();
	TypeInfo keyTypeInfo = keyType.accept(new TypeInfoLogicalTypeVisitor(dataType));
	TypeInfo valueTypeInfo = valueType.accept(new TypeInfoLogicalTypeVisitor(dataType));
	if (null == keyTypeInfo || null == valueTypeInfo) {
		return defaultMethod(mapType);
	} else {
		return TypeInfoFactory.getMapTypeInfo(keyTypeInfo, valueTypeInfo);
	}
}
 
Example 24
Source Project: flink   Source File: HiveTypeUtil.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public TypeInfo visit(RowType rowType) {
	List<String> names = rowType.getFieldNames();
	List<TypeInfo> typeInfos = new ArrayList<>(names.size());
	for (String name : names) {
		TypeInfo typeInfo =
				rowType.getTypeAt(rowType.getFieldIndex(name)).accept(new TypeInfoLogicalTypeVisitor(dataType));
		if (null != typeInfo) {
			typeInfos.add(typeInfo);
		} else {
			return defaultMethod(rowType);
		}
	}
	return TypeInfoFactory.getStructTypeInfo(names, typeInfos);
}
 
Example 25
Source Project: localization_nifi   Source File: TestNiFiOrcUtils.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void test_getOrcField_union() throws Exception {
    final SchemaBuilder.FieldAssembler<Schema> builder = SchemaBuilder.record("testRecord").namespace("any.data").fields();
    builder.name("union").type().unionOf().intType().and().booleanType().endUnion().noDefault();
    Schema testSchema = builder.endRecord();
    TypeInfo orcType = NiFiOrcUtils.getOrcField(testSchema.getField("union").schema());
    assertEquals(
            TypeInfoFactory.getUnionTypeInfo(Arrays.asList(
                    TypeInfoCreator.createInt(),
                    TypeInfoCreator.createBoolean())),
            orcType);
}
 
Example 26
Source Project: localization_nifi   Source File: TestNiFiOrcUtils.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void test_getOrcField_map() throws Exception {
    final SchemaBuilder.FieldAssembler<Schema> builder = SchemaBuilder.record("testRecord").namespace("any.data").fields();
    builder.name("map").type().map().values().doubleType().noDefault();
    Schema testSchema = builder.endRecord();
    TypeInfo orcType = NiFiOrcUtils.getOrcField(testSchema.getField("map").schema());
    assertEquals(
            TypeInfoFactory.getMapTypeInfo(
                    TypeInfoCreator.createString(),
                    TypeInfoCreator.createDouble()),
            orcType);
}
 
Example 27
private TypeInfo getTypeInfo(ColumnDefinition columnDefinition) throws SerDeException {
  String fieldType = columnDefinition.getFieldType();
  TypeInfo typeInfo = getTypeInfo(fieldType);
  if (columnDefinition.isMultiValueField()) {
    return TypeInfoFactory.getListTypeInfo(typeInfo);
  }
  return typeInfo;
}
 
Example 28
Source Project: flink   Source File: HiveTypeUtil.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public TypeInfo visit(ArrayType arrayType) {
	LogicalType elementType = arrayType.getElementType();
	TypeInfo elementTypeInfo = elementType.accept(this);
	if (null != elementTypeInfo) {
		return TypeInfoFactory.getListTypeInfo(elementTypeInfo);
	} else {
		return defaultMethod(arrayType);
	}
}
 
Example 29
Source Project: localization_nifi   Source File: TestNiFiOrcUtils.java    License: Apache License 2.0 5 votes vote down vote up
public static TypeInfo buildPrimitiveOrcSchema() {
    return TypeInfoFactory.getStructTypeInfo(Arrays.asList("int", "long", "boolean", "float", "double", "bytes", "string"),
            Arrays.asList(
                    TypeInfoCreator.createInt(),
                    TypeInfoCreator.createLong(),
                    TypeInfoCreator.createBoolean(),
                    TypeInfoCreator.createFloat(),
                    TypeInfoCreator.createDouble(),
                    TypeInfoCreator.createBinary(),
                    TypeInfoCreator.createString()));
}
 
Example 30
Source Project: kite   Source File: TestSchemaConversion.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testArray() {
  TypeInfo type = HiveSchemaConverter.convert(SchemaBuilder.array()
      .items().floatType());

  Assert.assertEquals("Array should be converted to list",
      TypeInfoFactory.getListTypeInfo(FLOAT_TYPE_INFO),
      type);
}