Java Code Examples for org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo

The following examples show how to use org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
private static void fieldEscaper(List<ExprNodeDesc> exprNodes, ExprNodeDesc parent, Set<String> columnNamesInNotInExpression) {
  if (exprNodes == null || exprNodes.isEmpty()) {
    return;
  } else {
    for (ExprNodeDesc nodeDesc : exprNodes) {
      String nodeType = nodeDesc.getTypeString().toLowerCase();
      if (QUOTED_TYPES.contains(nodeType)) {
        PrimitiveTypeInfo tInfo = new PrimitiveTypeInfo();
        tInfo.setTypeName(HIVE_STRING_TYPE_NAME);
        nodeDesc.setTypeInfo(tInfo);
      }
      addColumnNamesOfNotInExpressionToSet(nodeDesc, parent, columnNamesInNotInExpression);
      fieldEscaper(nodeDesc.getChildren(), nodeDesc, columnNamesInNotInExpression);
    }
  }
}
 
Example 2
Source Project: presto   Source File: HiveBucketing.java    License: Apache License 2.0 6 votes vote down vote up
private static boolean containsTimestampBucketedV2(TypeInfo type)
{
    switch (type.getCategory()) {
        case PRIMITIVE:
            return ((PrimitiveTypeInfo) type).getPrimitiveCategory() == TIMESTAMP;
        case LIST:
            return containsTimestampBucketedV2(((ListTypeInfo) type).getListElementTypeInfo());
        case MAP:
            MapTypeInfo mapTypeInfo = (MapTypeInfo) type;
            // Note: we do not check map value type because HiveBucketingV2#hashOfMap hashes map values with v1
            return containsTimestampBucketedV2(mapTypeInfo.getMapKeyTypeInfo());
        default:
            // TODO: support more types, e.g. ROW
            throw new UnsupportedOperationException("Computation of Hive bucket hashCode is not supported for Hive category: " + type.getCategory());
    }
}
 
Example 3
Source Project: presto   Source File: HiveWriteUtils.java    License: Apache License 2.0 6 votes vote down vote up
private static boolean isWritableType(TypeInfo typeInfo)
{
    switch (typeInfo.getCategory()) {
        case PRIMITIVE:
            PrimitiveCategory primitiveCategory = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
            return isWritablePrimitiveType(primitiveCategory);
        case MAP:
            MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
            return isWritableType(mapTypeInfo.getMapKeyTypeInfo()) && isWritableType(mapTypeInfo.getMapValueTypeInfo());
        case LIST:
            ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo;
            return isWritableType(listTypeInfo.getListElementTypeInfo());
        case STRUCT:
            StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
            return structTypeInfo.getAllStructFieldTypeInfos().stream().allMatch(HiveWriteUtils::isWritableType);
    }
    return false;
}
 
Example 4
Source Project: incubator-iotdb   Source File: TsFileSerDe.java    License: Apache License 2.0 6 votes vote down vote up
private ObjectInspector createObjectInspectorWorker(TypeInfo ti) throws TsFileSerDeException {
  if(!supportedCategories(ti)) {
    throw new TsFileSerDeException("Don't yet support this type: " + ti);
  }
  ObjectInspector result;
  switch(ti.getCategory()) {
    case PRIMITIVE:
      PrimitiveTypeInfo pti = (PrimitiveTypeInfo) ti;
      result = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(pti);
      break;
    // these types is not supported in TsFile
    case LIST:
    case MAP:
    case STRUCT:
    case UNION:
      throw new TsFileSerDeException("The type is not supported in TsFile: " + ti);
    default:
      throw new TsFileSerDeException("No Hive categories matched: " + ti);
  }
  return result;
}
 
Example 5
Source Project: incubator-iotdb   Source File: TsFileSerDeTest.java    License: Apache License 2.0 6 votes vote down vote up
@Before
public void setUp() {
  tsFileSerDer = new TsFileSerDe();
  columnNames = Arrays.asList("time_stamp", "sensor_1");
  columnTypes = new ArrayList<>();
  PrimitiveTypeInfo typeInfo1 = new PrimitiveTypeInfo();
  typeInfo1.setTypeName("bigint");
  columnTypes.add(typeInfo1);
  PrimitiveTypeInfo typeInfo2 = new PrimitiveTypeInfo();
  typeInfo2.setTypeName("bigint");
  columnTypes.add(typeInfo2);
  tbl = new Properties();
  String delimiter = ",";
  tbl.setProperty(serdeConstants.COLUMN_NAME_DELIMITER, delimiter);
  tbl.setProperty(serdeConstants.LIST_COLUMNS, String.join(delimiter, columnNames));
  tbl.setProperty(serdeConstants.LIST_COLUMN_TYPES, "bigint,bigint");
  tbl.setProperty(TsFileSerDe.DEVICE_ID, "device_1");
  job = new JobConf();
  try {
    tsFileSerDer.initialize(job, tbl);
  } catch (SerDeException e) {
    e.printStackTrace();

  }
}
 
Example 6
public MDSMapObjectInspector( final MapTypeInfo typeInfo ){
  TypeInfo keyTypeInfo = typeInfo.getMapKeyTypeInfo();
  if( keyTypeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE && ( (PrimitiveTypeInfo)keyTypeInfo ).getPrimitiveCategory() == PrimitiveCategory.STRING ){
    keyObjectInspector = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
  }
  else{
    throw new RuntimeException( "Map key type is string only." );
  }

  valueObjectInspector = MDSObjectInspectorFactory.craeteObjectInspectorFromTypeInfo( typeInfo.getMapValueTypeInfo() ); 

  if( valueObjectInspector.getCategory() == ObjectInspector.Category.PRIMITIVE ){
    getField = new PrimitiveGetField( (PrimitiveObjectInspector)valueObjectInspector );
  }
  else if( valueObjectInspector.getCategory() == ObjectInspector.Category.UNION ){
    getField = new UnionGetField( (UnionTypeInfo)( typeInfo.getMapValueTypeInfo() ) );
  }
  else{
    getField = new NestedGetField();
  }
}
 
Example 7
Source Project: emodb   Source File: EmoSerDe.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Determines if the given primitive is supported by this deserializer.  At this time the only exclusions are
 * BINARY, DECIMAL, VARCHAR, CHAR, and UNKNOWN.
 */
private boolean isSupportedPrimitive(PrimitiveTypeInfo type) {
    switch (type.getPrimitiveCategory()) {
        case VOID:
        case STRING:
        case BOOLEAN:
        case BYTE:
        case SHORT:
        case INT:
        case LONG:
        case FLOAT:
        case DOUBLE:
        case DATE:
        case TIMESTAMP:
            return true;
        default:
            return false;
    }
}
 
Example 8
Source Project: emodb   Source File: EmoSerDe.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Deserializes a primitive to its corresponding Java type, doing a best-effort conversion when necessary.
 */
private Object deserializePrimitive(PrimitiveTypeInfo type, Object value)
        throws SerDeException {
    switch (type.getPrimitiveCategory()) {
        case VOID:
            return null;
        case STRING:
            return deserializeString(value);
        case BOOLEAN:
            return deserializeBoolean(value);
        case BYTE:
        case SHORT:
        case INT:
        case LONG:
        case FLOAT:
        case DOUBLE:
            return deserializeNumber(value, type);
        case DATE:
        case TIMESTAMP:
            return deserializeDate(value, type);
        default:
            throw new SerDeException("Unsupported type: " + type.getPrimitiveCategory());
    }
}
 
Example 9
Source Project: emodb   Source File: EmoSerDe.java    License: Apache License 2.0 6 votes vote down vote up
private Object deserializeNumber(Object value, PrimitiveTypeInfo type)
        throws SerDeException {
    // Note that only numbers and booleans are supported.  All other types cannot be deserialized.  In particular
    // String representations of numbers are not parsed.
    Number number;
    if (value instanceof Number) {
        number = (Number) value;
    } else if (value instanceof Boolean) {
        number = ((Boolean) value) ? (byte) 1 : 0;
    } else {
        throw new SerDeException("Value is not a " + type + ": " + value);
    }

    switch (type.getPrimitiveCategory()) {
        case BYTE:   return number.byteValue();
        case SHORT:  return number.shortValue();
        case INT:    return number.intValue();
        case LONG:   return number.longValue();
        case FLOAT:  return number.floatValue();
        case DOUBLE: return number.doubleValue();
    }

    throw new SerDeException("Primitive number did not match any expected categories"); // Unreachable
}
 
Example 10
Source Project: emodb   Source File: EmoSerDe.java    License: Apache License 2.0 6 votes vote down vote up
private Object deserializeDate(Object value, PrimitiveTypeInfo type)
        throws SerDeException {
    long ts;
    // Dates can be either ISO8601 Strings or numeric timestamps.  Any other data type or format cannot be
    // deserialized.
    if (value instanceof String) {
        try {
            ts = JsonHelper.parseTimestamp((String) value).getTime();
        } catch (Exception e) {
            throw new SerDeException("Invalid time string: " + value);
        }
    } else if (value instanceof Number) {
        ts = ((Number) value).longValue();
    } else if (value instanceof java.util.Date) {
        ts = ((java.util.Date) value).getTime();
    } else {
        throw new SerDeException("Invalid time value: " + value);
    }

    if (type.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.DATE) {
        return new Date(ts);
    } else {
        return new Timestamp(ts);
    }
}
 
Example 11
Source Project: incubator-hivemall   Source File: KuromojiUDFTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test(expected = UDFArgumentException.class)
public void testInvalidMode() throws IOException, HiveException {
    GenericUDF udf = new KuromojiUDF();
    ObjectInspector[] argOIs = new ObjectInspector[2];
    // line
    argOIs[0] = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
    // mode
    PrimitiveTypeInfo stringType = new PrimitiveTypeInfo();
    stringType.setTypeName("string");
    argOIs[1] = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
        stringType, new Text("unsupported mode"));
    udf.initialize(argOIs);

    DeferredObject[] args = new DeferredObject[1];
    args[0] = new DeferredObject() {
        public Text get() throws HiveException {
            return new Text("クロモジのJapaneseAnalyzerを使ってみる。テスト。");
        }

        @Override
        public void prepare(int arg) throws HiveException {}
    };
    udf.evaluate(args);

    udf.close();
}
 
Example 12
Source Project: incubator-hivemall   Source File: KuromojiUDFTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testThreeArgument() throws UDFArgumentException, IOException {
    GenericUDF udf = new KuromojiUDF();
    ObjectInspector[] argOIs = new ObjectInspector[3];
    // line
    argOIs[0] = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
    // mode
    PrimitiveTypeInfo stringType = new PrimitiveTypeInfo();
    stringType.setTypeName("string");
    argOIs[1] = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
        stringType, null);
    // stopWords
    argOIs[2] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
        PrimitiveObjectInspectorFactory.javaStringObjectInspector, null);
    udf.initialize(argOIs);
    udf.close();
}
 
Example 13
Source Project: incubator-hivemall   Source File: KuromojiUDFTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testFourArgument() throws UDFArgumentException, IOException {
    GenericUDF udf = new KuromojiUDF();
    ObjectInspector[] argOIs = new ObjectInspector[4];
    // line
    argOIs[0] = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
    // mode
    PrimitiveTypeInfo stringType = new PrimitiveTypeInfo();
    stringType.setTypeName("string");
    argOIs[1] = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
        stringType, null);
    // stopWords
    argOIs[2] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
        PrimitiveObjectInspectorFactory.javaStringObjectInspector, null);
    // stopTags
    argOIs[3] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
        PrimitiveObjectInspectorFactory.javaStringObjectInspector, null);
    udf.initialize(argOIs);
    udf.close();
}
 
Example 14
Source Project: incubator-hivemall   Source File: KuromojiUDFTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testFiveArgumentArray() throws UDFArgumentException, IOException {
    GenericUDF udf = new KuromojiUDF();
    ObjectInspector[] argOIs = new ObjectInspector[5];
    // line
    argOIs[0] = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
    // mode
    PrimitiveTypeInfo stringType = new PrimitiveTypeInfo();
    stringType.setTypeName("string");
    argOIs[1] = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
        stringType, null);
    // stopWords
    argOIs[2] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
        PrimitiveObjectInspectorFactory.javaStringObjectInspector, null);
    // stopTags
    argOIs[3] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
        PrimitiveObjectInspectorFactory.javaStringObjectInspector, null);
    // userDictUrl
    argOIs[4] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
        PrimitiveObjectInspectorFactory.javaStringObjectInspector, null);
    udf.initialize(argOIs);
    udf.close();
}
 
Example 15
Source Project: incubator-hivemall   Source File: KuromojiUDFTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testFiveArgumenString() throws UDFArgumentException, IOException {
    GenericUDF udf = new KuromojiUDF();
    ObjectInspector[] argOIs = new ObjectInspector[5];
    // line
    argOIs[0] = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
    // mode
    PrimitiveTypeInfo stringType = new PrimitiveTypeInfo();
    stringType.setTypeName("string");
    argOIs[1] = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
        stringType, null);
    // stopWords
    argOIs[2] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
        PrimitiveObjectInspectorFactory.javaStringObjectInspector, null);
    // stopTags
    argOIs[3] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
        PrimitiveObjectInspectorFactory.javaStringObjectInspector, null);
    // userDictUrl
    argOIs[4] = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
        stringType, null);
    udf.initialize(argOIs);
    udf.close();
}
 
Example 16
Source Project: incubator-hivemall   Source File: HiveUtils.java    License: Apache License 2.0 6 votes vote down vote up
public static boolean isNumberTypeInfo(@Nonnull TypeInfo typeInfo) {
    if (typeInfo.getCategory() != ObjectInspector.Category.PRIMITIVE) {
        return false;
    }
    switch (((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) {
        case BYTE:
        case SHORT:
        case INT:
        case LONG:
        case FLOAT:
        case DOUBLE:
        case DECIMAL:
            return true;
        default:
            return false;
    }
}
 
Example 17
Source Project: Cobol-to-Hive   Source File: CobolDeserializer.java    License: Apache License 2.0 6 votes vote down vote up
private Object worker(String columnName, TypeInfo columnType){
	
	switch(columnType.getCategory()) {
		
		case STRUCT:
			return deserializeStruct(columnName, (StructTypeInfo) columnType);
		case UNION:
			return deserializeUnion(columnName,(UnionTypeInfo) columnType);
		case LIST:
		return deserializeList(columnName, (ListTypeInfo) columnType);
		case MAP:
		throw new RuntimeException("map type is not possible for cobol layout" + columnType.getCategory());
		case PRIMITIVE:
		return deserializePrimitive(columnName, (PrimitiveTypeInfo) columnType);
		default:
		throw new RuntimeException("Unknown TypeInfo: " + columnType.getCategory());
	}
}
 
Example 18
private ObjectInspector createObjectInspectorWorker(TypeInfo ti) throws SerDeException {
  switch (ti.getCategory()) {
  case PRIMITIVE:
    PrimitiveTypeInfo pti = (PrimitiveTypeInfo) ti;
    return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(pti);
  case STRUCT:
    StructTypeInfo sti = (StructTypeInfo) ti;
    List<ObjectInspector> ois = new ArrayList<ObjectInspector>(sti.getAllStructFieldTypeInfos().size());
    for (TypeInfo typeInfo : sti.getAllStructFieldTypeInfos()) {
      ois.add(createObjectInspectorWorker(typeInfo));
    }
    return ObjectInspectorFactory.getStandardStructObjectInspector(sti.getAllStructFieldNames(), ois);
  case LIST:
    ListTypeInfo lti = (ListTypeInfo) ti;
    TypeInfo listElementTypeInfo = lti.getListElementTypeInfo();
    return ObjectInspectorFactory.getStandardListObjectInspector(createObjectInspectorWorker(listElementTypeInfo));
  default:
    throw new SerDeException("No Hive categories matched for [" + ti + "]");
  }
}
 
Example 19
Source Project: presto   Source File: HiveType.java    License: Apache License 2.0 5 votes vote down vote up
public static boolean isSupportedType(TypeInfo typeInfo, StorageFormat storageFormat)
{
    switch (typeInfo.getCategory()) {
        case PRIMITIVE:
            return getPrimitiveType((PrimitiveTypeInfo) typeInfo) != null;
        case MAP:
            MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
            return isSupportedType(mapTypeInfo.getMapKeyTypeInfo(), storageFormat) && isSupportedType(mapTypeInfo.getMapValueTypeInfo(), storageFormat);
        case LIST:
            ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo;
            return isSupportedType(listTypeInfo.getListElementTypeInfo(), storageFormat);
        case STRUCT:
            StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
            return structTypeInfo.getAllStructFieldTypeInfos().stream()
                    .allMatch(fieldTypeInfo -> isSupportedType(fieldTypeInfo, storageFormat));
        case UNION:
            // This feature (reading uniontypes as structs) has only been verified against Avro and ORC tables. Here's a discussion:
            //   1. Avro tables are supported and verified.
            //   2. ORC tables are supported and verified.
            //   3. The Parquet format doesn't support uniontypes itself so there's no need to add support for it in Presto.
            //   4. TODO: RCFile tables are not supported yet.
            //   5. TODO: The support for Avro is done in SerDeUtils so it's possible that formats other than Avro are also supported. But verification is needed.
            if (storageFormat.getSerDe().equalsIgnoreCase(AVRO.getSerDe()) || storageFormat.getSerDe().equalsIgnoreCase(ORC.getSerDe())) {
                UnionTypeInfo unionTypeInfo = (UnionTypeInfo) typeInfo;
                return unionTypeInfo.getAllUnionObjectTypeInfos().stream()
                        .allMatch(fieldTypeInfo -> isSupportedType(fieldTypeInfo, storageFormat));
            }
    }
    return false;
}
 
Example 20
Source Project: presto   Source File: HiveType.java    License: Apache License 2.0 5 votes vote down vote up
public static Type getPrimitiveType(PrimitiveTypeInfo typeInfo)
{
    switch (typeInfo.getPrimitiveCategory()) {
        case BOOLEAN:
            return BOOLEAN;
        case BYTE:
            return TINYINT;
        case SHORT:
            return SMALLINT;
        case INT:
            return INTEGER;
        case LONG:
            return BIGINT;
        case FLOAT:
            return REAL;
        case DOUBLE:
            return DOUBLE;
        case STRING:
            return createUnboundedVarcharType();
        case VARCHAR:
            return createVarcharType(((VarcharTypeInfo) typeInfo).getLength());
        case CHAR:
            return createCharType(((CharTypeInfo) typeInfo).getLength());
        case DATE:
            return DATE;
        case TIMESTAMP:
            return TIMESTAMP;
        case BINARY:
            return VARBINARY;
        case DECIMAL:
            DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
            return createDecimalType(decimalTypeInfo.precision(), decimalTypeInfo.scale());
        default:
            return null;
    }
}
 
Example 21
Source Project: presto   Source File: ThriftMetastoreUtil.java    License: Apache License 2.0 5 votes vote down vote up
public static ColumnStatisticsObj createMetastoreColumnStatistics(String columnName, HiveType columnType, HiveColumnStatistics statistics, OptionalLong rowCount)
{
    TypeInfo typeInfo = columnType.getTypeInfo();
    checkArgument(typeInfo.getCategory() == PRIMITIVE, "unsupported type: %s", columnType);
    switch (((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) {
        case BOOLEAN:
            return createBooleanStatistics(columnName, columnType, statistics);
        case BYTE:
        case SHORT:
        case INT:
        case LONG:
            return createLongStatistics(columnName, columnType, statistics);
        case FLOAT:
        case DOUBLE:
            return createDoubleStatistics(columnName, columnType, statistics);
        case STRING:
        case VARCHAR:
        case CHAR:
            return createStringStatistics(columnName, columnType, statistics, rowCount);
        case DATE:
            return createDateStatistics(columnName, columnType, statistics);
        case TIMESTAMP:
            return createLongStatistics(columnName, columnType, statistics);
        case BINARY:
            return createBinaryStatistics(columnName, columnType, statistics, rowCount);
        case DECIMAL:
            return createDecimalStatistics(columnName, columnType, statistics);
        default:
            throw new IllegalArgumentException(format("unsupported type: %s", columnType));
    }
}
 
Example 22
Source Project: flink   Source File: HiveTypeUtil.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Convert Hive data type to a Flink data type.
 *
 * @param hiveType a Hive data type
 * @return the corresponding Flink data type
 */
public static DataType toFlinkType(TypeInfo hiveType) {
	checkNotNull(hiveType, "hiveType cannot be null");

	switch (hiveType.getCategory()) {
		case PRIMITIVE:
			return toFlinkPrimitiveType((PrimitiveTypeInfo) hiveType);
		case LIST:
			ListTypeInfo listTypeInfo = (ListTypeInfo) hiveType;
			return DataTypes.ARRAY(toFlinkType(listTypeInfo.getListElementTypeInfo()));
		case MAP:
			MapTypeInfo mapTypeInfo = (MapTypeInfo) hiveType;
			return DataTypes.MAP(toFlinkType(mapTypeInfo.getMapKeyTypeInfo()), toFlinkType(mapTypeInfo.getMapValueTypeInfo()));
		case STRUCT:
			StructTypeInfo structTypeInfo = (StructTypeInfo) hiveType;

			List<String> names = structTypeInfo.getAllStructFieldNames();
			List<TypeInfo> typeInfos = structTypeInfo.getAllStructFieldTypeInfos();

			DataTypes.Field[] fields = new DataTypes.Field[names.size()];

			for (int i = 0; i < fields.length; i++) {
				fields[i] = DataTypes.FIELD(names.get(i), toFlinkType(typeInfos.get(i)));
			}

			return DataTypes.ROW(fields);
		default:
			throw new UnsupportedOperationException(
				String.format("Flink doesn't support Hive data type %s yet.", hiveType));
	}
}
 
Example 23
Source Project: flink   Source File: HiveTypeUtil.java    License: Apache License 2.0 5 votes vote down vote up
private static DataType toFlinkPrimitiveType(PrimitiveTypeInfo hiveType) {
	checkNotNull(hiveType, "hiveType cannot be null");

	switch (hiveType.getPrimitiveCategory()) {
		case CHAR:
			return DataTypes.CHAR(((CharTypeInfo) hiveType).getLength());
		case VARCHAR:
			return DataTypes.VARCHAR(((VarcharTypeInfo) hiveType).getLength());
		case STRING:
			return DataTypes.STRING();
		case BOOLEAN:
			return DataTypes.BOOLEAN();
		case BYTE:
			return DataTypes.TINYINT();
		case SHORT:
			return DataTypes.SMALLINT();
		case INT:
			return DataTypes.INT();
		case LONG:
			return DataTypes.BIGINT();
		case FLOAT:
			return DataTypes.FLOAT();
		case DOUBLE:
			return DataTypes.DOUBLE();
		case DATE:
			return DataTypes.DATE();
		case TIMESTAMP:
			return DataTypes.TIMESTAMP();
		case BINARY:
			return DataTypes.BYTES();
		case DECIMAL:
			DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) hiveType;
			return DataTypes.DECIMAL(decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale());
		default:
			throw new UnsupportedOperationException(
				String.format("Flink doesn't support Hive primitive type %s yet", hiveType));
	}
}
 
Example 24
Source Project: flink   Source File: HiveInspectors.java    License: Apache License 2.0 5 votes vote down vote up
private static ConstantObjectInspector getPrimitiveJavaConstantObjectInspector(PrimitiveTypeInfo typeInfo, Object value) {
	switch (typeInfo.getPrimitiveCategory()) {
		case BOOLEAN:
			return new JavaConstantBooleanObjectInspector((Boolean) value);
		case BYTE:
			return new JavaConstantByteObjectInspector((Byte) value);
		case SHORT:
			return new JavaConstantShortObjectInspector((Short) value);
		case INT:
			return new JavaConstantIntObjectInspector((Integer) value);
		case LONG:
			return new JavaConstantLongObjectInspector((Long) value);
		case FLOAT:
			return new JavaConstantFloatObjectInspector((Float) value);
		case DOUBLE:
			return new JavaConstantDoubleObjectInspector((Double) value);
		case STRING:
			return new JavaConstantStringObjectInspector((String) value);
		case CHAR:
			return new JavaConstantHiveCharObjectInspector((HiveChar) value);
		case VARCHAR:
			return new JavaConstantHiveVarcharObjectInspector((HiveVarchar) value);
		case DATE:
			return new JavaConstantDateObjectInspector((Date) value);
		case TIMESTAMP:
			return new JavaConstantTimestampObjectInspector((Timestamp) value);
		case DECIMAL:
			return new JavaConstantHiveDecimalObjectInspector((HiveDecimal) value);
		case BINARY:
			return new JavaConstantBinaryObjectInspector((byte[]) value);
		case UNKNOWN:
		case VOID:
			// If type is null, we use the Java Constant String to replace
			return new JavaConstantStringObjectInspector((String) value);
		default:
			throw new FlinkHiveUDFException(
				String.format("Cannot find ConstantObjectInspector for %s", typeInfo));
	}
}
 
Example 25
Source Project: flink   Source File: HiveInspectors.java    License: Apache License 2.0 5 votes vote down vote up
private static ObjectInspector getObjectInspector(TypeInfo type) {
	switch (type.getCategory()) {

		case PRIMITIVE:
			PrimitiveTypeInfo primitiveType = (PrimitiveTypeInfo) type;
			return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(primitiveType);
		case LIST:
			ListTypeInfo listType = (ListTypeInfo) type;
			return ObjectInspectorFactory.getStandardListObjectInspector(
					getObjectInspector(listType.getListElementTypeInfo()));
		case MAP:
			MapTypeInfo mapType = (MapTypeInfo) type;
			return ObjectInspectorFactory.getStandardMapObjectInspector(
					getObjectInspector(mapType.getMapKeyTypeInfo()), getObjectInspector(mapType.getMapValueTypeInfo()));
		case STRUCT:
			StructTypeInfo structType = (StructTypeInfo) type;
			List<TypeInfo> fieldTypes = structType.getAllStructFieldTypeInfos();

			List<ObjectInspector> fieldInspectors = new ArrayList<ObjectInspector>();
			for (TypeInfo fieldType : fieldTypes) {
				fieldInspectors.add(getObjectInspector(fieldType));
			}

			return ObjectInspectorFactory.getStandardStructObjectInspector(
					structType.getAllStructFieldNames(), fieldInspectors);
		default:
			throw new CatalogException("Unsupported Hive type category " + type.getCategory());
	}
}
 
Example 26
Source Project: marble   Source File: TypeInferenceUtil.java    License: Apache License 2.0 5 votes vote down vote up
private static ObjectInspector getPrimitiveObjectInspector(
    RelDataTypeHolder relDataTypeHolder) {
  SqlTypeName sqlTypeName = relDataTypeHolder.getSqlTypeName();

  //FIXME Hive TypeInfoFactory.decimalTypeInfo use a default scale and
  // precision
  PrimitiveTypeInfo primitiveTypeInfo = CALCITE_SQL_TYPE_2_HIVE_TYPE_INFO.get(
      sqlTypeName);
  if (primitiveTypeInfo == null) {
    throw new IllegalArgumentException(
        "can't find hive primitiveTypeInfo for Calcite SqlType: "
            + sqlTypeName);
  }
  ObjectInspector result;
  if (relDataTypeHolder.isConstant()) {
    Object value = relDataTypeHolder.getValue();
    Object hiveWritableValue = convertCalciteObject2HiveWritableObject(
        relDataTypeHolder,
        value);
    result =
        PrimitiveObjectInspectorFactory
            .getPrimitiveWritableConstantObjectInspector(
                primitiveTypeInfo, hiveWritableValue);
  } else {
    result =
        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
            primitiveTypeInfo);
  }

  return result;
}
 
Example 27
Source Project: incubator-iotdb   Source File: TsFileDeserializerTest.java    License: Apache License 2.0 5 votes vote down vote up
@Before
public void setUp() {
  tsFileDeserializer = new TsFileDeserializer();
  columnNames = Arrays.asList("time_stamp", "sensor_1");
  columnTypes = new ArrayList<>();
  PrimitiveTypeInfo typeInfo1 = new PrimitiveTypeInfo();
  typeInfo1.setTypeName("bigint");
  columnTypes.add(typeInfo1);
  PrimitiveTypeInfo typeInfo2 = new PrimitiveTypeInfo();
  typeInfo2.setTypeName("bigint");
  columnTypes.add(typeInfo2);
}
 
Example 28
public static IColumnVectorAssignor create( final TypeInfo typeInfo ){
  switch ( typeInfo.getCategory() ){
    case PRIMITIVE:
      PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo)typeInfo;
      switch( primitiveTypeInfo.getPrimitiveCategory() ){
        case STRING:
        case BINARY:
          return new BytesColumnVectorAssignor();
        case BYTE:
          return new LongColumnVectorAssignor( BytePrimitiveSetter.getInstance() );
        case SHORT:
          return new LongColumnVectorAssignor( ShortPrimitiveSetter.getInstance() );
        case INT:
          return new LongColumnVectorAssignor( IntegerPrimitiveSetter.getInstance() );
        case BOOLEAN:
        case LONG:
          return new LongColumnVectorAssignor( LongPrimitiveSetter.getInstance() );
        case FLOAT:
          return new DoubleColumnVectorAssignor( FloatPrimitiveSetter.getInstance() );
        case DOUBLE:
          return new DoubleColumnVectorAssignor( DoublePrimitiveSetter.getInstance() );
        case DATE:
        case DECIMAL:
        case TIMESTAMP:
        case VOID:
        default:
          throw new UnsupportedOperationException( "Unsupport vectorize column " + primitiveTypeInfo.getPrimitiveCategory() );
      }
    case STRUCT:
    case MAP:
    case LIST:
    case UNION:
    default:
      throw new UnsupportedOperationException( "Unsupport vectorize column " + typeInfo.getCategory() );
  }
}
 
Example 29
Source Project: dremio-oss   Source File: HiveMetadataUtils.java    License: Apache License 2.0 5 votes vote down vote up
private static boolean isFieldTypeVarchar(FieldSchema hiveField) {
  final TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(hiveField.getType());
  if (typeInfo.getCategory() == Category.PRIMITIVE) {
    PrimitiveTypeInfo pTypeInfo = (PrimitiveTypeInfo) typeInfo;
    if (pTypeInfo.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.VARCHAR ||
      pTypeInfo.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.CHAR) {
      return true;
    }
  }
  return false;
}
 
Example 30
Source Project: dremio-oss   Source File: HiveMetadataUtils.java    License: Apache License 2.0 5 votes vote down vote up
private static boolean isFieldTypeVarchar(FieldSchema hiveField) {
  final TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(hiveField.getType());
  if (typeInfo.getCategory() == Category.PRIMITIVE) {
    PrimitiveTypeInfo pTypeInfo = (PrimitiveTypeInfo) typeInfo;
    if (pTypeInfo.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.VARCHAR ||
      pTypeInfo.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.CHAR) {
      return true;
    }
  }
  return false;
}