Java Code Examples for org.apache.hadoop.hive.common.type.HiveVarchar#MAX_VARCHAR_LENGTH

The following examples show how to use org.apache.hadoop.hive.common.type.HiveVarchar#MAX_VARCHAR_LENGTH . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HiveTypeUtil.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public TypeInfo visit(VarCharType varCharType) {
	// Flink's StringType is defined as VARCHAR(Integer.MAX_VALUE)
	// We don't have more information in LogicalTypeRoot to distinguish StringType and a VARCHAR(Integer.MAX_VALUE) instance
	// Thus always treat VARCHAR(Integer.MAX_VALUE) as StringType
	if (varCharType.getLength() == Integer.MAX_VALUE) {
		return TypeInfoFactory.stringTypeInfo;
	}
	if (varCharType.getLength() > HiveVarchar.MAX_VARCHAR_LENGTH) {
		throw new CatalogException(
				String.format("HiveCatalog doesn't support varchar type with length of '%d'. " +
							"The maximum length is %d",
							varCharType.getLength(), HiveVarchar.MAX_VARCHAR_LENGTH));
	}
	return TypeInfoFactory.getVarcharTypeInfo(varCharType.getLength());
}
 
Example 2
Source File: HiveTypeUtil.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public TypeInfo visit(VarCharType varCharType) {
	// Flink's StringType is defined as VARCHAR(Integer.MAX_VALUE)
	// We don't have more information in LogicalTypeRoot to distinguish StringType and a VARCHAR(Integer.MAX_VALUE) instance
	// Thus always treat VARCHAR(Integer.MAX_VALUE) as StringType
	if (varCharType.getLength() == Integer.MAX_VALUE) {
		return TypeInfoFactory.stringTypeInfo;
	}
	// Flink and Hive have different length limit for VARCHAR. Promote it to STRING if it exceeds the limits of
	// Hive and we're told not to check precision. This can be useful when calling Hive UDF to process data.
	if (varCharType.getLength() > HiveVarchar.MAX_VARCHAR_LENGTH || varCharType.getLength() < 1) {
		if (checkPrecision) {
			throw new CatalogException(
					String.format("HiveCatalog doesn't support varchar type with length of '%d'. " +
									"The supported length is [%d, %d]",
							varCharType.getLength(), 1, HiveVarchar.MAX_VARCHAR_LENGTH));
		} else {
			return TypeInfoFactory.stringTypeInfo;
		}
	}
	return TypeInfoFactory.getVarcharTypeInfo(varCharType.getLength());
}
 
Example 3
Source File: TypeConverter.java    From presto with Apache License 2.0 4 votes vote down vote up
private static TypeInfo toHiveTypeInfo(Type type)
{
    if (BOOLEAN.equals(type)) {
        return HIVE_BOOLEAN.getTypeInfo();
    }
    if (BIGINT.equals(type)) {
        return HIVE_LONG.getTypeInfo();
    }
    if (INTEGER.equals(type)) {
        return HIVE_INT.getTypeInfo();
    }
    if (SMALLINT.equals(type)) {
        return HIVE_SHORT.getTypeInfo();
    }
    if (TINYINT.equals(type)) {
        return HIVE_BYTE.getTypeInfo();
    }
    if (REAL.equals(type)) {
        return HIVE_FLOAT.getTypeInfo();
    }
    if (DOUBLE.equals(type)) {
        return HIVE_DOUBLE.getTypeInfo();
    }
    if (type instanceof VarcharType) {
        VarcharType varcharType = (VarcharType) type;
        if (varcharType.isUnbounded()) {
            return HIVE_STRING.getTypeInfo();
        }
        if (varcharType.getBoundedLength() <= HiveVarchar.MAX_VARCHAR_LENGTH) {
            return getVarcharTypeInfo(varcharType.getBoundedLength());
        }
        throw new PrestoException(NOT_SUPPORTED, format("Unsupported Hive type: %s. Supported VARCHAR types: VARCHAR(<=%d), VARCHAR.", type, HiveVarchar.MAX_VARCHAR_LENGTH));
    }
    if (type instanceof CharType) {
        CharType charType = (CharType) type;
        int charLength = charType.getLength();
        if (charLength <= HiveChar.MAX_CHAR_LENGTH) {
            return getCharTypeInfo(charLength);
        }
        throw new PrestoException(NOT_SUPPORTED, format("Unsupported Hive type: %s. Supported CHAR types: CHAR(<=%d).",
                type, HiveChar.MAX_CHAR_LENGTH));
    }
    if (VARBINARY.equals(type)) {
        return HIVE_BINARY.getTypeInfo();
    }
    if (DATE.equals(type)) {
        return HIVE_DATE.getTypeInfo();
    }
    if (TIMESTAMP.equals(type)) {
        return HIVE_TIMESTAMP.getTypeInfo();
    }
    if (TIMESTAMP_WITH_TIME_ZONE.equals(type)) {
        // Hive does not have TIMESTAMP_WITH_TIME_ZONE, this is just a work around for iceberg.
        return HIVE_TIMESTAMP.getTypeInfo();
    }
    if (type instanceof DecimalType) {
        DecimalType decimalType = (DecimalType) type;
        return new DecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale());
    }
    if (isArrayType(type)) {
        TypeInfo elementType = toHiveTypeInfo(type.getTypeParameters().get(0));
        return getListTypeInfo(elementType);
    }
    if (isMapType(type)) {
        TypeInfo keyType = toHiveTypeInfo(type.getTypeParameters().get(0));
        TypeInfo valueType = toHiveTypeInfo(type.getTypeParameters().get(1));
        return getMapTypeInfo(keyType, valueType);
    }
    if (isRowType(type)) {
        ImmutableList.Builder<String> fieldNames = ImmutableList.builder();
        for (TypeSignatureParameter parameter : type.getTypeSignature().getParameters()) {
            if (!parameter.isNamedTypeSignature()) {
                throw new IllegalArgumentException(format("Expected all parameters to be named type, but got %s", parameter));
            }
            NamedTypeSignature namedTypeSignature = parameter.getNamedTypeSignature();
            if (namedTypeSignature.getName().isEmpty()) {
                throw new PrestoException(NOT_SUPPORTED, format("Anonymous row type is not supported in Hive. Please give each field a name: %s", type));
            }
            fieldNames.add(namedTypeSignature.getName().get());
        }
        return getStructTypeInfo(
                fieldNames.build(),
                type.getTypeParameters().stream()
                        .map(TypeConverter::toHiveTypeInfo)
                        .collect(toList()));
    }
    throw new PrestoException(NOT_SUPPORTED, format("Unsupported Hive type: %s", type));
}
 
Example 4
Source File: HiveTypeTranslator.java    From presto with Apache License 2.0 4 votes vote down vote up
@Override
public TypeInfo translate(Type type)
{
    if (BOOLEAN.equals(type)) {
        return HIVE_BOOLEAN.getTypeInfo();
    }
    if (BIGINT.equals(type)) {
        return HIVE_LONG.getTypeInfo();
    }
    if (INTEGER.equals(type)) {
        return HIVE_INT.getTypeInfo();
    }
    if (SMALLINT.equals(type)) {
        return HIVE_SHORT.getTypeInfo();
    }
    if (TINYINT.equals(type)) {
        return HIVE_BYTE.getTypeInfo();
    }
    if (REAL.equals(type)) {
        return HIVE_FLOAT.getTypeInfo();
    }
    if (DOUBLE.equals(type)) {
        return HIVE_DOUBLE.getTypeInfo();
    }
    if (type instanceof VarcharType) {
        VarcharType varcharType = (VarcharType) type;
        if (varcharType.isUnbounded()) {
            return HIVE_STRING.getTypeInfo();
        }
        if (varcharType.getBoundedLength() <= HiveVarchar.MAX_VARCHAR_LENGTH) {
            return getVarcharTypeInfo(varcharType.getBoundedLength());
        }
        throw new PrestoException(NOT_SUPPORTED, format("Unsupported Hive type: %s. Supported VARCHAR types: VARCHAR(<=%d), VARCHAR.", type, HiveVarchar.MAX_VARCHAR_LENGTH));
    }
    if (type instanceof CharType) {
        CharType charType = (CharType) type;
        int charLength = charType.getLength();
        if (charLength <= HiveChar.MAX_CHAR_LENGTH) {
            return getCharTypeInfo(charLength);
        }
        throw new PrestoException(NOT_SUPPORTED, format("Unsupported Hive type: %s. Supported CHAR types: CHAR(<=%d).",
                type, HiveChar.MAX_CHAR_LENGTH));
    }
    if (VARBINARY.equals(type)) {
        return HIVE_BINARY.getTypeInfo();
    }
    if (DATE.equals(type)) {
        return HIVE_DATE.getTypeInfo();
    }
    if (TIMESTAMP.equals(type)) {
        return HIVE_TIMESTAMP.getTypeInfo();
    }
    if (type instanceof DecimalType) {
        DecimalType decimalType = (DecimalType) type;
        return new DecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale());
    }
    if (isArrayType(type)) {
        TypeInfo elementType = translate(type.getTypeParameters().get(0));
        return getListTypeInfo(elementType);
    }
    if (isMapType(type)) {
        TypeInfo keyType = translate(type.getTypeParameters().get(0));
        TypeInfo valueType = translate(type.getTypeParameters().get(1));
        return getMapTypeInfo(keyType, valueType);
    }
    if (isRowType(type)) {
        ImmutableList.Builder<String> fieldNames = ImmutableList.builder();
        for (TypeSignatureParameter parameter : type.getTypeSignature().getParameters()) {
            if (!parameter.isNamedTypeSignature()) {
                throw new IllegalArgumentException(format("Expected all parameters to be named type, but got %s", parameter));
            }
            NamedTypeSignature namedTypeSignature = parameter.getNamedTypeSignature();
            if (namedTypeSignature.getName().isEmpty()) {
                throw new PrestoException(NOT_SUPPORTED, format("Anonymous row type is not supported in Hive. Please give each field a name: %s", type));
            }
            fieldNames.add(namedTypeSignature.getName().get());
        }
        return getStructTypeInfo(
                fieldNames.build(),
                type.getTypeParameters().stream()
                        .map(this::translate)
                        .collect(toImmutableList()));
    }
    throw new PrestoException(NOT_SUPPORTED, format("Unsupported Hive type: %s", type));
}
 
Example 5
Source File: HiveWriteUtils.java    From presto with Apache License 2.0 4 votes vote down vote up
public static ObjectInspector getRowColumnInspector(Type type)
{
    if (type.equals(BooleanType.BOOLEAN)) {
        return writableBooleanObjectInspector;
    }

    if (type.equals(BigintType.BIGINT)) {
        return writableLongObjectInspector;
    }

    if (type.equals(IntegerType.INTEGER)) {
        return writableIntObjectInspector;
    }

    if (type.equals(SmallintType.SMALLINT)) {
        return writableShortObjectInspector;
    }

    if (type.equals(TinyintType.TINYINT)) {
        return writableByteObjectInspector;
    }

    if (type.equals(RealType.REAL)) {
        return writableFloatObjectInspector;
    }

    if (type.equals(DoubleType.DOUBLE)) {
        return writableDoubleObjectInspector;
    }

    if (type instanceof VarcharType) {
        VarcharType varcharType = (VarcharType) type;
        if (varcharType.isUnbounded()) {
            // Unbounded VARCHAR is not supported by Hive.
            // Values for such columns must be stored as STRING in Hive
            return writableStringObjectInspector;
        }
        if (varcharType.getBoundedLength() <= HiveVarchar.MAX_VARCHAR_LENGTH) {
            // VARCHAR columns with the length less than or equal to 65535 are supported natively by Hive
            return getPrimitiveWritableObjectInspector(getVarcharTypeInfo(varcharType.getBoundedLength()));
        }
    }

    if (isCharType(type)) {
        CharType charType = (CharType) type;
        int charLength = charType.getLength();
        return getPrimitiveWritableObjectInspector(getCharTypeInfo(charLength));
    }

    if (type.equals(VarbinaryType.VARBINARY)) {
        return writableBinaryObjectInspector;
    }

    if (type.equals(DateType.DATE)) {
        return writableDateObjectInspector;
    }

    if (type.equals(TimestampType.TIMESTAMP)) {
        return writableTimestampObjectInspector;
    }

    if (type instanceof DecimalType) {
        DecimalType decimalType = (DecimalType) type;
        return getPrimitiveWritableObjectInspector(new DecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale()));
    }

    if (isArrayType(type) || isMapType(type) || isRowType(type)) {
        return getJavaObjectInspector(type);
    }

    throw new IllegalArgumentException("unsupported type: " + type);
}
 
Example 6
Source File: HiveTestUDFImpls.java    From dremio-oss with Apache License 2.0 4 votes vote down vote up
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
  if (arguments[0] == null || arguments[0].get() == null) {
    return null;
  }

  Object input = arguments[0].get();
  switch(inputType) {
    case BOOLEAN:
      return ((BooleanObjectInspector)argumentOI).get(input) ? Boolean.TRUE : Boolean.FALSE;
    case BYTE:
      return new Byte(((ByteObjectInspector)argumentOI).get(input));
    case SHORT:
      return new Short(((ShortObjectInspector)argumentOI).get(input));
    case INT:
      return new Integer(((IntObjectInspector)argumentOI).get(input));
    case LONG:
      return new Long(((LongObjectInspector)argumentOI).get(input));
    case FLOAT:
      return new Float(((FloatObjectInspector)argumentOI).get(input));
    case DOUBLE:
      return new Double(((DoubleObjectInspector)argumentOI).get(input));
    case STRING:
      return PrimitiveObjectInspectorUtils.getString(input, (StringObjectInspector)argumentOI);
    case BINARY:
      return PrimitiveObjectInspectorUtils.getBinary(input, (BinaryObjectInspector) argumentOI).getBytes();
    case VARCHAR:
      if (outputType == PrimitiveCategory.CHAR) {
        HiveVarchar hiveVarchar = PrimitiveObjectInspectorUtils.getHiveVarchar(input, (HiveVarcharObjectInspector) argumentOI);
        return new HiveChar(hiveVarchar.getValue(), HiveChar.MAX_CHAR_LENGTH);
      } else {
        return PrimitiveObjectInspectorUtils.getHiveVarchar(input, (HiveVarcharObjectInspector)argumentOI);
      }
    case CHAR:
      return PrimitiveObjectInspectorUtils.getHiveChar(input, (HiveCharObjectInspector) argumentOI);
    case DATE:
      return PrimitiveObjectInspectorUtils.getDate(input, (DateObjectInspector) argumentOI);
    case TIMESTAMP:
      return PrimitiveObjectInspectorUtils.getTimestamp(input, (TimestampObjectInspector) argumentOI);
    case DECIMAL:
      // return type is a HiveVarchar
      HiveDecimal decimalValue =
          PrimitiveObjectInspectorUtils.getHiveDecimal(input, (HiveDecimalObjectInspector) argumentOI);
      return new HiveVarchar(decimalValue.toString(), HiveVarchar.MAX_VARCHAR_LENGTH);
  }

  throw new UnsupportedOperationException(String.format("Unexpected input type '%s' in Test UDF", inputType));
}