Java Code Examples for org.apache.hadoop.hive.common.type.HiveDecimal#create()

The following examples show how to use org.apache.hadoop.hive.common.type.HiveDecimal#create() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestParquetDecimalScaling.java    From presto with Apache License 2.0 6 votes vote down vote up
private Object convertValue(String value)
{
    BigDecimal bigValue = new BigDecimal(value).setScale(scale, UNNECESSARY);

    if (!forceFixedLengthArray && precision > 0 && precision < 10) {
        return bigValue.unscaledValue().intValue();
    }

    if (!forceFixedLengthArray && precision >= 10 && precision < 18) {
        return bigValue.unscaledValue().longValue();
    }

    if (precision > 38 || precision < 0) {
        throw new IllegalArgumentException("Scale could not be greater than 38 or less than 0");
    }

    return HiveDecimal.create(bigValue);
}
 
Example 2
Source File: Converters.java    From HiveRunner with Apache License 2.0 5 votes vote down vote up
@Override
public Object convert(@SuppressWarnings("rawtypes") Class type, Object value) {
  try {
    return HiveDecimal.create(new BigDecimal(value.toString()));
  } catch (NumberFormatException e) {
    throw new ConversionException(e);
  }
}
 
Example 3
Source File: HiveWriteUtils.java    From presto with Apache License 2.0 5 votes vote down vote up
private static HiveDecimal getHiveDecimal(DecimalType decimalType, Block block, int position)
{
    BigInteger unscaledValue;
    if (decimalType.isShort()) {
        unscaledValue = BigInteger.valueOf(decimalType.getLong(block, position));
    }
    else {
        unscaledValue = Decimals.decodeUnscaledValue(decimalType.getSlice(block, position));
    }
    return HiveDecimal.create(unscaledValue, decimalType.getScale());
}
 
Example 4
Source File: OrcUtils.java    From spork with Apache License 2.0 5 votes vote down vote up
@Override
public HiveDecimal getPrimitiveJavaObject(Object o) {
    if (o instanceof BigDecimal) {
        return o == null ? null : HiveDecimal.create((BigDecimal)o);
    } else { // BigInteger
        return o == null ? null : HiveDecimal.create((BigInteger)o);
    }
}
 
Example 5
Source File: OrcUtils.java    From spork with Apache License 2.0 5 votes vote down vote up
@Override
public HiveDecimalWritable getPrimitiveWritableObject(Object o) {
    if (o instanceof BigDecimal) {
        return o == null ? null : new HiveDecimalWritable(HiveDecimal.create((BigDecimal)o));
    } else { // BigInteger
        return o == null ? null : new HiveDecimalWritable(HiveDecimal.create((BigInteger)o));
    }
}
 
Example 6
Source File: SqoopHCatImportHelper.java    From aliyun-maxcompute-data-collectors with Apache License 2.0 5 votes vote down vote up
private Object convertStringTypes(Object val, HCatFieldSchema hfs) {
  HCatFieldSchema.Type hfsType = hfs.getType();
  if (hfsType == HCatFieldSchema.Type.STRING
      || hfsType == HCatFieldSchema.Type.VARCHAR
      || hfsType == HCatFieldSchema.Type.CHAR) {
    String str = val.toString();
    if (doHiveDelimsReplacement) {
      str = FieldFormatter.hiveStringReplaceDelims(str,
        hiveDelimsReplacement, hiveDelimiters);
    }
    if (hfsType == HCatFieldSchema.Type.STRING) {
      return str;
    } else if (hfsType == HCatFieldSchema.Type.VARCHAR) {
      VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo();
      HiveVarchar hvc = new HiveVarchar(str, vti.getLength());
      return hvc;
    } else if (hfsType == HCatFieldSchema.Type.CHAR) {
      CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo();
      HiveChar hc = new HiveChar(val.toString(), cti.getLength());
      return hc;
    }
  } else if (hfsType == HCatFieldSchema.Type.DECIMAL) {
    BigDecimal bd = new BigDecimal(val.toString(), MathContext.DECIMAL128);
    HiveDecimal hd = HiveDecimal.create(bd);
    return hd;
  }
  return null;
}
 
Example 7
Source File: ExcelSerde.java    From hadoopoffice with Apache License 2.0 4 votes vote down vote up
/**
 * Deserializes an object of type @see #getSerializedClass()
 * Note: Some Java types, such as Decimal, are converted to Hive specific datatypes. 
 * 
 * @param arg0 object of type @see #getSerializedClass()
 * @return Array containing objects of type primitive Java (e.g. string, byte, integer)/Hive (e.g HiveDecimal, HiveVarChar)
 * 
 */
@Override
public Object deserialize(Writable arg0) throws SerDeException {
	if ((arg0 == null) || (arg0 instanceof NullWritable)) {
		return this.nullRow;
	}
	Object[] primitiveRow = this.readConverter
			.getDataAccordingToSchema((SpreadSheetCellDAO[]) ((ArrayWritable) arg0).get());
	// check if supported type and convert to hive type, if necessary
	for (int i = 0; i < primitiveRow.length; i++) {
		PrimitiveTypeInfo ti = (PrimitiveTypeInfo) this.columnTypes.get(i);
		switch (ti.getPrimitiveCategory()) {
		case STRING:
			primitiveRow[i] = primitiveRow[i];
			break;
		case BYTE:
			primitiveRow[i] = primitiveRow[i];
			break;
		case SHORT:
			primitiveRow[i] = primitiveRow[i];
			break;
		case INT:
			primitiveRow[i] = primitiveRow[i];
			break;
		case LONG:
			primitiveRow[i] = primitiveRow[i];
			break;
		case FLOAT:
			primitiveRow[i] = primitiveRow[i];
			break;
		case DOUBLE:
			primitiveRow[i] = primitiveRow[i];
			break;
		case BOOLEAN:
			primitiveRow[i] = primitiveRow[i];
			break;
		case TIMESTAMP:
			primitiveRow[i] = primitiveRow[i];
			break;
		case DATE:
			if (primitiveRow[i] != null) {
				primitiveRow[i] = new java.sql.Date(((Date) primitiveRow[i]).getTime());
			}
			break;
		case DECIMAL:
			if (primitiveRow[i] != null) {
				primitiveRow[i] = HiveDecimal.create((BigDecimal) primitiveRow[i]);
			}
			break;
		case CHAR:
			if (primitiveRow[i] != null) {
				primitiveRow[i] = new HiveChar((String) primitiveRow[i], ((CharTypeInfo) ti).getLength());
			}
			break;
		case VARCHAR:
			if (primitiveRow[i] != null) {
				primitiveRow[i] = new HiveVarchar((String) primitiveRow[i], ((VarcharTypeInfo) ti).getLength());
			}
			break;
		default:
			throw new SerDeException("Unsupported type " + ti);
		}
	}
	if (this.columnNames.size()>primitiveRow.length) { // can happen in rare cases where a row does not contain all columns
		Object[] tempRow = new Object[this.columnNames.size()];
		for (int i=0;i<primitiveRow.length;i++) {
			tempRow[i]=primitiveRow[i];
		}
		primitiveRow=tempRow;
	}
	return primitiveRow;
}
 
Example 8
Source File: OrcTester.java    From spliceengine with GNU Affero General Public License v3.0 4 votes vote down vote up
private static Object preprocessWriteValueOld(TypeInfo typeInfo, Object value) throws IOException
    {
        if (value == null) {
            return null;
        }
        switch (typeInfo.getCategory()) {
            case PRIMITIVE:
                PrimitiveObjectInspector.PrimitiveCategory primitiveCategory = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
                switch (primitiveCategory) {
                    case BOOLEAN:
                        return value;
                    case BYTE:
                        return ((Number) value).byteValue();
                    case SHORT:
                        return ((Number) value).shortValue();
                    case INT:
                        return ((Number) value).intValue();
                    case LONG:
                        return ((Number) value).longValue();
                    case FLOAT:
                        return ((Number) value).floatValue();
                    case DOUBLE:
                        return ((Number) value).doubleValue();
                    case DECIMAL:
                        return HiveDecimal.create(((Decimal) value).toBigDecimal().bigDecimal());
                    case STRING:
                        return value;
                    case CHAR:
                        return new HiveChar(value.toString(), ((CharTypeInfo) typeInfo).getLength());
                    case DATE:
                        LocalDate localDate = LocalDate.ofEpochDay((int)value);
                        ZonedDateTime zonedDateTime = localDate.atStartOfDay(ZoneId.systemDefault());

                        long millis = zonedDateTime.toEpochSecond() * 1000;
                        Date date = new Date(0);
                        // mills must be set separately to avoid masking
                        date.setTime(millis);
                        return date;
                    case TIMESTAMP:
                        long millisUtc = ((Long)value).intValue();
                        return new Timestamp(millisUtc);
                    case BINARY:
                        return ((String) value).getBytes();
//                        return (byte[])value;
                }
                break;
            case MAP:
                MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
                TypeInfo keyTypeInfo = mapTypeInfo.getMapKeyTypeInfo();
                TypeInfo valueTypeInfo = mapTypeInfo.getMapValueTypeInfo();
                Map<Object, Object> newMap = new HashMap<>();
                for (Entry<?, ?> entry : ((Map<?, ?>) value).entrySet()) {
                    newMap.put(preprocessWriteValueOld(keyTypeInfo, entry.getKey()), preprocessWriteValueOld(valueTypeInfo, entry.getValue()));
                }
                return newMap;
            case LIST:
                ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo;
                TypeInfo elementTypeInfo = listTypeInfo.getListElementTypeInfo();
                List<Object> newList = new ArrayList<>(((Collection<?>) value).size());
                for (Object element : (Iterable<?>) value) {
                    newList.add(preprocessWriteValueOld(elementTypeInfo, element));
                }
                return newList;
            case STRUCT:
                StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
                List<?> fieldValues = (List<?>) value;
                List<TypeInfo> fieldTypeInfos = structTypeInfo.getAllStructFieldTypeInfos();
                List<Object> newStruct = new ArrayList<>();
                for (int fieldId = 0; fieldId < fieldValues.size(); fieldId++) {
                    newStruct.add(preprocessWriteValueOld(fieldTypeInfos.get(fieldId), fieldValues.get(fieldId)));
                }
                return newStruct;
        }
        throw new IOException(format("Unsupported Hive type: %s", typeInfo));
    }
 
Example 9
Source File: OrcSplitReader.java    From flink with Apache License 2.0 4 votes vote down vote up
Object castLiteral(Serializable literal) {

			switch (literalType) {
				case LONG:
					if (literal instanceof Byte) {
						return new Long((Byte) literal);
					} else if (literal instanceof Short) {
						return new Long((Short) literal);
					} else if (literal instanceof Integer) {
						return new Long((Integer) literal);
					} else if (literal instanceof Long) {
						return literal;
					} else {
						throw new IllegalArgumentException("A predicate on a LONG column requires an integer " +
								"literal, i.e., Byte, Short, Integer, or Long.");
					}
				case FLOAT:
					if (literal instanceof Float) {
						return new Double((Float) literal);
					} else if (literal instanceof Double) {
						return literal;
					} else if (literal instanceof BigDecimal) {
						return ((BigDecimal) literal).doubleValue();
					} else {
						throw new IllegalArgumentException("A predicate on a FLOAT column requires a floating " +
								"literal, i.e., Float or Double.");
					}
				case STRING:
					if (literal instanceof String) {
						return literal;
					} else {
						throw new IllegalArgumentException("A predicate on a STRING column requires a floating " +
								"literal, i.e., Float or Double.");
					}
				case BOOLEAN:
					if (literal instanceof Boolean) {
						return literal;
					} else {
						throw new IllegalArgumentException("A predicate on a BOOLEAN column requires a Boolean literal.");
					}
				case DATE:
					if (literal instanceof Date) {
						return literal;
					} else {
						throw new IllegalArgumentException("A predicate on a DATE column requires a java.sql.Date literal.");
					}
				case TIMESTAMP:
					if (literal instanceof Timestamp) {
						return literal;
					} else {
						throw new IllegalArgumentException("A predicate on a TIMESTAMP column requires a java.sql.Timestamp literal.");
					}
				case DECIMAL:
					if (literal instanceof BigDecimal) {
						return new HiveDecimalWritable(HiveDecimal.create((BigDecimal) literal));
					} else {
						throw new IllegalArgumentException("A predicate on a DECIMAL column requires a BigDecimal literal.");
					}
				default:
					throw new IllegalArgumentException("Unknown literal type " + literalType);
			}
		}
 
Example 10
Source File: HiveStatsUtil.java    From flink with Apache License 2.0 4 votes vote down vote up
private static HiveDecimal toHiveDecimal(Decimal decimal) {
	return HiveDecimal.create(new BigInteger(decimal.getUnscaled()), decimal.getScale());
}
 
Example 11
Source File: CobolNumberField.java    From Cobol-to-Hive with Apache License 2.0 4 votes vote down vote up
@Override
	public Object deserialize(byte[] rowBytes) throws CobolSerdeException {
		byte[] temp = transcodeField(super.getBytes(rowBytes));
		String s1 = new String(temp);

		if (this.compType > 0) {
			if (this.compType == 3) {
				s1 = unpackData(super.getBytes(rowBytes), this.decimalLocation);
			}else if(this.compType == 4){
				s1 = getBinary(super.getBytes(rowBytes), this.decimalLocation);
			}
		} //} else if (this.decimalLocation > 0) {
		else {
			//Now calling unpackSign on all numeric fields for which compType resolves to 0.
			//
			//The function will check to see if the least significant byte has been overpunched with a sign and
			//return a negative number if a negative sign is found.
			s1 = unpackSign(super.getBytes(rowBytes), this.decimalLocation);
		}
//		else if (this.decimalLocation > 0) {
//			s1 = s1.substring(0, this.length * this.divideFactor
//					- this.decimalLocation)
//					+ "."
//					+ s1.substring(this.length * this.divideFactor
//							- this.decimalLocation);
//		}
//		System.out.println(name + "\t - " + s1 + "\t:" + offset + "\t@"
//				+ length);
		try {
			switch (((PrimitiveTypeInfo) this.typeInfo).getPrimitiveCategory()) {
			case LONG:
				return Long.parseLong(s1.trim());
			case SHORT:
				return Short.parseShort(s1.trim());
			case INT:
				return Integer.parseInt(s1.trim());
			case BYTE:
				return Byte.parseByte(s1.trim());
			case FLOAT:
				return Float.parseFloat(s1.trim());
			case DOUBLE:
				return Double.parseDouble(s1.trim());
			case DECIMAL:
				BigDecimal bd = new BigDecimal(s1);
				HiveDecimal dec = HiveDecimal.create(bd);
				JavaHiveDecimalObjectInspector oi = (JavaHiveDecimalObjectInspector) PrimitiveObjectInspectorFactory
						.getPrimitiveJavaObjectInspector((DecimalTypeInfo) this.typeInfo);
				return oi.set(null, dec);
			}
		} catch (Exception e) {
			return null; // if cannot be converted make it null
		}
		return null;

	}
 
Example 12
Source File: HiveJsonStructReader.java    From incubator-hivemall with Apache License 2.0 4 votes vote down vote up
private Object getObjectOfCorrespondingPrimitiveType(String s, PrimitiveObjectInspector oi)
        throws IOException {
    PrimitiveTypeInfo typeInfo = oi.getTypeInfo();
    if (writeablePrimitives) {
        Converter c = ObjectInspectorConverters.getConverter(
            PrimitiveObjectInspectorFactory.javaStringObjectInspector, oi);
        return c.convert(s);
    }

    switch (typeInfo.getPrimitiveCategory()) {
        case INT:
            return Integer.valueOf(s);
        case BYTE:
            return Byte.valueOf(s);
        case SHORT:
            return Short.valueOf(s);
        case LONG:
            return Long.valueOf(s);
        case BOOLEAN:
            return (s.equalsIgnoreCase("true"));
        case FLOAT:
            return Float.valueOf(s);
        case DOUBLE:
            return Double.valueOf(s);
        case STRING:
            return s;
        case BINARY:
            try {
                String t = Text.decode(s.getBytes(), 0, s.getBytes().length);
                return t.getBytes();
            } catch (CharacterCodingException e) {
                LOG.warn("Error generating json binary type from object.", e);
                return null;
            }
        case DATE:
            return Date.valueOf(s);
        case TIMESTAMP:
            return Timestamp.valueOf(s);
        case DECIMAL:
            return HiveDecimal.create(s);
        case VARCHAR:
            return new HiveVarchar(s, ((BaseCharTypeInfo) typeInfo).getLength());
        case CHAR:
            return new HiveChar(s, ((BaseCharTypeInfo) typeInfo).getLength());
        default:
            throw new IOException(
                "Could not convert from string to " + typeInfo.getPrimitiveCategory());
    }
}
 
Example 13
Source File: JsonSerdeUtils.java    From incubator-hivemall with Apache License 2.0 4 votes vote down vote up
@Nonnull
private static Object getObjectOfCorrespondingPrimitiveType(String s,
        PrimitiveTypeInfo mapKeyType) throws IOException {
    switch (Type.getPrimitiveHType(mapKeyType)) {
        case INT:
            return Integer.valueOf(s);
        case TINYINT:
            return Byte.valueOf(s);
        case SMALLINT:
            return Short.valueOf(s);
        case BIGINT:
            return Long.valueOf(s);
        case BOOLEAN:
            return (s.equalsIgnoreCase("true"));
        case FLOAT:
            return Float.valueOf(s);
        case DOUBLE:
            return Double.valueOf(s);
        case STRING:
            return s;
        case BINARY:
            try {
                String t = Text.decode(s.getBytes(), 0, s.getBytes().length);
                return t.getBytes();
            } catch (CharacterCodingException e) {
                throw new IOException("Error generating json binary type from object.", e);
            }
        case DATE:
            return Date.valueOf(s);
        case TIMESTAMP:
            return Timestamp.valueOf(s);
        case DECIMAL:
            return HiveDecimal.create(s);
        case VARCHAR:
            return new HiveVarchar(s, ((BaseCharTypeInfo) mapKeyType).getLength());
        case CHAR:
            return new HiveChar(s, ((BaseCharTypeInfo) mapKeyType).getLength());
        default:
            throw new IOException(
                "Could not convert from string to map type " + mapKeyType.getTypeName());
    }
}
 
Example 14
Source File: OrcTester.java    From presto with Apache License 2.0 4 votes vote down vote up
private static Object preprocessWriteValueHive(Type type, Object value)
{
    if (value == null) {
        return null;
    }

    if (type.equals(BOOLEAN)) {
        return value;
    }
    if (type.equals(TINYINT)) {
        return ((Number) value).byteValue();
    }
    if (type.equals(SMALLINT)) {
        return ((Number) value).shortValue();
    }
    if (type.equals(INTEGER)) {
        return ((Number) value).intValue();
    }
    if (type.equals(BIGINT)) {
        return ((Number) value).longValue();
    }
    if (type.equals(REAL)) {
        return ((Number) value).floatValue();
    }
    if (type.equals(DOUBLE)) {
        return ((Number) value).doubleValue();
    }
    if (type instanceof VarcharType) {
        return value;
    }
    if (type instanceof CharType) {
        return new HiveChar((String) value, ((CharType) type).getLength());
    }
    if (type.equals(VARBINARY)) {
        return ((SqlVarbinary) value).getBytes();
    }
    if (type.equals(DATE)) {
        int days = ((SqlDate) value).getDays();
        LocalDate localDate = LocalDate.ofEpochDay(days);
        ZonedDateTime zonedDateTime = localDate.atStartOfDay(ZoneId.systemDefault());

        long millis = SECONDS.toMillis(zonedDateTime.toEpochSecond());
        Date date = new Date(0);
        // millis must be set separately to avoid masking
        date.setTime(millis);
        return date;
    }
    if (type.equals(TIMESTAMP)) {
        long millisUtc = ((SqlTimestamp) value).getMillisUtc();
        return new Timestamp(millisUtc);
    }
    if (type instanceof DecimalType) {
        return HiveDecimal.create(((SqlDecimal) value).toBigDecimal());
    }
    if (type instanceof ArrayType) {
        Type elementType = type.getTypeParameters().get(0);
        return ((List<?>) value).stream()
                .map(element -> preprocessWriteValueHive(elementType, element))
                .collect(toList());
    }
    if (type instanceof MapType) {
        Type keyType = type.getTypeParameters().get(0);
        Type valueType = type.getTypeParameters().get(1);
        Map<Object, Object> newMap = new HashMap<>();
        for (Entry<?, ?> entry : ((Map<?, ?>) value).entrySet()) {
            newMap.put(preprocessWriteValueHive(keyType, entry.getKey()), preprocessWriteValueHive(valueType, entry.getValue()));
        }
        return newMap;
    }
    if (type instanceof RowType) {
        List<?> fieldValues = (List<?>) value;
        List<Type> fieldTypes = type.getTypeParameters();
        List<Object> newStruct = new ArrayList<>();
        for (int fieldId = 0; fieldId < fieldValues.size(); fieldId++) {
            newStruct.add(preprocessWriteValueHive(fieldTypes.get(fieldId), fieldValues.get(fieldId)));
        }
        return newStruct;
    }
    throw new IllegalArgumentException("unsupported type: " + type);
}
 
Example 15
Source File: ORCSearchArgumentGenerator.java    From dremio-oss with Apache License 2.0 4 votes vote down vote up
private static HiveDecimalWritable getDecimal(RexLiteral literal) {
  return new HiveDecimalWritable(HiveDecimal.create((BigDecimal) literal.getValue()));
}
 
Example 16
Source File: OrcRowInputFormat.java    From flink with Apache License 2.0 4 votes vote down vote up
Object castLiteral(Serializable literal) {

			switch (literalType) {
				case LONG:
					if (literal instanceof Byte) {
						return new Long((Byte) literal);
					} else if (literal instanceof Short) {
						return new Long((Short) literal);
					} else if (literal instanceof Integer) {
						return new Long((Integer) literal);
					} else if (literal instanceof Long) {
						return literal;
					} else {
						throw new IllegalArgumentException("A predicate on a LONG column requires an integer " +
							"literal, i.e., Byte, Short, Integer, or Long.");
					}
				case FLOAT:
					if (literal instanceof Float) {
						return new Double((Float) literal);
					} else if (literal instanceof Double) {
						return literal;
					} else if (literal instanceof BigDecimal) {
						return ((BigDecimal) literal).doubleValue();
					} else {
						throw new IllegalArgumentException("A predicate on a FLOAT column requires a floating " +
							"literal, i.e., Float or Double.");
					}
				case STRING:
					if (literal instanceof String) {
						return literal;
					} else {
						throw new IllegalArgumentException("A predicate on a STRING column requires a floating " +
							"literal, i.e., Float or Double.");
					}
				case BOOLEAN:
					if (literal instanceof Boolean) {
						return literal;
					} else {
						throw new IllegalArgumentException("A predicate on a BOOLEAN column requires a Boolean literal.");
					}
				case DATE:
					if (literal instanceof Date) {
						return literal;
					} else {
						throw new IllegalArgumentException("A predicate on a DATE column requires a java.sql.Date literal.");
					}
				case TIMESTAMP:
					if (literal instanceof Timestamp) {
						return literal;
					} else {
						throw new IllegalArgumentException("A predicate on a TIMESTAMP column requires a java.sql.Timestamp literal.");
					}
				case DECIMAL:
					if (literal instanceof BigDecimal) {
						return new HiveDecimalWritable(HiveDecimal.create((BigDecimal) literal));
					} else {
						throw new IllegalArgumentException("A predicate on a DECIMAL column requires a BigDecimal literal.");
					}
				default:
					throw new IllegalArgumentException("Unknown literal type " + literalType);
			}
		}
 
Example 17
Source File: OrcRowInputFormat.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
Object castLiteral(Serializable literal) {

			switch (literalType) {
				case LONG:
					if (literal instanceof Byte) {
						return new Long((Byte) literal);
					} else if (literal instanceof Short) {
						return new Long((Short) literal);
					} else if (literal instanceof Integer) {
						return new Long((Integer) literal);
					} else if (literal instanceof Long) {
						return literal;
					} else {
						throw new IllegalArgumentException("A predicate on a LONG column requires an integer " +
							"literal, i.e., Byte, Short, Integer, or Long.");
					}
				case FLOAT:
					if (literal instanceof Float) {
						return new Double((Float) literal);
					} else if (literal instanceof Double) {
						return literal;
					} else if (literal instanceof BigDecimal) {
						return ((BigDecimal) literal).doubleValue();
					} else {
						throw new IllegalArgumentException("A predicate on a FLOAT column requires a floating " +
							"literal, i.e., Float or Double.");
					}
				case STRING:
					if (literal instanceof String) {
						return literal;
					} else {
						throw new IllegalArgumentException("A predicate on a STRING column requires a floating " +
							"literal, i.e., Float or Double.");
					}
				case BOOLEAN:
					if (literal instanceof Boolean) {
						return literal;
					} else {
						throw new IllegalArgumentException("A predicate on a BOOLEAN column requires a Boolean literal.");
					}
				case DATE:
					if (literal instanceof Date) {
						return literal;
					} else {
						throw new IllegalArgumentException("A predicate on a DATE column requires a java.sql.Date literal.");
					}
				case TIMESTAMP:
					if (literal instanceof Timestamp) {
						return literal;
					} else {
						throw new IllegalArgumentException("A predicate on a TIMESTAMP column requires a java.sql.Timestamp literal.");
					}
				case DECIMAL:
					if (literal instanceof BigDecimal) {
						return new HiveDecimalWritable(HiveDecimal.create((BigDecimal) literal));
					} else {
						throw new IllegalArgumentException("A predicate on a DECIMAL column requires a BigDecimal literal.");
					}
				default:
					throw new IllegalArgumentException("Unknown literal type " + literalType);
			}
		}
 
Example 18
Source File: RcFileTester.java    From presto with Apache License 2.0 4 votes vote down vote up
private static Object preprocessWriteValueOld(Type type, Object value)
{
    if (value == null) {
        return null;
    }

    if (type.equals(BOOLEAN)) {
        return value;
    }
    if (type.equals(TINYINT)) {
        return ((Number) value).byteValue();
    }
    if (type.equals(SMALLINT)) {
        return ((Number) value).shortValue();
    }
    if (type.equals(INTEGER)) {
        return ((Number) value).intValue();
    }
    if (type.equals(BIGINT)) {
        return ((Number) value).longValue();
    }
    if (type.equals(REAL)) {
        return ((Number) value).floatValue();
    }
    if (type.equals(DOUBLE)) {
        return ((Number) value).doubleValue();
    }
    if (type instanceof VarcharType) {
        return value;
    }
    if (type.equals(VARBINARY)) {
        return ((SqlVarbinary) value).getBytes();
    }
    if (type.equals(DATE)) {
        int days = ((SqlDate) value).getDays();
        LocalDate localDate = LocalDate.ofEpochDay(days);
        ZonedDateTime zonedDateTime = localDate.atStartOfDay(ZoneId.systemDefault());

        long millis = zonedDateTime.toEpochSecond() * 1000;
        Date date = new Date(0);
        // mills must be set separately to avoid masking
        date.setTime(millis);
        return date;
    }
    if (type.equals(TIMESTAMP)) {
        long millisUtc = (int) ((SqlTimestamp) value).getMillisUtc();
        return new Timestamp(millisUtc);
    }
    if (type instanceof DecimalType) {
        return HiveDecimal.create(((SqlDecimal) value).toBigDecimal());
    }
    if (type instanceof ArrayType) {
        Type elementType = type.getTypeParameters().get(0);
        return ((List<?>) value).stream()
                .map(element -> preprocessWriteValueOld(elementType, element))
                .collect(toList());
    }
    if (type instanceof MapType) {
        Type keyType = type.getTypeParameters().get(0);
        Type valueType = type.getTypeParameters().get(1);
        Map<Object, Object> newMap = new HashMap<>();
        for (Entry<?, ?> entry : ((Map<?, ?>) value).entrySet()) {
            newMap.put(preprocessWriteValueOld(keyType, entry.getKey()), preprocessWriteValueOld(valueType, entry.getValue()));
        }
        return newMap;
    }
    if (type instanceof RowType) {
        List<?> fieldValues = (List<?>) value;
        List<Type> fieldTypes = type.getTypeParameters();
        List<Object> newStruct = new ArrayList<>();
        for (int fieldId = 0; fieldId < fieldValues.size(); fieldId++) {
            newStruct.add(preprocessWriteValueOld(fieldTypes.get(fieldId), fieldValues.get(fieldId)));
        }
        return newStruct;
    }
    throw new IllegalArgumentException("unsupported type: " + type);
}
 
Example 19
Source File: Row.java    From presto with Apache License 2.0 4 votes vote down vote up
private static Object nativeContainerToOrcValue(Type type, Object nativeValue)
{
    if (nativeValue == null) {
        return null;
    }
    if (type instanceof DecimalType) {
        BigInteger unscaledValue;
        DecimalType decimalType = (DecimalType) type;
        if (decimalType.isShort()) {
            unscaledValue = BigInteger.valueOf((long) nativeValue);
        }
        else {
            unscaledValue = Decimals.decodeUnscaledValue((Slice) nativeValue);
        }
        return HiveDecimal.create(unscaledValue, decimalType.getScale());
    }
    if (type.getJavaType() == boolean.class) {
        return nativeValue;
    }
    if (type.getJavaType() == long.class) {
        return nativeValue;
    }
    if (type.getJavaType() == double.class) {
        return nativeValue;
    }
    if (type.getJavaType() == Slice.class) {
        Slice slice = (Slice) nativeValue;
        return type instanceof VarcharType ? slice.toStringUtf8() : slice.getBytes();
    }
    if (isArrayType(type)) {
        Block arrayBlock = (Block) nativeValue;
        Type elementType = type.getTypeParameters().get(0);
        List<Object> list = new ArrayList<>();
        for (int i = 0; i < arrayBlock.getPositionCount(); i++) {
            list.add(nativeContainerToOrcValue(elementType, getNativeContainerValue(elementType, arrayBlock, i)));
        }
        return list;
    }
    if (isMapType(type)) {
        Block mapBlock = (Block) nativeValue;
        Type keyType = type.getTypeParameters().get(0);
        Type valueType = type.getTypeParameters().get(1);
        Map<Object, Object> map = new HashMap<>();
        for (int i = 0; i < mapBlock.getPositionCount(); i += 2) {
            Object key = nativeContainerToOrcValue(keyType, getNativeContainerValue(keyType, mapBlock, i));
            Object value = nativeContainerToOrcValue(valueType, getNativeContainerValue(valueType, mapBlock, i + 1));
            map.put(key, value);
        }
        return map;
    }
    throw new PrestoException(GENERIC_INTERNAL_ERROR, "Unimplemented type: " + type);
}