org.apache.hadoop.hive.serde2.io.HiveCharWritable Java Examples

The following examples show how to use org.apache.hadoop.hive.serde2.io.HiveCharWritable. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CacheablePrimitiveObjectInspectorConverter.java    From transport with BSD 2-Clause "Simplified" License 5 votes vote down vote up
@Override
public Object convert(Object input) {
  if (input == null) {
    return null;
  }
  HiveCharWritable hc = new HiveCharWritable();
  switch (inputOI.getPrimitiveCategory()) {
    case BOOLEAN:
      return outputOI.set(hc,
          ((BooleanObjectInspector) inputOI).get(input)
              ? new HiveChar("TRUE", -1) : new HiveChar("FALSE", -1));
    default:
      return outputOI.set(hc, PrimitiveObjectInspectorUtils.getHiveChar(input, inputOI));
  }
}
 
Example #2
Source File: WritableTypeConverter.java    From tajo with Apache License 2.0 5 votes vote down vote up
public static Writable convertDatum2Writable(Datum value) {
  switch(value.kind()) {
    case INT1: return new ByteWritable(value.asByte());
    case INT2: return new ShortWritable(value.asInt2());
    case INT4: return new IntWritable(value.asInt4());
    case INT8: return new LongWritable(value.asInt8());

    case FLOAT4: return new FloatWritable(value.asFloat4());
    case FLOAT8: return new DoubleWritable(value.asFloat8());

    // NOTE: value should be DateDatum
    case DATE: return new DateWritable(value.asInt4() - DateTimeConstants.UNIX_EPOCH_JDATE);

    // NOTE: value should be TimestampDatum
    case TIMESTAMP:
      TimestampWritable result = new TimestampWritable();
      result.setTime(DateTimeUtil.julianTimeToJavaTime(value.asInt8()));
      return result;

    case CHAR: {
      String str = value.asChars();
      return new HiveCharWritable(new HiveChar(str, str.length()));
    }
    case TEXT: return new Text(value.asChars());
    case VARBINARY: return new BytesWritable(value.asByteArray());

    case NULL_TYPE: return null;
  }

  throw new TajoRuntimeException(new NotImplementedException(TypeStringEncoder.encode(value.type())));
}
 
Example #3
Source File: OrcTester.java    From presto with Apache License 2.0 4 votes vote down vote up
private static Object decodeRecordReaderValue(Type type, Object actualValue)
{
    if (actualValue instanceof BooleanWritable) {
        actualValue = ((BooleanWritable) actualValue).get();
    }
    else if (actualValue instanceof ByteWritable) {
        actualValue = ((ByteWritable) actualValue).get();
    }
    else if (actualValue instanceof BytesWritable) {
        actualValue = new SqlVarbinary(((BytesWritable) actualValue).copyBytes());
    }
    else if (actualValue instanceof DateWritable) {
        actualValue = new SqlDate(((DateWritable) actualValue).getDays());
    }
    else if (actualValue instanceof DoubleWritable) {
        actualValue = ((DoubleWritable) actualValue).get();
    }
    else if (actualValue instanceof FloatWritable) {
        actualValue = ((FloatWritable) actualValue).get();
    }
    else if (actualValue instanceof IntWritable) {
        actualValue = ((IntWritable) actualValue).get();
    }
    else if (actualValue instanceof HiveCharWritable) {
        actualValue = ((HiveCharWritable) actualValue).getPaddedValue().toString();
    }
    else if (actualValue instanceof LongWritable) {
        actualValue = ((LongWritable) actualValue).get();
    }
    else if (actualValue instanceof ShortWritable) {
        actualValue = ((ShortWritable) actualValue).get();
    }
    else if (actualValue instanceof HiveDecimalWritable) {
        DecimalType decimalType = (DecimalType) type;
        HiveDecimalWritable writable = (HiveDecimalWritable) actualValue;
        // writable messes with the scale so rescale the values to the Presto type
        BigInteger rescaledValue = rescale(writable.getHiveDecimal().unscaledValue(), writable.getScale(), decimalType.getScale());
        actualValue = new SqlDecimal(rescaledValue, decimalType.getPrecision(), decimalType.getScale());
    }
    else if (actualValue instanceof Text) {
        actualValue = actualValue.toString();
    }
    else if (actualValue instanceof TimestampWritable) {
        TimestampWritable timestamp = (TimestampWritable) actualValue;
        actualValue = sqlTimestampOf((timestamp.getSeconds() * 1000) + (timestamp.getNanos() / 1000000L), SESSION);
    }
    else if (actualValue instanceof OrcStruct) {
        List<Object> fields = new ArrayList<>();
        OrcStruct structObject = (OrcStruct) actualValue;
        for (int fieldId = 0; fieldId < structObject.getNumFields(); fieldId++) {
            fields.add(OrcUtil.getFieldValue(structObject, fieldId));
        }
        actualValue = decodeRecordReaderStruct(type, fields);
    }
    else if (actualValue instanceof List) {
        actualValue = decodeRecordReaderList(type, ((List<?>) actualValue));
    }
    else if (actualValue instanceof Map) {
        actualValue = decodeRecordReaderMap(type, (Map<?, ?>) actualValue);
    }
    return actualValue;
}
 
Example #4
Source File: HiveInspectors.java    From flink with Apache License 2.0 4 votes vote down vote up
public static ObjectInspector getObjectInspector(Class clazz) {
	TypeInfo typeInfo;

	if (clazz.equals(String.class) || clazz.equals(Text.class)) {

		typeInfo = TypeInfoFactory.stringTypeInfo;
	} else if (clazz.equals(Boolean.class) || clazz.equals(BooleanWritable.class)) {

		typeInfo = TypeInfoFactory.booleanTypeInfo;
	} else if (clazz.equals(Byte.class) || clazz.equals(ByteWritable.class)) {

		typeInfo = TypeInfoFactory.byteTypeInfo;
	} else if (clazz.equals(Short.class) || clazz.equals(ShortWritable.class)) {

		typeInfo = TypeInfoFactory.shortTypeInfo;
	} else if (clazz.equals(Integer.class) || clazz.equals(IntWritable.class)) {

		typeInfo = TypeInfoFactory.intTypeInfo;
	} else if (clazz.equals(Long.class) || clazz.equals(LongWritable.class)) {

		typeInfo = TypeInfoFactory.longTypeInfo;
	} else if (clazz.equals(Float.class) || clazz.equals(FloatWritable.class)) {

		typeInfo = TypeInfoFactory.floatTypeInfo;
	} else if (clazz.equals(Double.class) || clazz.equals(DoubleWritable.class)) {

		typeInfo = TypeInfoFactory.doubleTypeInfo;
	} else if (clazz.equals(Date.class) || clazz.equals(DateWritable.class)) {

		typeInfo = TypeInfoFactory.dateTypeInfo;
	} else if (clazz.equals(Timestamp.class) || clazz.equals(TimestampWritable.class)) {

		typeInfo = TypeInfoFactory.timestampTypeInfo;
	} else if (clazz.equals(byte[].class) || clazz.equals(BytesWritable.class)) {

		typeInfo = TypeInfoFactory.binaryTypeInfo;
	} else if (clazz.equals(HiveChar.class) || clazz.equals(HiveCharWritable.class)) {

		typeInfo = TypeInfoFactory.charTypeInfo;
	} else if (clazz.equals(HiveVarchar.class) || clazz.equals(HiveVarcharWritable.class)) {

		typeInfo = TypeInfoFactory.varcharTypeInfo;
	} else if (clazz.equals(HiveDecimal.class) || clazz.equals(HiveDecimalWritable.class)) {

		typeInfo = TypeInfoFactory.decimalTypeInfo;
	} else {
		throw new FlinkHiveUDFException(
			String.format("Class %s is not supported yet", clazz.getName()));
	}

	return getObjectInspector(typeInfo);
}
 
Example #5
Source File: WritableTypeConverter.java    From tajo with Apache License 2.0 4 votes vote down vote up
public static DataType convertWritableToTajoType(Class<? extends Writable> writableClass) throws UnsupportedDataTypeException {
  if (writableClass == null)
    return null;

  Set<Class<?>> parents = ReflectionUtils.getAllSuperTypes(writableClass);

  if (writableClass == ByteWritable.class || parents.contains(ByteWritable.class)) {
    return builder.setType(Type.INT1).build();
  }
  if (writableClass == ShortWritable.class || parents.contains(ShortWritable.class)) {
    return builder.setType(Type.INT2).build();
  }
  if (writableClass == IntWritable.class || parents.contains(IntWritable.class)) {
    return builder.setType(Type.INT4).build();
  }
  if (writableClass == LongWritable.class || parents.contains(LongWritable.class)) {
    return builder.setType(Type.INT8).build();
  }
  if (writableClass == HiveCharWritable.class || parents.contains(HiveCharWritable.class)) {
    return builder.setType(Type.CHAR).build();
  }
  if (writableClass == Text.class || parents.contains(Text.class)) {
    return builder.setType(Type.TEXT).build();
  }
  if (writableClass == FloatWritable.class || parents.contains(FloatWritable.class)) {
    return builder.setType(Type.FLOAT4).build();
  }
  if (writableClass == DoubleWritable.class || parents.contains(DoubleWritable.class)) {
    return builder.setType(Type.FLOAT8).build();
  }
  if (writableClass == DateWritable.class || parents.contains(DateWritable.class)) {
    return builder.setType(Type.DATE).build();
  }
  if (writableClass == TimestampWritable.class || parents.contains(TimestampWritable.class)) {
    return builder.setType(Type.TIMESTAMP).build();
  }
  if (writableClass == BytesWritable.class || parents.contains(BytesWritable.class)) {
    return builder.setType(Type.VARBINARY).build();
  }

  throw new UnsupportedDataTypeException(writableClass.getSimpleName());
}