org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector Java Examples
The following examples show how to use
org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: OrcFlowFileWriter.java From localization_nifi with Apache License 2.0 | 6 votes |
@Override void write(Object obj) throws IOException { super.write(obj); if (obj != null) { HiveDecimal decimal = ((HiveDecimalObjectInspector) inspector).getPrimitiveJavaObject(obj); if (decimal == null) { return; } SerializationUtils.writeBigInteger(valueStream, decimal.unscaledValue()); scaleStream.write(decimal.scale()); indexStatistics.updateDecimal(decimal); if (createBloomFilter) { bloomFilter.addString(decimal.toString()); } } }
Example #2
Source File: OrcFlowFileWriter.java From nifi with Apache License 2.0 | 6 votes |
@Override void write(Object obj) throws IOException { super.write(obj); if (obj != null) { HiveDecimal decimal = ((HiveDecimalObjectInspector) inspector).getPrimitiveJavaObject(obj); if (decimal == null) { return; } SerializationUtils.writeBigInteger(valueStream, decimal.unscaledValue()); scaleStream.write(decimal.scale()); indexStatistics.updateDecimal(decimal); if (createBloomFilter) { bloomFilter.addString(decimal.toString()); } } }
Example #3
Source File: SerDeUtils.java From presto with Apache License 2.0 | 4 votes |
private static void serializePrimitive(Type type, BlockBuilder builder, Object object, PrimitiveObjectInspector inspector) { requireNonNull(builder, "parent builder is null"); if (object == null) { builder.appendNull(); return; } switch (inspector.getPrimitiveCategory()) { case BOOLEAN: BooleanType.BOOLEAN.writeBoolean(builder, ((BooleanObjectInspector) inspector).get(object)); return; case BYTE: TinyintType.TINYINT.writeLong(builder, ((ByteObjectInspector) inspector).get(object)); return; case SHORT: SmallintType.SMALLINT.writeLong(builder, ((ShortObjectInspector) inspector).get(object)); return; case INT: IntegerType.INTEGER.writeLong(builder, ((IntObjectInspector) inspector).get(object)); return; case LONG: BigintType.BIGINT.writeLong(builder, ((LongObjectInspector) inspector).get(object)); return; case FLOAT: RealType.REAL.writeLong(builder, floatToRawIntBits(((FloatObjectInspector) inspector).get(object))); return; case DOUBLE: DoubleType.DOUBLE.writeDouble(builder, ((DoubleObjectInspector) inspector).get(object)); return; case STRING: type.writeSlice(builder, Slices.utf8Slice(((StringObjectInspector) inspector).getPrimitiveJavaObject(object))); return; case VARCHAR: type.writeSlice(builder, Slices.utf8Slice(((HiveVarcharObjectInspector) inspector).getPrimitiveJavaObject(object).getValue())); return; case CHAR: CharType charType = (CharType) type; HiveChar hiveChar = ((HiveCharObjectInspector) inspector).getPrimitiveJavaObject(object); type.writeSlice(builder, truncateToLengthAndTrimSpaces(Slices.utf8Slice(hiveChar.getValue()), charType.getLength())); return; case DATE: DateType.DATE.writeLong(builder, formatDateAsLong(object, (DateObjectInspector) inspector)); return; case TIMESTAMP: TimestampType.TIMESTAMP.writeLong(builder, formatTimestampAsLong(object, (TimestampObjectInspector) inspector)); return; case BINARY: VARBINARY.writeSlice(builder, Slices.wrappedBuffer(((BinaryObjectInspector) inspector).getPrimitiveJavaObject(object))); return; case DECIMAL: DecimalType decimalType = (DecimalType) type; HiveDecimalWritable hiveDecimal = ((HiveDecimalObjectInspector) inspector).getPrimitiveWritableObject(object); if (decimalType.isShort()) { decimalType.writeLong(builder, DecimalUtils.getShortDecimalValue(hiveDecimal, decimalType.getScale())); } else { decimalType.writeSlice(builder, DecimalUtils.getLongDecimalValue(hiveDecimal, decimalType.getScale())); } return; } throw new RuntimeException("Unknown primitive type: " + inspector.getPrimitiveCategory()); }
Example #4
Source File: CacheablePrimitiveObjectInspectorConverter.java From transport with BSD 2-Clause "Simplified" License | 4 votes |
public Text convert(Object input) { if (input == null) { return null; } Text t = new Text(); switch (inputOI.getPrimitiveCategory()) { case VOID: return null; case BOOLEAN: t.set(((BooleanObjectInspector) inputOI).get(input) ? trueBytes : falseBytes); return t; case BYTE: out.reset(); LazyInteger.writeUTF8NoException(out, ((ByteObjectInspector) inputOI).get(input)); t.set(out.getData(), 0, out.getLength()); return t; case SHORT: out.reset(); LazyInteger.writeUTF8NoException(out, ((ShortObjectInspector) inputOI).get(input)); t.set(out.getData(), 0, out.getLength()); return t; case INT: out.reset(); LazyInteger.writeUTF8NoException(out, ((IntObjectInspector) inputOI).get(input)); t.set(out.getData(), 0, out.getLength()); return t; case LONG: out.reset(); LazyLong.writeUTF8NoException(out, ((LongObjectInspector) inputOI).get(input)); t.set(out.getData(), 0, out.getLength()); return t; case FLOAT: t.set(String.valueOf(((FloatObjectInspector) inputOI).get(input))); return t; case DOUBLE: t.set(String.valueOf(((DoubleObjectInspector) inputOI).get(input))); return t; case STRING: if (inputOI.preferWritable()) { t.set(((StringObjectInspector) inputOI).getPrimitiveWritableObject(input)); } else { t.set(((StringObjectInspector) inputOI).getPrimitiveJavaObject(input)); } return t; case CHAR: // when converting from char, the value should be stripped of any trailing spaces. if (inputOI.preferWritable()) { // char text value is already stripped of trailing space t.set(((HiveCharObjectInspector) inputOI).getPrimitiveWritableObject(input) .getStrippedValue()); } else { t.set(((HiveCharObjectInspector) inputOI).getPrimitiveJavaObject(input).getStrippedValue()); } return t; case VARCHAR: if (inputOI.preferWritable()) { t.set(((HiveVarcharObjectInspector) inputOI).getPrimitiveWritableObject(input) .toString()); } else { t.set(((HiveVarcharObjectInspector) inputOI).getPrimitiveJavaObject(input).toString()); } return t; case DATE: t.set(((DateObjectInspector) inputOI).getPrimitiveWritableObject(input).toString()); return t; case TIMESTAMP: t.set(((TimestampObjectInspector) inputOI) .getPrimitiveWritableObject(input).toString()); return t; case BINARY: BinaryObjectInspector binaryOI = (BinaryObjectInspector) inputOI; if (binaryOI.preferWritable()) { BytesWritable bytes = binaryOI.getPrimitiveWritableObject(input); t.set(bytes.getBytes(), 0, bytes.getLength()); } else { t.set(binaryOI.getPrimitiveJavaObject(input)); } return t; case DECIMAL: t.set(((HiveDecimalObjectInspector) inputOI).getPrimitiveWritableObject(input).toString()); return t; default: throw new RuntimeException("Hive 2 Internal error: type = " + inputOI.getTypeName()); } }
Example #5
Source File: HiveColumnarSerdeResolver.java From pxf with Apache License 2.0 | 4 votes |
private void resolvePrimitive(Object o, PrimitiveObjectInspector oi) throws IOException { if (!firstColumn) { builder.append(delimiter); } if (o == null) { builder.append(nullChar); } else { switch (oi.getPrimitiveCategory()) { case BOOLEAN: builder.append(((BooleanObjectInspector) oi).get(o)); break; case SHORT: builder.append(((ShortObjectInspector) oi).get(o)); break; case INT: builder.append(((IntObjectInspector) oi).get(o)); break; case LONG: builder.append(((LongObjectInspector) oi).get(o)); break; case FLOAT: builder.append(((FloatObjectInspector) oi).get(o)); break; case DOUBLE: builder.append(((DoubleObjectInspector) oi).get(o)); break; case DECIMAL: builder.append(((HiveDecimalObjectInspector) oi).getPrimitiveJavaObject(o).bigDecimalValue()); break; case STRING: builder.append(((StringObjectInspector) oi).getPrimitiveJavaObject(o)); break; case BINARY: byte[] bytes = ((BinaryObjectInspector) oi).getPrimitiveJavaObject(o); Utilities.byteArrayToOctalString(bytes, builder); break; case TIMESTAMP: builder.append(((TimestampObjectInspector) oi).getPrimitiveJavaObject(o)); break; case BYTE: /* TINYINT */ builder.append(Short.valueOf(((ByteObjectInspector) oi).get(o))); break; default: throw new UnsupportedTypeException(oi.getTypeName() + " conversion is not supported by HiveColumnarSerdeResolver"); } } firstColumn = false; }
Example #6
Source File: HiveDecimalObjectConverter.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 4 votes |
@Override public Object convert(ObjectInspector objectInspector, Object o, TypeInfo odpsTypeInfo) { HiveDecimalObjectInspector hiveDecimalObjectInspector = (HiveDecimalObjectInspector) objectInspector; return hiveDecimalObjectInspector.getPrimitiveJavaObject(o).bigDecimalValue(); }
Example #7
Source File: HiveFieldConverter.java From dremio-oss with Apache License 2.0 | 4 votes |
@Override public void setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector outputVV, int outputIndex) { DecimalUtility.writeBigDecimalToArrowBuf(((HiveDecimalObjectInspector)oi).getPrimitiveJavaObject(hiveFieldValue).bigDecimalValue().setScale(holder.scale, RoundingMode.HALF_UP), holder.buffer, holder.start); ((DecimalVector) outputVV).setSafe(outputIndex, 1, 0, holder.buffer); }
Example #8
Source File: HiveTestUDFImpls.java From dremio-oss with Apache License 2.0 | 4 votes |
@Override public Object evaluate(DeferredObject[] arguments) throws HiveException { if (arguments[0] == null || arguments[0].get() == null) { return null; } Object input = arguments[0].get(); switch(inputType) { case BOOLEAN: return ((BooleanObjectInspector)argumentOI).get(input) ? Boolean.TRUE : Boolean.FALSE; case BYTE: return new Byte(((ByteObjectInspector)argumentOI).get(input)); case SHORT: return new Short(((ShortObjectInspector)argumentOI).get(input)); case INT: return new Integer(((IntObjectInspector)argumentOI).get(input)); case LONG: return new Long(((LongObjectInspector)argumentOI).get(input)); case FLOAT: return new Float(((FloatObjectInspector)argumentOI).get(input)); case DOUBLE: return new Double(((DoubleObjectInspector)argumentOI).get(input)); case STRING: return PrimitiveObjectInspectorUtils.getString(input, (StringObjectInspector)argumentOI); case BINARY: return PrimitiveObjectInspectorUtils.getBinary(input, (BinaryObjectInspector) argumentOI).getBytes(); case VARCHAR: if (outputType == PrimitiveCategory.CHAR) { HiveVarchar hiveVarchar = PrimitiveObjectInspectorUtils.getHiveVarchar(input, (HiveVarcharObjectInspector) argumentOI); return new HiveChar(hiveVarchar.getValue(), HiveChar.MAX_CHAR_LENGTH); } else { return PrimitiveObjectInspectorUtils.getHiveVarchar(input, (HiveVarcharObjectInspector)argumentOI); } case CHAR: return PrimitiveObjectInspectorUtils.getHiveChar(input, (HiveCharObjectInspector) argumentOI); case DATE: return PrimitiveObjectInspectorUtils.getDate(input, (DateObjectInspector) argumentOI); case TIMESTAMP: return PrimitiveObjectInspectorUtils.getTimestamp(input, (TimestampObjectInspector) argumentOI); case DECIMAL: // return type is a HiveVarchar HiveDecimal decimalValue = PrimitiveObjectInspectorUtils.getHiveDecimal(input, (HiveDecimalObjectInspector) argumentOI); return new HiveVarchar(decimalValue.toString(), HiveVarchar.MAX_VARCHAR_LENGTH); } throw new UnsupportedOperationException(String.format("Unexpected input type '%s' in Test UDF", inputType)); }
Example #9
Source File: HiveFieldConverter.java From dremio-oss with Apache License 2.0 | 4 votes |
@Override public void setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector outputVV, int outputIndex) { DecimalUtility.writeBigDecimalToArrowBuf(((HiveDecimalObjectInspector)oi).getPrimitiveJavaObject(hiveFieldValue).bigDecimalValue().setScale(holder.scale, RoundingMode.HALF_UP), holder.buffer, holder.start); ((DecimalVector) outputVV).setSafe(outputIndex, 1, 0, holder.buffer); }
Example #10
Source File: ExcelSerde.java From hadoopoffice with Apache License 2.0 | 4 votes |
/** * Initializes the SerDe \n * You can define in the table properties (additionally to the standard Hive properties) the following options \n * office.hive.write.defaultSheetName: The sheetname to which data should be written (note: as an input any sheets can be read or selected sheets according to HadoopOffice configuration values) \n * Any of the HadoopOffice options (hadoopoffice.*), such as encryption, signing, low footprint mode, linked workbooks, can be defined in the table properties @see <a href="https://github.com/ZuInnoTe/hadoopoffice/wiki/Hadoop-File-Format">HadoopOffice configuration</a>\n * @param conf Hadoop Configuration * @param prop table properties. * @param partitionProperties ignored. Partitions are not supported. */ @Override public void initialize(Configuration conf, Properties prop, Properties partitionProperties) throws SerDeException { LOG.debug("Initializing Excel Hive Serde"); LOG.debug("Configuring Hive-only options"); // configure hadoopoffice specific hive options String defaultSheetNameStr = prop.getProperty(ExcelSerde.CONF_DEFAULTSHEETNAME); if (defaultSheetNameStr != null) { this.defaultSheetName = defaultSheetNameStr; } // copy hadoopoffice options LOG.debug("Configuring HadoopOffice Format"); Set<Entry<Object, Object>> entries = prop.entrySet(); for (Entry<Object, Object> entry : entries) { if ((entry.getKey() instanceof String) && ((String) entry.getKey()).startsWith(ExcelSerde.HOSUFFIX)) { if (("TRUE".equalsIgnoreCase((String) entry.getValue())) || ("FALSE".equalsIgnoreCase(((String) entry.getValue())))) { conf.setBoolean((String) entry.getKey(), Boolean.valueOf((String) entry.getValue())); } else { conf.set((String) entry.getKey(), (String) entry.getValue()); } } } // create object inspector (always a struct = row) LOG.debug("Creating object inspector"); this.columnNames = Arrays.asList(prop.getProperty(serdeConstants.LIST_COLUMNS).split(",")); this.columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(prop.getProperty(serdeConstants.LIST_COLUMN_TYPES)); final List<ObjectInspector> columnOIs = new ArrayList<ObjectInspector>(columnNames.size()); for (TypeInfo currentColumnType : columnTypes) { columnOIs.add(TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(currentColumnType)); } this.oi = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, columnOIs); // create converter LOG.debug("Creating converter"); HadoopOfficeReadConfiguration hocr = new HadoopOfficeReadConfiguration(conf); this.readConverter = new ExcelConverterSimpleSpreadSheetCellDAO(hocr.getSimpleDateFormat(), hocr.getSimpleDecimalFormat(), hocr.getSimpleDateTimeFormat()); HadoopOfficeWriteConfiguration howc = new HadoopOfficeWriteConfiguration(conf,""); this.writeConverter = new ExcelConverterSimpleSpreadSheetCellDAO(howc.getSimpleDateFormat(), howc.getSimpleDecimalFormat(), howc.getSimpleDateTimeFormat()); // configure writing of header this.writeHeader=howc.getWriteHeader(); GenericDataType[] columnsGD = new GenericDataType[columnNames.size()]; for (int i = 0; i < columnOIs.size(); i++) { ObjectInspector currentOI = columnOIs.get(i); if (currentOI instanceof BooleanObjectInspector) { columnsGD[i] = new GenericBooleanDataType(); } else if (currentOI instanceof DateObjectInspector) { columnsGD[i] = new GenericDateDataType(); } else if (currentOI instanceof TimestampObjectInspector) { columnsGD[i] = new GenericTimestampDataType(); } else if (currentOI instanceof ByteObjectInspector) { columnsGD[i] = new GenericByteDataType(); } else if (currentOI instanceof ShortObjectInspector) { columnsGD[i] = new GenericShortDataType(); } else if (currentOI instanceof IntObjectInspector) { columnsGD[i] = new GenericIntegerDataType(); } else if (currentOI instanceof LongObjectInspector) { columnsGD[i] = new GenericLongDataType(); } else if (currentOI instanceof DoubleObjectInspector) { columnsGD[i] = new GenericDoubleDataType(); } else if (currentOI instanceof FloatObjectInspector) { columnsGD[i] = new GenericFloatDataType(); } else if (currentOI instanceof HiveDecimalObjectInspector) { HiveDecimalObjectInspector currentOIHiveDecimalOI = (HiveDecimalObjectInspector) currentOI; columnsGD[i] = new GenericBigDecimalDataType(currentOIHiveDecimalOI.precision(), currentOIHiveDecimalOI.scale()); } else if (currentOI instanceof StringObjectInspector) { columnsGD[i] = new GenericStringDataType(); } else { LOG.warn("Could not detect desired datatype for column " + i + ". Type " + currentOI.getTypeName() + ". Using String"); columnsGD[i] = new GenericStringDataType(); } } this.readConverter.setSchemaRow(columnsGD); this.writeConverter.setSchemaRow(columnsGD); // create nullrow this.nullRow = new Object[this.columnNames.size()]; // set writerow this.currentWriteRow = 0; // set outputrow this.outputRow = new Object[this.columnNames.size()]; LOG.debug("Finished Initialization"); }