Java Code Examples for org.apache.flink.table.functions.hive.conversion.HiveInspectors#toFlinkObject()
The following examples show how to use
org.apache.flink.table.functions.hive.conversion.HiveInspectors#toFlinkObject() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HiveSimpleUDF.java From flink with Apache License 2.0 | 6 votes |
@Override public Object evalInternal(Object[] args) { checkArgument(args.length == conversions.length); if (!allIdentityConverter) { for (int i = 0; i < args.length; i++) { args[i] = conversions[i].toHiveObject(args[i]); } } try { Object result = FunctionRegistry.invoke(method, function, conversionHelper.convertIfNecessary(args)); return HiveInspectors.toFlinkObject(returnInspector, result); } catch (HiveException e) { throw new FlinkHiveUDFException(e); } }
Example 2
Source File: HiveTableInputFormat.java From flink with Apache License 2.0 | 6 votes |
@Override public Row nextRecord(Row ignore) throws IOException { if (reachedEnd()) { return null; } Row row = new Row(rowArity); try { //Use HiveDeserializer to deserialize an object out of a Writable blob Object hiveRowStruct = deserializer.deserialize(value); int index = 0; for (; index < structFields.size(); index++) { StructField structField = structFields.get(index); Object object = HiveInspectors.toFlinkObject(structField.getFieldObjectInspector(), structObjectInspector.getStructFieldData(hiveRowStruct, structField)); row.setField(index, object); } for (String partition : partitionColNames){ row.setField(index++, hiveTablePartition.getPartitionSpec().get(partition)); } } catch (Exception e){ logger.error("Error happens when converting hive data type to flink data type."); throw new FlinkHiveException(e); } this.fetched = false; return row; }
Example 3
Source File: HiveGenericUDF.java From flink with Apache License 2.0 | 6 votes |
@Override public Object evalInternal(Object[] args) { for (int i = 0; i < args.length; i++) { ((DeferredObjectAdapter) deferredObjects[i]).set(args[i]); } try { Object result = returnInspector instanceof ConstantObjectInspector ? ((ConstantObjectInspector) returnInspector).getWritableConstantValue() : function.evaluate(deferredObjects); return HiveInspectors.toFlinkObject(returnInspector, result, hiveShim); } catch (HiveException e) { throw new FlinkHiveUDFException(e); } }
Example 4
Source File: HiveSimpleUDF.java From flink with Apache License 2.0 | 6 votes |
@Override public Object evalInternal(Object[] args) { checkArgument(args.length == conversions.length); if (!allIdentityConverter) { for (int i = 0; i < args.length; i++) { args[i] = conversions[i].toHiveObject(args[i]); } } try { Object result = FunctionRegistry.invoke(method, function, conversionHelper.convertIfNecessary(args)); return HiveInspectors.toFlinkObject(returnInspector, result, hiveShim); } catch (HiveException e) { throw new FlinkHiveUDFException(e); } }
Example 5
Source File: HiveMapredSplitReader.java From flink with Apache License 2.0 | 6 votes |
@Override @SuppressWarnings("unchecked") public RowData nextRecord(RowData reuse) throws IOException { if (reachedEnd()) { return null; } try { //Use HiveDeserializer to deserialize an object out of a Writable blob Object hiveRowStruct = deserializer.deserialize(value); for (int i = 0; i < selectedFields.length; i++) { // set non-partition columns if (selectedFields[i] < structFields.size()) { StructField structField = structFields.get(selectedFields[i]); Object object = HiveInspectors.toFlinkObject(structField.getFieldObjectInspector(), structObjectInspector.getStructFieldData(hiveRowStruct, structField), hiveShim); row.setField(i, converters[i].toInternal(object)); } } } catch (Exception e) { LOG.error("Error happens when converting hive data type to flink data type."); throw new FlinkHiveException(e); } this.fetched = false; return row; }
Example 6
Source File: HiveGenericUDAF.java From flink with Apache License 2.0 | 5 votes |
@Override public Object getValue(GenericUDAFEvaluator.AggregationBuffer accumulator) { try { return HiveInspectors.toFlinkObject(finalResultObjectInspector, finalEvaluator.terminate(accumulator)); } catch (HiveException e) { throw new FlinkHiveUDFException( String.format("Failed to get final result on %s", hiveFunctionWrapper.getClassName()), e); } }
Example 7
Source File: HiveGenericUDF.java From flink with Apache License 2.0 | 5 votes |
@Override public Object evalInternal(Object[] args) { for (int i = 0; i < args.length; i++) { ((DeferredObjectAdapter) deferredObjects[i]).set(args[i]); } try { return HiveInspectors.toFlinkObject(returnInspector, function.evaluate(deferredObjects)); } catch (HiveException e) { throw new FlinkHiveUDFException(e); } }
Example 8
Source File: HiveBatchSource.java From Alink with Apache License 2.0 | 5 votes |
private Object restorePartitionValueFromFromType(String valStr, DataType type) { LogicalTypeRoot typeRoot = type.getLogicalType().getTypeRoot(); //note: it's not a complete list ofr partition key types that Hive support, we may need add more later. switch (typeRoot) { case CHAR: case VARCHAR: return valStr; case BOOLEAN: return Boolean.parseBoolean(valStr); case TINYINT: return Integer.valueOf(valStr).byteValue(); case SMALLINT: return Short.valueOf(valStr); case INTEGER: return Integer.valueOf(valStr); case BIGINT: return Long.valueOf(valStr); case FLOAT: return Float.valueOf(valStr); case DOUBLE: return Double.valueOf(valStr); case DATE: return HiveInspectors.toFlinkObject( HiveInspectors.getObjectInspector(type), hiveShim.toHiveDate(Date.valueOf(valStr)), hiveShim); case TIMESTAMP_WITHOUT_TIME_ZONE: return HiveInspectors.toFlinkObject( HiveInspectors.getObjectInspector(type), hiveShim.toHiveTimestamp(Timestamp.valueOf(valStr)), hiveShim); default: break; } throw new FlinkHiveException( new IllegalArgumentException(String.format("Can not convert %s to type %s for partition value", valStr, type))); }
Example 9
Source File: HiveGenericUDAF.java From flink with Apache License 2.0 | 5 votes |
@Override public Object getValue(GenericUDAFEvaluator.AggregationBuffer accumulator) { try { return HiveInspectors.toFlinkObject(finalResultObjectInspector, finalEvaluator.terminate(accumulator), hiveShim); } catch (HiveException e) { throw new FlinkHiveUDFException( String.format("Failed to get final result on %s", hiveFunctionWrapper.getClassName()), e); } }
Example 10
Source File: HiveTableSource.java From flink with Apache License 2.0 | 5 votes |
private static Object restorePartitionValueFromFromType(HiveShim shim, String valStr, DataType type) { LogicalTypeRoot typeRoot = type.getLogicalType().getTypeRoot(); //note: it's not a complete list ofr partition key types that Hive support, we may need add more later. switch (typeRoot) { case CHAR: case VARCHAR: return valStr; case BOOLEAN: return Boolean.parseBoolean(valStr); case TINYINT: return Integer.valueOf(valStr).byteValue(); case SMALLINT: return Short.valueOf(valStr); case INTEGER: return Integer.valueOf(valStr); case BIGINT: return Long.valueOf(valStr); case FLOAT: return Float.valueOf(valStr); case DOUBLE: return Double.valueOf(valStr); case DATE: return HiveInspectors.toFlinkObject( HiveInspectors.getObjectInspector(type), shim.toHiveDate(Date.valueOf(valStr)), shim); case TIMESTAMP_WITHOUT_TIME_ZONE: return HiveInspectors.toFlinkObject( HiveInspectors.getObjectInspector(type), shim.toHiveTimestamp(Timestamp.valueOf(valStr)), shim); default: break; } throw new FlinkHiveException( new IllegalArgumentException(String.format("Can not convert %s to type %s for partition value", valStr, type))); }
Example 11
Source File: HiveGenericUDTFTest.java From flink with Apache License 2.0 | 4 votes |
@Override public void collect(Object o) { Row row = (Row) HiveInspectors.toFlinkObject(returnInspector, o); result.add(row); }
Example 12
Source File: HiveGenericUDTFTest.java From flink with Apache License 2.0 | 4 votes |
@Override public void collect(Object o) { Row row = (Row) HiveInspectors.toFlinkObject(returnInspector, o, hiveShim); result.add(row); }