org.apache.flink.table.functions.hive.conversion.HiveInspectors Java Examples
The following examples show how to use
org.apache.flink.table.functions.hive.conversion.HiveInspectors.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HiveSimpleUDF.java From flink with Apache License 2.0 | 6 votes |
@Override public DataType getHiveResultType(Object[] constantArguments, DataType[] argTypes) { try { List<TypeInfo> argTypeInfo = new ArrayList<>(); for (DataType argType : argTypes) { argTypeInfo.add(HiveTypeUtil.toHiveTypeInfo(argType)); } Class returnType = hiveFunctionWrapper.createFunction() .getResolver().getEvalMethod(argTypeInfo).getReturnType(); return HiveInspectors.toFlinkType( HiveInspectors.getObjectInspector(returnType)); } catch (UDFArgumentException e) { throw new FlinkHiveUDFException(e); } }
Example #2
Source File: HiveGenericUDF.java From flink with Apache License 2.0 | 6 votes |
@Override public void openInternal() { LOG.info("Open HiveGenericUDF as {}", hiveFunctionWrapper.getClassName()); function = hiveFunctionWrapper.createFunction(); ObjectInspector[] argInspectors = HiveInspectors.toInspectors(hiveShim, constantArguments, argTypes); try { returnInspector = function.initializeAndFoldConstants(argInspectors); } catch (UDFArgumentException e) { throw new FlinkHiveUDFException(e); } deferredObjects = new GenericUDF.DeferredObject[argTypes.length]; for (int i = 0; i < deferredObjects.length; i++) { deferredObjects[i] = new DeferredObjectAdapter( argInspectors[i], argTypes[i].getLogicalType(), hiveShim ); } }
Example #3
Source File: HiveGenericUDTF.java From flink with Apache License 2.0 | 6 votes |
@Override public void open(FunctionContext context) throws Exception { function = hiveFunctionWrapper.createFunction(); function.setCollector(input -> { Row row = (Row) HiveInspectors.toFlinkObject(returnInspector, input, hiveShim); HiveGenericUDTF.this.collect(row); }); ObjectInspector[] argumentInspectors = HiveInspectors.toInspectors(hiveShim, constantArguments, argTypes); returnInspector = function.initialize(argumentInspectors); isArgsSingleArray = HiveFunctionUtil.isSingleBoxedArray(argTypes); conversions = new HiveObjectConversion[argumentInspectors.length]; for (int i = 0; i < argumentInspectors.length; i++) { conversions[i] = HiveInspectors.getConversion(argumentInspectors[i], argTypes[i].getLogicalType(), hiveShim); } allIdentityConverter = Arrays.stream(conversions) .allMatch(conv -> conv instanceof IdentityConversion); }
Example #4
Source File: HiveGenericUDF.java From flink with Apache License 2.0 | 6 votes |
@Override public Object evalInternal(Object[] args) { for (int i = 0; i < args.length; i++) { ((DeferredObjectAdapter) deferredObjects[i]).set(args[i]); } try { Object result = returnInspector instanceof ConstantObjectInspector ? ((ConstantObjectInspector) returnInspector).getWritableConstantValue() : function.evaluate(deferredObjects); return HiveInspectors.toFlinkObject(returnInspector, result, hiveShim); } catch (HiveException e) { throw new FlinkHiveUDFException(e); } }
Example #5
Source File: HiveGenericUDAF.java From flink with Apache License 2.0 | 6 votes |
private void init() throws HiveException { ObjectInspector[] inputInspectors = HiveInspectors.toInspectors(hiveShim, constantArguments, argTypes); // Flink UDAF only supports Hive UDAF's PARTIAL_1 and FINAL mode // PARTIAL1: from original data to partial aggregation data: // iterate() and terminatePartial() will be called. this.partialEvaluator = createEvaluator(inputInspectors); this.partialResultObjectInspector = partialEvaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputInspectors); // FINAL: from partial aggregation to full aggregation: // merge() and terminate() will be called. this.finalEvaluator = createEvaluator(inputInspectors); this.finalResultObjectInspector = finalEvaluator.init( GenericUDAFEvaluator.Mode.FINAL, new ObjectInspector[]{ partialResultObjectInspector }); conversions = new HiveObjectConversion[inputInspectors.length]; for (int i = 0; i < inputInspectors.length; i++) { conversions[i] = HiveInspectors.getConversion(inputInspectors[i], argTypes[i].getLogicalType(), hiveShim); } allIdentityConverter = Arrays.stream(conversions) .allMatch(conv -> conv instanceof IdentityConversion); initialized = true; }
Example #6
Source File: HiveTableUtil.java From flink with Apache License 2.0 | 6 votes |
@Override public String visit(ValueLiteralExpression valueLiteral) { DataType dataType = valueLiteral.getOutputDataType(); Object value = valueLiteral.getValueAs(Object.class).orElse(null); if (value == null) { return "null"; } value = HiveInspectors.getConversion(HiveInspectors.getObjectInspector(dataType), dataType.getLogicalType(), hiveShim) .toHiveObject(value); String res = value.toString(); LogicalTypeRoot typeRoot = dataType.getLogicalType().getTypeRoot(); switch (typeRoot) { case CHAR: case VARCHAR: res = "'" + res.replace("'", "''") + "'"; break; case DATE: case TIMESTAMP_WITHOUT_TIME_ZONE: case TIMESTAMP_WITH_LOCAL_TIME_ZONE: // hive not support partition filter push down with these types. return null; default: break; } return res; }
Example #7
Source File: HiveGenericUDF.java From flink with Apache License 2.0 | 6 votes |
@Override public DataType getHiveResultType(Object[] constantArguments, DataType[] argTypes) { LOG.info("Getting result type of HiveGenericUDF from {}", hiveFunctionWrapper.getClassName()); try { ObjectInspector[] argumentInspectors = HiveInspectors.toInspectors(hiveShim, constantArguments, argTypes); ObjectInspector resultObjectInspector = hiveFunctionWrapper.createFunction().initializeAndFoldConstants(argumentInspectors); return HiveTypeUtil.toFlinkType( TypeInfoUtils.getTypeInfoFromObjectInspector(resultObjectInspector)); } catch (UDFArgumentException e) { throw new FlinkHiveUDFException(e); } }
Example #8
Source File: HiveSimpleUDF.java From flink with Apache License 2.0 | 6 votes |
@Override public Object evalInternal(Object[] args) { checkArgument(args.length == conversions.length); if (!allIdentityConverter) { for (int i = 0; i < args.length; i++) { args[i] = conversions[i].toHiveObject(args[i]); } } try { Object result = FunctionRegistry.invoke(method, function, conversionHelper.convertIfNecessary(args)); return HiveInspectors.toFlinkObject(returnInspector, result, hiveShim); } catch (HiveException e) { throw new FlinkHiveUDFException(e); } }
Example #9
Source File: HiveTableInputFormat.java From flink with Apache License 2.0 | 6 votes |
@Override public Row nextRecord(Row ignore) throws IOException { if (reachedEnd()) { return null; } Row row = new Row(rowArity); try { //Use HiveDeserializer to deserialize an object out of a Writable blob Object hiveRowStruct = deserializer.deserialize(value); int index = 0; for (; index < structFields.size(); index++) { StructField structField = structFields.get(index); Object object = HiveInspectors.toFlinkObject(structField.getFieldObjectInspector(), structObjectInspector.getStructFieldData(hiveRowStruct, structField)); row.setField(index, object); } for (String partition : partitionColNames){ row.setField(index++, hiveTablePartition.getPartitionSpec().get(partition)); } } catch (Exception e){ logger.error("Error happens when converting hive data type to flink data type."); throw new FlinkHiveException(e); } this.fetched = false; return row; }
Example #10
Source File: HiveSimpleUDF.java From flink with Apache License 2.0 | 6 votes |
@Override public Object evalInternal(Object[] args) { checkArgument(args.length == conversions.length); if (!allIdentityConverter) { for (int i = 0; i < args.length; i++) { args[i] = conversions[i].toHiveObject(args[i]); } } try { Object result = FunctionRegistry.invoke(method, function, conversionHelper.convertIfNecessary(args)); return HiveInspectors.toFlinkObject(returnInspector, result); } catch (HiveException e) { throw new FlinkHiveUDFException(e); } }
Example #11
Source File: HiveRowDataPartitionComputer.java From flink with Apache License 2.0 | 6 votes |
public HiveRowDataPartitionComputer( HiveShim hiveShim, String defaultPartValue, String[] columnNames, DataType[] columnTypes, String[] partitionColumns) { super(defaultPartValue, columnNames, columnTypes, partitionColumns); this.partitionConverters = Arrays.stream(partitionTypes) .map(TypeConversions::fromLogicalToDataType) .map(DataFormatConverters::getConverterForDataType) .toArray(DataFormatConverters.DataFormatConverter[]::new); this.hiveObjectConversions = new HiveObjectConversion[partitionIndexes.length]; for (int i = 0; i < hiveObjectConversions.length; i++) { DataType partColType = columnTypes[partitionIndexes[i]]; ObjectInspector objectInspector = HiveInspectors.getObjectInspector(partColType); hiveObjectConversions[i] = HiveInspectors.getConversion(objectInspector, partColType.getLogicalType(), hiveShim); } }
Example #12
Source File: HiveGenericUDF.java From flink with Apache License 2.0 | 6 votes |
@Override public DataType getHiveResultType(Object[] constantArguments, DataType[] argTypes) { LOG.info("Getting result type of HiveGenericUDF from {}", hiveFunctionWrapper.getClassName()); try { ObjectInspector[] argumentInspectors = HiveInspectors.toInspectors(constantArguments, argTypes); ObjectInspector resultObjectInspector = hiveFunctionWrapper.createFunction().initializeAndFoldConstants(argumentInspectors); return HiveTypeUtil.toFlinkType( TypeInfoUtils.getTypeInfoFromObjectInspector(resultObjectInspector)); } catch (UDFArgumentException e) { throw new FlinkHiveUDFException(e); } }
Example #13
Source File: HiveGenericUDF.java From flink with Apache License 2.0 | 6 votes |
@Override public void openInternal() { LOG.info("Open HiveGenericUDF as {}", hiveFunctionWrapper.getClassName()); function = hiveFunctionWrapper.createFunction(); try { returnInspector = function.initializeAndFoldConstants( HiveInspectors.toInspectors(constantArguments, argTypes)); } catch (UDFArgumentException e) { throw new FlinkHiveUDFException(e); } deferredObjects = new GenericUDF.DeferredObject[argTypes.length]; for (int i = 0; i < deferredObjects.length; i++) { deferredObjects[i] = new DeferredObjectAdapter( TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo( HiveTypeUtil.toHiveTypeInfo(argTypes[i])), argTypes[i].getLogicalType() ); } }
Example #14
Source File: HiveMapredSplitReader.java From flink with Apache License 2.0 | 6 votes |
@Override @SuppressWarnings("unchecked") public RowData nextRecord(RowData reuse) throws IOException { if (reachedEnd()) { return null; } try { //Use HiveDeserializer to deserialize an object out of a Writable blob Object hiveRowStruct = deserializer.deserialize(value); for (int i = 0; i < selectedFields.length; i++) { // set non-partition columns if (selectedFields[i] < structFields.size()) { StructField structField = structFields.get(selectedFields[i]); Object object = HiveInspectors.toFlinkObject(structField.getFieldObjectInspector(), structObjectInspector.getStructFieldData(hiveRowStruct, structField), hiveShim); row.setField(i, converters[i].toInternal(object)); } } } catch (Exception e) { LOG.error("Error happens when converting hive data type to flink data type."); throw new FlinkHiveException(e); } this.fetched = false; return row; }
Example #15
Source File: HiveGenericUDTF.java From flink with Apache License 2.0 | 6 votes |
@Override public void open(FunctionContext context) throws Exception { function = hiveFunctionWrapper.createFunction(); function.setCollector(input -> { Row row = (Row) HiveInspectors.toFlinkObject(returnInspector, input); HiveGenericUDTF.this.collect(row); }); ObjectInspector[] argumentInspectors = HiveInspectors.toInspectors(constantArguments, argTypes); returnInspector = function.initialize(argumentInspectors); isArgsSingleArray = HiveFunctionUtil.isSingleBoxedArray(argTypes); conversions = new HiveObjectConversion[argumentInspectors.length]; for (int i = 0; i < argumentInspectors.length; i++) { conversions[i] = HiveInspectors.getConversion(argumentInspectors[i], argTypes[i].getLogicalType()); } allIdentityConverter = Arrays.stream(conversions) .allMatch(conv -> conv instanceof IdentityConversion); }
Example #16
Source File: HiveGenericUDAF.java From flink with Apache License 2.0 | 6 votes |
private void init() throws HiveException { ObjectInspector[] inputInspectors = HiveInspectors.toInspectors(constantArguments, argTypes); // Flink UDAF only supports Hive UDAF's PARTIAL_1 and FINAL mode // PARTIAL1: from original data to partial aggregation data: // iterate() and terminatePartial() will be called. this.partialEvaluator = createEvaluator(inputInspectors); this.partialResultObjectInspector = partialEvaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputInspectors); // FINAL: from partial aggregation to full aggregation: // merge() and terminate() will be called. this.finalEvaluator = createEvaluator(inputInspectors); this.finalResultObjectInspector = finalEvaluator.init( GenericUDAFEvaluator.Mode.FINAL, new ObjectInspector[]{ partialResultObjectInspector }); conversions = new HiveObjectConversion[inputInspectors.length]; for (int i = 0; i < inputInspectors.length; i++) { conversions[i] = HiveInspectors.getConversion(inputInspectors[i], argTypes[i].getLogicalType()); } allIdentityConverter = Arrays.stream(conversions) .allMatch(conv -> conv instanceof IdentityConversion); initialized = true; }
Example #17
Source File: HiveGenericUDAF.java From flink with Apache License 2.0 | 5 votes |
@Override public Object getValue(GenericUDAFEvaluator.AggregationBuffer accumulator) { try { return HiveInspectors.toFlinkObject(finalResultObjectInspector, finalEvaluator.terminate(accumulator)); } catch (HiveException e) { throw new FlinkHiveUDFException( String.format("Failed to get final result on %s", hiveFunctionWrapper.getClassName()), e); } }
Example #18
Source File: HiveRowPartitionComputer.java From flink with Apache License 2.0 | 5 votes |
HiveRowPartitionComputer(HiveShim hiveShim, String defaultPartValue, String[] columnNames, DataType[] columnTypes, String[] partitionColumns) { super(defaultPartValue, columnNames, partitionColumns); partColConversions = new HiveObjectConversion[partitionIndexes.length]; for (int i = 0; i < partColConversions.length; i++) { DataType partColType = columnTypes[partitionIndexes[i]]; ObjectInspector objectInspector = HiveInspectors.getObjectInspector(partColType); partColConversions[i] = HiveInspectors.getConversion(objectInspector, partColType.getLogicalType(), hiveShim); } }
Example #19
Source File: HiveSimpleUDF.java From flink with Apache License 2.0 | 5 votes |
@Override public void openInternal() { LOG.info("Opening HiveSimpleUDF as '{}'", hiveFunctionWrapper.getClassName()); function = hiveFunctionWrapper.createFunction(); List<TypeInfo> typeInfos = new ArrayList<>(); for (DataType arg : argTypes) { typeInfos.add(HiveTypeUtil.toHiveTypeInfo(arg, false)); } try { method = function.getResolver().getEvalMethod(typeInfos); returnInspector = ObjectInspectorFactory.getReflectionObjectInspector(method.getGenericReturnType(), ObjectInspectorFactory.ObjectInspectorOptions.JAVA); ObjectInspector[] argInspectors = new ObjectInspector[typeInfos.size()]; for (int i = 0; i < argTypes.length; i++) { argInspectors[i] = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(typeInfos.get(i)); } conversionHelper = new GenericUDFUtils.ConversionHelper(method, argInspectors); conversions = new HiveObjectConversion[argInspectors.length]; for (int i = 0; i < argInspectors.length; i++) { conversions[i] = HiveInspectors.getConversion(argInspectors[i], argTypes[i].getLogicalType(), hiveShim); } allIdentityConverter = Arrays.stream(conversions) .allMatch(conv -> conv instanceof IdentityConversion); } catch (Exception e) { throw new FlinkHiveUDFException( String.format("Failed to open HiveSimpleUDF from %s", hiveFunctionWrapper.getClassName()), e); } }
Example #20
Source File: HiveWriterFactory.java From flink with Apache License 2.0 | 5 votes |
private void checkInitialize() throws Exception { if (initialized) { return; } JobConf jobConf = confWrapper.conf(); Object serdeLib = Class.forName(serDeInfo.getSerializationLib()).newInstance(); Preconditions.checkArgument(serdeLib instanceof Serializer && serdeLib instanceof Deserializer, "Expect a SerDe lib implementing both Serializer and Deserializer, but actually got " + serdeLib.getClass().getName()); this.recordSerDe = (Serializer) serdeLib; ReflectionUtils.setConf(recordSerDe, jobConf); // TODO: support partition properties, for now assume they're same as table properties SerDeUtils.initializeSerDe((Deserializer) recordSerDe, jobConf, tableProperties, null); this.formatFields = allColumns.length - partitionColumns.length; this.hiveConversions = new HiveObjectConversion[formatFields]; this.converters = new DataFormatConverter[formatFields]; List<ObjectInspector> objectInspectors = new ArrayList<>(hiveConversions.length); for (int i = 0; i < formatFields; i++) { DataType type = allTypes[i]; ObjectInspector objectInspector = HiveInspectors.getObjectInspector(type); objectInspectors.add(objectInspector); hiveConversions[i] = HiveInspectors.getConversion( objectInspector, type.getLogicalType(), hiveShim); converters[i] = DataFormatConverters.getConverterForDataType(type); } this.formatInspector = ObjectInspectorFactory.getStandardStructObjectInspector( Arrays.asList(allColumns).subList(0, formatFields), objectInspectors); this.initialized = true; }
Example #21
Source File: HiveTableSource.java From flink with Apache License 2.0 | 5 votes |
private static Object restorePartitionValueFromFromType(HiveShim shim, String valStr, DataType type) { LogicalTypeRoot typeRoot = type.getLogicalType().getTypeRoot(); //note: it's not a complete list ofr partition key types that Hive support, we may need add more later. switch (typeRoot) { case CHAR: case VARCHAR: return valStr; case BOOLEAN: return Boolean.parseBoolean(valStr); case TINYINT: return Integer.valueOf(valStr).byteValue(); case SMALLINT: return Short.valueOf(valStr); case INTEGER: return Integer.valueOf(valStr); case BIGINT: return Long.valueOf(valStr); case FLOAT: return Float.valueOf(valStr); case DOUBLE: return Double.valueOf(valStr); case DATE: return HiveInspectors.toFlinkObject( HiveInspectors.getObjectInspector(type), shim.toHiveDate(Date.valueOf(valStr)), shim); case TIMESTAMP_WITHOUT_TIME_ZONE: return HiveInspectors.toFlinkObject( HiveInspectors.getObjectInspector(type), shim.toHiveTimestamp(Timestamp.valueOf(valStr)), shim); default: break; } throw new FlinkHiveException( new IllegalArgumentException(String.format("Can not convert %s to type %s for partition value", valStr, type))); }
Example #22
Source File: HiveGenericUDTF.java From flink with Apache License 2.0 | 5 votes |
@Override public DataType getHiveResultType(Object[] constantArguments, DataType[] argTypes) { LOG.info("Getting result type of HiveGenericUDTF with {}", hiveFunctionWrapper.getClassName()); try { ObjectInspector[] argumentInspectors = HiveInspectors.toInspectors(hiveShim, constantArguments, argTypes); return HiveTypeUtil.toFlinkType( hiveFunctionWrapper.createFunction().initialize(argumentInspectors)); } catch (UDFArgumentException e) { throw new FlinkHiveUDFException(e); } }
Example #23
Source File: HiveGenericUDAF.java From flink with Apache License 2.0 | 5 votes |
@Override public Object getValue(GenericUDAFEvaluator.AggregationBuffer accumulator) { try { return HiveInspectors.toFlinkObject(finalResultObjectInspector, finalEvaluator.terminate(accumulator), hiveShim); } catch (HiveException e) { throw new FlinkHiveUDFException( String.format("Failed to get final result on %s", hiveFunctionWrapper.getClassName()), e); } }
Example #24
Source File: HiveBatchSource.java From Alink with Apache License 2.0 | 5 votes |
private Object restorePartitionValueFromFromType(String valStr, DataType type) { LogicalTypeRoot typeRoot = type.getLogicalType().getTypeRoot(); //note: it's not a complete list ofr partition key types that Hive support, we may need add more later. switch (typeRoot) { case CHAR: case VARCHAR: return valStr; case BOOLEAN: return Boolean.parseBoolean(valStr); case TINYINT: return Integer.valueOf(valStr).byteValue(); case SMALLINT: return Short.valueOf(valStr); case INTEGER: return Integer.valueOf(valStr); case BIGINT: return Long.valueOf(valStr); case FLOAT: return Float.valueOf(valStr); case DOUBLE: return Double.valueOf(valStr); case DATE: return HiveInspectors.toFlinkObject( HiveInspectors.getObjectInspector(type), hiveShim.toHiveDate(Date.valueOf(valStr)), hiveShim); case TIMESTAMP_WITHOUT_TIME_ZONE: return HiveInspectors.toFlinkObject( HiveInspectors.getObjectInspector(type), hiveShim.toHiveTimestamp(Timestamp.valueOf(valStr)), hiveShim); default: break; } throw new FlinkHiveException( new IllegalArgumentException(String.format("Can not convert %s to type %s for partition value", valStr, type))); }
Example #25
Source File: HiveSimpleUDF.java From flink with Apache License 2.0 | 5 votes |
@Override public void openInternal() { LOG.info("Opening HiveSimpleUDF as '{}'", hiveFunctionWrapper.getClassName()); function = hiveFunctionWrapper.createFunction(); List<TypeInfo> typeInfos = new ArrayList<>(); for (DataType arg : argTypes) { typeInfos.add(HiveTypeUtil.toHiveTypeInfo(arg)); } try { method = function.getResolver().getEvalMethod(typeInfos); returnInspector = ObjectInspectorFactory.getReflectionObjectInspector(method.getGenericReturnType(), ObjectInspectorFactory.ObjectInspectorOptions.JAVA); ObjectInspector[] argInspectors = new ObjectInspector[typeInfos.size()]; for (int i = 0; i < argTypes.length; i++) { argInspectors[i] = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(typeInfos.get(i)); } conversionHelper = new GenericUDFUtils.ConversionHelper(method, argInspectors); conversions = new HiveObjectConversion[argInspectors.length]; for (int i = 0; i < argInspectors.length; i++) { conversions[i] = HiveInspectors.getConversion(argInspectors[i], argTypes[i].getLogicalType()); } allIdentityConverter = Arrays.stream(conversions) .allMatch(conv -> conv instanceof IdentityConversion); } catch (Exception e) { throw new FlinkHiveUDFException( String.format("Failed to open HiveSimpleUDF from %s", hiveFunctionWrapper.getClassName()), e); } }
Example #26
Source File: HiveGenericUDF.java From flink with Apache License 2.0 | 5 votes |
@Override public Object evalInternal(Object[] args) { for (int i = 0; i < args.length; i++) { ((DeferredObjectAdapter) deferredObjects[i]).set(args[i]); } try { return HiveInspectors.toFlinkObject(returnInspector, function.evaluate(deferredObjects)); } catch (HiveException e) { throw new FlinkHiveUDFException(e); } }
Example #27
Source File: HiveGenericUDTF.java From flink with Apache License 2.0 | 5 votes |
@Override public DataType getHiveResultType(Object[] constantArguments, DataType[] argTypes) { LOG.info("Getting result type of HiveGenericUDTF with {}", hiveFunctionWrapper.getClassName()); try { ObjectInspector[] argumentInspectors = HiveInspectors.toInspectors(constantArguments, argTypes); return HiveTypeUtil.toFlinkType( hiveFunctionWrapper.createFunction().initialize(argumentInspectors)); } catch (UDFArgumentException e) { throw new FlinkHiveUDFException(e); } }
Example #28
Source File: DeferredObjectAdapter.java From flink with Apache License 2.0 | 4 votes |
public DeferredObjectAdapter(ObjectInspector inspector, LogicalType logicalType, HiveShim hiveShim) { conversion = HiveInspectors.getConversion(inspector, logicalType, hiveShim); }
Example #29
Source File: HiveGenericUDTFTest.java From flink with Apache License 2.0 | 4 votes |
@Override public void collect(Object o) { Row row = (Row) HiveInspectors.toFlinkObject(returnInspector, o); result.add(row); }
Example #30
Source File: HiveGenericUDTFTest.java From flink with Apache License 2.0 | 4 votes |
private static HiveGenericUDTF init(Class hiveUdfClass, Object[] constantArgs, DataType[] argTypes) throws Exception { HiveFunctionWrapper<GenericUDTF> wrapper = new HiveFunctionWrapper(hiveUdfClass.getName()); HiveGenericUDTF udf = new HiveGenericUDTF(wrapper); udf.setArgumentTypesAndConstants(constantArgs, argTypes); udf.getHiveResultType(constantArgs, argTypes); ObjectInspector[] argumentInspectors = HiveInspectors.toInspectors(constantArgs, argTypes); ObjectInspector returnInspector = wrapper.createFunction().initialize(argumentInspectors); udf.open(null); collector = new TestCollector(returnInspector); udf.setCollector(collector); return udf; }