org.apache.flink.table.types.utils.TypeConversions Java Examples

The following examples show how to use org.apache.flink.table.types.utils.TypeConversions. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestCsvFileSystemFormatFactory.java    From flink with Apache License 2.0 6 votes vote down vote up
private static void writeCsvToStream(
		DataType[] types,
		RowData rowData,
		OutputStream stream) throws IOException {
	LogicalType[] fieldTypes = Arrays.stream(types)
			.map(DataType::getLogicalType)
			.toArray(LogicalType[]::new);
	DataFormatConverters.DataFormatConverter converter = DataFormatConverters.getConverterForDataType(
			TypeConversions.fromLogicalToDataType(RowType.of(fieldTypes)));

	Row row = (Row) converter.toExternal(rowData);
	StringBuilder builder = new StringBuilder();
	Object o;
	for (int i = 0; i < row.getArity(); i++) {
		if (i > 0) {
			builder.append(DEFAULT_FIELD_DELIMITER);
		}
		if ((o = row.getField(i)) != null) {
			builder.append(o);
		}
	}
	String str = builder.toString();
	stream.write(str.getBytes(StandardCharsets.UTF_8));
	stream.write(DEFAULT_LINE_DELIMITER.getBytes(StandardCharsets.UTF_8));
}
 
Example #2
Source File: TimestampExtractorUtils.java    From flink with Apache License 2.0 6 votes vote down vote up
private static ResolvedFieldReference mapToResolvedField(
		Function<String, String> nameRemapping,
		TableSchema schema,
		String arg) {
	String remappedName = nameRemapping.apply(arg);

	int idx = IntStream.range(0, schema.getFieldCount())
		.filter(i -> schema.getFieldName(i).get().equals(remappedName))
		.findFirst()
		.orElseThrow(() -> new ValidationException(String.format("Field %s does not exist", remappedName)));

	TypeInformation<?> dataType = TypeConversions.fromDataTypeToLegacyInfo(schema.getTableColumn(idx)
		.get()
		.getType());
	return new ResolvedFieldReference(remappedName, dataType, idx);
}
 
Example #3
Source File: TimestampExtractorUtils.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Retrieves all field accesses needed for the given {@link TimestampExtractor}.
 *
 * @param timestampExtractor Extractor for which to construct array of field accesses.
 * @param physicalInputType Physical input type that the timestamp extractor accesses.
 * @param nameRemapping Additional remapping of a logical to a physical field name.
 *                      TimestampExtractor works with logical names, but accesses physical
 *                      fields
 * @return Array of physical field references.
 */
public static ResolvedFieldReference[] getAccessedFields(
		TimestampExtractor timestampExtractor,
		DataType physicalInputType,
		Function<String, String> nameRemapping) {

	final Function<String, ResolvedFieldReference> fieldMapping;
	if (LogicalTypeChecks.isCompositeType(physicalInputType.getLogicalType())) {
		TableSchema schema = DataTypeUtils.expandCompositeTypeToSchema(physicalInputType);
		fieldMapping = (arg) -> mapToResolvedField(nameRemapping, schema, arg);
	} else {
		fieldMapping = (arg) -> new ResolvedFieldReference(
			arg,
			TypeConversions.fromDataTypeToLegacyInfo(physicalInputType),
			0);
	}
	return getAccessedFields(timestampExtractor, fieldMapping);
}
 
Example #4
Source File: TypeMappingUtilsTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testFieldMappingLegacyDecimalTypeNotMatchingPrecision() {
	thrown.expect(ValidationException.class);
	thrown.expectMessage("Type DECIMAL(38, 10) of table field 'f0' does not match with the physical type" +
		" LEGACY('DECIMAL', 'DECIMAL') of the 'f0' field of the TableSource return type.");
	thrown.expectCause(allOf(
		instanceOf(ValidationException.class),
		hasMessage(equalTo("Legacy decimal type can only be mapped to DECIMAL(38, 18)."))));

	int[] indices = TypeMappingUtils.computePhysicalIndices(
		TableSchema.builder()
			.field("f0", DECIMAL(38, 10))
			.build().getTableColumns(),
		ROW(FIELD("f0", TypeConversions.fromLegacyInfoToDataType(Types.BIG_DEC))),
		Function.identity()
	);

	assertThat(indices, equalTo(new int[] {0}));
}
 
Example #5
Source File: OperationTreeBuilder.java    From flink with Apache License 2.0 6 votes vote down vote up
private void validateAlias(
	List<String> aliases,
	ResolvedExpression resolvedExpression,
	Boolean isRowbasedAggregate) {

	int length = TypeConversions
		.fromDataTypeToLegacyInfo(resolvedExpression.getOutputDataType()).getArity();
	int callArity = isRowbasedAggregate ? length : 1;
	int aliasesSize = aliases.size();

	if ((0 < aliasesSize) && (aliasesSize != callArity)) {
		throw new ValidationException(String.format(
			"List of column aliases must have same degree as table; " +
				"the returned table of function '%s' has " +
				"%d columns, whereas alias list has %d columns",
			resolvedExpression,
			callArity,
			aliasesSize));
	}
}
 
Example #6
Source File: TypeMappingUtilsTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testFieldMappingLegacyCompositeTypeWithRenaming() {
	int[] indices = TypeMappingUtils.computePhysicalIndices(
		TableSchema.builder()
			.field("a", DataTypes.BIGINT())
			.field("b", DataTypes.STRING())
			.build().getTableColumns(),
		TypeConversions.fromLegacyInfoToDataType(Types.TUPLE(Types.STRING, Types.LONG)),
		str -> {
			switch (str) {
				case "a":
					return "f1";
				case "b":
					return "f0";
				default:
					throw new AssertionError();
			}
		}
	);

	assertThat(indices, equalTo(new int[]{1, 0}));
}
 
Example #7
Source File: TypeMappingUtilsTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testCheckPhysicalLogicalTypeCompatible() {
	TableSchema tableSchema = TableSchema.builder()
							.field("a", DataTypes.VARCHAR(2))
							.field("b", DataTypes.DECIMAL(20, 2))
							.build();
	TableSink tableSink = new TestTableSink(tableSchema);
	LegacyTypeInformationType legacyDataType = (LegacyTypeInformationType) tableSink.getConsumedDataType()
													.getLogicalType();
	TypeInformation legacyTypeInfo = ((TupleTypeInfo) legacyDataType.getTypeInformation()).getTypeAt(1);
	DataType physicalType = TypeConversions.fromLegacyInfoToDataType(legacyTypeInfo);
	TableSchema physicSchema = DataTypeUtils.expandCompositeTypeToSchema(physicalType);
	DataType[] logicalDataTypes = tableSchema.getFieldDataTypes();
	DataType[] physicalDataTypes = physicSchema.getFieldDataTypes();
	for (int i = 0; i < logicalDataTypes.length; i++) {
		TypeMappingUtils.checkPhysicalLogicalTypeCompatible(
				physicalDataTypes[i].getLogicalType(),
				logicalDataTypes[i].getLogicalType(),
				"physicalField",
				"logicalField",
				false);
	}
}
 
Example #8
Source File: ValuesOperationFactory.java    From flink with Apache License 2.0 6 votes vote down vote up
private DataType findCommonTypeAtPosition(List<List<ResolvedExpression>> resolvedRows, int i) {
	List<LogicalType> typesAtIPosition = extractLogicalTypesAtPosition(resolvedRows, i);

	LogicalType logicalType = LogicalTypeMerging.findCommonType(typesAtIPosition)
		.orElseThrow(() -> {
			Set<DataType> columnTypes = resolvedRows.stream()
				.map(row -> row.get(i).getOutputDataType())
				.collect(Collectors.toCollection(LinkedHashSet::new));

			return new ValidationException(String.format(
				"Types in fromValues(...) must have a common super type. Could not find a common type" +
					" for all rows at column %d.\n" +
					"Could not find a common super type for types: %s",
				i,
				columnTypes));
		});

	return TypeConversions.fromLogicalToDataType(logicalType);
}
 
Example #9
Source File: OperationTreeBuilder.java    From flink with Apache License 2.0 6 votes vote down vote up
private void validateAlias(
	List<String> aliases,
	ResolvedExpression resolvedExpression,
	Boolean isRowbasedAggregate) {

	int length = TypeConversions
		.fromDataTypeToLegacyInfo(resolvedExpression.getOutputDataType()).getArity();
	int callArity = isRowbasedAggregate ? length : 1;
	int aliasesSize = aliases.size();

	if ((0 < aliasesSize) && (aliasesSize != callArity)) {
		throw new ValidationException(String.format(
			"List of column aliases must have same degree as table; " +
				"the returned table of function '%s' has " +
				"%d columns, whereas alias list has %d columns",
			resolvedExpression,
			callArity,
			aliasesSize));
	}
}
 
Example #10
Source File: PlannerQueryOperation.java    From flink with Apache License 2.0 6 votes vote down vote up
public PlannerQueryOperation(RelNode calciteTree) {
	this.calciteTree = calciteTree;

	RelDataType rowType = calciteTree.getRowType();
	String[] fieldNames = rowType.getFieldNames().toArray(new String[0]);
	DataType[] fieldTypes = rowType.getFieldList()
		.stream()
		.map(field -> {
			final DataType fieldType = TypeConversions
				.fromLegacyInfoToDataType(FlinkTypeFactory.toTypeInfo(field.getType()));
			final boolean nullable = field.getType().isNullable();
			if (nullable != fieldType.getLogicalType().isNullable()
				&& !FlinkTypeFactory.isTimeIndicatorType(field.getType())) {
				return nullable ? fieldType.nullable() : fieldType.notNull();
			} else {
				return fieldType;
			}
		})
		.toArray(DataType[]::new);

	this.tableSchema = TableSchema.builder().fields(fieldNames, fieldTypes).build();
}
 
Example #11
Source File: OperatorBindingCallContext.java    From flink with Apache License 2.0 6 votes vote down vote up
public OperatorBindingCallContext(
		DataTypeFactory dataTypeFactory,
		FunctionDefinition definition,
		SqlOperatorBinding binding) {
	super(
		dataTypeFactory,
		definition,
		binding.getOperator().getNameAsId().toString());

	this.binding = binding;
	this.argumentDataTypes = new AbstractList<DataType>() {
		@Override
		public DataType get(int pos) {
			final LogicalType logicalType = FlinkTypeFactory.toLogicalType(binding.getOperandType(pos));
			return TypeConversions.fromLogicalToDataType(logicalType);
		}

		@Override
		public int size() {
			return binding.getOperandCount();
		}
	};
}
 
Example #12
Source File: HiveRowDataPartitionComputer.java    From flink with Apache License 2.0 6 votes vote down vote up
public HiveRowDataPartitionComputer(
		HiveShim hiveShim,
		String defaultPartValue,
		String[] columnNames,
		DataType[] columnTypes,
		String[] partitionColumns) {
	super(defaultPartValue, columnNames, columnTypes, partitionColumns);
	this.partitionConverters = Arrays.stream(partitionTypes)
			.map(TypeConversions::fromLogicalToDataType)
			.map(DataFormatConverters::getConverterForDataType)
			.toArray(DataFormatConverters.DataFormatConverter[]::new);
	this.hiveObjectConversions = new HiveObjectConversion[partitionIndexes.length];
	for (int i = 0; i < hiveObjectConversions.length; i++) {
		DataType partColType = columnTypes[partitionIndexes[i]];
		ObjectInspector objectInspector = HiveInspectors.getObjectInspector(partColType);
		hiveObjectConversions[i] = HiveInspectors.getConversion(objectInspector, partColType.getLogicalType(), hiveShim);
	}
}
 
Example #13
Source File: AbstractRowPythonScalarFunctionOperator.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
@SuppressWarnings("unchecked")
public void open() throws Exception {
	super.open();
	this.cRowWrapper = new StreamRecordCRowWrappingCollector(output);

	CRowTypeInfo forwardedInputTypeInfo = new CRowTypeInfo(new RowTypeInfo(
		Arrays.stream(forwardedFields)
			.mapToObj(i -> inputType.getFields().get(i))
			.map(RowType.RowField::getType)
			.map(TypeConversions::fromLogicalToDataType)
			.map(TypeConversions::fromDataTypeToLegacyInfo)
			.toArray(TypeInformation[]::new)));
	forwardedInputSerializer = forwardedInputTypeInfo.createSerializer(getExecutionConfig());
}
 
Example #14
Source File: TableSourceValidation.java    From flink with Apache License 2.0 5 votes vote down vote up
private static void validateTimestampExtractorArguments(
		List<RowtimeAttributeDescriptor> descriptors,
		TableSource<?> tableSource) {
	if (descriptors.size() == 1) {
		RowtimeAttributeDescriptor descriptor = descriptors.get(0);
		// look up extractor input fields in return type
		String[] extractorInputFields = descriptor.getTimestampExtractor().getArgumentFields();
		TypeInformation[] physicalTypes = Arrays.stream(extractorInputFields)
			.map(fieldName -> resolveField(fieldName, tableSource))
			.map(resolvedField -> TypeConversions.fromDataTypeToLegacyInfo(resolvedField.getType()))
			.toArray(TypeInformation[]::new);
		// validate timestamp extractor
		descriptor.getTimestampExtractor().validateArgumentFields(physicalTypes);
	}
}
 
Example #15
Source File: CommonInputTypeStrategy.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Optional<List<DataType>> inferInputTypes(
		CallContext callContext,
		boolean throwOnFailure) {
	List<DataType> argumentDataTypes = callContext.getArgumentDataTypes();
	List<LogicalType> argumentTypes = argumentDataTypes
		.stream()
		.map(DataType::getLogicalType)
		.collect(Collectors.toList());

	if (argumentTypes.stream().anyMatch(CommonInputTypeStrategy::isLegacyType)) {
		return Optional.of(argumentDataTypes);
	}

	Optional<LogicalType> commonType = LogicalTypeMerging.findCommonType(argumentTypes);

	if (!commonType.isPresent()) {
		if (throwOnFailure) {
			throw callContext.newValidationError(
				"Could not find a common type for arguments: %s",
				argumentDataTypes);
		}
		return Optional.empty();
	}

	return commonType.map(type -> Collections.nCopies(
		argumentTypes.size(),
		TypeConversions.fromLogicalToDataType(type)));
}
 
Example #16
Source File: MapInputTypeStrategy.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Optional<List<DataType>> inferInputTypes(CallContext callContext, boolean throwOnFailure) {
	List<DataType> argumentDataTypes = callContext.getArgumentDataTypes();
	if (argumentDataTypes.size() == 0) {
		return Optional.empty();
	}

	List<LogicalType> keyTypes = new ArrayList<>();
	List<LogicalType> valueTypes = new ArrayList<>();

	for (int i = 0; i < argumentDataTypes.size(); i++) {
		LogicalType logicalType = argumentDataTypes.get(i).getLogicalType();
		if (i % 2 == 0) {
			keyTypes.add(logicalType);
		} else {
			valueTypes.add(logicalType);
		}
	}
	Optional<LogicalType> commonKeyType = LogicalTypeMerging.findCommonType(keyTypes);
	Optional<LogicalType> commonValueType = LogicalTypeMerging.findCommonType(valueTypes);

	if (!commonKeyType.isPresent() || !commonValueType.isPresent()) {
		return Optional.empty();
	}

	DataType keyType = TypeConversions.fromLogicalToDataType(commonKeyType.get());
	DataType valueType = TypeConversions.fromLogicalToDataType(commonValueType.get());
	return Optional.of(IntStream.range(0, argumentDataTypes.size())
		.mapToObj(idx -> {
			if (idx % 2 == 0) {
				return keyType;
			} else {
				return valueType;
			}
		})
		.collect(Collectors.toList()));
}
 
Example #17
Source File: FunctionCatalogOperatorTable.java    From flink with Apache License 2.0 5 votes vote down vote up
private Optional<SqlFunction> convertTableFunction(FunctionIdentifier identifier, TableFunctionDefinition functionDefinition) {
	SqlFunction tableFunction = UserDefinedFunctionUtils.createTableSqlFunction(
		identifier,
		identifier.toString(),
		functionDefinition.getTableFunction(),
		TypeConversions.fromLegacyInfoToDataType(functionDefinition.getResultType()),
		typeFactory
	);
	return Optional.of(tableFunction);
}
 
Example #18
Source File: PythonTableFunctionOperator.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
@SuppressWarnings("unchecked")
public void open() throws Exception {
	super.open();
	this.cRowWrapper = new StreamRecordCRowWrappingCollector(output);
	CRowTypeInfo forwardedInputTypeInfo = new CRowTypeInfo(
		(RowTypeInfo) TypeConversions.fromDataTypeToLegacyInfo(
			TypeConversions.fromLogicalToDataType(inputType)));
	forwardedInputSerializer = forwardedInputTypeInfo.createSerializer(getExecutionConfig());
	udtfOutputTypeSerializer = PythonTypeUtils.toFlinkTypeSerializer(userDefinedFunctionOutputType);
}
 
Example #19
Source File: PythonTableFunction.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public TypeInference getTypeInference(DataTypeFactory typeFactory) {
	final List<DataType> argumentDataTypes = Stream.of(inputTypes)
		.map(TypeConversions::fromLegacyInfoToDataType)
		.collect(Collectors.toList());
	return TypeInference.newBuilder()
		.typedArguments(argumentDataTypes)
		.outputTypeStrategy(TypeStrategies.explicit(TypeConversions.fromLegacyInfoToDataType(resultType)))
		.build();
}
 
Example #20
Source File: PythonTableFunctionFlatMap.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void open(Configuration parameters) throws Exception {
	super.open(parameters);

	RowTypeInfo forwardedInputTypeInfo = (RowTypeInfo) TypeConversions.fromDataTypeToLegacyInfo(
		TypeConversions.fromLogicalToDataType(inputType));
	forwardedInputSerializer = forwardedInputTypeInfo.createSerializer(getRuntimeContext().getExecutionConfig());
}
 
Example #21
Source File: TypeMappingUtilsTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testFieldMappingLegacyDecimalType() {
	int[] indices = TypeMappingUtils.computePhysicalIndices(
		TableSchema.builder()
			.field("f0", DECIMAL(38, 18))
			.build().getTableColumns(),
		ROW(FIELD("f0", TypeConversions.fromLegacyInfoToDataType(Types.BIG_DEC))),
		Function.identity()
	);

	assertThat(indices, equalTo(new int[] {0}));
}
 
Example #22
Source File: TypeMappingUtilsTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testFieldMappingLegacyCompositeType() {
	int[] indices = TypeMappingUtils.computePhysicalIndices(
		TableSchema.builder()
			.field("f1", DataTypes.BIGINT())
			.field("f0", DataTypes.STRING())
			.build().getTableColumns(),
		TypeConversions.fromLegacyInfoToDataType(Types.TUPLE(Types.STRING, Types.LONG)),
		Function.identity()
	);

	assertThat(indices, equalTo(new int[] {1, 0}));
}
 
Example #23
Source File: LogicalTypeChecksTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testIsCompositeTypeLegacyCompositeType() {
	DataType dataType = TypeConversions.fromLegacyInfoToDataType(new RowTypeInfo(Types.STRING, Types.INT));
	boolean isCompositeType = LogicalTypeChecks.isCompositeType(dataType.getLogicalType());

	assertThat(isCompositeType, is(true));
}
 
Example #24
Source File: LogicalTypeChecksTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testIsCompositeTypeLegacySimpleType() {
	DataType dataType = TypeConversions.fromLegacyInfoToDataType(Types.STRING);
	boolean isCompositeType = LogicalTypeChecks.isCompositeType(dataType.getLogicalType());

	assertThat(isCompositeType, is(false));
}
 
Example #25
Source File: HiveFunctionUtils.java    From flink with Apache License 2.0 5 votes vote down vote up
static Serializable invokeSetArgs(
		Serializable function, Object[] constantArguments, LogicalType[] argTypes) {
	try {
		// See hive HiveFunction
		Method method = getSetArgsMethod(function);
		method.invoke(function, constantArguments, TypeConversions.fromLogicalToDataType(argTypes));
		return function;
	} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) {
		throw new RuntimeException(e);
	}
}
 
Example #26
Source File: FunctionLookupMock.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public PlannerTypeInferenceUtil getPlannerTypeInferenceUtil() {
	return (unresolvedCall, resolvedArgs) -> {
		FunctionDefinition functionDefinition = unresolvedCall.getFunctionDefinition();
		List<DataType> argumentTypes = resolvedArgs.stream()
			.map(ResolvedExpression::getOutputDataType)
			.collect(Collectors.toList());
		if (functionDefinition.equals(BuiltInFunctionDefinitions.EQUALS)) {
			return new TypeInferenceUtil.Result(
				argumentTypes,
				null,
				DataTypes.BOOLEAN()
			);
		} else if (functionDefinition.equals(BuiltInFunctionDefinitions.IS_NULL)) {
			return new TypeInferenceUtil.Result(
				argumentTypes,
				null,
				DataTypes.BOOLEAN()
			);
		} else if (functionDefinition instanceof ScalarFunctionDefinition) {
			return new TypeInferenceUtil.Result(
				argumentTypes,
				null,
				// We do not support a full legacy type inference here. We support only a static result
				// type
				TypeConversions.fromLegacyInfoToDataType(((ScalarFunctionDefinition) functionDefinition)
					.getScalarFunction()
					.getResultType(null)));
		}

		throw new IllegalArgumentException(
			"Unsupported builtin function in the test: " + unresolvedCall);
	};
}
 
Example #27
Source File: CsvTableSink.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public TableSink<Row> configure(String[] fieldNames, TypeInformation<?>[] fieldTypes) {
	if (this.fieldNames != null || this.fieldTypes != null) {
		throw new IllegalStateException(
			"CsvTableSink has already been configured field names and field types.");
	}
	DataType[] dataTypes = Arrays.stream(fieldTypes)
		.map(TypeConversions::fromLegacyInfoToDataType)
		.toArray(DataType[]::new);
	return new CsvTableSink(path, fieldDelim, numFiles, writeMode, fieldNames, dataTypes);
}
 
Example #28
Source File: StreamTableEnvironmentImpl.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public <T> DataStream<T> toAppendStream(Table table, TypeInformation<T> typeInfo) {
	OutputConversionModifyOperation modifyOperation = new OutputConversionModifyOperation(
		table.getQueryOperation(),
		TypeConversions.fromLegacyInfoToDataType(typeInfo),
		OutputConversionModifyOperation.UpdateMode.APPEND);
	return toDataStream(table, modifyOperation);
}
 
Example #29
Source File: CsvTableSourceTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected TableSource<?> createTableSource(TableSchema requestedSchema) {
	CsvTableSource.Builder builder = CsvTableSource.builder()
		.path("ignored")
		.fieldDelimiter("|");

	requestedSchema.getTableColumns().forEach(
		column -> builder.field(column.getName(), TypeConversions.fromDataTypeToLegacyInfo(column.getType()))
	);

	return builder.build();
}
 
Example #30
Source File: FunctionCatalogOperatorTable.java    From flink with Apache License 2.0 5 votes vote down vote up
private Optional<SqlFunction> convertAggregateFunction(
		FunctionIdentifier identifier,
		AggregateFunctionDefinition functionDefinition) {
	SqlFunction aggregateFunction = UserDefinedFunctionUtils.createAggregateSqlFunction(
		identifier,
		identifier.toString(),
		functionDefinition.getAggregateFunction(),
		TypeConversions.fromLegacyInfoToDataType(functionDefinition.getResultTypeInfo()),
		TypeConversions.fromLegacyInfoToDataType(functionDefinition.getAccumulatorTypeInfo()),
		typeFactory
	);
	return Optional.of(aggregateFunction);
}