Java Code Examples for org.apache.flink.table.types.utils.TypeConversions

The following examples show how to use org.apache.flink.table.types.utils.TypeConversions. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: flink   Source File: OperationTreeBuilder.java    License: Apache License 2.0 6 votes vote down vote up
private void validateAlias(
	List<String> aliases,
	ResolvedExpression resolvedExpression,
	Boolean isRowbasedAggregate) {

	int length = TypeConversions
		.fromDataTypeToLegacyInfo(resolvedExpression.getOutputDataType()).getArity();
	int callArity = isRowbasedAggregate ? length : 1;
	int aliasesSize = aliases.size();

	if ((0 < aliasesSize) && (aliasesSize != callArity)) {
		throw new ValidationException(String.format(
			"List of column aliases must have same degree as table; " +
				"the returned table of function '%s' has " +
				"%d columns, whereas alias list has %d columns",
			resolvedExpression,
			callArity,
			aliasesSize));
	}
}
 
Example 2
Source Project: flink   Source File: TestCsvFileSystemFormatFactory.java    License: Apache License 2.0 6 votes vote down vote up
private static void writeCsvToStream(
		DataType[] types,
		RowData rowData,
		OutputStream stream) throws IOException {
	LogicalType[] fieldTypes = Arrays.stream(types)
			.map(DataType::getLogicalType)
			.toArray(LogicalType[]::new);
	DataFormatConverters.DataFormatConverter converter = DataFormatConverters.getConverterForDataType(
			TypeConversions.fromLogicalToDataType(RowType.of(fieldTypes)));

	Row row = (Row) converter.toExternal(rowData);
	StringBuilder builder = new StringBuilder();
	Object o;
	for (int i = 0; i < row.getArity(); i++) {
		if (i > 0) {
			builder.append(DEFAULT_FIELD_DELIMITER);
		}
		if ((o = row.getField(i)) != null) {
			builder.append(o);
		}
	}
	String str = builder.toString();
	stream.write(str.getBytes(StandardCharsets.UTF_8));
	stream.write(DEFAULT_LINE_DELIMITER.getBytes(StandardCharsets.UTF_8));
}
 
Example 3
Source Project: flink   Source File: HiveRowDataPartitionComputer.java    License: Apache License 2.0 6 votes vote down vote up
public HiveRowDataPartitionComputer(
		HiveShim hiveShim,
		String defaultPartValue,
		String[] columnNames,
		DataType[] columnTypes,
		String[] partitionColumns) {
	super(defaultPartValue, columnNames, columnTypes, partitionColumns);
	this.partitionConverters = Arrays.stream(partitionTypes)
			.map(TypeConversions::fromLogicalToDataType)
			.map(DataFormatConverters::getConverterForDataType)
			.toArray(DataFormatConverters.DataFormatConverter[]::new);
	this.hiveObjectConversions = new HiveObjectConversion[partitionIndexes.length];
	for (int i = 0; i < hiveObjectConversions.length; i++) {
		DataType partColType = columnTypes[partitionIndexes[i]];
		ObjectInspector objectInspector = HiveInspectors.getObjectInspector(partColType);
		hiveObjectConversions[i] = HiveInspectors.getConversion(objectInspector, partColType.getLogicalType(), hiveShim);
	}
}
 
Example 4
Source Project: flink   Source File: OperatorBindingCallContext.java    License: Apache License 2.0 6 votes vote down vote up
public OperatorBindingCallContext(
		DataTypeFactory dataTypeFactory,
		FunctionDefinition definition,
		SqlOperatorBinding binding) {
	super(
		dataTypeFactory,
		definition,
		binding.getOperator().getNameAsId().toString());

	this.binding = binding;
	this.argumentDataTypes = new AbstractList<DataType>() {
		@Override
		public DataType get(int pos) {
			final LogicalType logicalType = FlinkTypeFactory.toLogicalType(binding.getOperandType(pos));
			return TypeConversions.fromLogicalToDataType(logicalType);
		}

		@Override
		public int size() {
			return binding.getOperandCount();
		}
	};
}
 
Example 5
Source Project: flink   Source File: PlannerQueryOperation.java    License: Apache License 2.0 6 votes vote down vote up
public PlannerQueryOperation(RelNode calciteTree) {
	this.calciteTree = calciteTree;

	RelDataType rowType = calciteTree.getRowType();
	String[] fieldNames = rowType.getFieldNames().toArray(new String[0]);
	DataType[] fieldTypes = rowType.getFieldList()
		.stream()
		.map(field -> {
			final DataType fieldType = TypeConversions
				.fromLegacyInfoToDataType(FlinkTypeFactory.toTypeInfo(field.getType()));
			final boolean nullable = field.getType().isNullable();
			if (nullable != fieldType.getLogicalType().isNullable()
				&& !FlinkTypeFactory.isTimeIndicatorType(field.getType())) {
				return nullable ? fieldType.nullable() : fieldType.notNull();
			} else {
				return fieldType;
			}
		})
		.toArray(DataType[]::new);

	this.tableSchema = TableSchema.builder().fields(fieldNames, fieldTypes).build();
}
 
Example 6
Source Project: flink   Source File: TimestampExtractorUtils.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Retrieves all field accesses needed for the given {@link TimestampExtractor}.
 *
 * @param timestampExtractor Extractor for which to construct array of field accesses.
 * @param physicalInputType Physical input type that the timestamp extractor accesses.
 * @param nameRemapping Additional remapping of a logical to a physical field name.
 *                      TimestampExtractor works with logical names, but accesses physical
 *                      fields
 * @return Array of physical field references.
 */
public static ResolvedFieldReference[] getAccessedFields(
		TimestampExtractor timestampExtractor,
		DataType physicalInputType,
		Function<String, String> nameRemapping) {

	final Function<String, ResolvedFieldReference> fieldMapping;
	if (LogicalTypeChecks.isCompositeType(physicalInputType.getLogicalType())) {
		TableSchema schema = DataTypeUtils.expandCompositeTypeToSchema(physicalInputType);
		fieldMapping = (arg) -> mapToResolvedField(nameRemapping, schema, arg);
	} else {
		fieldMapping = (arg) -> new ResolvedFieldReference(
			arg,
			TypeConversions.fromDataTypeToLegacyInfo(physicalInputType),
			0);
	}
	return getAccessedFields(timestampExtractor, fieldMapping);
}
 
Example 7
Source Project: flink   Source File: TimestampExtractorUtils.java    License: Apache License 2.0 6 votes vote down vote up
private static ResolvedFieldReference mapToResolvedField(
		Function<String, String> nameRemapping,
		TableSchema schema,
		String arg) {
	String remappedName = nameRemapping.apply(arg);

	int idx = IntStream.range(0, schema.getFieldCount())
		.filter(i -> schema.getFieldName(i).get().equals(remappedName))
		.findFirst()
		.orElseThrow(() -> new ValidationException(String.format("Field %s does not exist", remappedName)));

	TypeInformation<?> dataType = TypeConversions.fromDataTypeToLegacyInfo(schema.getTableColumn(idx)
		.get()
		.getType());
	return new ResolvedFieldReference(remappedName, dataType, idx);
}
 
Example 8
Source Project: flink   Source File: TypeMappingUtilsTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testFieldMappingLegacyDecimalTypeNotMatchingPrecision() {
	thrown.expect(ValidationException.class);
	thrown.expectMessage("Type DECIMAL(38, 10) of table field 'f0' does not match with the physical type" +
		" LEGACY('DECIMAL', 'DECIMAL') of the 'f0' field of the TableSource return type.");
	thrown.expectCause(allOf(
		instanceOf(ValidationException.class),
		hasMessage(equalTo("Legacy decimal type can only be mapped to DECIMAL(38, 18)."))));

	int[] indices = TypeMappingUtils.computePhysicalIndices(
		TableSchema.builder()
			.field("f0", DECIMAL(38, 10))
			.build().getTableColumns(),
		ROW(FIELD("f0", TypeConversions.fromLegacyInfoToDataType(Types.BIG_DEC))),
		Function.identity()
	);

	assertThat(indices, equalTo(new int[] {0}));
}
 
Example 9
Source Project: flink   Source File: TypeMappingUtilsTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testFieldMappingLegacyCompositeTypeWithRenaming() {
	int[] indices = TypeMappingUtils.computePhysicalIndices(
		TableSchema.builder()
			.field("a", DataTypes.BIGINT())
			.field("b", DataTypes.STRING())
			.build().getTableColumns(),
		TypeConversions.fromLegacyInfoToDataType(Types.TUPLE(Types.STRING, Types.LONG)),
		str -> {
			switch (str) {
				case "a":
					return "f1";
				case "b":
					return "f0";
				default:
					throw new AssertionError();
			}
		}
	);

	assertThat(indices, equalTo(new int[]{1, 0}));
}
 
Example 10
Source Project: flink   Source File: TypeMappingUtilsTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testCheckPhysicalLogicalTypeCompatible() {
	TableSchema tableSchema = TableSchema.builder()
							.field("a", DataTypes.VARCHAR(2))
							.field("b", DataTypes.DECIMAL(20, 2))
							.build();
	TableSink tableSink = new TestTableSink(tableSchema);
	LegacyTypeInformationType legacyDataType = (LegacyTypeInformationType) tableSink.getConsumedDataType()
													.getLogicalType();
	TypeInformation legacyTypeInfo = ((TupleTypeInfo) legacyDataType.getTypeInformation()).getTypeAt(1);
	DataType physicalType = TypeConversions.fromLegacyInfoToDataType(legacyTypeInfo);
	TableSchema physicSchema = DataTypeUtils.expandCompositeTypeToSchema(physicalType);
	DataType[] logicalDataTypes = tableSchema.getFieldDataTypes();
	DataType[] physicalDataTypes = physicSchema.getFieldDataTypes();
	for (int i = 0; i < logicalDataTypes.length; i++) {
		TypeMappingUtils.checkPhysicalLogicalTypeCompatible(
				physicalDataTypes[i].getLogicalType(),
				logicalDataTypes[i].getLogicalType(),
				"physicalField",
				"logicalField",
				false);
	}
}
 
Example 11
Source Project: flink   Source File: ValuesOperationFactory.java    License: Apache License 2.0 6 votes vote down vote up
private DataType findCommonTypeAtPosition(List<List<ResolvedExpression>> resolvedRows, int i) {
	List<LogicalType> typesAtIPosition = extractLogicalTypesAtPosition(resolvedRows, i);

	LogicalType logicalType = LogicalTypeMerging.findCommonType(typesAtIPosition)
		.orElseThrow(() -> {
			Set<DataType> columnTypes = resolvedRows.stream()
				.map(row -> row.get(i).getOutputDataType())
				.collect(Collectors.toCollection(LinkedHashSet::new));

			return new ValidationException(String.format(
				"Types in fromValues(...) must have a common super type. Could not find a common type" +
					" for all rows at column %d.\n" +
					"Could not find a common super type for types: %s",
				i,
				columnTypes));
		});

	return TypeConversions.fromLogicalToDataType(logicalType);
}
 
Example 12
Source Project: flink   Source File: OperationTreeBuilder.java    License: Apache License 2.0 6 votes vote down vote up
private void validateAlias(
	List<String> aliases,
	ResolvedExpression resolvedExpression,
	Boolean isRowbasedAggregate) {

	int length = TypeConversions
		.fromDataTypeToLegacyInfo(resolvedExpression.getOutputDataType()).getArity();
	int callArity = isRowbasedAggregate ? length : 1;
	int aliasesSize = aliases.size();

	if ((0 < aliasesSize) && (aliasesSize != callArity)) {
		throw new ValidationException(String.format(
			"List of column aliases must have same degree as table; " +
				"the returned table of function '%s' has " +
				"%d columns, whereas alias list has %d columns",
			resolvedExpression,
			callArity,
			aliasesSize));
	}
}
 
Example 13
Source Project: flink   Source File: PythonTableFunctionOperator.java    License: Apache License 2.0 5 votes vote down vote up
@Override
@SuppressWarnings("unchecked")
public void open() throws Exception {
	super.open();
	this.cRowWrapper = new StreamRecordCRowWrappingCollector(output);
	CRowTypeInfo forwardedInputTypeInfo = new CRowTypeInfo(
		(RowTypeInfo) TypeConversions.fromDataTypeToLegacyInfo(
			TypeConversions.fromLogicalToDataType(inputType)));
	forwardedInputSerializer = forwardedInputTypeInfo.createSerializer(getExecutionConfig());
	udtfOutputTypeSerializer = PythonTypeUtils.toFlinkTypeSerializer(userDefinedFunctionOutputType);
}
 
Example 14
Source Project: flink   Source File: CallBindingCallContext.java    License: Apache License 2.0 5 votes vote down vote up
private static @Nullable DataType convertOutputType(SqlCallBinding binding, @Nullable RelDataType returnType) {
	if (returnType == null || returnType.equals(binding.getValidator().getUnknownType())) {
		return null;
	} else {
		final LogicalType logicalType = FlinkTypeFactory.toLogicalType(returnType);
		return TypeConversions.fromLogicalToDataType(logicalType);
	}
}
 
Example 15
Source Project: flink   Source File: RowArrowSourceFunctionTest.java    License: Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void init() {
	fieldTypes.add(new VarCharType());
	List<RowType.RowField> rowFields = new ArrayList<>();
	for (int i = 0; i < fieldTypes.size(); i++) {
		rowFields.add(new RowType.RowField("f" + i, fieldTypes.get(i)));
	}
	rowType = new RowType(rowFields);
	dataType = TypeConversions.fromLogicalToDataType(rowType);
	allocator = ArrowUtils.getRootAllocator().newChildAllocator("stdout", 0, Long.MAX_VALUE);
}
 
Example 16
Source Project: flink   Source File: TestRowDataCsvInputFormat.java    License: Apache License 2.0 5 votes vote down vote up
public TestRowDataCsvInputFormat(
		Path[] paths,
		TableSchema schema,
		List<String> partitionKeys,
		String defaultPartValue,
		int[] selectFields,
		long limit) {
	this.partitionKeys = partitionKeys;
	this.defaultPartValue = defaultPartValue;
	this.selectFields = selectFields;
	this.limit = limit;
	RowTypeInfo rowType = (RowTypeInfo) schema.toRowType();
	this.fieldTypes = Arrays.asList(rowType.getFieldTypes());
	this.fieldNames = Arrays.asList(rowType.getFieldNames());

	List<String> csvFieldNames = fieldNames.stream()
			.filter(name -> !partitionKeys.contains(name)).collect(Collectors.toList());

	List<String> selectFieldNames = Arrays.stream(selectFields)
			.mapToObj(fieldNames::get)
			.collect(Collectors.toList());
	List<String> csvSelectFieldNames = selectFieldNames.stream()
			.filter(name -> !partitionKeys.contains(name)).collect(Collectors.toList());
	List<TypeInformation> csvSelectTypes = csvSelectFieldNames.stream()
			.map(name -> fieldTypes.get(fieldNames.indexOf(name))).collect(Collectors.toList());
	this.csvSelectConverters = csvSelectTypes.stream()
			.map(TypeConversions::fromLegacyInfoToDataType)
			.map(DataFormatConverters::getConverterForDataType)
			.collect(Collectors.toList());
	int[] csvSelectFields = csvSelectFieldNames.stream().mapToInt(csvFieldNames::indexOf).toArray();
	this.inputFormat = new RowCsvInputFormat(
			null, csvSelectTypes.toArray(new TypeInformation[0]), csvSelectFields);
	this.inputFormat.setFilePaths(paths);

	this.csvFieldMapping = csvSelectFieldNames.stream().mapToInt(selectFieldNames::indexOf).toArray();
	this.emitted = 0;
}
 
Example 17
Source Project: flink   Source File: TableSourceValidation.java    License: Apache License 2.0 5 votes vote down vote up
private static void validateTimestampExtractorArguments(
		List<RowtimeAttributeDescriptor> descriptors,
		TableSource<?> tableSource) {
	if (descriptors.size() == 1) {
		RowtimeAttributeDescriptor descriptor = descriptors.get(0);
		// look up extractor input fields in return type
		String[] extractorInputFields = descriptor.getTimestampExtractor().getArgumentFields();
		TypeInformation[] physicalTypes = Arrays.stream(extractorInputFields)
			.map(fieldName -> resolveField(fieldName, tableSource))
			.map(resolvedField -> TypeConversions.fromDataTypeToLegacyInfo(resolvedField.getType()))
			.toArray(TypeInformation[]::new);
		// validate timestamp extractor
		descriptor.getTimestampExtractor().validateArgumentFields(physicalTypes);
	}
}
 
Example 18
Source Project: flink   Source File: ArrayFieldReader.java    License: Apache License 2.0 5 votes vote down vote up
private Class<?> getElementClass(LogicalType elementType) {
	DataType dataType = TypeConversions.fromLogicalToDataType(elementType);
	if (elementType instanceof TimestampType) {
		// the default conversion class is java.time.LocalDateTime
		dataType = dataType.bridgedTo(Timestamp.class);
	} else if (elementType instanceof DateType) {
		// the default conversion class is java.time.LocalDate
		dataType = dataType.bridgedTo(Date.class);
	} else if (elementType instanceof TimeType) {
		// the default conversion class is java.time.LocalTime
		dataType = dataType.bridgedTo(Time.class);
	}
	return dataType.getConversionClass();
}
 
Example 19
Source Project: flink   Source File: AggregateOperationFactory.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Extract result types for the aggregate or the table aggregate expression. For a table aggregate,
 * it may return multi result types when the composite return type is flattened.
 */
private Stream<DataType> extractAggregateResultTypes(ResolvedExpression expression) {
	if (ApiExpressionUtils.isFunctionOfKind(expression, TABLE_AGGREGATE)) {
		TypeInformation<?> legacyInfo = TypeConversions.fromDataTypeToLegacyInfo(expression.getOutputDataType());
		return Stream.of(FieldInfoUtils.getFieldTypes(legacyInfo))
			.map(TypeConversions::fromLegacyInfoToDataType);
	} else {
		return Stream.of(expression.getOutputDataType());
	}
}
 
Example 20
Source Project: flink   Source File: StreamTableEnvironmentImpl.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public <T> DataStream<T> toAppendStream(Table table, TypeInformation<T> typeInfo) {
	OutputConversionModifyOperation modifyOperation = new OutputConversionModifyOperation(
		table.getQueryOperation(),
		TypeConversions.fromLegacyInfoToDataType(typeInfo),
		OutputConversionModifyOperation.UpdateMode.APPEND);
	return toDataStream(table, modifyOperation);
}
 
Example 21
Source Project: flink   Source File: FunctionCatalogOperatorTable.java    License: Apache License 2.0 5 votes vote down vote up
private Optional<SqlFunction> convertAggregateFunction(
		String name,
		AggregateFunctionDefinition functionDefinition) {
	SqlFunction aggregateFunction = UserDefinedFunctionUtils.createAggregateSqlFunction(
		name,
		name,
		functionDefinition.getAggregateFunction(),
		TypeConversions.fromLegacyInfoToDataType(functionDefinition.getResultTypeInfo()),
		TypeConversions.fromLegacyInfoToDataType(functionDefinition.getAccumulatorTypeInfo()),
		typeFactory
	);
	return Optional.of(aggregateFunction);
}
 
Example 22
Source Project: flink   Source File: FunctionCatalogOperatorTable.java    License: Apache License 2.0 5 votes vote down vote up
private Optional<SqlFunction> convertTableFunction(String name, TableFunctionDefinition functionDefinition) {
	SqlFunction tableFunction = UserDefinedFunctionUtils.createTableSqlFunction(
		name,
		name,
		functionDefinition.getTableFunction(),
		TypeConversions.fromLegacyInfoToDataType(functionDefinition.getResultType()),
		typeFactory
	);
	return Optional.of(tableFunction);
}
 
Example 23
Source Project: flink   Source File: HiveFunctionUtils.java    License: Apache License 2.0 5 votes vote down vote up
static Serializable invokeSetArgs(
		Serializable function, Object[] constantArguments, LogicalType[] argTypes) {
	try {
		// See hive HiveFunction
		Method method = getSetArgsMethod(function);
		method.invoke(function, constantArguments, TypeConversions.fromLogicalToDataType(argTypes));
		return function;
	} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) {
		throw new RuntimeException(e);
	}
}
 
Example 24
Source Project: flink-connectors   Source File: FlinkPravegaTableITCase.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testTableSourceUsingDescriptor() throws Exception {
    StreamExecutionEnvironment execEnvWrite = StreamExecutionEnvironment.getExecutionEnvironment();
    execEnvWrite.setParallelism(1);

    Stream stream = Stream.of(SETUP_UTILS.getScope(), "testJsonTableSource1");
    SETUP_UTILS.createTestStream(stream.getStreamName(), 1);

    // read data from the stream using Table reader
    TableSchema tableSchema = TableSchema.builder()
            .field("user", DataTypes.STRING())
            .field("uri", DataTypes.STRING())
            .field("accessTime", DataTypes.TIMESTAMP(3).bridgedTo(Timestamp.class))
            .build();
    TypeInformation<Row> typeInfo = (RowTypeInfo) TypeConversions.fromDataTypeToLegacyInfo(tableSchema.toRowDataType());

    PravegaConfig pravegaConfig = SETUP_UTILS.getPravegaConfig();

    // Write some data to the stream
    DataStreamSource<Row> dataStream = execEnvWrite
            .addSource(new TableEventSource(EVENT_COUNT_PER_SOURCE));

    FlinkPravegaWriter<Row> pravegaSink = FlinkPravegaWriter.<Row>builder()
            .withPravegaConfig(pravegaConfig)
            .forStream(stream)
            .withSerializationSchema(new JsonRowSerializationSchema.Builder(typeInfo).build())
            .withEventRouter((Row event) -> "fixedkey")
            .build();

    dataStream.addSink(pravegaSink);
    Assert.assertNotNull(execEnvWrite.getExecutionPlan());
    execEnvWrite.execute("PopulateRowData");

    testTableSourceStreamingDescriptor(stream, pravegaConfig);
    testTableSourceBatchDescriptor(stream, pravegaConfig);
}
 
Example 25
Source Project: flink   Source File: AbstractJdbcRowConverter.java    License: Apache License 2.0 5 votes vote down vote up
protected JdbcSerializationConverter wrapIntoNullableExternalConverter(JdbcSerializationConverter jdbcSerializationConverter, LogicalType type) {
	final int sqlType = JdbcTypeUtil.typeInformationToSqlType(TypeConversions.fromDataTypeToLegacyInfo(
		TypeConversions.fromLogicalToDataType(type)));
	return (val, index, statement)  -> {
		if (val == null || val.isNullAt(index) || LogicalTypeRoot.NULL.equals(type.getTypeRoot())) {
			statement.setNull(index + 1, sqlType);
		} else {
			jdbcSerializationConverter.serialize(val, index, statement);
		}
	};
}
 
Example 26
Source Project: flink   Source File: HBaseTableSchema.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Returns the types of all registered column qualifiers of a specific column family.
 *
 * @param family The name of the column family for which the column qualifier types are returned.
 * @return The types of all registered column qualifiers of a specific column family.
 */
public TypeInformation<?>[] getQualifierTypes(String family) {
	DataType[] dataTypes = getQualifierDataTypes(family);
	return Arrays.stream(dataTypes)
		.map(TypeConversions::fromDataTypeToLegacyInfo)
		.toArray(TypeInformation[]::new);
}
 
Example 27
Source Project: flink   Source File: FunctionCatalogOperatorTable.java    License: Apache License 2.0 5 votes vote down vote up
private Optional<SqlFunction> convertTableFunction(FunctionIdentifier identifier, TableFunctionDefinition functionDefinition) {
	SqlFunction tableFunction = UserDefinedFunctionUtils.createTableSqlFunction(
		identifier,
		identifier.toString(),
		functionDefinition.getTableFunction(),
		TypeConversions.fromLegacyInfoToDataType(functionDefinition.getResultType()),
		typeFactory
	);
	return Optional.of(tableFunction);
}
 
Example 28
@Override
@SuppressWarnings("unchecked")
public void open() throws Exception {
	super.open();
	this.cRowWrapper = new StreamRecordCRowWrappingCollector(output);

	CRowTypeInfo forwardedInputTypeInfo = new CRowTypeInfo(new RowTypeInfo(
		Arrays.stream(forwardedFields)
			.mapToObj(i -> inputType.getFields().get(i))
			.map(RowType.RowField::getType)
			.map(TypeConversions::fromLogicalToDataType)
			.map(TypeConversions::fromDataTypeToLegacyInfo)
			.toArray(TypeInformation[]::new)));
	forwardedInputSerializer = forwardedInputTypeInfo.createSerializer(getExecutionConfig());
}
 
Example 29
Source Project: flink   Source File: CommonInputTypeStrategy.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public Optional<List<DataType>> inferInputTypes(
		CallContext callContext,
		boolean throwOnFailure) {
	List<DataType> argumentDataTypes = callContext.getArgumentDataTypes();
	List<LogicalType> argumentTypes = argumentDataTypes
		.stream()
		.map(DataType::getLogicalType)
		.collect(Collectors.toList());

	if (argumentTypes.stream().anyMatch(CommonInputTypeStrategy::isLegacyType)) {
		return Optional.of(argumentDataTypes);
	}

	Optional<LogicalType> commonType = LogicalTypeMerging.findCommonType(argumentTypes);

	if (!commonType.isPresent()) {
		if (throwOnFailure) {
			throw callContext.newValidationError(
				"Could not find a common type for arguments: %s",
				argumentDataTypes);
		}
		return Optional.empty();
	}

	return commonType.map(type -> Collections.nCopies(
		argumentTypes.size(),
		TypeConversions.fromLogicalToDataType(type)));
}
 
Example 30
Source Project: flink   Source File: MapInputTypeStrategy.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public Optional<List<DataType>> inferInputTypes(CallContext callContext, boolean throwOnFailure) {
	List<DataType> argumentDataTypes = callContext.getArgumentDataTypes();
	if (argumentDataTypes.size() == 0) {
		return Optional.empty();
	}

	List<LogicalType> keyTypes = new ArrayList<>();
	List<LogicalType> valueTypes = new ArrayList<>();

	for (int i = 0; i < argumentDataTypes.size(); i++) {
		LogicalType logicalType = argumentDataTypes.get(i).getLogicalType();
		if (i % 2 == 0) {
			keyTypes.add(logicalType);
		} else {
			valueTypes.add(logicalType);
		}
	}
	Optional<LogicalType> commonKeyType = LogicalTypeMerging.findCommonType(keyTypes);
	Optional<LogicalType> commonValueType = LogicalTypeMerging.findCommonType(valueTypes);

	if (!commonKeyType.isPresent() || !commonValueType.isPresent()) {
		return Optional.empty();
	}

	DataType keyType = TypeConversions.fromLogicalToDataType(commonKeyType.get());
	DataType valueType = TypeConversions.fromLogicalToDataType(commonValueType.get());
	return Optional.of(IntStream.range(0, argumentDataTypes.size())
		.mapToObj(idx -> {
			if (idx % 2 == 0) {
				return keyType;
			} else {
				return valueType;
			}
		})
		.collect(Collectors.toList()));
}