Java Code Examples for org.apache.flink.table.types.utils.TypeConversions#fromDataTypeToLegacyInfo()

The following examples show how to use org.apache.flink.table.types.utils.TypeConversions#fromDataTypeToLegacyInfo() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TimestampExtractorUtils.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Retrieves all field accesses needed for the given {@link TimestampExtractor}.
 *
 * @param timestampExtractor Extractor for which to construct array of field accesses.
 * @param physicalInputType Physical input type that the timestamp extractor accesses.
 * @param nameRemapping Additional remapping of a logical to a physical field name.
 *                      TimestampExtractor works with logical names, but accesses physical
 *                      fields
 * @return Array of physical field references.
 */
public static ResolvedFieldReference[] getAccessedFields(
		TimestampExtractor timestampExtractor,
		DataType physicalInputType,
		Function<String, String> nameRemapping) {

	final Function<String, ResolvedFieldReference> fieldMapping;
	if (LogicalTypeChecks.isCompositeType(physicalInputType.getLogicalType())) {
		TableSchema schema = DataTypeUtils.expandCompositeTypeToSchema(physicalInputType);
		fieldMapping = (arg) -> mapToResolvedField(nameRemapping, schema, arg);
	} else {
		fieldMapping = (arg) -> new ResolvedFieldReference(
			arg,
			TypeConversions.fromDataTypeToLegacyInfo(physicalInputType),
			0);
	}
	return getAccessedFields(timestampExtractor, fieldMapping);
}
 
Example 2
Source File: TimestampExtractorUtils.java    From flink with Apache License 2.0 6 votes vote down vote up
private static ResolvedFieldReference mapToResolvedField(
		Function<String, String> nameRemapping,
		TableSchema schema,
		String arg) {
	String remappedName = nameRemapping.apply(arg);

	int idx = IntStream.range(0, schema.getFieldCount())
		.filter(i -> schema.getFieldName(i).get().equals(remappedName))
		.findFirst()
		.orElseThrow(() -> new ValidationException(String.format("Field %s does not exist", remappedName)));

	TypeInformation<?> dataType = TypeConversions.fromDataTypeToLegacyInfo(schema.getTableColumn(idx)
		.get()
		.getType());
	return new ResolvedFieldReference(remappedName, dataType, idx);
}
 
Example 3
Source File: AggregateOperationFactory.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Extract result types for the aggregate or the table aggregate expression. For a table aggregate,
 * it may return multi result types when the composite return type is flattened.
 */
private Stream<DataType> extractAggregateResultTypes(ResolvedExpression expression) {
	if (ApiExpressionUtils.isFunctionOfKind(expression, TABLE_AGGREGATE)) {
		TypeInformation<?> legacyInfo = TypeConversions.fromDataTypeToLegacyInfo(expression.getOutputDataType());
		return Stream.of(FieldInfoUtils.getFieldTypes(legacyInfo))
			.map(TypeConversions::fromLegacyInfoToDataType);
	} else {
		return Stream.of(expression.getOutputDataType());
	}
}
 
Example 4
Source File: FlinkPravegaTableITCase.java    From flink-connectors with Apache License 2.0 5 votes vote down vote up
@Test
public void testTableSourceUsingDescriptor() throws Exception {
    StreamExecutionEnvironment execEnvWrite = StreamExecutionEnvironment.getExecutionEnvironment();
    execEnvWrite.setParallelism(1);

    Stream stream = Stream.of(SETUP_UTILS.getScope(), "testJsonTableSource1");
    SETUP_UTILS.createTestStream(stream.getStreamName(), 1);

    // read data from the stream using Table reader
    TableSchema tableSchema = TableSchema.builder()
            .field("user", DataTypes.STRING())
            .field("uri", DataTypes.STRING())
            .field("accessTime", DataTypes.TIMESTAMP(3).bridgedTo(Timestamp.class))
            .build();
    TypeInformation<Row> typeInfo = (RowTypeInfo) TypeConversions.fromDataTypeToLegacyInfo(tableSchema.toRowDataType());

    PravegaConfig pravegaConfig = SETUP_UTILS.getPravegaConfig();

    // Write some data to the stream
    DataStreamSource<Row> dataStream = execEnvWrite
            .addSource(new TableEventSource(EVENT_COUNT_PER_SOURCE));

    FlinkPravegaWriter<Row> pravegaSink = FlinkPravegaWriter.<Row>builder()
            .withPravegaConfig(pravegaConfig)
            .forStream(stream)
            .withSerializationSchema(new JsonRowSerializationSchema.Builder(typeInfo).build())
            .withEventRouter((Row event) -> "fixedkey")
            .build();

    dataStream.addSink(pravegaSink);
    Assert.assertNotNull(execEnvWrite.getExecutionPlan());
    execEnvWrite.execute("PopulateRowData");

    testTableSourceStreamingDescriptor(stream, pravegaConfig);
    testTableSourceBatchDescriptor(stream, pravegaConfig);
}
 
Example 5
Source File: FlinkPravegaTableSinkTest.java    From flink-connectors with Apache License 2.0 5 votes vote down vote up
@Test
public void testEmitDataStream() {
    FlinkPravegaWriter<Row> writer = mock(FlinkPravegaWriter.class);
    FlinkPravegaOutputFormat<Row> outputFormat = mock(FlinkPravegaOutputFormat.class);
    FlinkPravegaTableSink tableSink = new FlinkPravegaTableSink(schema -> writer, schema -> outputFormat, TUPLE1);
    TypeInformation<Row> rowTypeInfo = (TypeInformation<Row>) TypeConversions.fromDataTypeToLegacyInfo(TUPLE1.toRowDataType());
    DataStreamMock dataStream = new DataStreamMock(new StreamExecutionEnvironmentMock(), rowTypeInfo);
    tableSink.emitDataStream(dataStream);
    assertTrue(FlinkPravegaWriter.class.isAssignableFrom(dataStream.sinkFunction.getClass()));
}
 
Example 6
Source File: AggregateOperationFactory.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Extract result types for the aggregate or the table aggregate expression. For a table aggregate,
 * it may return multi result types when the composite return type is flattened.
 */
private Stream<DataType> extractAggregateResultTypes(ResolvedExpression expression) {
	if (ApiExpressionUtils.isFunctionOfKind(expression, TABLE_AGGREGATE)) {
		TypeInformation<?> legacyInfo = TypeConversions.fromDataTypeToLegacyInfo(expression.getOutputDataType());
		return Stream.of(FieldInfoUtils.getFieldTypes(legacyInfo))
			.map(TypeConversions::fromLegacyInfoToDataType);
	} else {
		return Stream.of(expression.getOutputDataType());
	}
}
 
Example 7
Source File: PythonTableFunctionOperator.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
@SuppressWarnings("unchecked")
public void open() throws Exception {
	super.open();
	this.cRowWrapper = new StreamRecordCRowWrappingCollector(output);
	CRowTypeInfo forwardedInputTypeInfo = new CRowTypeInfo(
		(RowTypeInfo) TypeConversions.fromDataTypeToLegacyInfo(
			TypeConversions.fromLogicalToDataType(inputType)));
	forwardedInputSerializer = forwardedInputTypeInfo.createSerializer(getExecutionConfig());
	udtfOutputTypeSerializer = PythonTypeUtils.toFlinkTypeSerializer(userDefinedFunctionOutputType);
}
 
Example 8
Source File: PythonTableFunctionFlatMap.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void open(Configuration parameters) throws Exception {
	super.open(parameters);

	RowTypeInfo forwardedInputTypeInfo = (RowTypeInfo) TypeConversions.fromDataTypeToLegacyInfo(
		TypeConversions.fromLogicalToDataType(inputType));
	forwardedInputSerializer = forwardedInputTypeInfo.createSerializer(getRuntimeContext().getExecutionConfig());
}
 
Example 9
Source File: TypeInfoDataTypeConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
public static TypeInformation<?> fromDataTypeToTypeInfo(DataType dataType) {
	Class<?> clazz = dataType.getConversionClass();
	if (clazz.isPrimitive()) {
		final TypeInformation<?> foundTypeInfo = primitiveDataTypeTypeInfoMap.get(clazz.getName());
		if (foundTypeInfo != null) {
			return foundTypeInfo;
		}
	}
	LogicalType logicalType = fromDataTypeToLogicalType(dataType);
	switch (logicalType.getTypeRoot()) {
		case DECIMAL:
			DecimalType decimalType = (DecimalType) logicalType;
			return clazz == Decimal.class ?
					new DecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale()) :
					new BigDecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale());
		case CHAR:
		case VARCHAR: // ignore precision
			return clazz == BinaryString.class ?
					BinaryStringTypeInfo.INSTANCE :
					BasicTypeInfo.STRING_TYPE_INFO;
		case BINARY:
		case VARBINARY: // ignore precision
			return PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO;
		case INTERVAL_YEAR_MONTH:
			return TimeIntervalTypeInfo.INTERVAL_MONTHS;
		case INTERVAL_DAY_TIME:
			return TimeIntervalTypeInfo.INTERVAL_MILLIS;
		case ARRAY:
			if (dataType instanceof CollectionDataType &&
					!isPrimitive(((CollectionDataType) dataType).getElementDataType().getLogicalType())) {
				return ObjectArrayTypeInfo.getInfoFor(
						fromDataTypeToTypeInfo(((CollectionDataType) dataType).getElementDataType()));
			} else {
				return TypeConversions.fromDataTypeToLegacyInfo(dataType);
			}
		case MAP:
			KeyValueDataType mapType = (KeyValueDataType) dataType;
			return new MapTypeInfo(
					fromDataTypeToTypeInfo(mapType.getKeyDataType()),
					fromDataTypeToTypeInfo(mapType.getValueDataType()));
		case MULTISET:
			return MultisetTypeInfo.getInfoFor(
					fromDataTypeToTypeInfo(((CollectionDataType) dataType).getElementDataType()));
		case ROW:
			if (BaseRow.class.isAssignableFrom(dataType.getConversionClass())) {
				return BaseRowTypeInfo.of((RowType) fromDataTypeToLogicalType(dataType));
			} else if (Row.class == dataType.getConversionClass()) {
				FieldsDataType rowType = (FieldsDataType) dataType;
				RowType logicalRowType = (RowType) logicalType;
				return new RowTypeInfo(
						logicalRowType.getFieldNames().stream()
								.map(name -> rowType.getFieldDataTypes().get(name))
								.map(TypeInfoDataTypeConverter::fromDataTypeToTypeInfo)
								.toArray(TypeInformation[]::new),
						logicalRowType.getFieldNames().toArray(new String[0]));
			} else {
				return TypeConversions.fromDataTypeToLegacyInfo(dataType);
			}
		default:
			return TypeConversions.fromDataTypeToLegacyInfo(dataType);
	}
}
 
Example 10
Source File: FlinkPravegaTableSource.java    From flink-connectors with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("unchecked")
private TypeInformation<Row> getProducedTypeInformation() {
    return (TypeInformation<Row>) TypeConversions.fromDataTypeToLegacyInfo(getProducedDataType());
}
 
Example 11
Source File: TypeInfoDataTypeConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
public static TypeInformation<?> fromDataTypeToTypeInfo(DataType dataType) {
	Class<?> clazz = dataType.getConversionClass();
	if (clazz.isPrimitive()) {
		final TypeInformation<?> foundTypeInfo = primitiveDataTypeTypeInfoMap.get(clazz.getName());
		if (foundTypeInfo != null) {
			return foundTypeInfo;
		}
	}
	LogicalType logicalType = fromDataTypeToLogicalType(dataType);
	switch (logicalType.getTypeRoot()) {
		case TIMESTAMP_WITHOUT_TIME_ZONE:
			TimestampType timestampType = (TimestampType) logicalType;
			int precision = timestampType.getPrecision();
			if (timestampType.getKind() == TimestampKind.REGULAR) {
				return clazz == TimestampData.class ?
					new TimestampDataTypeInfo(precision) :
					(clazz == LocalDateTime.class ?
						((3 == precision) ?
							Types.LOCAL_DATE_TIME : new LegacyLocalDateTimeTypeInfo(precision)) :
						((3 == precision) ?
							Types.SQL_TIMESTAMP : new LegacyTimestampTypeInfo(precision)));
			} else {
				return TypeConversions.fromDataTypeToLegacyInfo(dataType);
			}
		case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
			LocalZonedTimestampType lzTs = (LocalZonedTimestampType) logicalType;
			int precisionLzTs = lzTs.getPrecision();
			return clazz == TimestampData.class ?
				new TimestampDataTypeInfo(precisionLzTs) :
				(clazz == Instant.class ?
					((3 == precisionLzTs) ? Types.INSTANT : new LegacyInstantTypeInfo(precisionLzTs)) :
					TypeConversions.fromDataTypeToLegacyInfo(dataType));

		case DECIMAL:
			DecimalType decimalType = (DecimalType) logicalType;
			return clazz == DecimalData.class ?
					new DecimalDataTypeInfo(decimalType.getPrecision(), decimalType.getScale()) :
					new BigDecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale());
		case CHAR:
		case VARCHAR: // ignore precision
			return clazz == StringData.class ?
					StringDataTypeInfo.INSTANCE :
					BasicTypeInfo.STRING_TYPE_INFO;
		case BINARY:
		case VARBINARY: // ignore precision
			return PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO;
		case INTERVAL_YEAR_MONTH:
			return TimeIntervalTypeInfo.INTERVAL_MONTHS;
		case INTERVAL_DAY_TIME:
			return TimeIntervalTypeInfo.INTERVAL_MILLIS;
		case ARRAY:
			if (dataType instanceof CollectionDataType &&
					!isPrimitive(((CollectionDataType) dataType).getElementDataType().getLogicalType())) {
				return ObjectArrayTypeInfo.getInfoFor(
						fromDataTypeToTypeInfo(((CollectionDataType) dataType).getElementDataType()));
			} else {
				return TypeConversions.fromDataTypeToLegacyInfo(dataType);
			}
		case MAP:
			KeyValueDataType mapType = (KeyValueDataType) dataType;
			return new MapTypeInfo(
					fromDataTypeToTypeInfo(mapType.getKeyDataType()),
					fromDataTypeToTypeInfo(mapType.getValueDataType()));
		case MULTISET:
			return MultisetTypeInfo.getInfoFor(
					fromDataTypeToTypeInfo(((CollectionDataType) dataType).getElementDataType()));
		case ROW:
			if (RowData.class.isAssignableFrom(dataType.getConversionClass())) {
				return RowDataTypeInfo.of((RowType) fromDataTypeToLogicalType(dataType));
			} else if (Row.class == dataType.getConversionClass()) {
				RowType logicalRowType = (RowType) logicalType;
				return new RowTypeInfo(
					dataType.getChildren()
						.stream()
						.map(TypeInfoDataTypeConverter::fromDataTypeToTypeInfo)
						.toArray(TypeInformation[]::new),
					logicalRowType.getFieldNames().toArray(new String[0]));
			} else {
				return TypeConversions.fromDataTypeToLegacyInfo(dataType);
			}
		case RAW:
			if (logicalType instanceof RawType) {
				final RawType<?> rawType = (RawType<?>) logicalType;
				return createWrapperTypeInfo(rawType);
			}
			return TypeConversions.fromDataTypeToLegacyInfo(dataType);
		default:
			return TypeConversions.fromDataTypeToLegacyInfo(dataType);
	}
}
 
Example 12
Source File: RowArrowSourceFunction.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public TypeInformation<Row> getProducedType() {
	return (TypeInformation<Row>) TypeConversions.fromDataTypeToLegacyInfo(dataType);
}