Java Code Examples for org.apache.flink.table.types.DataType#getLogicalType()

The following examples show how to use org.apache.flink.table.types.DataType#getLogicalType() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CsvRowDataSerDeSchemaTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
private Row testDeserialization(
		boolean allowParsingErrors,
		boolean allowComments,
		String string) throws Exception {
	DataType dataType = ROW(
		FIELD("f0", STRING()),
		FIELD("f1", INT()),
		FIELD("f2", STRING()));
	RowType rowType = (RowType) dataType.getLogicalType();
	CsvRowDataDeserializationSchema.Builder deserSchemaBuilder =
		new CsvRowDataDeserializationSchema.Builder(rowType, new RowDataTypeInfo(rowType))
			.setIgnoreParseErrors(allowParsingErrors)
			.setAllowComments(allowComments);
	RowData deserializedRow = deserialize(deserSchemaBuilder, string);
	return (Row) DataFormatConverters.getConverterForDataType(dataType)
		.toExternal(deserializedRow);
}
 
Example 2
Source File: DataTypeUtils.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public DataType visit(KeyValueDataType keyValueDataType) {
	DataType newKeyType = keyValueDataType.getKeyDataType().accept(this);
	DataType newValueType = keyValueDataType.getValueDataType().accept(this);
	LogicalType logicalType = keyValueDataType.getLogicalType();
	LogicalType newLogicalType;
	if (logicalType instanceof MapType) {
		newLogicalType = new MapType(
			logicalType.isNullable(),
			newKeyType.getLogicalType(),
			newValueType.getLogicalType());
	} else {
		throw new UnsupportedOperationException("Unsupported logical type : " + logicalType);
	}
	return transformation.transform(new KeyValueDataType(newLogicalType, newKeyType, newValueType));
}
 
Example 3
Source File: DataTypeExtractor.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Use closest class for data type if possible. Even though a hint might have provided some data
 * type, in many cases, the conversion class can be enriched with the extraction type itself.
 */
private DataType closestBridging(DataType dataType, @Nullable Class<?> clazz) {
	// no context class or conversion class is already more specific than context class
	if (clazz == null || clazz.isAssignableFrom(dataType.getConversionClass())) {
		return dataType;
	}
	final LogicalType logicalType = dataType.getLogicalType();
	final boolean supportsConversion = logicalType.supportsInputConversion(clazz) ||
		logicalType.supportsOutputConversion(clazz);
	if (supportsConversion) {
		return dataType.bridgedTo(clazz);
	}
	return dataType;
}
 
Example 4
Source File: StructuredObjectConverter.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a {@link DataStructureConverter} for the given structured type.
 *
 * <p>Note: We do not perform validation if data type and structured type implementation match. This
 * must have been done earlier in the {@link DataTypeFactory}.
 */
@SuppressWarnings("RedundantCast")
private static StructuredObjectConverter<?> createOrError(DataType dataType) {
	final List<DataType> fields = dataType.getChildren();

	final DataStructureConverter<Object, Object>[] fieldConverters = fields.stream()
		.map(dt -> (DataStructureConverter<Object, Object>) DataStructureConverters.getConverter(dt))
		.toArray(DataStructureConverter[]::new);

	final RowData.FieldGetter[] fieldGetters = IntStream
		.range(0, fields.size())
		.mapToObj(pos -> RowData.createFieldGetter(fields.get(pos).getLogicalType(), pos))
		.toArray(RowData.FieldGetter[]::new);

	final Class<?>[] fieldClasses = fields.stream()
		.map(DataType::getConversionClass)
		.toArray(Class[]::new);

	final StructuredType structuredType = (StructuredType) dataType.getLogicalType();

	final Class<?> implementationClass = structuredType.getImplementationClass()
		.orElseThrow(IllegalStateException::new);

	final String converterName = implementationClass.getName().replace('.', '$') + "$Converter";
	final String converterCode = generateCode(
			converterName,
			implementationClass,
			getFieldNames(structuredType).toArray(new String[0]),
			fieldClasses);

	return new StructuredObjectConverter<>(
		fieldConverters,
		fieldGetters,
		converterName,
		converterCode
	);
}
 
Example 5
Source File: ValueLiteralExpression.java    From flink with Apache License 2.0 5 votes vote down vote up
private static void validateValueDataType(Object value, DataType dataType) {
	final LogicalType logicalType = dataType.getLogicalType();
	if (value == null) {
		if (!logicalType.isNullable()) {
			throw new ValidationException(
				String.format(
					"Data type '%s' does not support null values.",
					dataType));
		}
		return;
	}
	final Class<?> candidate = value.getClass();
	// ensure value and data type match
	if (!dataType.getConversionClass().isAssignableFrom(candidate)) {
		throw new ValidationException(
			String.format(
				"Data type '%s' with conversion class '%s' does not support a value literal of class '%s'.",
				dataType,
				dataType.getConversionClass().getName(),
				value.getClass().getName()));
	}
	// check for proper input as this cannot be checked in data type
	if (!logicalType.supportsInputConversion(candidate)) {
		throw new ValidationException(
			String.format(
				"Data type '%s' does not support a conversion from class '%s'.",
				dataType,
				candidate.getName()));
	}
}
 
Example 6
Source File: AndArgumentTypeStrategy.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Optional<DataType> inferArgumentType(CallContext callContext, int argumentPos, boolean throwOnFailure) {
	final DataType actualDataType = callContext.getArgumentDataTypes().get(argumentPos);
	final LogicalType actualType = actualDataType.getLogicalType();

	Optional<DataType> closestDataType = Optional.empty();
	for (ArgumentTypeStrategy strategy : argumentStrategies) {
		final Optional<DataType> inferredDataType = strategy.inferArgumentType(
			callContext,
			argumentPos,
			throwOnFailure);
		// argument type does not match at all
		if (!inferredDataType.isPresent()) {
			return Optional.empty();
		}
		final LogicalType inferredType = inferredDataType.get().getLogicalType();
		// a more specific, casted argument type is available
		if (!supportsAvoidingCast(actualType, inferredType) && !closestDataType.isPresent()) {
			closestDataType = inferredDataType;
		}
	}

	if (closestDataType.isPresent()) {
		return closestDataType;
	}

	return Optional.of(actualDataType);
}
 
Example 7
Source File: TableSourceValidation.java    From flink with Apache License 2.0 5 votes vote down vote up
private static void validateLogicalToPhysicalMapping(
		TableSource<?> tableSource,
		TableSchema schema,
		List<RowtimeAttributeDescriptor> rowtimeAttributes,
		Optional<String> proctimeAttribute) {
	// validate that schema fields can be resolved to a return type field of correct type
	int mappedFieldCnt = 0;
	for (int i = 0; i < schema.getFieldCount(); i++) {
		DataType fieldType = schema.getFieldDataType(i).get();
		LogicalType logicalFieldType = fieldType.getLogicalType();
		String fieldName = schema.getFieldName(i).get();

		if (proctimeAttribute.map(p -> p.equals(fieldName)).orElse(false)) {
			if (!(hasFamily(logicalFieldType, LogicalTypeFamily.TIMESTAMP))) {
				throw new ValidationException(String.format("Processing time field '%s' has invalid type %s. " +
					"Processing time attributes must be of type SQL_TIMESTAMP.", fieldName, logicalFieldType));
			}
		} else if (rowtimeAttributes.stream().anyMatch(p -> p.getAttributeName().equals(fieldName))) {
			if (!(hasFamily(logicalFieldType, LogicalTypeFamily.TIMESTAMP))) {
				throw new ValidationException(String.format("Rowtime time field '%s' has invalid type %s. " +
					"Rowtime time attributes must be of type SQL_TIMESTAMP.", fieldName, logicalFieldType));
			}
		} else {
			validateLogicalTypeEqualsPhysical(fieldName, fieldType, tableSource);
			mappedFieldCnt += 1;
		}
	}

	// ensure that only one field is mapped to an atomic type
	DataType producedDataType = tableSource.getProducedDataType();
	if (!isCompositeType(producedDataType) && mappedFieldCnt > 1) {
		throw new ValidationException(
			String.format(
				"More than one table field matched to atomic input type %s.",
				producedDataType));
	}
}
 
Example 8
Source File: DataTypeConversionClassTransformation.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public DataType transform(DataType dataType) {
	LogicalType logicalType = dataType.getLogicalType();
	Class<?> conversionClass = conversions.get(logicalType.getTypeRoot());
	if (conversionClass != null) {
		return dataType.bridgedTo(conversionClass);
	} else {
		return dataType;
	}
}
 
Example 9
Source File: DecimalBigDecimalConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
static DecimalBigDecimalConverter create(DataType dataType) {
	final DecimalType decimalType = (DecimalType) dataType.getLogicalType();
	return new DecimalBigDecimalConverter(decimalType.getPrecision(), decimalType.getScale());
}
 
Example 10
Source File: JsonRowDataSerDeSchemaTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testDeserializationMissingNode() throws Exception {
	ObjectMapper objectMapper = new ObjectMapper();

	// Root
	ObjectNode root = objectMapper.createObjectNode();
	root.put("id", 123123123);
	byte[] serializedJson = objectMapper.writeValueAsBytes(root);

	DataType dataType = ROW(FIELD("name", STRING()));
	RowType schema = (RowType) dataType.getLogicalType();

	// pass on missing field
	JsonRowDataDeserializationSchema deserializationSchema = new JsonRowDataDeserializationSchema(
		schema, new RowDataTypeInfo(schema), false, false, TimestampFormat.ISO_8601);

	Row expected = new Row(1);
	Row actual = convertToExternal(deserializationSchema.deserialize(serializedJson), dataType);
	assertEquals(expected, actual);

	// fail on missing field
	deserializationSchema = deserializationSchema = new JsonRowDataDeserializationSchema(
		schema, new RowDataTypeInfo(schema), true, false, TimestampFormat.ISO_8601);

	thrown.expect(IOException.class);
	thrown.expectMessage("Failed to deserialize JSON '{\"id\":123123123}'");
	deserializationSchema.deserialize(serializedJson);

	// ignore on parse error
	deserializationSchema = new JsonRowDataDeserializationSchema(
		schema, new RowDataTypeInfo(schema), false, true, TimestampFormat.ISO_8601);
	actual = convertToExternal(deserializationSchema.deserialize(serializedJson), dataType);
	assertEquals(expected, actual);

	thrown.expect(IllegalArgumentException.class);
	thrown.expectMessage("JSON format doesn't support failOnMissingField and ignoreParseErrors are both enabled");
	// failOnMissingField and ignoreParseErrors both enabled
	//noinspection ConstantConditions
	new JsonRowDataDeserializationSchema(
		schema, new RowDataTypeInfo(schema), true, true, TimestampFormat.ISO_8601);
}
 
Example 11
Source File: JsonRowDataSerDeSchemaTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testSerDe() throws Exception {
	byte tinyint = 'c';
	short smallint = 128;
	int intValue = 45536;
	float floatValue = 33.333F;
	long bigint = 1238123899121L;
	String name = "asdlkjasjkdla998y1122";
	byte[] bytes = new byte[1024];
	ThreadLocalRandom.current().nextBytes(bytes);
	BigDecimal decimal = new BigDecimal("123.456789");
	Double[] doubles = new Double[]{1.1, 2.2, 3.3};
	LocalDate date = LocalDate.parse("1990-10-14");
	LocalTime time = LocalTime.parse("12:12:43");
	Timestamp timestamp3 = Timestamp.valueOf("1990-10-14 12:12:43.123");
	Timestamp timestamp9 = Timestamp.valueOf("1990-10-14 12:12:43.123456789");

	Map<String, Long> map = new HashMap<>();
	map.put("flink", 123L);

	Map<String, Map<String, Integer>> nestedMap = new HashMap<>();
	Map<String, Integer> innerMap = new HashMap<>();
	innerMap.put("key", 234);
	nestedMap.put("inner_map", innerMap);

	ObjectMapper objectMapper = new ObjectMapper();
	ArrayNode doubleNode = objectMapper.createArrayNode().add(1.1D).add(2.2D).add(3.3D);

	// Root
	ObjectNode root = objectMapper.createObjectNode();
	root.put("bool", true);
	root.put("tinyint", tinyint);
	root.put("smallint", smallint);
	root.put("int", intValue);
	root.put("bigint", bigint);
	root.put("float", floatValue);
	root.put("name", name);
	root.put("bytes", bytes);
	root.put("decimal", decimal);
	root.set("doubles", doubleNode);
	root.put("date", "1990-10-14");
	root.put("time", "12:12:43");
	root.put("timestamp3", "1990-10-14T12:12:43.123");
	root.put("timestamp9", "1990-10-14T12:12:43.123456789");
	root.putObject("map").put("flink", 123);
	root.putObject("map2map").putObject("inner_map").put("key", 234);

	byte[] serializedJson = objectMapper.writeValueAsBytes(root);

	DataType dataType = ROW(
		FIELD("bool", BOOLEAN()),
		FIELD("tinyint", TINYINT()),
		FIELD("smallint", SMALLINT()),
		FIELD("int", INT()),
		FIELD("bigint", BIGINT()),
		FIELD("float", FLOAT()),
		FIELD("name", STRING()),
		FIELD("bytes", BYTES()),
		FIELD("decimal", DECIMAL(9, 6)),
		FIELD("doubles", ARRAY(DOUBLE())),
		FIELD("date", DATE()),
		FIELD("time", TIME(0)),
		FIELD("timestamp3", TIMESTAMP(3)),
		FIELD("timestamp9", TIMESTAMP(9)),
		FIELD("map", MAP(STRING(), BIGINT())),
		FIELD("map2map", MAP(STRING(), MAP(STRING(), INT()))));
	RowType schema = (RowType) dataType.getLogicalType();
	RowDataTypeInfo resultTypeInfo = new RowDataTypeInfo(schema);

	JsonRowDataDeserializationSchema deserializationSchema = new JsonRowDataDeserializationSchema(
		schema, resultTypeInfo, false, false, TimestampFormat.ISO_8601);

	Row expected = new Row(16);
	expected.setField(0, true);
	expected.setField(1, tinyint);
	expected.setField(2, smallint);
	expected.setField(3, intValue);
	expected.setField(4, bigint);
	expected.setField(5, floatValue);
	expected.setField(6, name);
	expected.setField(7, bytes);
	expected.setField(8, decimal);
	expected.setField(9, doubles);
	expected.setField(10, date);
	expected.setField(11, time);
	expected.setField(12, timestamp3.toLocalDateTime());
	expected.setField(13, timestamp9.toLocalDateTime());
	expected.setField(14, map);
	expected.setField(15, nestedMap);

	RowData rowData = deserializationSchema.deserialize(serializedJson);
	Row actual = convertToExternal(rowData, dataType);
	assertEquals(expected, actual);

	// test serialization
	JsonRowDataSerializationSchema serializationSchema = new JsonRowDataSerializationSchema(schema,  TimestampFormat.ISO_8601);

	byte[] actualBytes = serializationSchema.serialize(rowData);
	assertEquals(new String(serializedJson), new String(actualBytes));
}
 
Example 12
Source File: TypeConversions.java    From flink with Apache License 2.0 4 votes vote down vote up
public static LogicalType fromDataToLogicalType(DataType dataType) {
	return dataType.getLogicalType();
}
 
Example 13
Source File: LegacyTypeInfoDataTypeConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
private static boolean canConvertToLegacyTypeInfo(DataType dataType) {
	return dataType.getLogicalType() instanceof LegacyTypeInformationType;
}
 
Example 14
Source File: LegacyTypeInfoDataTypeConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
private static boolean canConvertToTimestampTypeInfoLenient(DataType dataType) {
	LogicalType logicalType = dataType.getLogicalType();
	return hasRoot(logicalType, LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE) &&
		dataType.getConversionClass() != LocalDateTime.class &&
		LogicalTypeChecks.getPrecision(logicalType) <= 3;
}
 
Example 15
Source File: TypeConversions.java    From flink with Apache License 2.0 4 votes vote down vote up
public static LogicalType fromDataToLogicalType(DataType dataType) {
	return dataType.getLogicalType();
}
 
Example 16
Source File: LegacyTypeInfoDataTypeConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
private static boolean canConvertToLegacyTypeInfo(DataType dataType) {
	return dataType.getLogicalType() instanceof LegacyTypeInformationType;
}
 
Example 17
Source File: DataTypes.java    From flink with Apache License 2.0 3 votes vote down vote up
/**
 * Data type of an associative array that maps keys (including {@code NULL}) to values (including
 * {@code NULL}). A map cannot contain duplicate keys; each key can map to at most one value.
 *
 * <p>There is no restriction of key types; it is the responsibility of the user to ensure uniqueness.
 * The map type is an extension to the SQL standard.
 *
 * @see MapType
 */
public static DataType MAP(DataType keyDataType, DataType valueDataType) {
	Preconditions.checkNotNull(keyDataType, "Key data type must not be null.");
	Preconditions.checkNotNull(valueDataType, "Value data type must not be null.");
	return new KeyValueDataType(
		new MapType(keyDataType.getLogicalType(), valueDataType.getLogicalType()),
		keyDataType,
		valueDataType);
}
 
Example 18
Source File: DataTypes.java    From flink with Apache License 2.0 2 votes vote down vote up
/**
 * Data type of an array of elements with same subtype.
 *
 * <p>Compared to the SQL standard, the maximum cardinality of an array cannot be specified but
 * is fixed at {@link Integer#MAX_VALUE}. Also, any valid type is supported as a subtype.
 *
 * @see ArrayType
 */
public static DataType ARRAY(DataType elementDataType) {
	Preconditions.checkNotNull(elementDataType, "Element data type must not be null.");
	return new CollectionDataType(new ArrayType(elementDataType.getLogicalType()), elementDataType);
}
 
Example 19
Source File: HiveTypeUtil.java    From flink with Apache License 2.0 2 votes vote down vote up
/**
 * Convert Flink DataType to Hive TypeInfo. For types with a precision parameter, e.g. timestamp, the supported
 * precisions in Hive and Flink can be different. Therefore the conversion will fail for those types if the precision
 * is not supported by Hive and checkPrecision is true.
 *
 * @param dataType a Flink DataType
 * @param checkPrecision whether to fail the conversion if the precision of the DataType is not supported by Hive
 * @return the corresponding Hive data type
 */
public static TypeInfo toHiveTypeInfo(DataType dataType, boolean checkPrecision) {
	checkNotNull(dataType, "type cannot be null");
	LogicalType logicalType = dataType.getLogicalType();
	return logicalType.accept(new TypeInfoLogicalTypeVisitor(dataType, checkPrecision));
}
 
Example 20
Source File: DataTypes.java    From flink with Apache License 2.0 2 votes vote down vote up
/**
 * Data type of a multiset (=bag). Unlike a set, it allows for multiple instances for each of its
 * elements with a common subtype. Each unique value (including {@code NULL}) is mapped to some
 * multiplicity.
 *
 * <p>There is no restriction of element types; it is the responsibility of the user to ensure
 * uniqueness.
 *
 * @see MultisetType
 */
public static DataType MULTISET(DataType elementDataType) {
	Preconditions.checkNotNull(elementDataType, "Element data type must not be null.");
	return new CollectionDataType(new MultisetType(elementDataType.getLogicalType()), elementDataType);
}