org.apache.flink.table.types.logical.LogicalType Java Examples

The following examples show how to use org.apache.flink.table.types.logical.LogicalType. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: LogicalTypeDuplicator.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public LogicalType visit(RowType rowType) {
	final List<RowField> fields = rowType.getFields().stream()
		.map(f -> {
			if (f.getDescription().isPresent()) {
				return new RowField(
					f.getName(),
					f.getType().accept(this),
					f.getDescription().get());
			}
			return new RowField(f.getName(), f.getType().accept(this));
		})
		.collect(Collectors.toList());

	return new RowType(
		rowType.isNullable(),
		fields);
}
 
Example #2
Source File: CsvRowSchemaConverter.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Convert {@link LogicalType} to {@link CsvSchema.ColumnType} based on Jackson's categories.
 */
private static CsvSchema.ColumnType convertType(String fieldName, LogicalType type) {
	if (STRING_TYPE_ROOTS.contains(type.getTypeRoot())) {
		return CsvSchema.ColumnType.STRING;
	} else if (NUMBER_TYPE_ROOTS.contains(type.getTypeRoot())) {
		return CsvSchema.ColumnType.NUMBER;
	} else if (BOOLEAN_TYPE_ROOTS.contains(type.getTypeRoot())) {
		return CsvSchema.ColumnType.BOOLEAN;
	} else if (type.getTypeRoot() == LogicalTypeRoot.ARRAY) {
		validateNestedField(fieldName, ((ArrayType) type).getElementType());
		return CsvSchema.ColumnType.ARRAY;
	} else if (type.getTypeRoot() == LogicalTypeRoot.ROW) {
		RowType rowType = (RowType) type;
		for (LogicalType fieldType : rowType.getChildren()) {
			validateNestedField(fieldName, fieldType);
		}
		return CsvSchema.ColumnType.ARRAY;
	} else {
		throw new IllegalArgumentException(
			"Unsupported type '" + type.asSummaryString() + "' for field '" + fieldName + "'.");
	}
}
 
Example #3
Source File: CsvRowDataSerializationSchema.java    From flink with Apache License 2.0 6 votes vote down vote up
private SerializationRuntimeConverter createRowConverter(RowType type) {
	LogicalType[] fieldTypes = type.getFields().stream()
		.map(RowType.RowField::getType)
		.toArray(LogicalType[]::new);
	final String[] fieldNames = type.getFieldNames().toArray(new String[0]);
	final RowFieldConverter[] fieldConverters = Arrays.stream(fieldTypes)
		.map(this::createNullableRowFieldConverter)
		.toArray(RowFieldConverter[]::new);
	final int rowArity = type.getFieldCount();
	return (csvMapper, container, row) -> {
		// top level reuses the object node container
		final ObjectNode objectNode = (ObjectNode) container;
		for (int i = 0; i < rowArity; i++) {
			objectNode.set(
				fieldNames[i],
				fieldConverters[i].convert(csvMapper, container, row, i));
		}
		return objectNode;
	};
}
 
Example #4
Source File: LogicalTypeParser.java    From flink with Apache License 2.0 6 votes vote down vote up
private LogicalType parseYearMonthIntervalType() {
	int yearPrecision = YearMonthIntervalType.DEFAULT_PRECISION;
	switch (tokenAsKeyword()) {
		case YEAR:
			yearPrecision = parseOptionalPrecision(yearPrecision);
			if (hasNextToken(Keyword.TO)) {
				nextToken(Keyword.TO);
				nextToken(Keyword.MONTH);
				return new YearMonthIntervalType(
					YearMonthResolution.YEAR_TO_MONTH,
					yearPrecision);
			}
			return new YearMonthIntervalType(
				YearMonthResolution.YEAR,
				yearPrecision);
		case MONTH:
			return new YearMonthIntervalType(
				YearMonthResolution.MONTH,
				yearPrecision);
		default:
			throw parsingError("Invalid year-month interval resolution.");
	}
}
 
Example #5
Source File: ArrayObjectArrayConverter.java    From flink with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
private static <E> GenericToJavaArrayConverter<E> createGenericToJavaArrayConverter(LogicalType elementType) {
	switch (elementType.getTypeRoot()) {
		case BOOLEAN:
			return internal -> (E[]) ArrayUtils.toObject(internal.toBooleanArray());
		case TINYINT:
			return internal -> (E[]) ArrayUtils.toObject(internal.toByteArray());
		case SMALLINT:
			return internal -> (E[]) ArrayUtils.toObject(internal.toShortArray());
		case INTEGER:
			return internal -> (E[]) ArrayUtils.toObject(internal.toIntArray());
		case BIGINT:
			return internal -> (E[]) ArrayUtils.toObject(internal.toLongArray());
		case FLOAT:
			return internal -> (E[]) ArrayUtils.toObject(internal.toFloatArray());
		case DOUBLE:
			return internal -> (E[]) ArrayUtils.toObject(internal.toDoubleArray());
		case DISTINCT_TYPE:
			return createGenericToJavaArrayConverter(((DistinctType) elementType).getSourceType());
		default:
			return internal -> {
				throw new IllegalStateException();
			};
	}
}
 
Example #6
Source File: CastInputTypeStrategy.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public Optional<List<DataType>> inferInputTypes(CallContext callContext, boolean throwOnFailure) {
	// check for type literal
	if (!callContext.isArgumentLiteral(1) || !callContext.getArgumentValue(1, DataType.class).isPresent()) {
		return Optional.empty();
	}

	final List<DataType> argumentDataTypes = callContext.getArgumentDataTypes();
	final LogicalType fromType = argumentDataTypes.get(0).getLogicalType();
	final LogicalType toType = argumentDataTypes.get(1).getLogicalType();

	// A hack to support legacy types. To be removed when we drop the legacy types.
	if (fromType instanceof LegacyTypeInformationType) {
		return Optional.of(argumentDataTypes);
	}
	if (!supportsExplicitCast(fromType, toType)) {
		if (throwOnFailure) {
			throw callContext.newValidationError(
				"Unsupported cast from '%s' to '%s'.",
				fromType,
				toType);
		}
		return Optional.empty();
	}
	return Optional.of(argumentDataTypes);
}
 
Example #7
Source File: LogicalTypesTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testArrayType() {
	testAll(
		new ArrayType(new TimestampType()),
		"ARRAY<TIMESTAMP(6)>",
		"ARRAY<TIMESTAMP(6)>",
		new Class[]{java.sql.Timestamp[].class, java.time.LocalDateTime[].class, List.class, ArrayList.class},
		new Class[]{java.sql.Timestamp[].class, java.time.LocalDateTime[].class, List.class},
		new LogicalType[]{new TimestampType()},
		new ArrayType(new SmallIntType())
	);

	testAll(
		new ArrayType(new ArrayType(new TimestampType())),
		"ARRAY<ARRAY<TIMESTAMP(6)>>",
		"ARRAY<ARRAY<TIMESTAMP(6)>>",
		new Class[]{java.sql.Timestamp[][].class, java.time.LocalDateTime[][].class},
		new Class[]{java.sql.Timestamp[][].class, java.time.LocalDateTime[][].class},
		new LogicalType[]{new ArrayType(new TimestampType())},
		new ArrayType(new ArrayType(new SmallIntType()))
	);

	final LogicalType nestedArray = new ArrayType(new ArrayType(new TimestampType()));
	assertFalse(nestedArray.supportsInputConversion(java.sql.Timestamp[].class));
	assertFalse(nestedArray.supportsOutputConversion(java.sql.Timestamp[].class));
}
 
Example #8
Source File: OperatorBindingCallContext.java    From flink with Apache License 2.0 6 votes vote down vote up
public OperatorBindingCallContext(
		DataTypeFactory dataTypeFactory,
		FunctionDefinition definition,
		SqlOperatorBinding binding) {
	super(
		dataTypeFactory,
		definition,
		binding.getOperator().getNameAsId().toString());

	this.binding = binding;
	this.argumentDataTypes = new AbstractList<DataType>() {
		@Override
		public DataType get(int pos) {
			final LogicalType logicalType = FlinkTypeFactory.toLogicalType(binding.getOperandType(pos));
			return TypeConversions.fromLogicalToDataType(logicalType);
		}

		@Override
		public int size() {
			return binding.getOperandCount();
		}
	};
}
 
Example #9
Source File: LogicalTypeGeneralization.java    From flink with Apache License 2.0 6 votes vote down vote up
private static @Nullable LogicalType findCommonCastableType(List<LogicalType> normalizedTypes) {
	LogicalType resultType = normalizedTypes.get(0);

	for (LogicalType type : normalizedTypes) {
		final LogicalTypeRoot typeRoot = type.getTypeRoot();

		// NULL does not affect the result of this loop
		if (typeRoot == NULL) {
			continue;
		}

		if (supportsImplicitCast(resultType, type)) {
			resultType = type;
		} else {
			if (!supportsImplicitCast(type, resultType)) {
				return null;
			}
		}
	}

	return resultType;
}
 
Example #10
Source File: AvroRowDataDeserializationSchema.java    From flink with Apache License 2.0 6 votes vote down vote up
private static DeserializationRuntimeConverter createMapConverter(LogicalType type) {
	final DeserializationRuntimeConverter keyConverter = createConverter(
			DataTypes.STRING().getLogicalType());
	final DeserializationRuntimeConverter valueConverter = createConverter(
			extractValueTypeToAvroMap(type));

	return avroObject -> {
		final Map<?, ?> map = (Map<?, ?>) avroObject;
		Map<Object, Object> result = new HashMap<>();
		for (Map.Entry<?, ?> entry : map.entrySet()) {
			Object key = keyConverter.convert(entry.getKey());
			Object value = valueConverter.convert(entry.getValue());
			result.put(key, value);
		}
		return new GenericMapData(result);
	};
}
 
Example #11
Source File: TypeCheckUtils.java    From flink with Apache License 2.0 6 votes vote down vote up
public static boolean isReference(LogicalType type) {
	switch (type.getTypeRoot()) {
		case BOOLEAN:
		case TINYINT:
		case SMALLINT:
		case INTEGER:
		case BIGINT:
		case FLOAT:
		case DOUBLE:
		case DATE:
		case TIME_WITHOUT_TIME_ZONE:
		case TIMESTAMP_WITHOUT_TIME_ZONE:
		case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
		case INTERVAL_YEAR_MONTH:
		case INTERVAL_DAY_TIME:
			return false;
		default:
			return true;
	}
}
 
Example #12
Source File: TypeInferenceOperandInference.java    From flink with Apache License 2.0 6 votes vote down vote up
private void inferOperandTypesOrError(FlinkTypeFactory typeFactory, CallContext callContext, RelDataType[] operandTypes) {
	final List<DataType> expectedDataTypes;
	// typed arguments have highest priority
	if (typeInference.getTypedArguments().isPresent()) {
		expectedDataTypes = typeInference.getTypedArguments().get();
	} else {
		expectedDataTypes = typeInference.getInputTypeStrategy()
			.inferInputTypes(callContext, false)
			.orElse(null);
	}

	// early out for invalid input
	if (expectedDataTypes == null || expectedDataTypes.size() != operandTypes.length) {
		return;
	}

	for (int i = 0; i < operandTypes.length; i++) {
		final LogicalType inferredType = expectedDataTypes.get(i).getLogicalType();
		operandTypes[i] = typeFactory.createFieldTypeFromLogicalType(inferredType);
	}
}
 
Example #13
Source File: WindowOperatorBuilder.java    From flink with Apache License 2.0 5 votes vote down vote up
public AggregateWindowOperatorBuilder aggregate(
	NamespaceAggsHandleFunction<?> aggregateFunction,
	RecordEqualiser equaliser,
	LogicalType[] accumulatorTypes,
	LogicalType[] aggResultTypes,
	LogicalType[] windowPropertyTypes) {

	aggregate(accumulatorTypes, aggResultTypes, windowPropertyTypes);
	return new AggregateWindowOperatorBuilder(aggregateFunction, equaliser, this);
}
 
Example #14
Source File: HiveScalarSqlFunction.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public ScalarFunction makeFunction(Object[] constantArguments, LogicalType[] argTypes) {
	ScalarFunction clone;
	try {
		clone = InstantiationUtil.clone(function);
	} catch (IOException | ClassNotFoundException e) {
		throw new RuntimeException(e);
	}
	return (ScalarFunction) invokeSetArgs(clone, constantArguments, argTypes);
}
 
Example #15
Source File: WindowOperatorBuilder.java    From flink with Apache License 2.0 5 votes vote down vote up
public TableAggregateWindowOperatorBuilder aggregate(
	GeneratedNamespaceTableAggsHandleFunction<?> generatedTableAggregateFunction,
	LogicalType[] accumulatorTypes,
	LogicalType[] aggResultTypes,
	LogicalType[] windowPropertyTypes) {

	aggregate(accumulatorTypes, aggResultTypes, windowPropertyTypes);
	return new TableAggregateWindowOperatorBuilder(generatedTableAggregateFunction, this);
}
 
Example #16
Source File: ValueLiteralExpression.java    From flink with Apache License 2.0 5 votes vote down vote up
private static void validateValueDataType(Object value, DataType dataType) {
	final LogicalType logicalType = dataType.getLogicalType();
	if (value == null) {
		if (!logicalType.isNullable()) {
			throw new ValidationException(
				String.format(
					"Data type '%s' does not support null values.",
					dataType));
		}
		return;
	}
	final Class<?> candidate = value.getClass();
	// ensure value and data type match
	if (!dataType.getConversionClass().isAssignableFrom(candidate)) {
		throw new ValidationException(
			String.format(
				"Data type '%s' with conversion class '%s' does not support a value literal of class '%s'.",
				dataType,
				dataType.getConversionClass().getName(),
				value.getClass().getName()));
	}
	// check for proper input as this cannot be checked in data type
	if (!logicalType.supportsInputConversion(candidate)) {
		throw new ValidationException(
			String.format(
				"Data type '%s' does not support a conversion from class '%s'.",
				dataType,
				candidate.getName()));
	}
}
 
Example #17
Source File: LogicalTypesTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testVarCharType() {
	testAll(
		new VarCharType(33),
		"VARCHAR(33)",
		"VARCHAR(33)",
		new Class[]{String.class, byte[].class},
		new Class[]{String.class, byte[].class},
		new LogicalType[]{},
		new VarCharType(12)
	);
}
 
Example #18
Source File: JdbcRowDataOutputFormat.java    From flink with Apache License 2.0 5 votes vote down vote up
private JdbcRowDataOutputFormat(
		JdbcConnectionProvider connectionProvider,
		JdbcDmlOptions dmlOptions,
		JdbcExecutionOptions batchOptions,
		TypeInformation<RowData> rowDataTypeInfo,
		LogicalType[] logicalTypes) {
	super(
		connectionProvider,
		batchOptions,
		ctx -> createUpsertRowExecutor(dmlOptions, ctx, rowDataTypeInfo, logicalTypes),
		RecordExtractor.identity());
	this.dmlOptions = dmlOptions;
	this.logicalTypes = logicalTypes;
}
 
Example #19
Source File: BinaryRow.java    From flink with Apache License 2.0 5 votes vote down vote up
public static String toOriginString(BaseRow row, LogicalType[] types) {
	checkArgument(types.length == row.getArity());
	StringBuilder build = new StringBuilder("[");
	build.append(row.getHeader());
	for (int i = 0; i < row.getArity(); i++) {
		build.append(',');
		if (row.isNullAt(i)) {
			build.append("null");
		} else {
			build.append(TypeGetterSetters.get(row, i, types[i]));
		}
	}
	build.append(']');
	return build.toString();
}
 
Example #20
Source File: LogicalTypeParser.java    From flink with Apache License 2.0 5 votes vote down vote up
private LogicalType parseRowType() {
	List<RowType.RowField> fields;
	// SQL standard notation
	if (hasNextToken(TokenType.BEGIN_PARAMETER)) {
		nextToken(TokenType.BEGIN_PARAMETER);
		fields = parseRowFields(TokenType.END_PARAMETER);
		nextToken(TokenType.END_PARAMETER);
	} else {
		nextToken(TokenType.BEGIN_SUBTYPE);
		fields = parseRowFields(TokenType.END_SUBTYPE);
		nextToken(TokenType.END_SUBTYPE);
	}
	return new RowType(fields);
}
 
Example #21
Source File: CsvRowDataSerializationSchema.java    From flink with Apache License 2.0 5 votes vote down vote up
private ArrayElementConverter createNullableArrayElementConverter(LogicalType fieldType) {
	final ArrayElementConverter elementConverter = createArrayElementConverter(fieldType);
	return (csvMapper, container, array, pos) -> {
		if (array.isNullAt(pos)) {
			return container.nullNode();
		}
		return elementConverter.convert(csvMapper, container, array, pos);
	};
}
 
Example #22
Source File: LogicalTypeParser.java    From flink with Apache License 2.0 5 votes vote down vote up
private LogicalType parseTimeType() {
	int precision = parseOptionalPrecision(TimeType.DEFAULT_PRECISION);
	if (hasNextToken(Keyword.WITHOUT)) {
		nextToken(Keyword.WITHOUT);
		nextToken(Keyword.TIME);
		nextToken(Keyword.ZONE);
	}
	return new TimeType(precision);
}
 
Example #23
Source File: LogicalTypeMerging.java    From flink with Apache License 2.0 5 votes vote down vote up
private static @Nullable LogicalType findExactlySameType(List<LogicalType> normalizedTypes) {
	final LogicalType firstType = normalizedTypes.get(0);
	for (LogicalType type : normalizedTypes) {
		if (!type.equals(firstType)) {
			return null;
		}
	}
	return firstType;
}
 
Example #24
Source File: LogicalTypesTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testCharType() {
	testAll(
		new CharType(33),
		"CHAR(33)",
		"CHAR(33)",
		new Class[]{String.class, byte[].class},
		new Class[]{String.class, byte[].class},
		new LogicalType[]{},
		new CharType(Integer.MAX_VALUE)
	);
}
 
Example #25
Source File: MapDataUtil.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Converts a {@link MapData} into Java {@link Map}, the keys and values of the Java map
 * still holds objects of internal data structures.
 */
public static Map<Object, Object> convertToJavaMap(
		MapData map, LogicalType keyType, LogicalType valueType) {
	ArrayData keyArray = map.keyArray();
	ArrayData valueArray = map.valueArray();
	Map<Object, Object> javaMap = new HashMap<>();
	for (int i = 0; i < map.size(); i++) {
		Object key = ArrayData.get(keyArray, i, keyType);
		Object value = ArrayData.get(valueArray, i, valueType);
		javaMap.put(key, value);
	}
	return javaMap;
}
 
Example #26
Source File: AvroRowDataDeserializationSchema.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a runtime converter which is null safe.
 */
private static DeserializationRuntimeConverter createNullableConverter(LogicalType type) {
	final DeserializationRuntimeConverter converter = createConverter(type);
	return avroObject -> {
		if (avroObject == null) {
			return null;
		}
		return converter.convert(avroObject);
	};
}
 
Example #27
Source File: JdbcRowDataOutputFormat.java    From flink with Apache License 2.0 5 votes vote down vote up
private static JdbcBatchStatementExecutor<RowData> createKeyedRowExecutor(JdbcDialect dialect, int[] pkFields, LogicalType[] pkTypes, String sql, LogicalType[] logicalTypes) {
	final JdbcRowConverter rowConverter = dialect.getRowConverter(RowType.of(logicalTypes));
	final Function<RowData, RowData>  keyExtractor = createRowKeyExtractor(logicalTypes, pkFields);
	return JdbcBatchStatementExecutor.keyed(
		sql,
		keyExtractor,
		(st, record) -> rowConverter
			.toExternal(keyExtractor.apply(record), st));
}
 
Example #28
Source File: TableEnvironmentImpl.java    From flink with Apache License 2.0 5 votes vote down vote up
private TableResult buildDescribeResult(TableSchema schema) {
	Map<String, String> fieldToWatermark =
			schema.getWatermarkSpecs()
					.stream()
					.collect(Collectors.toMap(WatermarkSpec::getRowtimeAttribute, WatermarkSpec::getWatermarkExpr));

	Map<String, String> fieldToPrimaryKey = new HashMap<>();
	schema.getPrimaryKey().ifPresent((p) -> {
		List<String> columns = p.getColumns();
		columns.forEach((c) -> fieldToPrimaryKey.put(c, String.format("PRI(%s)", String.join(", ", columns))));
	});

	Object[][] rows =
		schema.getTableColumns()
			.stream()
			.map((c) -> {
				LogicalType logicalType = c.getType().getLogicalType();
				return new Object[]{
					c.getName(),
					StringUtils.removeEnd(logicalType.toString(), " NOT NULL"),
					logicalType.isNullable(),
					fieldToPrimaryKey.getOrDefault(c.getName(), null),
					c.getExpr().orElse(null),
					fieldToWatermark.getOrDefault(c.getName(), null)};
			}).toArray(Object[][]::new);

	return buildResult(
		new String[]{"name", "type", "null", "key", "computed column", "watermark"},
		new DataType[]{DataTypes.STRING(), DataTypes.STRING(), DataTypes.BOOLEAN(), DataTypes.STRING(), DataTypes.STRING(), DataTypes.STRING()},
		rows);
}
 
Example #29
Source File: DataTypeFactoryImpl.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected LogicalType defaultMethod(LogicalType logicalType) {
	if (hasRoot(logicalType, LogicalTypeRoot.UNRESOLVED)) {
		final UnresolvedUserDefinedType unresolvedType = (UnresolvedUserDefinedType) logicalType;
		return resolveType(unresolvedType.getUnresolvedIdentifier()).copy(unresolvedType.isNullable());
	}
	return logicalType;
}
 
Example #30
Source File: RankLikeAggFunctionBase.java    From flink with Apache License 2.0 5 votes vote down vote up
public RankLikeAggFunctionBase(LogicalType[] orderKeyTypes) {
	this.orderKeyTypes = orderKeyTypes;
	lastValues = new UnresolvedReferenceExpression[orderKeyTypes.length];
	for (int i = 0; i < orderKeyTypes.length; ++i) {
		lastValues[i] = unresolvedRef("lastValue_" + i);
	}
}