Java Code Examples for org.apache.flink.table.types.logical.LogicalType

The following examples show how to use org.apache.flink.table.types.logical.LogicalType. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: flink   Source File: LogicalTypeDuplicator.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public LogicalType visit(RowType rowType) {
	final List<RowField> fields = rowType.getFields().stream()
		.map(f -> {
			if (f.getDescription().isPresent()) {
				return new RowField(
					f.getName(),
					f.getType().accept(this),
					f.getDescription().get());
			}
			return new RowField(f.getName(), f.getType().accept(this));
		})
		.collect(Collectors.toList());

	return new RowType(
		rowType.isNullable(),
		fields);
}
 
Example 2
Source Project: flink   Source File: TypeInferenceOperandInference.java    License: Apache License 2.0 6 votes vote down vote up
private void inferOperandTypesOrError(FlinkTypeFactory typeFactory, CallContext callContext, RelDataType[] operandTypes) {
	final List<DataType> expectedDataTypes;
	// typed arguments have highest priority
	if (typeInference.getTypedArguments().isPresent()) {
		expectedDataTypes = typeInference.getTypedArguments().get();
	} else {
		expectedDataTypes = typeInference.getInputTypeStrategy()
			.inferInputTypes(callContext, false)
			.orElse(null);
	}

	// early out for invalid input
	if (expectedDataTypes == null || expectedDataTypes.size() != operandTypes.length) {
		return;
	}

	for (int i = 0; i < operandTypes.length; i++) {
		final LogicalType inferredType = expectedDataTypes.get(i).getLogicalType();
		operandTypes[i] = typeFactory.createFieldTypeFromLogicalType(inferredType);
	}
}
 
Example 3
Source Project: flink   Source File: CsvRowSchemaConverter.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Convert {@link LogicalType} to {@link CsvSchema.ColumnType} based on Jackson's categories.
 */
private static CsvSchema.ColumnType convertType(String fieldName, LogicalType type) {
	if (STRING_TYPE_ROOTS.contains(type.getTypeRoot())) {
		return CsvSchema.ColumnType.STRING;
	} else if (NUMBER_TYPE_ROOTS.contains(type.getTypeRoot())) {
		return CsvSchema.ColumnType.NUMBER;
	} else if (BOOLEAN_TYPE_ROOTS.contains(type.getTypeRoot())) {
		return CsvSchema.ColumnType.BOOLEAN;
	} else if (type.getTypeRoot() == LogicalTypeRoot.ARRAY) {
		validateNestedField(fieldName, ((ArrayType) type).getElementType());
		return CsvSchema.ColumnType.ARRAY;
	} else if (type.getTypeRoot() == LogicalTypeRoot.ROW) {
		RowType rowType = (RowType) type;
		for (LogicalType fieldType : rowType.getChildren()) {
			validateNestedField(fieldName, fieldType);
		}
		return CsvSchema.ColumnType.ARRAY;
	} else {
		throw new IllegalArgumentException(
			"Unsupported type '" + type.asSummaryString() + "' for field '" + fieldName + "'.");
	}
}
 
Example 4
Source Project: flink   Source File: CastInputTypeStrategy.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public Optional<List<DataType>> inferInputTypes(CallContext callContext, boolean throwOnFailure) {
	// check for type literal
	if (!callContext.isArgumentLiteral(1) || !callContext.getArgumentValue(1, DataType.class).isPresent()) {
		return Optional.empty();
	}

	final List<DataType> argumentDataTypes = callContext.getArgumentDataTypes();
	final LogicalType fromType = argumentDataTypes.get(0).getLogicalType();
	final LogicalType toType = argumentDataTypes.get(1).getLogicalType();

	// A hack to support legacy types. To be removed when we drop the legacy types.
	if (fromType instanceof LegacyTypeInformationType) {
		return Optional.of(argumentDataTypes);
	}
	if (!supportsExplicitCast(fromType, toType)) {
		if (throwOnFailure) {
			throw callContext.newValidationError(
				"Unsupported cast from '%s' to '%s'.",
				fromType,
				toType);
		}
		return Optional.empty();
	}
	return Optional.of(argumentDataTypes);
}
 
Example 5
Source Project: flink   Source File: AvroRowDataDeserializationSchema.java    License: Apache License 2.0 6 votes vote down vote up
private static DeserializationRuntimeConverter createMapConverter(LogicalType type) {
	final DeserializationRuntimeConverter keyConverter = createConverter(
			DataTypes.STRING().getLogicalType());
	final DeserializationRuntimeConverter valueConverter = createConverter(
			extractValueTypeToAvroMap(type));

	return avroObject -> {
		final Map<?, ?> map = (Map<?, ?>) avroObject;
		Map<Object, Object> result = new HashMap<>();
		for (Map.Entry<?, ?> entry : map.entrySet()) {
			Object key = keyConverter.convert(entry.getKey());
			Object value = valueConverter.convert(entry.getValue());
			result.put(key, value);
		}
		return new GenericMapData(result);
	};
}
 
Example 6
Source Project: flink   Source File: LogicalTypeParser.java    License: Apache License 2.0 6 votes vote down vote up
private LogicalType parseYearMonthIntervalType() {
	int yearPrecision = YearMonthIntervalType.DEFAULT_PRECISION;
	switch (tokenAsKeyword()) {
		case YEAR:
			yearPrecision = parseOptionalPrecision(yearPrecision);
			if (hasNextToken(Keyword.TO)) {
				nextToken(Keyword.TO);
				nextToken(Keyword.MONTH);
				return new YearMonthIntervalType(
					YearMonthResolution.YEAR_TO_MONTH,
					yearPrecision);
			}
			return new YearMonthIntervalType(
				YearMonthResolution.YEAR,
				yearPrecision);
		case MONTH:
			return new YearMonthIntervalType(
				YearMonthResolution.MONTH,
				yearPrecision);
		default:
			throw parsingError("Invalid year-month interval resolution.");
	}
}
 
Example 7
Source Project: flink   Source File: LogicalTypeGeneralization.java    License: Apache License 2.0 6 votes vote down vote up
private static @Nullable LogicalType findCommonCastableType(List<LogicalType> normalizedTypes) {
	LogicalType resultType = normalizedTypes.get(0);

	for (LogicalType type : normalizedTypes) {
		final LogicalTypeRoot typeRoot = type.getTypeRoot();

		// NULL does not affect the result of this loop
		if (typeRoot == NULL) {
			continue;
		}

		if (supportsImplicitCast(resultType, type)) {
			resultType = type;
		} else {
			if (!supportsImplicitCast(type, resultType)) {
				return null;
			}
		}
	}

	return resultType;
}
 
Example 8
Source Project: flink   Source File: OperatorBindingCallContext.java    License: Apache License 2.0 6 votes vote down vote up
public OperatorBindingCallContext(
		DataTypeFactory dataTypeFactory,
		FunctionDefinition definition,
		SqlOperatorBinding binding) {
	super(
		dataTypeFactory,
		definition,
		binding.getOperator().getNameAsId().toString());

	this.binding = binding;
	this.argumentDataTypes = new AbstractList<DataType>() {
		@Override
		public DataType get(int pos) {
			final LogicalType logicalType = FlinkTypeFactory.toLogicalType(binding.getOperandType(pos));
			return TypeConversions.fromLogicalToDataType(logicalType);
		}

		@Override
		public int size() {
			return binding.getOperandCount();
		}
	};
}
 
Example 9
Source Project: flink   Source File: TypeCheckUtils.java    License: Apache License 2.0 6 votes vote down vote up
public static boolean isReference(LogicalType type) {
	switch (type.getTypeRoot()) {
		case BOOLEAN:
		case TINYINT:
		case SMALLINT:
		case INTEGER:
		case BIGINT:
		case FLOAT:
		case DOUBLE:
		case DATE:
		case TIME_WITHOUT_TIME_ZONE:
		case TIMESTAMP_WITHOUT_TIME_ZONE:
		case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
		case INTERVAL_YEAR_MONTH:
		case INTERVAL_DAY_TIME:
			return false;
		default:
			return true;
	}
}
 
Example 10
Source Project: flink   Source File: CsvRowDataSerializationSchema.java    License: Apache License 2.0 6 votes vote down vote up
private SerializationRuntimeConverter createRowConverter(RowType type) {
	LogicalType[] fieldTypes = type.getFields().stream()
		.map(RowType.RowField::getType)
		.toArray(LogicalType[]::new);
	final String[] fieldNames = type.getFieldNames().toArray(new String[0]);
	final RowFieldConverter[] fieldConverters = Arrays.stream(fieldTypes)
		.map(this::createNullableRowFieldConverter)
		.toArray(RowFieldConverter[]::new);
	final int rowArity = type.getFieldCount();
	return (csvMapper, container, row) -> {
		// top level reuses the object node container
		final ObjectNode objectNode = (ObjectNode) container;
		for (int i = 0; i < rowArity; i++) {
			objectNode.set(
				fieldNames[i],
				fieldConverters[i].convert(csvMapper, container, row, i));
		}
		return objectNode;
	};
}
 
Example 11
Source Project: flink   Source File: LogicalTypesTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testArrayType() {
	testAll(
		new ArrayType(new TimestampType()),
		"ARRAY<TIMESTAMP(6)>",
		"ARRAY<TIMESTAMP(6)>",
		new Class[]{java.sql.Timestamp[].class, java.time.LocalDateTime[].class, List.class, ArrayList.class},
		new Class[]{java.sql.Timestamp[].class, java.time.LocalDateTime[].class, List.class},
		new LogicalType[]{new TimestampType()},
		new ArrayType(new SmallIntType())
	);

	testAll(
		new ArrayType(new ArrayType(new TimestampType())),
		"ARRAY<ARRAY<TIMESTAMP(6)>>",
		"ARRAY<ARRAY<TIMESTAMP(6)>>",
		new Class[]{java.sql.Timestamp[][].class, java.time.LocalDateTime[][].class},
		new Class[]{java.sql.Timestamp[][].class, java.time.LocalDateTime[][].class},
		new LogicalType[]{new ArrayType(new TimestampType())},
		new ArrayType(new ArrayType(new SmallIntType()))
	);

	final LogicalType nestedArray = new ArrayType(new ArrayType(new TimestampType()));
	assertFalse(nestedArray.supportsInputConversion(java.sql.Timestamp[].class));
	assertFalse(nestedArray.supportsOutputConversion(java.sql.Timestamp[].class));
}
 
Example 12
Source Project: flink   Source File: ArrayObjectArrayConverter.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
private static <E> GenericToJavaArrayConverter<E> createGenericToJavaArrayConverter(LogicalType elementType) {
	switch (elementType.getTypeRoot()) {
		case BOOLEAN:
			return internal -> (E[]) ArrayUtils.toObject(internal.toBooleanArray());
		case TINYINT:
			return internal -> (E[]) ArrayUtils.toObject(internal.toByteArray());
		case SMALLINT:
			return internal -> (E[]) ArrayUtils.toObject(internal.toShortArray());
		case INTEGER:
			return internal -> (E[]) ArrayUtils.toObject(internal.toIntArray());
		case BIGINT:
			return internal -> (E[]) ArrayUtils.toObject(internal.toLongArray());
		case FLOAT:
			return internal -> (E[]) ArrayUtils.toObject(internal.toFloatArray());
		case DOUBLE:
			return internal -> (E[]) ArrayUtils.toObject(internal.toDoubleArray());
		case DISTINCT_TYPE:
			return createGenericToJavaArrayConverter(((DistinctType) elementType).getSourceType());
		default:
			return internal -> {
				throw new IllegalStateException();
			};
	}
}
 
Example 13
Source Project: flink   Source File: BytesHashMap.java    License: Apache License 2.0 5 votes vote down vote up
static int getVariableLength(LogicalType[] types) {
	int length = 0;
	for (LogicalType type : types) {
		if (!BinaryRowData.isInFixedLengthPart(type)) {
			// find a better way of computing generic type field variable-length
			// right now we use a small value assumption
			length += 16;
		}
	}
	return length;
}
 
Example 14
Source Project: flink   Source File: BinaryRow.java    License: Apache License 2.0 5 votes vote down vote up
public static String toOriginString(BaseRow row, LogicalType[] types) {
	checkArgument(types.length == row.getArity());
	StringBuilder build = new StringBuilder("[");
	build.append(row.getHeader());
	for (int i = 0; i < row.getArity(); i++) {
		build.append(',');
		if (row.isNullAt(i)) {
			build.append("null");
		} else {
			build.append(TypeGetterSetters.get(row, i, types[i]));
		}
	}
	build.append(']');
	return build.toString();
}
 
Example 15
Source Project: flink   Source File: LogicalTypeParser.java    License: Apache License 2.0 5 votes vote down vote up
private LogicalType parseRowType() {
	List<RowType.RowField> fields;
	// SQL standard notation
	if (hasNextToken(TokenType.BEGIN_PARAMETER)) {
		nextToken(TokenType.BEGIN_PARAMETER);
		fields = parseRowFields(TokenType.END_PARAMETER);
		nextToken(TokenType.END_PARAMETER);
	} else {
		nextToken(TokenType.BEGIN_SUBTYPE);
		fields = parseRowFields(TokenType.END_SUBTYPE);
		nextToken(TokenType.END_SUBTYPE);
	}
	return new RowType(fields);
}
 
Example 16
Source Project: flink   Source File: LogicalTypeParser.java    License: Apache License 2.0 5 votes vote down vote up
private LogicalType parseIntervalType() {
	nextToken(TokenType.KEYWORD);
	switch (tokenAsKeyword()) {
		case YEAR:
		case MONTH:
			return parseYearMonthIntervalType();
		case DAY:
		case HOUR:
		case MINUTE:
		case SECOND:
			return parseDayTimeIntervalType();
		default:
			throw parsingError("Invalid interval resolution.");
	}
}
 
Example 17
Source Project: flink   Source File: ComparableTypeStrategy.java    License: Apache License 2.0 5 votes vote down vote up
private boolean areComparableWithNormalizedNullability(LogicalType firstType, LogicalType secondType) {
	// A hack to support legacy types. To be removed when we drop the legacy types.
	if (firstType instanceof LegacyTypeInformationType ||
			secondType instanceof LegacyTypeInformationType) {
		return true;
	}

	// everything is comparable with null, it should return null in that case
	if (hasRoot(firstType, LogicalTypeRoot.NULL) || hasRoot(secondType, LogicalTypeRoot.NULL)) {
		return true;
	}

	if (firstType.getTypeRoot() == secondType.getTypeRoot()) {
		return areTypesOfSameRootComparable(firstType, secondType);
	}

	if (hasFamily(firstType, LogicalTypeFamily.NUMERIC) && hasFamily(secondType, LogicalTypeFamily.NUMERIC)) {
		return true;
	}

	// DATE + ALL TIMESTAMPS
	if (hasFamily(firstType, LogicalTypeFamily.DATETIME) && hasFamily(secondType, LogicalTypeFamily.DATETIME)) {
		return true;
	}

	// VARCHAR + CHAR (we do not compare collations here)
	if (hasFamily(firstType, LogicalTypeFamily.CHARACTER_STRING) &&
			hasFamily(secondType, LogicalTypeFamily.CHARACTER_STRING)) {
		return true;
	}

	// VARBINARY + BINARY
	if (hasFamily(firstType, LogicalTypeFamily.BINARY_STRING) &&
			hasFamily(secondType, LogicalTypeFamily.BINARY_STRING)) {
		return true;
	}

	return false;
}
 
Example 18
Source Project: flink   Source File: LogicalTypeChecks.java    License: Apache License 2.0 5 votes vote down vote up
@Override
protected T defaultMethod(LogicalType logicalType) {
	throw new IllegalArgumentException(
		String.format(
			"Invalid use of extractor %s. Called on logical type: %s",
			this.getClass().getName(),
			logicalType));
}
 
Example 19
Source Project: flink   Source File: FirstValueAggFunction.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public TypeInformation<GenericRowData> getAccumulatorType() {
	LogicalType[] fieldTypes = new LogicalType[] {
			fromTypeInfoToLogicalType(getResultType()),
			new BigIntType()
	};

	String[] fieldNames = new String[] {
			"value",
			"time"
	};

	return (TypeInformation) new RowDataTypeInfo(fieldTypes, fieldNames);
}
 
Example 20
Source Project: flink   Source File: CallBindingCallContext.java    License: Apache License 2.0 5 votes vote down vote up
public CallBindingCallContext(
		DataTypeFactory dataTypeFactory,
		FunctionDefinition definition,
		SqlCallBinding binding,
		@Nullable RelDataType outputType) {
	super(
		dataTypeFactory,
		definition,
		binding.getOperator().getNameAsId().toString());

	this.adaptedArguments = binding.operands(); // reorders the operands
	this.argumentDataTypes = new AbstractList<DataType>() {
		@Override
		public DataType get(int pos) {
			final RelDataType relDataType = binding.getValidator().deriveType(
				binding.getScope(),
				adaptedArguments.get(pos));
			final LogicalType logicalType = FlinkTypeFactory.toLogicalType(relDataType);
			return TypeConversions.fromLogicalToDataType(logicalType);
		}

		@Override
		public int size() {
			return binding.getOperandCount();
		}
	};
	this.outputType = convertOutputType(binding, outputType);
}
 
Example 21
Source Project: flink   Source File: AbstractOrcNoHiveVector.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Create a orc vector from partition spec value.
 * See hive {@code VectorizedRowBatchCtx#addPartitionColsToBatch}.
 */
private static ColumnVector createHiveVectorFromConstant(
		LogicalType type, Object value, int batchSize) {
	switch (type.getTypeRoot()) {
		case CHAR:
		case VARCHAR:
		case BINARY:
		case VARBINARY:
			return createBytesVector(batchSize, value);
		case BOOLEAN:
			return createLongVector(batchSize, (Boolean) value ? 1 : 0);
		case TINYINT:
		case SMALLINT:
		case INTEGER:
		case BIGINT:
			return createLongVector(batchSize, value);
		case DECIMAL:
			DecimalType decimalType = (DecimalType) type;
			return createDecimalVector(
					batchSize, decimalType.getPrecision(), decimalType.getScale(), value);
		case FLOAT:
		case DOUBLE:
			return createDoubleVector(batchSize, value);
		case DATE:
			if (value instanceof LocalDate) {
				value = Date.valueOf((LocalDate) value);
			}
			return createLongVector(batchSize, dateToInternal((Date) value));
		case TIMESTAMP_WITHOUT_TIME_ZONE:
			return createTimestampVector(batchSize, value);
		default:
			throw new UnsupportedOperationException("Unsupported type: " + type);
	}
}
 
Example 22
Source Project: flink   Source File: MapDataSerializer.java    License: Apache License 2.0 5 votes vote down vote up
public MapDataSerializer(LogicalType keyType, LogicalType valueType, ExecutionConfig conf) {
	this.keyType = keyType;
	this.valueType = valueType;

	this.keySerializer = InternalSerializers.create(keyType, conf);
	this.valueSerializer = InternalSerializers.create(valueType, conf);
}
 
Example 23
Source Project: flink   Source File: HiveFunctionUtils.java    License: Apache License 2.0 5 votes vote down vote up
static RelDataType invokeGetResultType(
		Object function, Object[] constantArguments, LogicalType[] argTypes,
		FlinkTypeFactory typeFactory) {
	try {
		// See hive HiveFunction
		Method method = function.getClass()
				.getMethod("getHiveResultType", Object[].class, DataType[].class);
		DataType resultType = (DataType) method.invoke(
				function, constantArguments, TypeConversions.fromLogicalToDataType(argTypes));
		return typeFactory.createFieldTypeFromLogicalType(fromDataTypeToLogicalType(resultType));
	} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) {
		throw new RuntimeException(e);
	}
}
 
Example 24
Source Project: flink   Source File: LogicalTypesTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testVarCharType() {
	testAll(
		new VarCharType(33),
		"VARCHAR(33)",
		"VARCHAR(33)",
		new Class[]{String.class, byte[].class},
		new Class[]{String.class, byte[].class},
		new LogicalType[]{},
		new VarCharType(12)
	);
}
 
Example 25
Source Project: flink   Source File: ValueLiteralExpression.java    License: Apache License 2.0 5 votes vote down vote up
private static void validateValueDataType(Object value, DataType dataType) {
	final LogicalType logicalType = dataType.getLogicalType();
	if (value == null) {
		if (!logicalType.isNullable()) {
			throw new ValidationException(
				String.format(
					"Data type '%s' does not support null values.",
					dataType));
		}
		return;
	}
	final Class<?> candidate = value.getClass();
	// ensure value and data type match
	if (!dataType.getConversionClass().isAssignableFrom(candidate)) {
		throw new ValidationException(
			String.format(
				"Data type '%s' with conversion class '%s' does not support a value literal of class '%s'.",
				dataType,
				dataType.getConversionClass().getName(),
				value.getClass().getName()));
	}
	// check for proper input as this cannot be checked in data type
	if (!logicalType.supportsInputConversion(candidate)) {
		throw new ValidationException(
			String.format(
				"Data type '%s' does not support a conversion from class '%s'.",
				dataType,
				candidate.getName()));
	}
}
 
Example 26
Source Project: flink   Source File: WindowOperatorBuilder.java    License: Apache License 2.0 5 votes vote down vote up
public TableAggregateWindowOperatorBuilder aggregate(
	GeneratedNamespaceTableAggsHandleFunction<?> generatedTableAggregateFunction,
	LogicalType[] accumulatorTypes,
	LogicalType[] aggResultTypes,
	LogicalType[] windowPropertyTypes) {

	aggregate(accumulatorTypes, aggResultTypes, windowPropertyTypes);
	return new TableAggregateWindowOperatorBuilder(generatedTableAggregateFunction, this);
}
 
Example 27
Source Project: flink   Source File: WindowOperatorBuilder.java    License: Apache License 2.0 5 votes vote down vote up
public AggregateWindowOperatorBuilder aggregate(
	NamespaceAggsHandleFunction<?> aggregateFunction,
	RecordEqualiser equaliser,
	LogicalType[] accumulatorTypes,
	LogicalType[] aggResultTypes,
	LogicalType[] windowPropertyTypes) {

	aggregate(accumulatorTypes, aggResultTypes, windowPropertyTypes);
	return new AggregateWindowOperatorBuilder(aggregateFunction, equaliser, this);
}
 
Example 28
Source Project: flink   Source File: LogicalTypeParser.java    License: Apache License 2.0 5 votes vote down vote up
private LogicalType parseTimeType() {
	int precision = parseOptionalPrecision(TimeType.DEFAULT_PRECISION);
	if (hasNextToken(Keyword.WITHOUT)) {
		nextToken(Keyword.WITHOUT);
		nextToken(Keyword.TIME);
		nextToken(Keyword.ZONE);
	}
	return new TimeType(precision);
}
 
Example 29
Source Project: flink   Source File: HiveScalarSqlFunction.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public ScalarFunction makeFunction(Object[] constantArguments, LogicalType[] argTypes) {
	ScalarFunction clone;
	try {
		clone = InstantiationUtil.clone(function);
	} catch (IOException | ClassNotFoundException e) {
		throw new RuntimeException(e);
	}
	return (ScalarFunction) invokeSetArgs(clone, constantArguments, argTypes);
}
 
Example 30
Source Project: flink   Source File: AvroRowDataDeserializationSchema.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Creates a runtime converter which is null safe.
 */
private static DeserializationRuntimeConverter createNullableConverter(LogicalType type) {
	final DeserializationRuntimeConverter converter = createConverter(type);
	return avroObject -> {
		if (avroObject == null) {
			return null;
		}
		return converter.convert(avroObject);
	};
}