org.apache.flink.table.expressions.ResolvedFieldReference Java Examples

The following examples show how to use org.apache.flink.table.expressions.ResolvedFieldReference. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ExistingField.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Returns an {@link Expression} that casts a {@link Long}, {@link Timestamp}, or
 * timestamp formatted {@link String} field (e.g., "2018-05-28 12:34:56.000")
 * into a rowtime attribute.
 */
@Override
public Expression getExpression(ResolvedFieldReference[] fieldAccesses) {
	ResolvedFieldReference fieldAccess = fieldAccesses[0];
	DataType type = fromLegacyInfoToDataType(fieldAccess.resultType());

	FieldReferenceExpression fieldReferenceExpr = new FieldReferenceExpression(
			fieldAccess.name(),
			type,
			0,
			fieldAccess.fieldIndex());

	switch (type.getLogicalType().getTypeRoot()) {
		case BIGINT:
		case TIMESTAMP_WITHOUT_TIME_ZONE:
			return fieldReferenceExpr;
		case VARCHAR:
			return unresolvedCall(
					CAST,
					fieldReferenceExpr,
					typeLiteral(TIMESTAMP(3).bridgedTo(Timestamp.class)));
		default:
			throw new RuntimeException("Unsupport type: " + type);
	}
}
 
Example #2
Source File: TimestampExtractorUtils.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Retrieves all field accesses needed for the given {@link TimestampExtractor}.
 *
 * @param timestampExtractor Extractor for which to construct array of field accesses.
 * @param physicalInputType Physical input type that the timestamp extractor accesses.
 * @param nameRemapping Additional remapping of a logical to a physical field name.
 *                      TimestampExtractor works with logical names, but accesses physical
 *                      fields
 * @return Array of physical field references.
 */
public static ResolvedFieldReference[] getAccessedFields(
		TimestampExtractor timestampExtractor,
		DataType physicalInputType,
		Function<String, String> nameRemapping) {

	final Function<String, ResolvedFieldReference> fieldMapping;
	if (LogicalTypeChecks.isCompositeType(physicalInputType.getLogicalType())) {
		TableSchema schema = DataTypeUtils.expandCompositeTypeToSchema(physicalInputType);
		fieldMapping = (arg) -> mapToResolvedField(nameRemapping, schema, arg);
	} else {
		fieldMapping = (arg) -> new ResolvedFieldReference(
			arg,
			TypeConversions.fromDataTypeToLegacyInfo(physicalInputType),
			0);
	}
	return getAccessedFields(timestampExtractor, fieldMapping);
}
 
Example #3
Source File: TimestampExtractorUtils.java    From flink with Apache License 2.0 6 votes vote down vote up
private static ResolvedFieldReference mapToResolvedField(
		Function<String, String> nameRemapping,
		TableSchema schema,
		String arg) {
	String remappedName = nameRemapping.apply(arg);

	int idx = IntStream.range(0, schema.getFieldCount())
		.filter(i -> schema.getFieldName(i).get().equals(remappedName))
		.findFirst()
		.orElseThrow(() -> new ValidationException(String.format("Field %s does not exist", remappedName)));

	TypeInformation<?> dataType = TypeConversions.fromDataTypeToLegacyInfo(schema.getTableColumn(idx)
		.get()
		.getType());
	return new ResolvedFieldReference(remappedName, dataType, idx);
}
 
Example #4
Source File: ExistingField.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Returns an {@link Expression} that casts a {@link Long}, {@link Timestamp}, or
 * timestamp formatted {@link String} field (e.g., "2018-05-28 12:34:56.000")
 * into a rowtime attribute.
 */
@Override
public Expression getExpression(ResolvedFieldReference[] fieldAccesses) {
	ResolvedFieldReference fieldAccess = fieldAccesses[0];
	DataType type = fromLegacyInfoToDataType(fieldAccess.resultType());

	FieldReferenceExpression fieldReferenceExpr = new FieldReferenceExpression(
			fieldAccess.name(),
			type,
			0,
			fieldAccess.fieldIndex());

	switch (type.getLogicalType().getTypeRoot()) {
		case BIGINT:
		case TIMESTAMP_WITHOUT_TIME_ZONE:
			return fieldReferenceExpr;
		case VARCHAR:
			DataType outputType = TIMESTAMP(3).bridgedTo(Timestamp.class);
			return new CallExpression(
					CAST,
					Arrays.asList(fieldReferenceExpr, typeLiteral(outputType)),
					outputType);
		default:
			throw new RuntimeException("Unsupport type: " + type);
	}
}
 
Example #5
Source File: TimestampExtractorUtils.java    From flink with Apache License 2.0 5 votes vote down vote up
private static ResolvedFieldReference[] getAccessedFields(
		TimestampExtractor timestampExtractor,
		Function<String, ResolvedFieldReference> fieldMapping) {
	return Arrays.stream(timestampExtractor.getArgumentFields())
		.map(fieldMapping)
		.toArray(ResolvedFieldReference[]::new);
}
 
Example #6
Source File: TableSourceValidation.java    From flink with Apache License 2.0 5 votes vote down vote up
private static void validateTimestampExtractorArguments(
		List<RowtimeAttributeDescriptor> descriptors,
		TableSource<?> tableSource) {
	if (descriptors.size() == 1) {
		TimestampExtractor extractor = descriptors.get(0).getTimestampExtractor();
		TypeInformation<?>[] types = Arrays.stream(TimestampExtractorUtils.getAccessedFields(
			extractor,
			tableSource.getProducedDataType(),
			getNameMappingFunction(tableSource)
		)).map(ResolvedFieldReference::resultType)
			.toArray(TypeInformation<?>[]::new);
		extractor.validateArgumentFields(types);
	}
}
 
Example #7
Source File: OrcTableSourceTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Test
@SuppressWarnings("unchecked")
public void testApplyPredicate() throws Exception {

	OrcTableSource orc = OrcTableSource.builder()
		.path(getPath(TEST_FILE_NESTED))
		.forOrcSchema(TEST_SCHEMA_NESTED)
		.build();

	// expressions for supported predicates
	Expression pred1 = new GreaterThan(
		new ResolvedFieldReference("int1", Types.INT),
		new Literal(100, Types.INT));
	Expression pred2 = new EqualTo(
		new ResolvedFieldReference("string1", Types.STRING),
		new Literal("hello", Types.STRING));
	// unsupported predicate
	Expression unsupportedPred = new EqualTo(
		new GetCompositeField(
			new ItemAt(
				new ResolvedFieldReference(
					"list",
					ObjectArrayTypeInfo.getInfoFor(
						Types.ROW_NAMED(new String[] {"int1", "string1"}, Types.INT, Types.STRING))),
				new Literal(1, Types.INT)),
			"int1"),
		new Literal(1, Types.INT)
		);
	// invalid predicate
	Expression invalidPred = new EqualTo(
		new ResolvedFieldReference("long1", Types.LONG),
		// some invalid, non-serializable literal (here an object of this test class)
		new Literal(new OrcTableSourceTest(), Types.LONG)
	);

	ArrayList<Expression> preds = new ArrayList<>();
	preds.add(pred1);
	preds.add(pred2);
	preds.add(unsupportedPred);
	preds.add(invalidPred);

	// apply predicates on TableSource
	OrcTableSource projected = (OrcTableSource) orc.applyPredicate(preds);

	// ensure copy is returned
	assertTrue(orc != projected);

	// ensure table schema is identical
	assertEquals(orc.getTableSchema(), projected.getTableSchema());

	// ensure return type is identical
	assertEquals(
		Types.ROW_NAMED(getNestedFieldNames(), getNestedFieldTypes()),
		projected.getReturnType());

	// ensure IF is configured with valid/supported predicates
	OrcTableSource spyTS = spy(projected);
	OrcRowInputFormat mockIF = mock(OrcRowInputFormat.class);
	doReturn(mockIF).when(spyTS).buildOrcInputFormat();
	ExecutionEnvironment environment = mock(ExecutionEnvironment.class);
	when(environment.createInput(any(InputFormat.class))).thenReturn(mock(DataSource.class));
	spyTS.getDataSet(environment);

	ArgumentCaptor<OrcRowInputFormat.Predicate> arguments = ArgumentCaptor.forClass(OrcRowInputFormat.Predicate.class);
	verify(mockIF, times(2)).addPredicate(arguments.capture());
	List<String> values = arguments.getAllValues().stream().map(Object::toString).collect(Collectors.toList());
	assertTrue(values.contains(
		new OrcRowInputFormat.Not(new OrcRowInputFormat.LessThanEquals("int1", PredicateLeaf.Type.LONG, 100)).toString()));
	assertTrue(values.contains(
		new OrcRowInputFormat.Equals("string1", PredicateLeaf.Type.STRING, "hello").toString()));

	// ensure filter pushdown is correct
	assertTrue(spyTS.isFilterPushedDown());
	assertFalse(orc.isFilterPushedDown());
}
 
Example #8
Source File: StreamRecordTimestamp.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public Expression getExpression(ResolvedFieldReference[] fieldAccesses) {
	return ApiExpressionUtils.unresolvedCall(STREAM_RECORD_TIMESTAMP);
}
 
Example #9
Source File: StreamRecordTimestamp.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public Expression getExpression(ResolvedFieldReference[] fieldAccesses) {
	return ApiExpressionUtils.unresolvedCall(STREAM_RECORD_TIMESTAMP);
}
 
Example #10
Source File: FieldComputer.java    From flink with Apache License 2.0 2 votes vote down vote up
/**
 * Returns the {@link Expression} that computes the value of the field.
 *
 * @param fieldAccesses Field access expressions for the argument fields.
 * @return The expression to extract the timestamp from the {@link TableSource} return type.
 */
Expression getExpression(ResolvedFieldReference[] fieldAccesses);
 
Example #11
Source File: FieldComputer.java    From flink with Apache License 2.0 2 votes vote down vote up
/**
 * Returns the {@link Expression} that computes the value of the field.
 *
 * @param fieldAccesses Field access expressions for the argument fields.
 * @return The expression to extract the timestamp from the {@link TableSource} return type.
 */
Expression getExpression(ResolvedFieldReference[] fieldAccesses);