org.apache.flink.api.java.typeutils.TypeExtractor Java Examples

The following examples show how to use org.apache.flink.api.java.typeutils.TypeExtractor. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: JoinedStreams.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Completes the join operation with the user function that is executed
 * for each combination of elements with the same key in a window.
 *
 * <p>Note: This method's return type does not support setting an operator-specific parallelism.
 * Due to binary backwards compatibility, this cannot be altered. Use the
 * {@link #with(FlatJoinFunction)}, method to set an operator-specific parallelism.
 */
public <T> DataStream<T> apply(FlatJoinFunction<T1, T2, T> function) {
	TypeInformation<T> resultType = TypeExtractor.getBinaryOperatorReturnType(
		function,
		FlatJoinFunction.class,
		0,
		1,
		2,
		new int[]{2, 0},
		input1.getType(),
		input2.getType(),
		"Join",
		false);

	return apply(function, resultType);
}
 
Example #2
Source File: WritableSerializerTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testStringArrayWritable() {
	StringArrayWritable[] data = new StringArrayWritable[]{
			new StringArrayWritable(new String[]{}),
			new StringArrayWritable(new String[]{""}),
			new StringArrayWritable(new String[]{"a", "a"}),
			new StringArrayWritable(new String[]{"a", "b"}),
			new StringArrayWritable(new String[]{"c", "c"}),
			new StringArrayWritable(new String[]{"d", "f"}),
			new StringArrayWritable(new String[]{"d", "m"}),
			new StringArrayWritable(new String[]{"z", "x"}),
			new StringArrayWritable(new String[]{"a", "a", "a"})
	};

	WritableTypeInfo<StringArrayWritable> writableTypeInfo = (WritableTypeInfo<StringArrayWritable>) TypeExtractor.getForObject(data[0]);
	WritableSerializer<StringArrayWritable> writableSerializer = (WritableSerializer<StringArrayWritable>) writableTypeInfo.createSerializer(new ExecutionConfig());

	SerializerTestInstance<StringArrayWritable> testInstance = new SerializerTestInstance<StringArrayWritable>(writableSerializer, writableTypeInfo.getTypeClass(), -1, data);

	testInstance.testAll();
}
 
Example #3
Source File: ConnectedStreams.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Applies the given {@link CoProcessFunction} on the connected input streams,
 * thereby creating a transformed output stream.
 *
 * <p>The function will be called for every element in the input streams and can produce zero or
 * more output elements. Contrary to the {@link #flatMap(CoFlatMapFunction)} function, this
 * function can also query the time and set timers. When reacting to the firing of set timers
 * the function can directly emit elements and/or register yet more timers.
 *
 * @param coProcessFunction The {@link CoProcessFunction} that is called for each element
 *                      in the stream.
 *
 * @param <R> The type of elements emitted by the {@code CoProcessFunction}.
 *
 * @return The transformed {@link DataStream}.
 */
@PublicEvolving
public <R> SingleOutputStreamOperator<R> process(
		CoProcessFunction<IN1, IN2, R> coProcessFunction) {

	TypeInformation<R> outTypeInfo = TypeExtractor.getBinaryOperatorReturnType(
		coProcessFunction,
		CoProcessFunction.class,
		0,
		1,
		2,
		TypeExtractor.NO_INDEX,
		getType1(),
		getType2(),
		Utils.getCallLocationName(),
		true);

	return process(coProcessFunction, outTypeInfo);
}
 
Example #4
Source File: CsvInputFormatTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testPojoTypeWithMappingInformation() throws Exception {
	File tempFile = File.createTempFile("CsvReaderPojoType", "tmp");
	tempFile.deleteOnExit();
	tempFile.setWritable(true);

	OutputStreamWriter wrt = new OutputStreamWriter(new FileOutputStream(tempFile));
	wrt.write("123,3.123,AAA,BBB\n");
	wrt.write("456,1.123,BBB,AAA\n");
	wrt.close();

	@SuppressWarnings("unchecked")
	PojoTypeInfo<PojoItem> typeInfo = (PojoTypeInfo<PojoItem>) TypeExtractor.createTypeInfo(PojoItem.class);
	CsvInputFormat<PojoItem> inputFormat = new PojoCsvInputFormat<PojoItem>(new Path(tempFile.toURI().toString()), typeInfo, new String[]{"field1", "field3", "field2", "field4"});

	inputFormat.configure(new Configuration());
	FileInputSplit[] splits = inputFormat.createInputSplits(1);

	inputFormat.open(splits[0]);

	validatePojoItem(inputFormat);
}
 
Example #5
Source File: UserDefinedFunctionHelper.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Tries to infer the TypeInformation of an AggregateFunction's accumulator type.
 *
 * @param aggregateFunction The AggregateFunction for which the accumulator type is inferred.
 * @param scalaType The implicitly inferred type of the accumulator type.
 *
 * @return The inferred accumulator type of the AggregateFunction.
 */
public static <T, ACC> TypeInformation<ACC> getAccumulatorTypeOfAggregateFunction(
		UserDefinedAggregateFunction<T, ACC> aggregateFunction,
		TypeInformation<ACC> scalaType) {

	TypeInformation<ACC> userProvidedType = aggregateFunction.getAccumulatorType();
	if (userProvidedType != null) {
		return userProvidedType;
	} else if (scalaType != null) {
		return scalaType;
	} else {
		return TypeExtractor.createTypeInfo(
			aggregateFunction,
			UserDefinedAggregateFunction.class,
			aggregateFunction.getClass(),
			1);
	}
}
 
Example #6
Source File: SelectorFunctionKeysTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testAreCompatible1() throws Keys.IncompatibleKeysException {
	TypeInformation<Pojo2> t1 = TypeExtractor.getForClass(Pojo2.class);
	TypeInformation<Tuple2<Integer, String>> t2 =
		new TupleTypeInfo<>(BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);

	Keys<Pojo2> k1 = new Keys.SelectorFunctionKeys<>(
		new KeySelector1(),
		t1,
		BasicTypeInfo.STRING_TYPE_INFO
	);
	Keys<Tuple2<Integer, String>> k2 = new Keys.SelectorFunctionKeys<>(
		new KeySelector2(),
		t2,
		BasicTypeInfo.STRING_TYPE_INFO
	);

	Assert.assertTrue(k1.areCompatible(k2));
	Assert.assertTrue(k2.areCompatible(k1));
}
 
Example #7
Source File: SelectorFunctionKeysTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testOriginalTypes2() throws Exception {
	final TupleTypeInfo<Tuple2<Integer, String>> t1 = new TupleTypeInfo<>(
			BasicTypeInfo.INT_TYPE_INFO,
			BasicTypeInfo.STRING_TYPE_INFO
	);
	TypeInformation<PojoWithMultiplePojos> t2 = TypeExtractor.getForClass(PojoWithMultiplePojos.class);

	Keys<PojoWithMultiplePojos> sk = new Keys.SelectorFunctionKeys<>(
			new KeySelector3(),
			t2,
			t1
	);

	Assert.assertArrayEquals(
			new TypeInformation<?>[] { t1 },
			sk.getOriginalKeyFieldTypes()
	);
}
 
Example #8
Source File: ExecutionEnvironment.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
public <X> DataSource<X> readFile(FileInputFormat<X> inputFormat, String filePath) {
	if (inputFormat == null) {
		throw new IllegalArgumentException("InputFormat must not be null.");
	}
	if (filePath == null) {
		throw new IllegalArgumentException("The file path must not be null.");
	}

	inputFormat.setFilePath(new Path(filePath));
	try {
		return createInput(inputFormat, TypeExtractor.getInputFormatTypes(inputFormat));
	}
	catch (Exception e) {
		throw new InvalidProgramException("The type returned by the input format could not be automatically determined. " +
				"Please specify the TypeInformation of the produced type explicitly by using the " +
				"'createInput(InputFormat, TypeInformation)' method instead.");
	}
}
 
Example #9
Source File: StreamExecutionEnvironment.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Creates a new data stream that contains the given elements. The elements must all be of the
 * same type, for example, all of the {@link String} or {@link Integer}.
 *
 * <p>The framework will try and determine the exact type from the elements. In case of generic
 * elements, it may be necessary to manually supply the type information via
 * {@link #fromCollection(java.util.Collection, org.apache.flink.api.common.typeinfo.TypeInformation)}.
 *
 * <p>Note that this operation will result in a non-parallel data stream source, i.e. a data
 * stream source with a degree of parallelism one.
 *
 * @param data
 * 		The array of elements to create the data stream from.
 * @param <OUT>
 * 		The type of the returned data stream
 * @return The data stream representing the given array of elements
 */
@SafeVarargs
public final <OUT> DataStreamSource<OUT> fromElements(OUT... data) {
	if (data.length == 0) {
		throw new IllegalArgumentException("fromElements needs at least one element as argument");
	}

	TypeInformation<OUT> typeInfo;
	try {
		typeInfo = TypeExtractor.getForObject(data[0]);
	}
	catch (Exception e) {
		throw new RuntimeException("Could not create TypeInformation for type " + data[0].getClass().getName()
				+ "; please specify the TypeInformation manually via "
				+ "StreamExecutionEnvironment#fromElements(Collection, TypeInformation)", e);
	}
	return fromCollection(Arrays.asList(data), typeInfo);
}
 
Example #10
Source File: WindowedStream.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Applies the given window function to each window. The window function is called for each
 * evaluation of the window for each key individually. The output of the window function is
 * interpreted as a regular non-windowed stream.
 *
 * <p>Arriving data is incrementally aggregated using the given aggregate function. This means
 * that the window function typically has only a single value to process when called.
 *
 * @param aggFunction The aggregate function that is used for incremental aggregation.
 * @param windowFunction The window function.
 *
 * @return The data stream that is the result of applying the window function to the window.
 *
 * @param <ACC> The type of the AggregateFunction's accumulator
 * @param <V> The type of AggregateFunction's result, and the WindowFunction's input
 * @param <R> The type of the elements in the resulting stream, equal to the
 *            WindowFunction's result type
 */
@PublicEvolving
public <ACC, V, R> SingleOutputStreamOperator<R> aggregate(
		AggregateFunction<T, ACC, V> aggFunction,
		ProcessWindowFunction<V, R, K, W> windowFunction) {

	checkNotNull(aggFunction, "aggFunction");
	checkNotNull(windowFunction, "windowFunction");

	TypeInformation<ACC> accumulatorType = TypeExtractor.getAggregateFunctionAccumulatorType(
			aggFunction, input.getType(), null, false);

	TypeInformation<V> aggResultType = TypeExtractor.getAggregateFunctionReturnType(
			aggFunction, input.getType(), null, false);

	TypeInformation<R> resultType = getProcessWindowFunctionReturnType(windowFunction, aggResultType, null);

	return aggregate(aggFunction, windowFunction, accumulatorType, aggResultType, resultType);
}
 
Example #11
Source File: AllGroupReduceDriverTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testAllReduceDriverImmutableEmpty() {
	try {
		TestTaskContext<GroupReduceFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>, Tuple2<String, Integer>> context =
				new TestTaskContext<GroupReduceFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>, Tuple2<String,Integer>>();
		
		List<Tuple2<String, Integer>> data = DriverTestData.createReduceImmutableData();
		TypeInformation<Tuple2<String, Integer>> typeInfo = TypeExtractor.getForObject(data.get(0));
		MutableObjectIterator<Tuple2<String, Integer>> input = EmptyMutableObjectIterator.get();
		context.setDriverStrategy(DriverStrategy.ALL_GROUP_REDUCE);
		
		context.setInput1(input, typeInfo.createSerializer(new ExecutionConfig()));
		context.setCollector(new DiscardingOutputCollector<Tuple2<String, Integer>>());
		
		AllGroupReduceDriver<Tuple2<String, Integer>, Tuple2<String, Integer>> driver = new AllGroupReduceDriver<Tuple2<String, Integer>, Tuple2<String, Integer>>();
		driver.setup(context);
		driver.prepare();
		driver.run();
	}
	catch (Exception e) {
		System.err.println(e.getMessage());
		e.printStackTrace();
		Assert.fail(e.getMessage());
	}
}
 
Example #12
Source File: AvroInputFormatTypeExtractionTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testTypeExtraction() {
	try {
		InputFormat<MyAvroType, ?> format = new AvroInputFormat<MyAvroType>(new Path("file:///ignore/this/file"), MyAvroType.class);

		TypeInformation<?> typeInfoDirect = TypeExtractor.getInputFormatTypes(format);

		ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
		DataSet<MyAvroType> input = env.createInput(format);
		TypeInformation<?> typeInfoDataSet = input.getType();

		Assert.assertTrue(typeInfoDirect instanceof PojoTypeInfo);
		Assert.assertTrue(typeInfoDataSet instanceof PojoTypeInfo);

		Assert.assertEquals(MyAvroType.class, typeInfoDirect.getTypeClass());
		Assert.assertEquals(MyAvroType.class, typeInfoDataSet.getTypeClass());
	} catch (Exception e) {
		e.printStackTrace();
		Assert.fail(e.getMessage());
	}
}
 
Example #13
Source File: Graph.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Apply a function to the attribute of each edge in the graph.
 *
 * @param mapper the map function to apply.
 * @return a new graph
 */
@SuppressWarnings({ "unchecked", "rawtypes" })
public <NV> Graph<K, VV, NV> mapEdges(final MapFunction<Edge<K, EV>, NV> mapper) {

	TypeInformation<K> keyType = ((TupleTypeInfo<?>) edges.getType()).getTypeAt(0);

	TypeInformation<NV> valueType;

	if (mapper instanceof ResultTypeQueryable) {
		valueType = ((ResultTypeQueryable) mapper).getProducedType();
	} else {
		valueType = TypeExtractor.createTypeInfo(MapFunction.class, mapper.getClass(), 1, edges.getType(), null);
	}

	TypeInformation<Edge<K, NV>> returnType = (TypeInformation<Edge<K, NV>>) new TupleTypeInfo(
			Edge.class, keyType, keyType, valueType);

	return mapEdges(mapper, returnType);
}
 
Example #14
Source File: Graph.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Apply a function to the attribute of each vertex in the graph.
 *
 * @param mapper the map function to apply.
 * @return a new graph
 */
@SuppressWarnings({ "unchecked", "rawtypes" })
public <NV> Graph<K, NV, EV> mapVertices(final MapFunction<Vertex<K, VV>, NV> mapper) {

	TypeInformation<K> keyType = ((TupleTypeInfo<?>) vertices.getType()).getTypeAt(0);

	TypeInformation<NV> valueType;

	if (mapper instanceof ResultTypeQueryable) {
		valueType = ((ResultTypeQueryable) mapper).getProducedType();
	} else {
		valueType = TypeExtractor.createTypeInfo(MapFunction.class, mapper.getClass(), 1, vertices.getType(), null);
	}

	TypeInformation<Vertex<K, NV>> returnType = (TypeInformation<Vertex<K, NV>>) new TupleTypeInfo(
			Vertex.class, keyType, valueType);

	return mapVertices(mapper, returnType);
}
 
Example #15
Source File: BroadcastConnectedStream.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Assumes as inputs a {@link BroadcastStream} and a non-keyed {@link DataStream} and applies the given
 * {@link BroadcastProcessFunction} on them, thereby creating a transformed output stream.
 *
 * @param function The {@link BroadcastProcessFunction} that is called for each element in the stream.
 * @param <OUT> The type of the output elements.
 * @return The transformed {@link DataStream}.
 */
@PublicEvolving
public <OUT> SingleOutputStreamOperator<OUT> process(final BroadcastProcessFunction<IN1, IN2, OUT> function) {

	TypeInformation<OUT> outTypeInfo = TypeExtractor.getBinaryOperatorReturnType(
			function,
			BroadcastProcessFunction.class,
			0,
			1,
			2,
			TypeExtractor.NO_INDEX,
			getType1(),
			getType2(),
			Utils.getCallLocationName(),
			true);

	return process(function, outTypeInfo);
}
 
Example #16
Source File: Keys.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
public <E> void validateCustomPartitioner(Partitioner<E> partitioner, TypeInformation<E> typeInfo) {

	if (keyFields.size() != 1) {
		throw new InvalidProgramException("Custom partitioners can only be used with keys that have one key field.");
	}
	
	if (typeInfo == null) {
		// try to extract key type from partitioner
		try {
			typeInfo = TypeExtractor.getPartitionerTypes(partitioner);
		}
		catch (Throwable t) {
			// best effort check, so we ignore exceptions
		}
	}

	// only check if type is known and not a generic type
	if (typeInfo != null && !(typeInfo instanceof GenericTypeInfo)) {
		// check equality of key and partitioner type
		if (!keyType.equals(typeInfo)) {
			throw new InvalidProgramException("The partitioner is incompatible with the key type. "
				+ "Partitioner type: " + typeInfo + " , key type: " + keyType);
		}
	}
}
 
Example #17
Source File: ExpressionKeysTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testInvalidPojo() throws Throwable {
	TypeInformation<ComplexNestedClass> ti = TypeExtractor.getForClass(ComplexNestedClass.class);

	String[][] tests = new String[][] {
		new String[] {"nonexistent"},
		new String[] {"date.abc"} // nesting into unnested
	};
	for (String[] test : tests) {
		Throwable e = null;
		try {
			new ExpressionKeys<>(test, ti);
		} catch (Throwable t) {
			e = t;
		}
		Assert.assertNotNull(e);
	}
}
 
Example #18
Source File: AllGroupReduceDriverTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testAllReduceDriverImmutableEmpty() {
	try {
		TestTaskContext<GroupReduceFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>, Tuple2<String, Integer>> context =
				new TestTaskContext<GroupReduceFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>, Tuple2<String,Integer>>();
		
		List<Tuple2<String, Integer>> data = DriverTestData.createReduceImmutableData();
		TypeInformation<Tuple2<String, Integer>> typeInfo = TypeExtractor.getForObject(data.get(0));
		MutableObjectIterator<Tuple2<String, Integer>> input = EmptyMutableObjectIterator.get();
		context.setDriverStrategy(DriverStrategy.ALL_GROUP_REDUCE);
		
		context.setInput1(input, typeInfo.createSerializer(new ExecutionConfig()));
		context.setCollector(new DiscardingOutputCollector<Tuple2<String, Integer>>());
		
		AllGroupReduceDriver<Tuple2<String, Integer>, Tuple2<String, Integer>> driver = new AllGroupReduceDriver<Tuple2<String, Integer>, Tuple2<String, Integer>>();
		driver.setup(context);
		driver.prepare();
		driver.run();
	}
	catch (Exception e) {
		System.err.println(e.getMessage());
		e.printStackTrace();
		Assert.fail(e.getMessage());
	}
}
 
Example #19
Source File: TestSource.java    From sylph with Apache License 2.0 5 votes vote down vote up
@Override
public TypeInformation<Row> getProducedType()
{
    TypeInformation<?>[] types = new TypeInformation<?>[] {
            TypeExtractor.createTypeInfo(String.class),
            TypeExtractor.createTypeInfo(String.class),
            TypeExtractor.createTypeInfo(long.class) //createTypeInformation[String]
    };

    RowTypeInfo rowTypeInfo = new RowTypeInfo(types, new String[] {"key", "message", "event_time"});
    //createTypeInformation[Row]
    return rowTypeInfo;
}
 
Example #20
Source File: TypeHint.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a hint for the generic type in the class signature.
 */
public TypeHint() {
	try {
		this.typeInfo = TypeExtractor.createTypeInfo(
				this, TypeHint.class, getClass(), 0);
	}
	catch (InvalidTypesException e) {
		throw new FlinkRuntimeException("The TypeHint is using a generic variable." +
				"This is not supported, generic types must be fully specified for the TypeHint.");
	}
}
 
Example #21
Source File: FieldAccessorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test(expected = CompositeType.InvalidFieldReferenceException.class)
public void testIllegalFlatTuple() {
	Tuple2<String, Integer> t = Tuple2.of("aa", 5);
	TupleTypeInfo<Tuple2<String, Integer>> tpeInfo =
		(TupleTypeInfo<Tuple2<String, Integer>>) TypeExtractor.getForObject(t);

	FieldAccessorFactory.getAccessor(tpeInfo, "illegal", null);
}
 
Example #22
Source File: TestCKSource.java    From sylph with Apache License 2.0 5 votes vote down vote up
@Override
public TypeInformation<Row> getProducedType()
{
    TypeInformation<?>[] types = new TypeInformation<?>[] {
            TypeExtractor.createTypeInfo(String.class),
            TypeExtractor.createTypeInfo(String.class),
            TypeExtractor.createTypeInfo(java.sql.Date.class)
    };

    RowTypeInfo rowTypeInfo = new RowTypeInfo(types, new String[] {"key", "message", "mes_time"});
    return rowTypeInfo;
}
 
Example #23
Source File: ExpressionKeysTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test(expected = InvalidProgramException.class)
public void testTupleNonKeyField() {
	// selected field is not a key type
	TypeInformation<Tuple3<String, Long, GenericNonKeyType>> ti = new TupleTypeInfo<>(
		BasicTypeInfo.STRING_TYPE_INFO,
		BasicTypeInfo.LONG_TYPE_INFO,
		TypeExtractor.getForClass(GenericNonKeyType.class)
	);

	new ExpressionKeys<>(2, ti);
}
 
Example #24
Source File: AllWindowedStream.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Applies the given fold function to each window. The window function is called for each
 * evaluation of the window for each key individually. The output of the reduce function is
 * interpreted as a regular non-windowed stream.
 *
 * @param function The fold function.
 * @return The data stream that is the result of applying the fold function to the window.
 *
 * @deprecated use {@link #aggregate(AggregateFunction)} instead
 */
@Deprecated
public <R> SingleOutputStreamOperator<R> fold(R initialValue, FoldFunction<T, R> function) {
	if (function instanceof RichFunction) {
		throw new UnsupportedOperationException("FoldFunction of fold can not be a RichFunction. " +
				"Please use fold(FoldFunction, WindowFunction) instead.");
	}

	TypeInformation<R> resultType = TypeExtractor.getFoldReturnTypes(function, input.getType(),
			Utils.getCallLocationName(), true);

	return fold(initialValue, function, resultType);
}
 
Example #25
Source File: FieldAccessorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test(expected = CompositeType.InvalidFieldReferenceException.class)
public void testIllegalTupleInPojoInTuple() {
	Tuple2<String, Foo> t = Tuple2.of("aa", new Foo(8, Tuple2.of("ddd", 9L), (short) 2));
	TupleTypeInfo<Tuple2<String, Foo>> tpeInfo =
		(TupleTypeInfo<Tuple2<String, Foo>>) TypeExtractor.getForObject(t);

	FieldAccessorFactory.getAccessor(tpeInfo, "illegal.illegal.illegal", null);
}
 
Example #26
Source File: ExpressionKeysTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test(expected = Keys.IncompatibleKeysException.class)
public void testAreCompatible7() throws Keys.IncompatibleKeysException {
	TypeInformation<Pojo1> t1 = TypeExtractor.getForClass(Pojo1.class);
	TypeInformation<Tuple2<String, Long>> t2 = new TupleTypeInfo<>(
		BasicTypeInfo.STRING_TYPE_INFO,
		BasicTypeInfo.LONG_TYPE_INFO
	);

	ExpressionKeys<Pojo1> ek1 = new ExpressionKeys<>(new String[]{"a", "b"}, t1);
	ExpressionKeys<Tuple2<String, Long>> ek2 = new ExpressionKeys<>(0, t2);

	ek1.areCompatible(ek2);
}
 
Example #27
Source File: Translate.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Translate {@link Edge} values using the given {@link TranslateFunction}.
 *
 * @param edges input edges
 * @param translator implements conversion from {@code OLD} to {@code NEW}
 * @param parallelism operator parallelism
 * @param <K> vertex ID type
 * @param <OLD> old edge value type
 * @param <NEW> new edge value type
 * @return translated edges
 */
@SuppressWarnings("unchecked")
public static <K, OLD, NEW> DataSet<Edge<K, NEW>> translateEdgeValues(DataSet<Edge<K, OLD>> edges, TranslateFunction<OLD, NEW> translator, int parallelism) {
	Preconditions.checkNotNull(edges);
	Preconditions.checkNotNull(translator);

	Class<Edge<K, NEW>> edgeClass = (Class<Edge<K, NEW>>) (Class<? extends Edge>) Edge.class;
	TypeInformation<K> idType = ((TupleTypeInfo<Edge<K, OLD>>) edges.getType()).getTypeAt(0);
	TypeInformation<OLD> oldType = ((TupleTypeInfo<Edge<K, OLD>>) edges.getType()).getTypeAt(2);
	TypeInformation<NEW> newType = TypeExtractor.getUnaryOperatorReturnType(
		translator,
		TranslateFunction.class,
		0,
		1,
		new int[]{1},
		oldType,
		null,
		false);

	TupleTypeInfo<Edge<K, NEW>> returnType = new TupleTypeInfo<>(edgeClass, idType, idType, newType);

	return edges
		.map(new TranslateEdgeValue<>(translator))
		.returns(returnType)
			.setParallelism(parallelism)
			.name("Translate edge values");
}
 
Example #28
Source File: ExpressionKeysTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testAreCompatible5() throws Keys.IncompatibleKeysException {
	TypeInformation<PojoWithMultiplePojos> t1 = TypeExtractor.getForClass(PojoWithMultiplePojos.class);
	TypeInformation<Tuple2<String, String>> t2 = new TupleTypeInfo<>(
		BasicTypeInfo.STRING_TYPE_INFO,
		BasicTypeInfo.STRING_TYPE_INFO
	);

	ExpressionKeys<PojoWithMultiplePojos> ek1 = new ExpressionKeys<>(new String[]{"p1.b", "p2.a2"}, t1);
	ExpressionKeys<Tuple2<String, String>> ek2 = new ExpressionKeys<>("*", t2);

	Assert.assertTrue(ek1.areCompatible(ek2));
	Assert.assertTrue(ek2.areCompatible(ek1));
}
 
Example #29
Source File: ExpressionKeysTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test(expected = Keys.IncompatibleKeysException.class)
public void testAreCompatible7() throws Keys.IncompatibleKeysException {
	TypeInformation<Pojo1> t1 = TypeExtractor.getForClass(Pojo1.class);
	TypeInformation<Tuple2<String, Long>> t2 = new TupleTypeInfo<>(
		BasicTypeInfo.STRING_TYPE_INFO,
		BasicTypeInfo.LONG_TYPE_INFO
	);

	ExpressionKeys<Pojo1> ek1 = new ExpressionKeys<>(new String[]{"a", "b"}, t1);
	ExpressionKeys<Tuple2<String, Long>> ek2 = new ExpressionKeys<>(0, t2);

	ek1.areCompatible(ek2);
}
 
Example #30
Source File: ExpressionKeysTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testTupleWithNestedPojo() {

	TypeInformation<Tuple3<Integer, Pojo1, PojoWithMultiplePojos>> ti =
			new TupleTypeInfo<>(
				BasicTypeInfo.INT_TYPE_INFO,
				TypeExtractor.getForClass(Pojo1.class),
				TypeExtractor.getForClass(PojoWithMultiplePojos.class)
			);

	ExpressionKeys<Tuple3<Integer, Pojo1, PojoWithMultiplePojos>> ek;

	ek = new ExpressionKeys<>(0, ti);
	Assert.assertArrayEquals(new int[] {0}, ek.computeLogicalKeyPositions());

	ek = new ExpressionKeys<>(1, ti);
	Assert.assertArrayEquals(new int[] {1,2}, ek.computeLogicalKeyPositions());

	ek = new ExpressionKeys<>(2, ti);
	Assert.assertArrayEquals(new int[] {3,4,5,6,7}, ek.computeLogicalKeyPositions());

	ek = new ExpressionKeys<>(new int[]{}, ti, true);
	Assert.assertArrayEquals(new int[] {0,1,2,3,4,5,6,7}, ek.computeLogicalKeyPositions());

	ek = new ExpressionKeys<>("*", ti);
	Assert.assertArrayEquals(new int[] {0,1,2,3,4,5,6,7}, ek.computeLogicalKeyPositions());

	ek = new ExpressionKeys<>("f2.p1.*", ti);
	Assert.assertArrayEquals(new int[] {4,5}, ek.computeLogicalKeyPositions());
}