org.apache.flink.api.java.typeutils.TupleTypeInfo Java Examples

The following examples show how to use org.apache.flink.api.java.typeutils.TupleTypeInfo. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ProjectOperator.java    From flink with Apache License 2.0 6 votes vote down vote up
public Projection(DataSet<T> ds, int[] fieldIndexes) {

			if (!(ds.getType() instanceof TupleTypeInfo)) {
				throw new UnsupportedOperationException("project() can only be applied to DataSets of Tuples.");
			}

			if (fieldIndexes.length == 0) {
				throw new IllegalArgumentException("project() needs to select at least one (1) field.");
			} else if (fieldIndexes.length > Tuple.MAX_ARITY - 1) {
				throw new IllegalArgumentException(
					"project() may select only up to (" + (Tuple.MAX_ARITY - 1) + ") fields.");
			}

			int maxFieldIndex = ds.getType().getArity();
			for (int fieldIndexe : fieldIndexes) {
				Preconditions.checkElementIndex(fieldIndexe, maxFieldIndex);
			}

			this.ds = ds;
			this.fieldIndexes = fieldIndexes;
		}
 
Example #2
Source File: Graph.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Apply a function to the attribute of each edge in the graph.
 *
 * @param mapper the map function to apply.
 * @return a new graph
 */
@SuppressWarnings({ "unchecked", "rawtypes" })
public <NV> Graph<K, VV, NV> mapEdges(final MapFunction<Edge<K, EV>, NV> mapper) {

	TypeInformation<K> keyType = ((TupleTypeInfo<?>) edges.getType()).getTypeAt(0);

	TypeInformation<NV> valueType;

	if (mapper instanceof ResultTypeQueryable) {
		valueType = ((ResultTypeQueryable) mapper).getProducedType();
	} else {
		valueType = TypeExtractor.createTypeInfo(MapFunction.class, mapper.getClass(), 1, edges.getType(), null);
	}

	TypeInformation<Edge<K, NV>> returnType = (TypeInformation<Edge<K, NV>>) new TupleTypeInfo(
			Edge.class, keyType, keyType, valueType);

	return mapEdges(mapper, returnType);
}
 
Example #3
Source File: ReplicatingDataSourceTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Tests compiler fail for join program with replicated data source behind rebalance.
 */
@Test(expected = CompilerException.class)
public void checkJoinWithReplicatedSourceInputBehindRebalance() {
	ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment();
	env.setParallelism(DEFAULT_PARALLELISM);

	TupleTypeInfo<Tuple1<String>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(String.class);
	ReplicatingInputFormat<Tuple1<String>, FileInputSplit> rif =
			new ReplicatingInputFormat<Tuple1<String>, FileInputSplit>(new TupleCsvInputFormat<Tuple1<String>>(new Path("/some/path"), typeInfo));

	DataSet<Tuple1<String>> source1 = env.createInput(rif, new TupleTypeInfo<Tuple1<String>>(BasicTypeInfo.STRING_TYPE_INFO));
	DataSet<Tuple1<String>> source2 = env.readCsvFile("/some/otherpath").types(String.class);

	DataSink<Tuple2<Tuple1<String>, Tuple1<String>>> out = source1
			.rebalance()
			.join(source2).where("*").equalTo("*")
			.writeAsText("/some/newpath");

	Plan plan = env.createProgramPlan();

	// submit the plan to the compiler
	OptimizedPlan oPlan = compileNoStats(plan);
}
 
Example #4
Source File: ReplicatingDataSourceTest.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Tests compiler fail for join program with replicated data source behind rebalance.
 */
@Test(expected = CompilerException.class)
public void checkJoinWithReplicatedSourceInputBehindRebalance() {
	ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment();
	env.setParallelism(DEFAULT_PARALLELISM);

	TupleTypeInfo<Tuple1<String>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(String.class);
	ReplicatingInputFormat<Tuple1<String>, FileInputSplit> rif =
			new ReplicatingInputFormat<Tuple1<String>, FileInputSplit>(new TupleCsvInputFormat<Tuple1<String>>(new Path("/some/path"), typeInfo));

	DataSet<Tuple1<String>> source1 = env.createInput(rif, new TupleTypeInfo<Tuple1<String>>(BasicTypeInfo.STRING_TYPE_INFO));
	DataSet<Tuple1<String>> source2 = env.readCsvFile("/some/otherpath").types(String.class);

	DataSink<Tuple2<Tuple1<String>, Tuple1<String>>> out = source1
			.rebalance()
			.join(source2).where("*").equalTo("*")
			.writeAsText("/some/newpath");

	Plan plan = env.createProgramPlan();

	// submit the plan to the compiler
	OptimizedPlan oPlan = compileNoStats(plan);
}
 
Example #5
Source File: AbstractSortMergeOuterJoinIteratorITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Before
public void beforeTest() {
	ExecutionConfig config = new ExecutionConfig();
	config.disableObjectReuse();
	
	TupleTypeInfo<Tuple2<String, String>> typeInfo1 = TupleTypeInfo.getBasicTupleTypeInfo(String.class, String.class);
	TupleTypeInfo<Tuple2<String, Integer>> typeInfo2 = TupleTypeInfo.getBasicTupleTypeInfo(String.class, Integer.class);
	serializer1 = typeInfo1.createSerializer(config);
	serializer2 = typeInfo2.createSerializer(config);
	comparator1 = typeInfo1.createComparator(new int[]{0}, new boolean[]{true}, 0, config);
	comparator2 = typeInfo2.createComparator(new int[]{0}, new boolean[]{true}, 0, config);
	pairComp = new GenericPairComparator<>(comparator1, comparator2);

	this.memoryManager = MemoryManagerBuilder.newBuilder().setMemorySize(MEMORY_SIZE).build();
	this.ioManager = new IOManagerAsync();
}
 
Example #6
Source File: ExpressionKeysTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testAreCompatible9() throws Keys.IncompatibleKeysException {
	TypeInformation<Tuple3<String, Long, Integer>> t1 = new TupleTypeInfo<>(
		BasicTypeInfo.STRING_TYPE_INFO,
		BasicTypeInfo.LONG_TYPE_INFO,
		BasicTypeInfo.INT_TYPE_INFO
	);
	TypeInformation<PojoWithMultiplePojos> t2 = TypeExtractor.getForClass(PojoWithMultiplePojos.class);

	ExpressionKeys<Tuple3<String, Long, Integer>> ek1 = new ExpressionKeys<>(new int[]{2,0}, t1);
	Keys<PojoWithMultiplePojos> ek2 = new Keys.SelectorFunctionKeys<>(
		new KeySelector3(),
		t2,
		new TupleTypeInfo<Tuple2<Integer, String>>(BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO)
	);

	Assert.assertTrue(ek1.areCompatible(ek2));
}
 
Example #7
Source File: JavaTableEnvironmentITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testAsFromAndToTuple() throws Exception {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	BatchTableEnvironment tableEnv = BatchTableEnvironment.create(env, config());

	Table table = tableEnv
		.fromDataSet(CollectionDataSets.get3TupleDataSet(env), "a, b, c")
		.select("a, b, c");

	TypeInformation<?> ti = new TupleTypeInfo<Tuple3<Integer, Long, String>>(
		BasicTypeInfo.INT_TYPE_INFO,
		BasicTypeInfo.LONG_TYPE_INFO,
		BasicTypeInfo.STRING_TYPE_INFO);

	DataSet<?> ds = tableEnv.toDataSet(table, ti);
	List<?> results = ds.collect();
	String expected = "(1,1,Hi)\n" + "(2,2,Hello)\n" + "(3,2,Hello world)\n" +
		"(4,3,Hello world, how are you?)\n" + "(5,3,I am fine.)\n" + "(6,3,Luke Skywalker)\n" +
		"(7,4,Comment#1)\n" + "(8,4,Comment#2)\n" + "(9,4,Comment#3)\n" + "(10,4,Comment#4)\n" +
		"(11,5,Comment#5)\n" + "(12,5,Comment#6)\n" + "(13,5,Comment#7)\n" +
		"(14,5,Comment#8)\n" + "(15,5,Comment#9)\n" + "(16,6,Comment#10)\n" +
		"(17,6,Comment#11)\n" + "(18,6,Comment#12)\n" + "(19,6,Comment#13)\n" +
		"(20,6,Comment#14)\n" + "(21,6,Comment#15)\n";
	compareResultAsText(results, expected);
}
 
Example #8
Source File: Java8WordCount.java    From flink-examples with MIT License 6 votes vote down vote up
public static void main(String[] args) throws Exception {
    final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    DataSource<String> lines = env.fromElements(
        "Apache Flink is a community-driven open source framework for distributed big data analytics,",
        "like Hadoop and Spark. The core of Apache Flink is a distributed streaming dataflow engine written",
        " in Java and Scala.[1][2] It aims to bridge the gap between MapReduce-like systems and shared-nothing",
        "parallel database systems. Therefore, Flink executes arbitrary dataflow programs in a data-parallel and",
        "pipelined manner.[3] Flink's pipelined runtime system enables the execution of bulk/batch and stream",
        "processing programs.[4][5] Furthermore, Flink's runtime supports the execution of iterative algorithms natively.[6]"
    );

    lines.flatMap((line, out) -> {
        String[] words = line.split("\\W+");
        for (String word : words) {
            out.collect(new Tuple2<>(word, 1));
        }
    })
    .returns(new TupleTypeInfo(TypeInformation.of(String.class), TypeInformation.of(Integer.class)))
    .groupBy(0)
    .sum(1)
    .print();
}
 
Example #9
Source File: EitherSerializerTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testEitherWithTupleValues() {
	@SuppressWarnings("unchecked")
	Either<Tuple2<LongValue, LongValue>, DoubleValue>[] testData = new Either[] {
		Left(new Tuple2<>(new LongValue(2L), new LongValue(9L))),
		new Left<>(new Tuple2<>(new LongValue(Long.MIN_VALUE), new LongValue(Long.MAX_VALUE))),
		new Right<>(new DoubleValue(32.0)),
		Right(new DoubleValue(Double.MIN_VALUE)),
		Right(new DoubleValue(Double.MAX_VALUE))};

	EitherTypeInfo<Tuple2<LongValue, LongValue>, DoubleValue> eitherTypeInfo = new EitherTypeInfo<>(
		new TupleTypeInfo<Tuple2<LongValue, LongValue>>(ValueTypeInfo.LONG_VALUE_TYPE_INFO, ValueTypeInfo.LONG_VALUE_TYPE_INFO),
		ValueTypeInfo.DOUBLE_VALUE_TYPE_INFO);
	EitherSerializer<Tuple2<LongValue, LongValue>, DoubleValue> eitherSerializer =
		(EitherSerializer<Tuple2<LongValue, LongValue>, DoubleValue>) eitherTypeInfo.createSerializer(new ExecutionConfig());
	SerializerTestInstance<Either<Tuple2<LongValue, LongValue>, DoubleValue>> testInstance =
		new EitherSerializerTestInstance<>(eitherSerializer, eitherTypeInfo.getTypeClass(), -1, testData);
	testInstance.testAll();
}
 
Example #10
Source File: AbstractSortMergeOuterJoinIteratorITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Before
public void beforeTest() {
	ExecutionConfig config = new ExecutionConfig();
	config.disableObjectReuse();
	
	TupleTypeInfo<Tuple2<String, String>> typeInfo1 = TupleTypeInfo.getBasicTupleTypeInfo(String.class, String.class);
	TupleTypeInfo<Tuple2<String, Integer>> typeInfo2 = TupleTypeInfo.getBasicTupleTypeInfo(String.class, Integer.class);
	serializer1 = typeInfo1.createSerializer(config);
	serializer2 = typeInfo2.createSerializer(config);
	comparator1 = typeInfo1.createComparator(new int[]{0}, new boolean[]{true}, 0, config);
	comparator2 = typeInfo2.createComparator(new int[]{0}, new boolean[]{true}, 0, config);
	pairComp = new GenericPairComparator<>(comparator1, comparator2);

	this.memoryManager = new MemoryManager(MEMORY_SIZE, 1);
	this.ioManager = new IOManagerAsync();
}
 
Example #11
Source File: EitherSerializerTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
@Test
public void testEitherWithTuple() {

Either<Tuple2<Long, Long>, Double>[] testData = new Either[] {
		Either.Left(new Tuple2<>(2L, 9L)),
		new Left<>(new Tuple2<>(Long.MIN_VALUE, Long.MAX_VALUE)),
		new Right<>(32.0),
		Right(Double.MIN_VALUE),
		Right(Double.MAX_VALUE)};

EitherTypeInfo<Tuple2<Long, Long>, Double> eitherTypeInfo = (EitherTypeInfo<Tuple2<Long, Long>, Double>)
		new EitherTypeInfo<Tuple2<Long, Long>, Double>(
		new TupleTypeInfo<Tuple2<Long, Long>>(BasicTypeInfo.LONG_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO),
		BasicTypeInfo.DOUBLE_TYPE_INFO);
EitherSerializer<Tuple2<Long, Long>, Double> eitherSerializer =
		(EitherSerializer<Tuple2<Long, Long>, Double>) eitherTypeInfo.createSerializer(new ExecutionConfig());
SerializerTestInstance<Either<Tuple2<Long, Long>, Double>> testInstance =
		new EitherSerializerTestInstance<Either<Tuple2<Long, Long>, Double>>(
				eitherSerializer, eitherTypeInfo.getTypeClass(), -1, testData);
testInstance.testAll();
}
 
Example #12
Source File: DataStreamTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testPOJOWithNestedArrayNoHashCodeKeyRejection() {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

	DataStream<POJOWithHashCode> input = env.fromElements(
			new POJOWithHashCode(new int[] {1, 2}));

	TypeInformation<?> expectedTypeInfo = new TupleTypeInfo<Tuple1<int[]>>(
			PrimitiveArrayTypeInfo.INT_PRIMITIVE_ARRAY_TYPE_INFO);

	// adjust the rule
	expectedException.expect(InvalidProgramException.class);
	expectedException.expectMessage(new StringStartsWith("Type " + expectedTypeInfo + " cannot be used as key."));

	input.keyBy("id");
}
 
Example #13
Source File: ProjectOperator.java    From flink with Apache License 2.0 6 votes vote down vote up
public Projection(DataSet<T> ds, int[] fieldIndexes) {

			if (!(ds.getType() instanceof TupleTypeInfo)) {
				throw new UnsupportedOperationException("project() can only be applied to DataSets of Tuples.");
			}

			if (fieldIndexes.length == 0) {
				throw new IllegalArgumentException("project() needs to select at least one (1) field.");
			} else if (fieldIndexes.length > Tuple.MAX_ARITY - 1) {
				throw new IllegalArgumentException(
					"project() may select only up to (" + (Tuple.MAX_ARITY - 1) + ") fields.");
			}

			int maxFieldIndex = ds.getType().getArity();
			for (int fieldIndexe : fieldIndexes) {
				Preconditions.checkElementIndex(fieldIndexe, maxFieldIndex);
			}

			this.ds = ds;
			this.fieldIndexes = fieldIndexes;
		}
 
Example #14
Source File: AbstractSortMergeOuterJoinIteratorITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Before
public void beforeTest() {
	ExecutionConfig config = new ExecutionConfig();
	config.disableObjectReuse();
	
	TupleTypeInfo<Tuple2<String, String>> typeInfo1 = TupleTypeInfo.getBasicTupleTypeInfo(String.class, String.class);
	TupleTypeInfo<Tuple2<String, Integer>> typeInfo2 = TupleTypeInfo.getBasicTupleTypeInfo(String.class, Integer.class);
	serializer1 = typeInfo1.createSerializer(config);
	serializer2 = typeInfo2.createSerializer(config);
	comparator1 = typeInfo1.createComparator(new int[]{0}, new boolean[]{true}, 0, config);
	comparator2 = typeInfo2.createComparator(new int[]{0}, new boolean[]{true}, 0, config);
	pairComp = new GenericPairComparator<>(comparator1, comparator2);

	this.memoryManager = new MemoryManager(MEMORY_SIZE, 1);
	this.ioManager = new IOManagerAsync();
}
 
Example #15
Source File: ValueCollectionDataSets.java    From flink with Apache License 2.0 6 votes vote down vote up
public static DataSet<Tuple3<Tuple2<IntValue, IntValue>, StringValue, IntValue>> getGroupSortedNestedTupleDataSet2(ExecutionEnvironment env) {
	List<Tuple3<Tuple2<IntValue, IntValue>, StringValue, IntValue>> data = new ArrayList<>();

	data.add(new Tuple3<>(new Tuple2<IntValue, IntValue>(new IntValue(1), new IntValue(3)), new StringValue("a"), new IntValue(2)));
	data.add(new Tuple3<>(new Tuple2<IntValue, IntValue>(new IntValue(1), new IntValue(2)), new StringValue("a"), new IntValue(1)));
	data.add(new Tuple3<>(new Tuple2<IntValue, IntValue>(new IntValue(2), new IntValue(1)), new StringValue("a"), new IntValue(3)));
	data.add(new Tuple3<>(new Tuple2<IntValue, IntValue>(new IntValue(2), new IntValue(2)), new StringValue("b"), new IntValue(4)));
	data.add(new Tuple3<>(new Tuple2<IntValue, IntValue>(new IntValue(3), new IntValue(3)), new StringValue("c"), new IntValue(5)));
	data.add(new Tuple3<>(new Tuple2<IntValue, IntValue>(new IntValue(3), new IntValue(6)), new StringValue("c"), new IntValue(6)));
	data.add(new Tuple3<>(new Tuple2<IntValue, IntValue>(new IntValue(4), new IntValue(9)), new StringValue("c"), new IntValue(7)));

	TupleTypeInfo<Tuple3<Tuple2<IntValue, IntValue>, StringValue, IntValue>> type = new
		TupleTypeInfo<>(
			new TupleTypeInfo<Tuple2<IntValue, IntValue>>(ValueTypeInfo.INT_VALUE_TYPE_INFO, ValueTypeInfo.INT_VALUE_TYPE_INFO),
			ValueTypeInfo.STRING_VALUE_TYPE_INFO,
			ValueTypeInfo.INT_VALUE_TYPE_INFO
	);

	return env.fromCollection(data, type);
}
 
Example #16
Source File: FieldAccessor.java    From flink with Apache License 2.0 5 votes vote down vote up
RecursiveTupleFieldAccessor(int pos, FieldAccessor<R, F> innerAccessor, TypeInformation<T> typeInfo) {
	checkNotNull(typeInfo, "typeInfo must not be null.");
	checkNotNull(innerAccessor, "innerAccessor must not be null.");

	int arity = ((TupleTypeInfo) typeInfo).getArity();
	if (pos < 0 || pos >= arity) {
		throw new CompositeType.InvalidFieldReferenceException(
			"Tried to select " + ((Integer) pos).toString() + ". field on \"" +
				typeInfo.toString() + "\", which is an invalid index.");
	}

	this.pos = pos;
	this.innerAccessor = innerAccessor;
	this.fieldType = innerAccessor.fieldType;
}
 
Example #17
Source File: CrossOperator.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
protected ProjectCross(DataSet<I1> input1, DataSet<I2> input2, int[] fields, boolean[] isFromFirst,
		TupleTypeInfo<OUT> returnType, CrossProjection<I1, I2> crossProjection, CrossHint hint) {
	super(input1, input2,
		new ProjectCrossFunction<I1, I2, OUT>(fields, isFromFirst, returnType.createSerializer(input1.getExecutionEnvironment().getConfig()).createInstance()),
		returnType, hint, "unknown");

	this.crossProjection = crossProjection;
}
 
Example #18
Source File: CollectionDataSets.java    From flink with Apache License 2.0 5 votes vote down vote up
public static DataSet<Tuple5<Integer, Long, Integer, String, Long>> get5TupleDataSet(ExecutionEnvironment env) {

		List<Tuple5<Integer, Long, Integer, String, Long>> data = new ArrayList<>();
		data.add(new Tuple5<>(1, 1L, 0, "Hallo", 1L));
		data.add(new Tuple5<>(2, 2L, 1, "Hallo Welt", 2L));
		data.add(new Tuple5<>(2, 3L, 2, "Hallo Welt wie", 1L));
		data.add(new Tuple5<>(3, 4L, 3, "Hallo Welt wie gehts?", 2L));
		data.add(new Tuple5<>(3, 5L, 4, "ABC", 2L));
		data.add(new Tuple5<>(3, 6L, 5, "BCD", 3L));
		data.add(new Tuple5<>(4, 7L, 6, "CDE", 2L));
		data.add(new Tuple5<>(4, 8L, 7, "DEF", 1L));
		data.add(new Tuple5<>(4, 9L, 8, "EFG", 1L));
		data.add(new Tuple5<>(4, 10L, 9, "FGH", 2L));
		data.add(new Tuple5<>(5, 11L, 10, "GHI", 1L));
		data.add(new Tuple5<>(5, 12L, 11, "HIJ", 3L));
		data.add(new Tuple5<>(5, 13L, 12, "IJK", 3L));
		data.add(new Tuple5<>(5, 14L, 13, "JKL", 2L));
		data.add(new Tuple5<>(5, 15L, 14, "KLM", 2L));

		Collections.shuffle(data);

		TupleTypeInfo<Tuple5<Integer, Long, Integer, String, Long>> type = new TupleTypeInfo<>(
				BasicTypeInfo.INT_TYPE_INFO,
				BasicTypeInfo.LONG_TYPE_INFO,
				BasicTypeInfo.INT_TYPE_INFO,
				BasicTypeInfo.STRING_TYPE_INFO,
				BasicTypeInfo.LONG_TYPE_INFO
		);

		return env.fromCollection(data, type);
	}
 
Example #19
Source File: SlidingWindowCheckMapper.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void open(Configuration parameters) {
	ValueStateDescriptor<List<Tuple2<Event, Integer>>> previousWindowDescriptor =
		new ValueStateDescriptor<>("eventsSeenSoFar",
			new ListTypeInfo<>(new TupleTypeInfo<>(TypeInformation.of(Event.class), BasicTypeInfo.INT_TYPE_INFO)));

	eventsSeenSoFar = getRuntimeContext().getState(previousWindowDescriptor);

	ValueStateDescriptor<Long> lastSequenceNumberDescriptor =
		new ValueStateDescriptor<>("lastSequenceNumber", BasicTypeInfo.LONG_TYPE_INFO);

	lastSequenceNumber = getRuntimeContext().getState(lastSequenceNumberDescriptor);
}
 
Example #20
Source File: JoinOperator.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
protected ProjectJoin(DataSet<I1> input1, DataSet<I2> input2, Keys<I1> keys1, Keys<I2> keys2, JoinHint hint, int[] fields, boolean[] isFromFirst, TupleTypeInfo<OUT> returnType, JoinProjection<I1, I2> joinProj) {
	super(input1, input2, keys1, keys2,
			new ProjectFlatJoinFunction<I1, I2, OUT>(fields, isFromFirst, returnType.createSerializer(input1.getExecutionEnvironment().getConfig()).createInstance()),
			returnType, hint, Utils.getCallLocationName(4));

	this.joinProj = joinProj;
}
 
Example #21
Source File: Translate.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Translate {@link Vertex} values using the given {@link TranslateFunction}.
 *
 * @param vertices input vertices
 * @param translator implements conversion from {@code OLD} to {@code NEW}
 * @param parallelism operator parallelism
 * @param <K> vertex ID type
 * @param <OLD> old vertex value type
 * @param <NEW> new vertex value type
 * @return translated vertices
 */
@SuppressWarnings("unchecked")
public static <K, OLD, NEW> DataSet<Vertex<K, NEW>> translateVertexValues(DataSet<Vertex<K, OLD>> vertices, TranslateFunction<OLD, NEW> translator, int parallelism) {
	Preconditions.checkNotNull(vertices);
	Preconditions.checkNotNull(translator);

	Class<Vertex<K, NEW>> vertexClass = (Class<Vertex<K, NEW>>) (Class<? extends Vertex>) Vertex.class;
	TypeInformation<K> idType = ((TupleTypeInfo<Vertex<K, OLD>>) vertices.getType()).getTypeAt(0);
	TypeInformation<OLD> oldType = ((TupleTypeInfo<Vertex<K, OLD>>) vertices.getType()).getTypeAt(1);
	TypeInformation<NEW> newType = TypeExtractor.getUnaryOperatorReturnType(
		translator,
		TranslateFunction.class,
		0,
		1,
		new int[]{1},
		oldType,
		null,
		false);

	TupleTypeInfo<Vertex<K, NEW>> returnType = new TupleTypeInfo<>(vertexClass, idType, newType);

	return vertices
		.map(new TranslateVertexValue<>(translator))
		.returns(returnType)
			.setParallelism(parallelism)
			.name("Translate vertex values");
}
 
Example #22
Source File: GroupReduceDriverTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testAllReduceDriverImmutable() {
	try {
		TestTaskContext<GroupReduceFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>, Tuple2<String, Integer>> context =
				new TestTaskContext<GroupReduceFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>, Tuple2<String,Integer>>();
		
		List<Tuple2<String, Integer>> data = DriverTestData.createReduceImmutableData();
		TupleTypeInfo<Tuple2<String, Integer>> typeInfo = (TupleTypeInfo<Tuple2<String, Integer>>) TypeExtractor.getForObject(data.get(0));
		MutableObjectIterator<Tuple2<String, Integer>> input = new RegularToMutableObjectIterator<Tuple2<String, Integer>>(data.iterator(), typeInfo.createSerializer(new ExecutionConfig()));
		TypeComparator<Tuple2<String, Integer>> comparator = typeInfo.createComparator(new int[]{0}, new boolean[] {true}, 0, new ExecutionConfig());
		
		GatheringCollector<Tuple2<String, Integer>> result = new GatheringCollector<Tuple2<String,Integer>>(typeInfo.createSerializer(new ExecutionConfig()));
		
		context.setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE);
		context.setInput1(input, typeInfo.createSerializer(new ExecutionConfig()));
		context.setCollector(result);
		context.setComparator1(comparator);
		context.setUdf(new ConcatSumReducer());
		
		GroupReduceDriver<Tuple2<String, Integer>, Tuple2<String, Integer>> driver = new GroupReduceDriver<Tuple2<String, Integer>, Tuple2<String, Integer>>();
		driver.setup(context);
		driver.prepare();
		driver.run();
		
		Object[] res = result.getList().toArray();
		Object[] expected = DriverTestData.createReduceImmutableDataGroupedResult().toArray();
		
		DriverTestData.compareTupleArrays(expected, res);
	}
	catch (Exception e) {
		System.err.println(e.getMessage());
		e.printStackTrace();
		Assert.fail(e.getMessage());
	}
}
 
Example #23
Source File: CrossOperator.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
public DefaultCross(DataSet<I1> input1, DataSet<I2> input2, CrossHint hint, String defaultName) {
	super(input1, input2, new DefaultCrossFunction<I1, I2>(),
		new TupleTypeInfo<Tuple2<I1, I2>>(
			Preconditions.checkNotNull(input1, "input1 is null").getType(),
			Preconditions.checkNotNull(input2, "input2 is null").getType()),
		hint, defaultName);
}
 
Example #24
Source File: ExpressionKeysTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testAreCompatible2() throws Keys.IncompatibleKeysException {
	TypeInformation<Pojo1> t1 = TypeExtractor.getForClass(Pojo1.class);
	TypeInformation<Tuple2<String, Long>> t2 = new TupleTypeInfo<>(
		BasicTypeInfo.STRING_TYPE_INFO,
		BasicTypeInfo.LONG_TYPE_INFO
	);

	ExpressionKeys<Pojo1> ek1 = new ExpressionKeys<>("a", t1);
	ExpressionKeys<Tuple2<String, Long>> ek2 = new ExpressionKeys<>(0, t2);

	Assert.assertTrue(ek1.areCompatible(ek2));
	Assert.assertTrue(ek2.areCompatible(ek1));
}
 
Example #25
Source File: SlidingWindowCheckMapper.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void open(Configuration parameters) {
	ValueStateDescriptor<List<Tuple2<Event, Integer>>> previousWindowDescriptor =
		new ValueStateDescriptor<>("eventsSeenSoFar",
			new ListTypeInfo<>(new TupleTypeInfo<>(TypeInformation.of(Event.class), BasicTypeInfo.INT_TYPE_INFO)));

	eventsSeenSoFar = getRuntimeContext().getState(previousWindowDescriptor);

	ValueStateDescriptor<Long> lastSequenceNumberDescriptor =
		new ValueStateDescriptor<>("lastSequenceNumber", BasicTypeInfo.LONG_TYPE_INFO);

	lastSequenceNumber = getRuntimeContext().getState(lastSequenceNumberDescriptor);
}
 
Example #26
Source File: StreamProjection.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Projects a {@link Tuple} {@link DataStream} to the previously selected fields.
 *
 * @return The projected DataStream.
 * @see Tuple
 * @see DataStream
 */
public <T0> SingleOutputStreamOperator<Tuple1<T0>> projectTuple1() {
	TypeInformation<?>[] fTypes = extractFieldTypes(fieldIndexes, dataStream.getType());
	TupleTypeInfo<Tuple1<T0>> tType = new TupleTypeInfo<Tuple1<T0>>(fTypes);

	return dataStream.transform("Projection", tType, new StreamProject<IN, Tuple1<T0>>(
			fieldIndexes, tType.createSerializer(dataStream.getExecutionConfig())));
}
 
Example #27
Source File: ReplicatingDataSourceTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Tests cross program with replicated data source.
 */
@Test
public void checkCrossWithReplicatedSourceInput() {

	ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment();
	env.setParallelism(DEFAULT_PARALLELISM);

	TupleTypeInfo<Tuple1<String>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(String.class);
	ReplicatingInputFormat<Tuple1<String>, FileInputSplit> rif =
			new ReplicatingInputFormat<Tuple1<String>, FileInputSplit>(new TupleCsvInputFormat<Tuple1<String>>(new Path("/some/path"), typeInfo));

	DataSet<Tuple1<String>> source1 = env.createInput(rif, new TupleTypeInfo<Tuple1<String>>(BasicTypeInfo.STRING_TYPE_INFO));
	DataSet<Tuple1<String>> source2 = env.readCsvFile("/some/otherpath").types(String.class);

	DataSink<Tuple2<Tuple1<String>, Tuple1<String>>> out = source1
			.cross(source2)
			.writeAsText("/some/newpath");

	Plan plan = env.createProgramPlan();

	// submit the plan to the compiler
	OptimizedPlan oPlan = compileNoStats(plan);

	// check the optimized Plan
	// when cross should have forward strategy on both sides
	SinkPlanNode sinkNode = oPlan.getDataSinks().iterator().next();
	DualInputPlanNode crossNode = (DualInputPlanNode) sinkNode.getPredecessor();

	ShipStrategyType crossIn1 = crossNode.getInput1().getShipStrategy();
	ShipStrategyType crossIn2 = crossNode.getInput2().getShipStrategy();

	Assert.assertEquals("Invalid ship strategy for an operator.", ShipStrategyType.FORWARD, crossIn1);
	Assert.assertEquals("Invalid ship strategy for an operator.", ShipStrategyType.FORWARD, crossIn2);
}
 
Example #28
Source File: FieldAccessorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test(expected = CompositeType.InvalidFieldReferenceException.class)
public void testIllegalTupleInPojoInTuple() {
	Tuple2<String, Foo> t = Tuple2.of("aa", new Foo(8, Tuple2.of("ddd", 9L), (short) 2));
	TupleTypeInfo<Tuple2<String, Foo>> tpeInfo =
		(TupleTypeInfo<Tuple2<String, Foo>>) TypeExtractor.getForObject(t);

	FieldAccessorFactory.getAccessor(tpeInfo, "illegal.illegal.illegal", null);
}
 
Example #29
Source File: CsvInputFormatTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testReadSparseWithPositionSetter() throws IOException {
	try {
		final String fileContent = "111|222|333|444|555|666|777|888|999|000|\n000|999|888|777|666|555|444|333|222|111|";
		final FileInputSplit split = createTempFile(fileContent);

		final TupleTypeInfo<Tuple3<Integer, Integer, Integer>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(Integer.class, Integer.class, Integer.class);
		final CsvInputFormat<Tuple3<Integer, Integer, Integer>> format = new TupleCsvInputFormat<Tuple3<Integer, Integer, Integer>>(PATH, typeInfo, new int[]{0, 3, 7});

		format.setFieldDelimiter("|");

		format.configure(new Configuration());
		format.open(split);

		Tuple3<Integer, Integer, Integer> result = new Tuple3<Integer, Integer, Integer>();

		result = format.nextRecord(result);
		assertNotNull(result);
		assertEquals(Integer.valueOf(111), result.f0);
		assertEquals(Integer.valueOf(444), result.f1);
		assertEquals(Integer.valueOf(888), result.f2);

		result = format.nextRecord(result);
		assertNotNull(result);
		assertEquals(Integer.valueOf(000), result.f0);
		assertEquals(Integer.valueOf(777), result.f1);
		assertEquals(Integer.valueOf(333), result.f2);

		result = format.nextRecord(result);
		assertNull(result);
		assertTrue(format.reachedEnd());
	}
	catch (Exception ex) {
		fail("Test failed due to a " + ex.getClass().getName() + ": " + ex.getMessage());
	}
}
 
Example #30
Source File: ExpressionKeysTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testAreCompatible4() throws Keys.IncompatibleKeysException {
	TypeInformation<PojoWithMultiplePojos> t1 = TypeExtractor.getForClass(PojoWithMultiplePojos.class);
	TypeInformation<Tuple3<String, Long, Integer>> t2 = new TupleTypeInfo<>(
		BasicTypeInfo.STRING_TYPE_INFO,
		BasicTypeInfo.LONG_TYPE_INFO,
		BasicTypeInfo.INT_TYPE_INFO
	);

	ExpressionKeys<PojoWithMultiplePojos> ek1 = new ExpressionKeys<>(new String[]{"p1", "i0"}, t1);
	ExpressionKeys<Tuple3<String, Long, Integer>> ek2 = new ExpressionKeys<>(new int[]{0, 0, 2}, t2);

	Assert.assertTrue(ek1.areCompatible(ek2));
	Assert.assertTrue(ek2.areCompatible(ek1));
}