org.apache.flink.api.common.operators.base.JoinOperatorBase.JoinHint Java Examples

The following examples show how to use org.apache.flink.api.common.operators.base.JoinOperatorBase.JoinHint. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: OuterJoinITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private void testFullOuterJoinOnTuplesWithKeyPositions(JoinHint hint) throws Exception {
	/*
	 * UDF Join on tuples with key field positions
	 */

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<Tuple3<Integer, Long, String>> ds1 = CollectionDataSets.getSmall3TupleDataSet(env);
	DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds2 = CollectionDataSets.getSmall5TupleDataSet(env);
	DataSet<Tuple2<String, String>> joinDs =
			ds1.fullOuterJoin(ds2, hint)
					.where(0)
					.equalTo(2)
					.with(new T3T5FlatJoin());

	List<Tuple2<String, String>> result = joinDs.collect();

	String expected = "null,Hallo\n" +
			"Hi,Hallo Welt\n" +
			"Hello,Hallo Welt wie\n" +
			"Hello world,null\n";

	compareResultAsTuples(result, expected);
}
 
Example #2
Source File: EdgeDegreesPair.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
public DataSet<Edge<K, Tuple3<EV, Degrees, Degrees>>> runInternal(Graph<K, VV, EV> input)
		throws Exception {
	// s, t, d(s)
	DataSet<Edge<K, Tuple2<EV, Degrees>>> edgeSourceDegrees = input
		.run(new EdgeSourceDegrees<K, VV, EV>()
			.setParallelism(parallelism));

	// t, d(t)
	DataSet<Vertex<K, Degrees>> vertexDegrees = input
		.run(new VertexDegrees<K, VV, EV>()
			.setParallelism(parallelism));

	// s, t, (d(s), d(t))
	return edgeSourceDegrees
		.join(vertexDegrees, JoinHint.REPARTITION_HASH_SECOND)
		.where(1)
		.equalTo(0)
		.with(new JoinEdgeDegreeWithVertexDegree<>())
			.setParallelism(parallelism)
			.name("Edge target degree");
}
 
Example #3
Source File: OuterJoinNode.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private List<OperatorDescriptorDual> createRightOuterJoinDescriptors(JoinHint hint) {

		List<OperatorDescriptorDual> list = new ArrayList<>();
		switch (hint) {
			case OPTIMIZER_CHOOSES:
				list.add(new SortMergeRightOuterJoinDescriptor(this.keys1, this.keys2, true));
				list.add(new HashRightOuterJoinBuildFirstDescriptor(this.keys1, this.keys2, true, true));
				break;
			case REPARTITION_SORT_MERGE:
				list.add(new SortMergeRightOuterJoinDescriptor(this.keys1, this.keys2, false));
				break;
			case REPARTITION_HASH_FIRST:
				list.add(new HashRightOuterJoinBuildFirstDescriptor(this.keys1, this.keys2, false, true));
				break;
			case BROADCAST_HASH_FIRST:
				list.add(new HashRightOuterJoinBuildFirstDescriptor(this.keys1, this.keys2, true, false));
				break;
			case REPARTITION_HASH_SECOND:
				list.add(new HashRightOuterJoinBuildSecondDescriptor(this.keys1, this.keys2, false, true));
				break;
			case BROADCAST_HASH_SECOND:
			default:
				throw new CompilerException("Invalid join hint: " + hint + " for right outer join");
		}
		return list;
	}
 
Example #4
Source File: OuterJoinNode.java    From flink with Apache License 2.0 6 votes vote down vote up
private List<OperatorDescriptorDual> createFullOuterJoinDescriptors(JoinHint hint) {

		List<OperatorDescriptorDual> list = new ArrayList<>();
		switch (hint) {
			case OPTIMIZER_CHOOSES:
				list.add(new SortMergeFullOuterJoinDescriptor(this.keys1, this.keys2));
				break;
			case REPARTITION_SORT_MERGE:
				list.add(new SortMergeFullOuterJoinDescriptor(this.keys1, this.keys2));
				break;
			case REPARTITION_HASH_FIRST:
				list.add(new HashFullOuterJoinBuildFirstDescriptor(this.keys1, this.keys2));
				break;
			case REPARTITION_HASH_SECOND:
				list.add(new HashFullOuterJoinBuildSecondDescriptor(this.keys1, this.keys2));
				break;
			case BROADCAST_HASH_FIRST:
			case BROADCAST_HASH_SECOND:
			default:
				throw new CompilerException("Invalid join hint: " + hint + " for full outer join");
		}
		return list;
	}
 
Example #5
Source File: EdgeTargetDegrees.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public DataSet<Edge<K, Tuple2<EV, Degrees>>> runInternal(Graph<K, VV, EV> input)
		throws Exception {
	// t, d(t)
	DataSet<Vertex<K, Degrees>> vertexDegrees = input
		.run(new VertexDegrees<K, VV, EV>()
			.setParallelism(parallelism));

	// s, t, d(t)
	return input.getEdges()
		.join(vertexDegrees, JoinHint.REPARTITION_HASH_SECOND)
		.where(1)
		.equalTo(0)
		.with(new JoinEdgeWithVertexDegree<>())
			.setParallelism(parallelism)
			.name("Edge target degrees");
}
 
Example #6
Source File: EdgeDegreePair.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public DataSet<Edge<K, Tuple3<EV, LongValue, LongValue>>> runInternal(Graph<K, VV, EV> input)
		throws Exception {
	// s, t, d(s)
	DataSet<Edge<K, Tuple2<EV, LongValue>>> edgeSourceDegrees = input
		.run(new EdgeSourceDegree<K, VV, EV>()
			.setReduceOnTargetId(reduceOnTargetId.get())
			.setParallelism(parallelism));

	// t, d(t)
	DataSet<Vertex<K, LongValue>> vertexDegrees = input
		.run(new VertexDegree<K, VV, EV>()
			.setReduceOnTargetId(reduceOnTargetId.get())
			.setParallelism(parallelism));

	// s, t, (d(s), d(t))
	return edgeSourceDegrees
		.join(vertexDegrees, JoinHint.REPARTITION_HASH_SECOND)
		.where(1)
		.equalTo(0)
		.with(new JoinEdgeDegreeWithVertexDegree<>())
			.setParallelism(parallelism)
			.name("Edge target degree");
}
 
Example #7
Source File: OuterJoinNode.java    From flink with Apache License 2.0 6 votes vote down vote up
private List<OperatorDescriptorDual> createFullOuterJoinDescriptors(JoinHint hint) {

		List<OperatorDescriptorDual> list = new ArrayList<>();
		switch (hint) {
			case OPTIMIZER_CHOOSES:
				list.add(new SortMergeFullOuterJoinDescriptor(this.keys1, this.keys2));
				break;
			case REPARTITION_SORT_MERGE:
				list.add(new SortMergeFullOuterJoinDescriptor(this.keys1, this.keys2));
				break;
			case REPARTITION_HASH_FIRST:
				list.add(new HashFullOuterJoinBuildFirstDescriptor(this.keys1, this.keys2));
				break;
			case REPARTITION_HASH_SECOND:
				list.add(new HashFullOuterJoinBuildSecondDescriptor(this.keys1, this.keys2));
				break;
			case BROADCAST_HASH_FIRST:
			case BROADCAST_HASH_SECOND:
			default:
				throw new CompilerException("Invalid join hint: " + hint + " for full outer join");
		}
		return list;
	}
 
Example #8
Source File: OuterJoinITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private void testRightOuterJoinOnTuplesWithKeyPositions(JoinHint hint) throws Exception {
	/*
	 * UDF Join on tuples with key field positions
	 */

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<Tuple3<Integer, Long, String>> ds1 = CollectionDataSets.getSmall3TupleDataSet(env);
	DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds2 = CollectionDataSets.getSmall5TupleDataSet(env);
	DataSet<Tuple2<String, String>> joinDs =
			ds1.rightOuterJoin(ds2, hint)
					.where(1)
					.equalTo(1)
					.with(new T3T5FlatJoin());

	List<Tuple2<String, String>> result = joinDs.collect();

	String expected = "Hi,Hallo\n" +
			"Hello,Hallo Welt\n" +
			"null,Hallo Welt wie\n" +
			"Hello world,Hallo Welt\n";

	compareResultAsTuples(result, expected);
}
 
Example #9
Source File: EdgeDegreePair.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
public DataSet<Edge<K, Tuple3<EV, LongValue, LongValue>>> runInternal(Graph<K, VV, EV> input)
		throws Exception {
	// s, t, d(s)
	DataSet<Edge<K, Tuple2<EV, LongValue>>> edgeSourceDegrees = input
		.run(new EdgeSourceDegree<K, VV, EV>()
			.setReduceOnTargetId(reduceOnTargetId.get())
			.setParallelism(parallelism));

	// t, d(t)
	DataSet<Vertex<K, LongValue>> vertexDegrees = input
		.run(new VertexDegree<K, VV, EV>()
			.setReduceOnTargetId(reduceOnTargetId.get())
			.setParallelism(parallelism));

	// s, t, (d(s), d(t))
	return edgeSourceDegrees
		.join(vertexDegrees, JoinHint.REPARTITION_HASH_SECOND)
		.where(1)
		.equalTo(0)
		.with(new JoinEdgeDegreeWithVertexDegree<>())
			.setParallelism(parallelism)
			.name("Edge target degree");
}
 
Example #10
Source File: OuterJoinNode.java    From flink with Apache License 2.0 5 votes vote down vote up
private List<OperatorDescriptorDual> getDataProperties() {
	OuterJoinOperatorBase<?, ?, ?, ?> operator = getOperator();

	OuterJoinType type = operator.getOuterJoinType();

	JoinHint joinHint = operator.getJoinHint();
	joinHint = joinHint == null ? JoinHint.OPTIMIZER_CHOOSES : joinHint;

	List<OperatorDescriptorDual> list;
	switch (type) {
		case LEFT:
			list = createLeftOuterJoinDescriptors(joinHint);
			break;
		case RIGHT:
			list = createRightOuterJoinDescriptors(joinHint);
			break;
		case FULL:
			list = createFullOuterJoinDescriptors(joinHint);
			break;
		default:
			throw new CompilerException("Unknown outer join type: " + type);
	}

	Partitioner<?> customPartitioner = operator.getCustomPartitioner();
	if (customPartitioner != null) {
		for (OperatorDescriptorDual desc : list) {
			((AbstractJoinDescriptor) desc).setCustomPartitioner(customPartitioner);
		}
	}
	return list;
}
 
Example #11
Source File: JoinTranslationTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testPartitionHashSecondTest() {
	try {
		DualInputPlanNode node = createPlanAndGetJoinNode(JoinHint.REPARTITION_HASH_SECOND);
		assertEquals(ShipStrategyType.PARTITION_HASH, node.getInput1().getShipStrategy());
		assertEquals(ShipStrategyType.PARTITION_HASH, node.getInput2().getShipStrategy());
		assertEquals(DriverStrategy.HYBRIDHASH_BUILD_SECOND, node.getDriverStrategy());
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getClass().getSimpleName() + ": " + e.getMessage());
	}
}
 
Example #12
Source File: BipartiteGraph.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private DataSet<Tuple5<KT, KB, EV, VVT, VVB>> joinEdgeWithVertices() {
	return edges
		.join(topVertices, JoinHint.REPARTITION_HASH_SECOND)
		.where(0)
		.equalTo(0)
		.projectFirst(0, 1, 2)
		.<Tuple4<KT, KB, EV, VVT>>projectSecond(1)
			.name("Edge with vertex")
		.join(bottomVertices, JoinHint.REPARTITION_HASH_SECOND)
		.where(1)
		.equalTo(0)
		.projectFirst(0, 1, 2, 3)
		.<Tuple5<KT, KB, EV, VVT, VVB>>projectSecond(1)
			.name("Edge with vertices");
}
 
Example #13
Source File: JoinTranslationTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testBroadcastHashFirstTest() {
	try {
		DualInputPlanNode node = createPlanAndGetJoinNode(JoinHint.BROADCAST_HASH_FIRST);
		assertEquals(ShipStrategyType.BROADCAST, node.getInput1().getShipStrategy());
		assertEquals(ShipStrategyType.FORWARD, node.getInput2().getShipStrategy());
		assertEquals(DriverStrategy.HYBRIDHASH_BUILD_FIRST, node.getDriverStrategy());
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getClass().getSimpleName() + ": " + e.getMessage());
	}
}
 
Example #14
Source File: IterationsCompilerTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testWorksetIterationPipelineBreakerPlacement() {
	try {
		ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
		env.setParallelism(8);
		
		// the workset (input two of the delta iteration) is the same as what is consumed be the successive join
		DataSet<Tuple2<Long, Long>> initialWorkset = env.readCsvFile("/some/file/path").types(Long.class).map(new DuplicateValue());
		
		DataSet<Tuple2<Long, Long>> initialSolutionSet = env.readCsvFile("/some/file/path").types(Long.class).map(new DuplicateValue());
		
		// trivial iteration, since we are interested in the inputs to the iteration
		DeltaIteration<Tuple2<Long, Long>, Tuple2<Long, Long>> iteration = initialSolutionSet.iterateDelta(initialWorkset, 100, 0);
		
		DataSet<Tuple2<Long, Long>> next = iteration.getWorkset().map(new IdentityMapper<Tuple2<Long,Long>>());
		
		DataSet<Tuple2<Long, Long>> result = iteration.closeWith(next, next);
		
		initialWorkset
			.join(result, JoinHint.REPARTITION_HASH_FIRST)
			.where(0).equalTo(0)
			.output(new DiscardingOutputFormat<Tuple2<Tuple2<Long, Long>, Tuple2<Long, Long>>>());
		
		Plan p = env.createProgramPlan();
		compileNoStats(p);
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example #15
Source File: IterationIncompleteDynamicPathConsumptionITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected void testProgram() throws Exception {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	// the test data is constructed such that the merge join zig zag
	// has an early out, leaving elements on the dynamic path input unconsumed

	DataSet<Path> edges = env.fromElements(
			new Path(1, 2),
			new Path(1, 4),
			new Path(3, 6),
			new Path(3, 8),
			new Path(1, 10),
			new Path(1, 12),
			new Path(3, 14),
			new Path(3, 16),
			new Path(1, 18),
			new Path(1, 20));

	IterativeDataSet<Path> currentPaths = edges.iterate(10);

	DataSet<Path> newPaths = currentPaths
			.join(edges, JoinHint.REPARTITION_SORT_MERGE).where("to").equalTo("from")
				.with(new PathConnector())
			.union(currentPaths).distinct("from", "to");

	DataSet<Path> result = currentPaths.closeWith(newPaths);

	result.output(new DiscardingOutputFormat<Path>());

	env.execute();
}
 
Example #16
Source File: IterationIncompleteStaticPathConsumptionITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected void testProgram() throws Exception {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	// the test data is constructed such that the merge join zig zag
	// has an early out, leaving elements on the static path input unconsumed

	DataSet<Path> edges = env.fromElements(
			new Path(2, 1),
			new Path(4, 1),
			new Path(6, 3),
			new Path(8, 3),
			new Path(10, 1),
			new Path(12, 1),
			new Path(14, 3),
			new Path(16, 3),
			new Path(18, 1),
			new Path(20, 1));

	IterativeDataSet<Path> currentPaths = edges.iterate(10);

	DataSet<Path> newPaths = currentPaths
			.join(edges, JoinHint.REPARTITION_SORT_MERGE).where("to").equalTo("from")
				.with(new PathConnector())
			.union(currentPaths).distinct("from", "to");

	DataSet<Path> result = currentPaths.closeWith(newPaths);

	result.output(new DiscardingOutputFormat<Path>());

	env.execute();
}
 
Example #17
Source File: JoinOperator.java    From flink with Apache License 2.0 5 votes vote down vote up
protected ProjectJoin(DataSet<I1> input1, DataSet<I2> input2, Keys<I1> keys1, Keys<I2> keys2, JoinHint hint, int[] fields, boolean[] isFromFirst, TupleTypeInfo<OUT> returnType) {
	super(input1, input2, keys1, keys2,
			new ProjectFlatJoinFunction<I1, I2, OUT>(fields, isFromFirst, returnType.createSerializer(input1.getExecutionEnvironment().getConfig()).createInstance()),
			returnType, hint, Utils.getCallLocationName(4)); // We need to use the 4th element in the stack because the call comes through .types().

	joinProj = null;
}
 
Example #18
Source File: JoinOperatorSetsBase.java    From flink with Apache License 2.0 5 votes vote down vote up
public JoinOperatorSetsBase(DataSet<I1> input1, DataSet<I2> input2, JoinHint hint, JoinType type) {
	if (input1 == null || input2 == null) {
		throw new NullPointerException();
	}

	this.input1 = input1;
	this.input2 = input2;
	this.joinHint = hint;
	this.joinType = type;
}
 
Example #19
Source File: JoinTranslationTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testBroadcastHashSecondTest() {
	try {
		DualInputPlanNode node = createPlanAndGetJoinNode(JoinHint.BROADCAST_HASH_SECOND);
		assertEquals(ShipStrategyType.FORWARD, node.getInput1().getShipStrategy());
		assertEquals(ShipStrategyType.BROADCAST, node.getInput2().getShipStrategy());
		assertEquals(DriverStrategy.HYBRIDHASH_BUILD_SECOND, node.getDriverStrategy());
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getClass().getSimpleName() + ": " + e.getMessage());
	}
}
 
Example #20
Source File: RightOuterJoinOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private void testRightOuterStrategies(JoinHint hint) {

		final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
		DataSet<Tuple5<Integer, Long, String, Long, Integer>> ds1 = env.fromCollection(emptyTupleData, tupleTypeInfo);
		DataSet<Tuple5<Integer, Long, String, Long, Integer>> ds2 = env.fromCollection(emptyTupleData, tupleTypeInfo);

		// should work
		ds1.rightOuterJoin(ds2, hint)
				.where(0).equalTo(4)
				.with(new DummyJoin());
	}
 
Example #21
Source File: JoinOperator.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
protected ProjectJoin(DataSet<I1> input1, DataSet<I2> input2, Keys<I1> keys1, Keys<I2> keys2, JoinHint hint, int[] fields, boolean[] isFromFirst, TupleTypeInfo<OUT> returnType) {
	super(input1, input2, keys1, keys2,
			new ProjectFlatJoinFunction<I1, I2, OUT>(fields, isFromFirst, returnType.createSerializer(input1.getExecutionEnvironment().getConfig()).createInstance()),
			returnType, hint, Utils.getCallLocationName(4)); // We need to use the 4th element in the stack because the call comes through .types().

	joinProj = null;
}
 
Example #22
Source File: JoinTranslationTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testPartitionHashSecondTest() {
	try {
		DualInputPlanNode node = createPlanAndGetJoinNode(JoinHint.REPARTITION_HASH_SECOND);
		assertEquals(ShipStrategyType.PARTITION_HASH, node.getInput1().getShipStrategy());
		assertEquals(ShipStrategyType.PARTITION_HASH, node.getInput2().getShipStrategy());
		assertEquals(DriverStrategy.HYBRIDHASH_BUILD_SECOND, node.getDriverStrategy());
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getClass().getSimpleName() + ": " + e.getMessage());
	}
}
 
Example #23
Source File: LeftOuterJoinOperatorTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private void testLeftOuterStrategies(JoinHint hint) {

		final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
		DataSet<Tuple5<Integer, Long, String, Long, Integer>> ds1 = env.fromCollection(emptyTupleData, tupleTypeInfo);
		DataSet<Tuple5<Integer, Long, String, Long, Integer>> ds2 = env.fromCollection(emptyTupleData, tupleTypeInfo);

		// should work
		ds1.leftOuterJoin(ds2, hint)
				.where(0).equalTo(4)
				.with(new DummyJoin());
	}
 
Example #24
Source File: OuterJoinNode.java    From flink with Apache License 2.0 5 votes vote down vote up
private List<OperatorDescriptorDual> getDataProperties() {
	OuterJoinOperatorBase<?, ?, ?, ?> operator = getOperator();

	OuterJoinType type = operator.getOuterJoinType();

	JoinHint joinHint = operator.getJoinHint();
	joinHint = joinHint == null ? JoinHint.OPTIMIZER_CHOOSES : joinHint;

	List<OperatorDescriptorDual> list;
	switch (type) {
		case LEFT:
			list = createLeftOuterJoinDescriptors(joinHint);
			break;
		case RIGHT:
			list = createRightOuterJoinDescriptors(joinHint);
			break;
		case FULL:
			list = createFullOuterJoinDescriptors(joinHint);
			break;
		default:
			throw new CompilerException("Unknown outer join type: " + type);
	}

	Partitioner<?> customPartitioner = operator.getCustomPartitioner();
	if (customPartitioner != null) {
		for (OperatorDescriptorDual desc : list) {
			((AbstractJoinDescriptor) desc).setCustomPartitioner(customPartitioner);
		}
	}
	return list;
}
 
Example #25
Source File: JoinOperatorSetsBase.java    From flink with Apache License 2.0 5 votes vote down vote up
public JoinOperatorSetsBase(DataSet<I1> input1, DataSet<I2> input2, JoinHint hint, JoinType type) {
	if (input1 == null || input2 == null) {
		throw new NullPointerException();
	}

	this.input1 = input1;
	this.input2 = input2;
	this.joinHint = hint;
	this.joinType = type;
}
 
Example #26
Source File: JoinOperator.java    From flink with Apache License 2.0 5 votes vote down vote up
public EquiJoin(DataSet<I1> input1, DataSet<I2> input2,
		Keys<I1> keys1, Keys<I2> keys2, FlatJoinFunction<I1, I2, OUT> generatedFunction, JoinFunction<I1, I2, OUT> function,
		TypeInformation<OUT> returnType, JoinHint hint, String joinLocationName, JoinType type) {
	super(input1, input2, keys1, keys2, returnType, hint, type);

	this.joinLocationName = joinLocationName;

	if (function == null) {
		throw new NullPointerException();
	}

	this.function = generatedFunction;
}
 
Example #27
Source File: JoinTranslationTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testPartitionHashFirstTest() {
	try {
		DualInputPlanNode node = createPlanAndGetJoinNode(JoinHint.REPARTITION_HASH_FIRST);
		assertEquals(ShipStrategyType.PARTITION_HASH, node.getInput1().getShipStrategy());
		assertEquals(ShipStrategyType.PARTITION_HASH, node.getInput2().getShipStrategy());
		assertEquals(DriverStrategy.HYBRIDHASH_BUILD_FIRST, node.getDriverStrategy());
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getClass().getSimpleName() + ": " + e.getMessage());
	}
}
 
Example #28
Source File: JoinTranslationTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testBroadcastHashFirstTest() {
	try {
		DualInputPlanNode node = createPlanAndGetJoinNode(JoinHint.BROADCAST_HASH_FIRST);
		assertEquals(ShipStrategyType.BROADCAST, node.getInput1().getShipStrategy());
		assertEquals(ShipStrategyType.FORWARD, node.getInput2().getShipStrategy());
		assertEquals(DriverStrategy.HYBRIDHASH_BUILD_FIRST, node.getDriverStrategy());
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getClass().getSimpleName() + ": " + e.getMessage());
	}
}
 
Example #29
Source File: LeftOuterJoinOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private void testLeftOuterStrategies(JoinHint hint) {

		final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
		DataSet<Tuple5<Integer, Long, String, Long, Integer>> ds1 = env.fromCollection(emptyTupleData, tupleTypeInfo);
		DataSet<Tuple5<Integer, Long, String, Long, Integer>> ds2 = env.fromCollection(emptyTupleData, tupleTypeInfo);

		// should work
		ds1.leftOuterJoin(ds2, hint)
				.where(0).equalTo(4)
				.with(new DummyJoin());
	}
 
Example #30
Source File: JoinTranslationTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testPartitionHashFirstTest() {
	try {
		DualInputPlanNode node = createPlanAndGetJoinNode(JoinHint.REPARTITION_HASH_FIRST);
		assertEquals(ShipStrategyType.PARTITION_HASH, node.getInput1().getShipStrategy());
		assertEquals(ShipStrategyType.PARTITION_HASH, node.getInput2().getShipStrategy());
		assertEquals(DriverStrategy.HYBRIDHASH_BUILD_FIRST, node.getDriverStrategy());
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getClass().getSimpleName() + ": " + e.getMessage());
	}
}