Java Code Examples for org.apache.flink.optimizer.plan.Channel#getSource()

The following examples show how to use org.apache.flink.optimizer.plan.Channel#getSource() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: BinaryUnionReplacer.java    From flink with Apache License 2.0 6 votes vote down vote up
public void collect(Channel in, List<Channel> inputs) {
	if (in.getSource() instanceof NAryUnionPlanNode) {
		// sanity check
		if (in.getShipStrategy() != ShipStrategyType.FORWARD) {
			throw new CompilerException("Bug: Plan generation for Unions picked a ship strategy between binary plan operators.");
		}
		if (!(in.getLocalStrategy() == null || in.getLocalStrategy() == LocalStrategy.NONE)) {
			throw new CompilerException("Bug: Plan generation for Unions picked a local strategy between binary plan operators.");
		}

		inputs.addAll(((NAryUnionPlanNode) in.getSource()).getListOfInputs());
	} else {
		// is not a collapsed union node, so we take the channel directly
		inputs.add(in);
	}
}
 
Example 2
Source File: BinaryUnionReplacer.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
public void collect(Channel in, List<Channel> inputs) {
	if (in.getSource() instanceof NAryUnionPlanNode) {
		// sanity check
		if (in.getShipStrategy() != ShipStrategyType.FORWARD) {
			throw new CompilerException("Bug: Plan generation for Unions picked a ship strategy between binary plan operators.");
		}
		if (!(in.getLocalStrategy() == null || in.getLocalStrategy() == LocalStrategy.NONE)) {
			throw new CompilerException("Bug: Plan generation for Unions picked a local strategy between binary plan operators.");
		}

		inputs.addAll(((NAryUnionPlanNode) in.getSource()).getListOfInputs());
	} else {
		// is not a collapsed union node, so we take the channel directly
		inputs.add(in);
	}
}
 
Example 3
Source File: AllGroupWithPartialPreGroupProperties.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public SingleInputPlanNode instantiate(Channel in, SingleInputNode node) {
	if (in.getShipStrategy() == ShipStrategyType.FORWARD) {
		// locally connected, directly instantiate
		return new SingleInputPlanNode(node, "GroupReduce ("+node.getOperator().getName()+")",
										in, DriverStrategy.ALL_GROUP_REDUCE);
	} else {
		// non forward case.plug in a combiner
		Channel toCombiner = new Channel(in.getSource());
		toCombiner.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED);
		
		// create an input node for combine with same parallelism as input node
		GroupReduceNode combinerNode = ((GroupReduceNode) node).getCombinerUtilityNode();
		combinerNode.setParallelism(in.getSource().getParallelism());

		SingleInputPlanNode combiner = new SingleInputPlanNode(combinerNode,
				"Combine ("+node.getOperator().getName()+")", toCombiner, DriverStrategy.ALL_GROUP_REDUCE_COMBINE);
		combiner.setCosts(new Costs(0, 0));
		combiner.initProperties(toCombiner.getGlobalProperties(), toCombiner.getLocalProperties());
		
		Channel toReducer = new Channel(combiner);
		toReducer.setShipStrategy(in.getShipStrategy(), in.getShipStrategyKeys(),
									in.getShipStrategySortOrder(), in.getDataExchangeMode());

		toReducer.setLocalStrategy(in.getLocalStrategy(), in.getLocalStrategyKeys(), in.getLocalStrategySortOrder());
		return new SingleInputPlanNode(node, "GroupReduce ("+node.getOperator().getName()+")",
										toReducer, DriverStrategy.ALL_GROUP_REDUCE);
	}
}
 
Example 4
Source File: AllReduceProperties.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public SingleInputPlanNode instantiate(Channel in, SingleInputNode node) {
	if (in.getShipStrategy() == ShipStrategyType.FORWARD) {
		// locally connected, directly instantiate
		return new SingleInputPlanNode(node, "Reduce ("+node.getOperator().getName()+")",
										in, DriverStrategy.ALL_REDUCE);
	} else {
		// non forward case.plug in a combiner
		Channel toCombiner = new Channel(in.getSource());
		toCombiner.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED);
		
		// create an input node for combine with same parallelism as input node
		ReduceNode combinerNode = ((ReduceNode) node).getCombinerUtilityNode();
		combinerNode.setParallelism(in.getSource().getParallelism());

		SingleInputPlanNode combiner = new SingleInputPlanNode(combinerNode,
				"Combine ("+node.getOperator().getName()+")", toCombiner, DriverStrategy.ALL_REDUCE);
		combiner.setCosts(new Costs(0, 0));
		combiner.initProperties(toCombiner.getGlobalProperties(), toCombiner.getLocalProperties());
		
		Channel toReducer = new Channel(combiner);
		toReducer.setShipStrategy(in.getShipStrategy(), in.getShipStrategyKeys(),
									in.getShipStrategySortOrder(), in.getDataExchangeMode());
		toReducer.setLocalStrategy(in.getLocalStrategy(), in.getLocalStrategyKeys(),
									in.getLocalStrategySortOrder());

		return new SingleInputPlanNode(node, "Reduce ("+node.getOperator().getName()+")",
										toReducer, DriverStrategy.ALL_REDUCE);
	}
}
 
Example 5
Source File: AllGroupWithPartialPreGroupProperties.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public SingleInputPlanNode instantiate(Channel in, SingleInputNode node) {
	if (in.getShipStrategy() == ShipStrategyType.FORWARD) {
		// locally connected, directly instantiate
		return new SingleInputPlanNode(node, "GroupReduce ("+node.getOperator().getName()+")",
										in, DriverStrategy.ALL_GROUP_REDUCE);
	} else {
		// non forward case.plug in a combiner
		Channel toCombiner = new Channel(in.getSource());
		toCombiner.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED);
		
		// create an input node for combine with same parallelism as input node
		GroupReduceNode combinerNode = ((GroupReduceNode) node).getCombinerUtilityNode();
		combinerNode.setParallelism(in.getSource().getParallelism());

		SingleInputPlanNode combiner = new SingleInputPlanNode(combinerNode,
				"Combine ("+node.getOperator().getName()+")", toCombiner, DriverStrategy.ALL_GROUP_REDUCE_COMBINE);
		combiner.setCosts(new Costs(0, 0));
		combiner.initProperties(toCombiner.getGlobalProperties(), toCombiner.getLocalProperties());
		
		Channel toReducer = new Channel(combiner);
		toReducer.setShipStrategy(in.getShipStrategy(), in.getShipStrategyKeys(),
									in.getShipStrategySortOrder(), in.getDataExchangeMode());

		toReducer.setLocalStrategy(in.getLocalStrategy(), in.getLocalStrategyKeys(), in.getLocalStrategySortOrder());
		return new SingleInputPlanNode(node, "GroupReduce ("+node.getOperator().getName()+")",
										toReducer, DriverStrategy.ALL_GROUP_REDUCE);
	}
}
 
Example 6
Source File: RangePartitionRewriter.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void postVisit(PlanNode node) {

	if(node instanceof IterationPlanNode) {
		IterationPlanNode iNode = (IterationPlanNode)node;
		if(!visitedIterationNodes.contains(iNode)) {
			visitedIterationNodes.add(iNode);
			iNode.acceptForStepFunction(this);
		}
	}

	final Iterable<Channel> inputChannels = node.getInputs();
	for (Channel channel : inputChannels) {
		ShipStrategyType shipStrategy = channel.getShipStrategy();
		// Make sure we only optimize the DAG for range partition, and do not optimize multi times.
		if (shipStrategy == ShipStrategyType.PARTITION_RANGE) {

			if(channel.getDataDistribution() == null) {
				if (node.isOnDynamicPath()) {
					throw new InvalidProgramException("Range Partitioning not supported within iterations if users do not supply the data distribution.");
				}

				PlanNode channelSource = channel.getSource();
				List<Channel> newSourceOutputChannels = rewriteRangePartitionChannel(channel);
				channelSource.getOutgoingChannels().remove(channel);
				channelSource.getOutgoingChannels().addAll(newSourceOutputChannels);
			}
		}
	}
}
 
Example 7
Source File: Utils.java    From flink with Apache License 2.0 5 votes vote down vote up
public static TypeComparatorFactory<?> getShipComparator(Channel channel, ExecutionConfig executionConfig) {
	PlanNode source = channel.getSource();
	Operator<?> javaOp = source.getProgramOperator();
	TypeInformation<?> type = javaOp.getOperatorInfo().getOutputType();
	return createComparator(type, channel.getShipStrategyKeys(),
		getSortOrders(channel.getShipStrategyKeys(), channel.getShipStrategySortOrder()), executionConfig);
}
 
Example 8
Source File: RangePartitionRewriter.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void postVisit(PlanNode node) {

	if(node instanceof IterationPlanNode) {
		IterationPlanNode iNode = (IterationPlanNode)node;
		if(!visitedIterationNodes.contains(iNode)) {
			visitedIterationNodes.add(iNode);
			iNode.acceptForStepFunction(this);
		}
	}

	final Iterable<Channel> inputChannels = node.getInputs();
	for (Channel channel : inputChannels) {
		ShipStrategyType shipStrategy = channel.getShipStrategy();
		// Make sure we only optimize the DAG for range partition, and do not optimize multi times.
		if (shipStrategy == ShipStrategyType.PARTITION_RANGE) {

			if(channel.getDataDistribution() == null) {
				if (node.isOnDynamicPath()) {
					throw new InvalidProgramException("Range Partitioning not supported within iterations if users do not supply the data distribution.");
				}

				PlanNode channelSource = channel.getSource();
				List<Channel> newSourceOutputChannels = rewriteRangePartitionChannel(channel);
				channelSource.getOutgoingChannels().remove(channel);
				channelSource.getOutgoingChannels().addAll(newSourceOutputChannels);
			}
		}
	}
}
 
Example 9
Source File: AllReduceProperties.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public SingleInputPlanNode instantiate(Channel in, SingleInputNode node) {
	if (in.getShipStrategy() == ShipStrategyType.FORWARD) {
		// locally connected, directly instantiate
		return new SingleInputPlanNode(node, "Reduce ("+node.getOperator().getName()+")",
										in, DriverStrategy.ALL_REDUCE);
	} else {
		// non forward case.plug in a combiner
		Channel toCombiner = new Channel(in.getSource());
		toCombiner.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED);
		
		// create an input node for combine with same parallelism as input node
		ReduceNode combinerNode = ((ReduceNode) node).getCombinerUtilityNode();
		combinerNode.setParallelism(in.getSource().getParallelism());

		SingleInputPlanNode combiner = new SingleInputPlanNode(combinerNode,
				"Combine ("+node.getOperator().getName()+")", toCombiner, DriverStrategy.ALL_REDUCE);
		combiner.setCosts(new Costs(0, 0));
		combiner.initProperties(toCombiner.getGlobalProperties(), toCombiner.getLocalProperties());
		
		Channel toReducer = new Channel(combiner);
		toReducer.setShipStrategy(in.getShipStrategy(), in.getShipStrategyKeys(),
									in.getShipStrategySortOrder(), in.getDataExchangeMode());
		toReducer.setLocalStrategy(in.getLocalStrategy(), in.getLocalStrategyKeys(),
									in.getLocalStrategySortOrder());

		return new SingleInputPlanNode(node, "Reduce ("+node.getOperator().getName()+")",
										toReducer, DriverStrategy.ALL_REDUCE);
	}
}
 
Example 10
Source File: AllGroupWithPartialPreGroupProperties.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public SingleInputPlanNode instantiate(Channel in, SingleInputNode node) {
	if (in.getShipStrategy() == ShipStrategyType.FORWARD) {
		// locally connected, directly instantiate
		return new SingleInputPlanNode(node, "GroupReduce ("+node.getOperator().getName()+")",
										in, DriverStrategy.ALL_GROUP_REDUCE);
	} else {
		// non forward case.plug in a combiner
		Channel toCombiner = new Channel(in.getSource());
		toCombiner.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED);
		
		// create an input node for combine with same parallelism as input node
		GroupReduceNode combinerNode = ((GroupReduceNode) node).getCombinerUtilityNode();
		combinerNode.setParallelism(in.getSource().getParallelism());

		SingleInputPlanNode combiner = new SingleInputPlanNode(combinerNode,
				"Combine ("+node.getOperator().getName()+")", toCombiner, DriverStrategy.ALL_GROUP_REDUCE_COMBINE);
		combiner.setCosts(new Costs(0, 0));
		combiner.initProperties(toCombiner.getGlobalProperties(), toCombiner.getLocalProperties());
		
		Channel toReducer = new Channel(combiner);
		toReducer.setShipStrategy(in.getShipStrategy(), in.getShipStrategyKeys(),
									in.getShipStrategySortOrder(), in.getDataExchangeMode());

		toReducer.setLocalStrategy(in.getLocalStrategy(), in.getLocalStrategyKeys(), in.getLocalStrategySortOrder());
		return new SingleInputPlanNode(node, "GroupReduce ("+node.getOperator().getName()+")",
										toReducer, DriverStrategy.ALL_GROUP_REDUCE);
	}
}
 
Example 11
Source File: RangePartitionRewriter.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public void postVisit(PlanNode node) {

	if(node instanceof IterationPlanNode) {
		IterationPlanNode iNode = (IterationPlanNode)node;
		if(!visitedIterationNodes.contains(iNode)) {
			visitedIterationNodes.add(iNode);
			iNode.acceptForStepFunction(this);
		}
	}

	final Iterable<Channel> inputChannels = node.getInputs();
	for (Channel channel : inputChannels) {
		ShipStrategyType shipStrategy = channel.getShipStrategy();
		// Make sure we only optimize the DAG for range partition, and do not optimize multi times.
		if (shipStrategy == ShipStrategyType.PARTITION_RANGE) {

			if(channel.getDataDistribution() == null) {
				if (node.isOnDynamicPath()) {
					throw new InvalidProgramException("Range Partitioning not supported within iterations if users do not supply the data distribution.");
				}

				PlanNode channelSource = channel.getSource();
				List<Channel> newSourceOutputChannels = rewriteRangePartitionChannel(channel);
				channelSource.getOutgoingChannels().remove(channel);
				channelSource.getOutgoingChannels().addAll(newSourceOutputChannels);
			}
		}
	}
}
 
Example 12
Source File: Utils.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
public static TypeComparatorFactory<?> getShipComparator(Channel channel, ExecutionConfig executionConfig) {
	PlanNode source = channel.getSource();
	Operator<?> javaOp = source.getProgramOperator();
	TypeInformation<?> type = javaOp.getOperatorInfo().getOutputType();
	return createComparator(type, channel.getShipStrategyKeys(),
		getSortOrders(channel.getShipStrategyKeys(), channel.getShipStrategySortOrder()), executionConfig);
}
 
Example 13
Source File: AllReduceProperties.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public SingleInputPlanNode instantiate(Channel in, SingleInputNode node) {
	if (in.getShipStrategy() == ShipStrategyType.FORWARD) {
		// locally connected, directly instantiate
		return new SingleInputPlanNode(node, "Reduce ("+node.getOperator().getName()+")",
										in, DriverStrategy.ALL_REDUCE);
	} else {
		// non forward case.plug in a combiner
		Channel toCombiner = new Channel(in.getSource());
		toCombiner.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED);
		
		// create an input node for combine with same parallelism as input node
		ReduceNode combinerNode = ((ReduceNode) node).getCombinerUtilityNode();
		combinerNode.setParallelism(in.getSource().getParallelism());

		SingleInputPlanNode combiner = new SingleInputPlanNode(combinerNode,
				"Combine ("+node.getOperator().getName()+")", toCombiner, DriverStrategy.ALL_REDUCE);
		combiner.setCosts(new Costs(0, 0));
		combiner.initProperties(toCombiner.getGlobalProperties(), toCombiner.getLocalProperties());
		
		Channel toReducer = new Channel(combiner);
		toReducer.setShipStrategy(in.getShipStrategy(), in.getShipStrategyKeys(),
									in.getShipStrategySortOrder(), in.getDataExchangeMode());
		toReducer.setLocalStrategy(in.getLocalStrategy(), in.getLocalStrategyKeys(),
									in.getLocalStrategySortOrder());

		return new SingleInputPlanNode(node, "Reduce ("+node.getOperator().getName()+")",
										toReducer, DriverStrategy.ALL_REDUCE);
	}
}
 
Example 14
Source File: GroupReduceWithCombineProperties.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public SingleInputPlanNode instantiate(Channel in, SingleInputNode node) {
	if (in.getShipStrategy() == ShipStrategyType.FORWARD) {
		if(in.getSource().getOptimizerNode() instanceof PartitionNode) {
			LOG.warn("Cannot automatically inject combiner for GroupReduceFunction. Please add an explicit combiner with combineGroup() in front of the partition operator.");
		}
		// adjust a sort (changes grouping, so it must be for this driver to combining sort
		if (in.getLocalStrategy() == LocalStrategy.SORT) {
			if (!in.getLocalStrategyKeys().isValidUnorderedPrefix(this.keys)) {
				throw new RuntimeException("Bug: Inconsistent sort for group strategy.");
			}
			in.setLocalStrategy(LocalStrategy.COMBININGSORT, in.getLocalStrategyKeys(),
								in.getLocalStrategySortOrder());
		}
		return new SingleInputPlanNode(node, "Reduce ("+node.getOperator().getName()+")", in,
										DriverStrategy.SORTED_GROUP_REDUCE, this.keyList);
	} else {
		// non forward case. all local properties are killed anyways, so we can safely plug in a combiner
		Channel toCombiner = new Channel(in.getSource());
		toCombiner.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED);

		// create an input node for combine with same parallelism as input node
		GroupReduceNode combinerNode = ((GroupReduceNode) node).getCombinerUtilityNode();
		combinerNode.setParallelism(in.getSource().getParallelism());

		SingleInputPlanNode combiner = new SingleInputPlanNode(combinerNode, "Combine ("+node.getOperator()
				.getName()+")", toCombiner, DriverStrategy.SORTED_GROUP_COMBINE);
		combiner.setCosts(new Costs(0, 0));
		combiner.initProperties(toCombiner.getGlobalProperties(), toCombiner.getLocalProperties());
		// set sorting comparator key info
		combiner.setDriverKeyInfo(in.getLocalStrategyKeys(), in.getLocalStrategySortOrder(), 0);
		// set grouping comparator key info
		combiner.setDriverKeyInfo(this.keyList, 1);
		
		Channel toReducer = new Channel(combiner);
		toReducer.setShipStrategy(in.getShipStrategy(), in.getShipStrategyKeys(),
								in.getShipStrategySortOrder(), in.getDataExchangeMode());
		if (in.getShipStrategy() == ShipStrategyType.PARTITION_RANGE) {
			toReducer.setDataDistribution(in.getDataDistribution());
		}
		toReducer.setLocalStrategy(LocalStrategy.COMBININGSORT, in.getLocalStrategyKeys(),
									in.getLocalStrategySortOrder());

		return new SingleInputPlanNode(node, "Reduce ("+node.getOperator().getName()+")",
										toReducer, DriverStrategy.SORTED_GROUP_REDUCE, this.keyList);
	}
}
 
Example 15
Source File: TwoInputNode.java    From flink with Apache License 2.0 4 votes vote down vote up
protected void instantiate(OperatorDescriptorDual operator, Channel in1, Channel in2,
		List<Set<? extends NamedChannel>> broadcastPlanChannels, List<PlanNode> target, CostEstimator estimator,
		RequestedGlobalProperties globPropsReq1, RequestedGlobalProperties globPropsReq2,
		RequestedLocalProperties locPropsReq1, RequestedLocalProperties locPropsReq2)
{
	final PlanNode inputSource1 = in1.getSource();
	final PlanNode inputSource2 = in2.getSource();
	
	for (List<NamedChannel> broadcastChannelsCombination: Sets.cartesianProduct(broadcastPlanChannels)) {
		
		boolean validCombination = true;
		
		// check whether the broadcast inputs use the same plan candidate at the branching point
		for (int i = 0; i < broadcastChannelsCombination.size(); i++) {
			NamedChannel nc = broadcastChannelsCombination.get(i);
			PlanNode bcSource = nc.getSource();
			
			if (!(areBranchCompatible(bcSource, inputSource1) || areBranchCompatible(bcSource, inputSource2))) {
				validCombination = false;
				break;
			}
			
			// check branch compatibility against all other broadcast variables
			for (int k = 0; k < i; k++) {
				PlanNode otherBcSource = broadcastChannelsCombination.get(k).getSource();
				
				if (!areBranchCompatible(bcSource, otherBcSource)) {
					validCombination = false;
					break;
				}
			}
		}
		
		if (!validCombination) {
			continue;
		}
		
		placePipelineBreakersIfNecessary(operator.getStrategy(), in1, in2);
		
		DualInputPlanNode node = operator.instantiate(in1, in2, this);
		node.setBroadcastInputs(broadcastChannelsCombination);

		SemanticProperties semPropsGlobalPropFiltering = getSemanticPropertiesForGlobalPropertyFiltering();
		GlobalProperties gp1 = in1.getGlobalProperties().clone()
				.filterBySemanticProperties(semPropsGlobalPropFiltering, 0);
		GlobalProperties gp2 = in2.getGlobalProperties().clone()
				.filterBySemanticProperties(semPropsGlobalPropFiltering, 1);
		GlobalProperties combined = operator.computeGlobalProperties(gp1, gp2);

		SemanticProperties semPropsLocalPropFiltering = getSemanticPropertiesForLocalPropertyFiltering();
		LocalProperties lp1 = in1.getLocalProperties().clone()
				.filterBySemanticProperties(semPropsLocalPropFiltering, 0);
		LocalProperties lp2 = in2.getLocalProperties().clone()
				.filterBySemanticProperties(semPropsLocalPropFiltering, 1);
		LocalProperties locals = operator.computeLocalProperties(lp1, lp2);
		
		node.initProperties(combined, locals);
		node.updatePropertiesWithUniqueSets(getUniqueFields());
		target.add(node);
	}
}
 
Example 16
Source File: TwoInputNode.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
protected void instantiate(OperatorDescriptorDual operator, Channel in1, Channel in2,
		List<Set<? extends NamedChannel>> broadcastPlanChannels, List<PlanNode> target, CostEstimator estimator,
		RequestedGlobalProperties globPropsReq1, RequestedGlobalProperties globPropsReq2,
		RequestedLocalProperties locPropsReq1, RequestedLocalProperties locPropsReq2)
{
	final PlanNode inputSource1 = in1.getSource();
	final PlanNode inputSource2 = in2.getSource();
	
	for (List<NamedChannel> broadcastChannelsCombination: Sets.cartesianProduct(broadcastPlanChannels)) {
		
		boolean validCombination = true;
		
		// check whether the broadcast inputs use the same plan candidate at the branching point
		for (int i = 0; i < broadcastChannelsCombination.size(); i++) {
			NamedChannel nc = broadcastChannelsCombination.get(i);
			PlanNode bcSource = nc.getSource();
			
			if (!(areBranchCompatible(bcSource, inputSource1) || areBranchCompatible(bcSource, inputSource2))) {
				validCombination = false;
				break;
			}
			
			// check branch compatibility against all other broadcast variables
			for (int k = 0; k < i; k++) {
				PlanNode otherBcSource = broadcastChannelsCombination.get(k).getSource();
				
				if (!areBranchCompatible(bcSource, otherBcSource)) {
					validCombination = false;
					break;
				}
			}
		}
		
		if (!validCombination) {
			continue;
		}
		
		placePipelineBreakersIfNecessary(operator.getStrategy(), in1, in2);
		
		DualInputPlanNode node = operator.instantiate(in1, in2, this);
		node.setBroadcastInputs(broadcastChannelsCombination);

		SemanticProperties semPropsGlobalPropFiltering = getSemanticPropertiesForGlobalPropertyFiltering();
		GlobalProperties gp1 = in1.getGlobalProperties().clone()
				.filterBySemanticProperties(semPropsGlobalPropFiltering, 0);
		GlobalProperties gp2 = in2.getGlobalProperties().clone()
				.filterBySemanticProperties(semPropsGlobalPropFiltering, 1);
		GlobalProperties combined = operator.computeGlobalProperties(gp1, gp2);

		SemanticProperties semPropsLocalPropFiltering = getSemanticPropertiesForLocalPropertyFiltering();
		LocalProperties lp1 = in1.getLocalProperties().clone()
				.filterBySemanticProperties(semPropsLocalPropFiltering, 0);
		LocalProperties lp2 = in2.getLocalProperties().clone()
				.filterBySemanticProperties(semPropsLocalPropFiltering, 1);
		LocalProperties locals = operator.computeLocalProperties(lp1, lp2);
		
		node.initProperties(combined, locals);
		node.updatePropertiesWithUniqueSets(getUniqueFields());
		target.add(node);
	}
}
 
Example 17
Source File: SingleInputNode.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
protected void instantiateCandidate(OperatorDescriptorSingle dps, Channel in, List<Set<? extends NamedChannel>> broadcastPlanChannels,
		List<PlanNode> target, CostEstimator estimator, RequestedGlobalProperties globPropsReq, RequestedLocalProperties locPropsReq)
{
	final PlanNode inputSource = in.getSource();
	
	for (List<NamedChannel> broadcastChannelsCombination: Sets.cartesianProduct(broadcastPlanChannels)) {
		
		boolean validCombination = true;
		boolean requiresPipelinebreaker = false;
		
		// check whether the broadcast inputs use the same plan candidate at the branching point
		for (int i = 0; i < broadcastChannelsCombination.size(); i++) {
			NamedChannel nc = broadcastChannelsCombination.get(i);
			PlanNode bcSource = nc.getSource();
			
			// check branch compatibility against input
			if (!areBranchCompatible(bcSource, inputSource)) {
				validCombination = false;
				break;
			}
			
			// check branch compatibility against all other broadcast variables
			for (int k = 0; k < i; k++) {
				PlanNode otherBcSource = broadcastChannelsCombination.get(k).getSource();
				
				if (!areBranchCompatible(bcSource, otherBcSource)) {
					validCombination = false;
					break;
				}
			}
			
			// check if there is a common predecessor and whether there is a dam on the way to all common predecessors
			if (in.isOnDynamicPath() && this.hereJoinedBranches != null) {
				for (OptimizerNode brancher : this.hereJoinedBranches) {
					PlanNode candAtBrancher = in.getSource().getCandidateAtBranchPoint(brancher);
					
					if (candAtBrancher == null) {
						// closed branch between two broadcast variables
						continue;
					}
					
					SourceAndDamReport res = in.getSource().hasDamOnPathDownTo(candAtBrancher);
					if (res == NOT_FOUND) {
						throw new CompilerException("Bug: Tracing dams for deadlock detection is broken.");
					} else if (res == FOUND_SOURCE) {
						requiresPipelinebreaker = true;
						break;
					} else if (res == FOUND_SOURCE_AND_DAM) {
						// good
					} else {
						throw new CompilerException();
					}
				}
			}
		}
		
		if (!validCombination) {
			continue;
		}
		
		if (requiresPipelinebreaker) {
			in.setTempMode(in.getTempMode().makePipelineBreaker());
		}
		
		final SingleInputPlanNode node = dps.instantiate(in, this);
		node.setBroadcastInputs(broadcastChannelsCombination);
		
		// compute how the strategy affects the properties
		GlobalProperties gProps = in.getGlobalProperties().clone();
		LocalProperties lProps = in.getLocalProperties().clone();
		gProps = dps.computeGlobalProperties(gProps);
		lProps = dps.computeLocalProperties(lProps);

		// filter by the user code field copies
		gProps = gProps.filterBySemanticProperties(getSemanticPropertiesForGlobalPropertyFiltering(), 0);
		lProps = lProps.filterBySemanticProperties(getSemanticPropertiesForLocalPropertyFiltering(), 0);
		
		// apply
		node.initProperties(gProps, lProps);
		node.updatePropertiesWithUniqueSets(getUniqueFields());
		target.add(node);
	}
}
 
Example 18
Source File: GroupReduceWithCombineProperties.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public SingleInputPlanNode instantiate(Channel in, SingleInputNode node) {
	if (in.getShipStrategy() == ShipStrategyType.FORWARD) {
		if(in.getSource().getOptimizerNode() instanceof PartitionNode) {
			LOG.warn("Cannot automatically inject combiner for GroupReduceFunction. Please add an explicit combiner with combineGroup() in front of the partition operator.");
		}
		// adjust a sort (changes grouping, so it must be for this driver to combining sort
		if (in.getLocalStrategy() == LocalStrategy.SORT) {
			if (!in.getLocalStrategyKeys().isValidUnorderedPrefix(this.keys)) {
				throw new RuntimeException("Bug: Inconsistent sort for group strategy.");
			}
			in.setLocalStrategy(LocalStrategy.COMBININGSORT, in.getLocalStrategyKeys(),
								in.getLocalStrategySortOrder());
		}
		return new SingleInputPlanNode(node, "Reduce ("+node.getOperator().getName()+")", in,
										DriverStrategy.SORTED_GROUP_REDUCE, this.keyList);
	} else {
		// non forward case. all local properties are killed anyways, so we can safely plug in a combiner
		Channel toCombiner = new Channel(in.getSource());
		toCombiner.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED);

		// create an input node for combine with same parallelism as input node
		GroupReduceNode combinerNode = ((GroupReduceNode) node).getCombinerUtilityNode();
		combinerNode.setParallelism(in.getSource().getParallelism());

		SingleInputPlanNode combiner = new SingleInputPlanNode(combinerNode, "Combine ("+node.getOperator()
				.getName()+")", toCombiner, DriverStrategy.SORTED_GROUP_COMBINE);
		combiner.setCosts(new Costs(0, 0));
		combiner.initProperties(toCombiner.getGlobalProperties(), toCombiner.getLocalProperties());
		// set sorting comparator key info
		combiner.setDriverKeyInfo(in.getLocalStrategyKeys(), in.getLocalStrategySortOrder(), 0);
		// set grouping comparator key info
		combiner.setDriverKeyInfo(this.keyList, 1);
		
		Channel toReducer = new Channel(combiner);
		toReducer.setShipStrategy(in.getShipStrategy(), in.getShipStrategyKeys(),
								in.getShipStrategySortOrder(), in.getDataExchangeMode());
		if (in.getShipStrategy() == ShipStrategyType.PARTITION_RANGE) {
			toReducer.setDataDistribution(in.getDataDistribution());
		}
		toReducer.setLocalStrategy(LocalStrategy.COMBININGSORT, in.getLocalStrategyKeys(),
									in.getLocalStrategySortOrder());

		return new SingleInputPlanNode(node, "Reduce ("+node.getOperator().getName()+")",
										toReducer, DriverStrategy.SORTED_GROUP_REDUCE, this.keyList);
	}
}
 
Example 19
Source File: GroupReduceWithCombineProperties.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Override
public SingleInputPlanNode instantiate(Channel in, SingleInputNode node) {
	if (in.getShipStrategy() == ShipStrategyType.FORWARD) {
		if(in.getSource().getOptimizerNode() instanceof PartitionNode) {
			LOG.warn("Cannot automatically inject combiner for GroupReduceFunction. Please add an explicit combiner with combineGroup() in front of the partition operator.");
		}
		// adjust a sort (changes grouping, so it must be for this driver to combining sort
		if (in.getLocalStrategy() == LocalStrategy.SORT) {
			if (!in.getLocalStrategyKeys().isValidUnorderedPrefix(this.keys)) {
				throw new RuntimeException("Bug: Inconsistent sort for group strategy.");
			}
			in.setLocalStrategy(LocalStrategy.COMBININGSORT, in.getLocalStrategyKeys(),
								in.getLocalStrategySortOrder());
		}
		return new SingleInputPlanNode(node, "Reduce ("+node.getOperator().getName()+")", in,
										DriverStrategy.SORTED_GROUP_REDUCE, this.keyList);
	} else {
		// non forward case. all local properties are killed anyways, so we can safely plug in a combiner
		Channel toCombiner = new Channel(in.getSource());
		toCombiner.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED);

		// create an input node for combine with same parallelism as input node
		GroupReduceNode combinerNode = ((GroupReduceNode) node).getCombinerUtilityNode();
		combinerNode.setParallelism(in.getSource().getParallelism());

		SingleInputPlanNode combiner = new SingleInputPlanNode(combinerNode, "Combine ("+node.getOperator()
				.getName()+")", toCombiner, DriverStrategy.SORTED_GROUP_COMBINE);
		combiner.setCosts(new Costs(0, 0));
		combiner.initProperties(toCombiner.getGlobalProperties(), toCombiner.getLocalProperties());
		// set sorting comparator key info
		combiner.setDriverKeyInfo(in.getLocalStrategyKeys(), in.getLocalStrategySortOrder(), 0);
		// set grouping comparator key info
		combiner.setDriverKeyInfo(this.keyList, 1);
		
		Channel toReducer = new Channel(combiner);
		toReducer.setShipStrategy(in.getShipStrategy(), in.getShipStrategyKeys(),
								in.getShipStrategySortOrder(), in.getDataExchangeMode());
		if (in.getShipStrategy() == ShipStrategyType.PARTITION_RANGE) {
			toReducer.setDataDistribution(in.getDataDistribution());
		}
		toReducer.setLocalStrategy(LocalStrategy.COMBININGSORT, in.getLocalStrategyKeys(),
									in.getLocalStrategySortOrder());

		return new SingleInputPlanNode(node, "Reduce ("+node.getOperator().getName()+")",
										toReducer, DriverStrategy.SORTED_GROUP_REDUCE, this.keyList);
	}
}
 
Example 20
Source File: SingleInputNode.java    From flink with Apache License 2.0 4 votes vote down vote up
protected void instantiateCandidate(OperatorDescriptorSingle dps, Channel in, List<Set<? extends NamedChannel>> broadcastPlanChannels,
		List<PlanNode> target, CostEstimator estimator, RequestedGlobalProperties globPropsReq, RequestedLocalProperties locPropsReq)
{
	final PlanNode inputSource = in.getSource();
	
	for (List<NamedChannel> broadcastChannelsCombination: Sets.cartesianProduct(broadcastPlanChannels)) {
		
		boolean validCombination = true;
		boolean requiresPipelinebreaker = false;
		
		// check whether the broadcast inputs use the same plan candidate at the branching point
		for (int i = 0; i < broadcastChannelsCombination.size(); i++) {
			NamedChannel nc = broadcastChannelsCombination.get(i);
			PlanNode bcSource = nc.getSource();
			
			// check branch compatibility against input
			if (!areBranchCompatible(bcSource, inputSource)) {
				validCombination = false;
				break;
			}
			
			// check branch compatibility against all other broadcast variables
			for (int k = 0; k < i; k++) {
				PlanNode otherBcSource = broadcastChannelsCombination.get(k).getSource();
				
				if (!areBranchCompatible(bcSource, otherBcSource)) {
					validCombination = false;
					break;
				}
			}
			
			// check if there is a common predecessor and whether there is a dam on the way to all common predecessors
			if (in.isOnDynamicPath() && this.hereJoinedBranches != null) {
				for (OptimizerNode brancher : this.hereJoinedBranches) {
					PlanNode candAtBrancher = in.getSource().getCandidateAtBranchPoint(brancher);
					
					if (candAtBrancher == null) {
						// closed branch between two broadcast variables
						continue;
					}
					
					SourceAndDamReport res = in.getSource().hasDamOnPathDownTo(candAtBrancher);
					if (res == NOT_FOUND) {
						throw new CompilerException("Bug: Tracing dams for deadlock detection is broken.");
					} else if (res == FOUND_SOURCE) {
						requiresPipelinebreaker = true;
						break;
					} else if (res == FOUND_SOURCE_AND_DAM) {
						// good
					} else {
						throw new CompilerException();
					}
				}
			}
		}
		
		if (!validCombination) {
			continue;
		}
		
		if (requiresPipelinebreaker) {
			in.setTempMode(in.getTempMode().makePipelineBreaker());
		}
		
		final SingleInputPlanNode node = dps.instantiate(in, this);
		node.setBroadcastInputs(broadcastChannelsCombination);
		
		// compute how the strategy affects the properties
		GlobalProperties gProps = in.getGlobalProperties().clone();
		LocalProperties lProps = in.getLocalProperties().clone();
		gProps = dps.computeGlobalProperties(gProps);
		lProps = dps.computeLocalProperties(lProps);

		// filter by the user code field copies
		gProps = gProps.filterBySemanticProperties(getSemanticPropertiesForGlobalPropertyFiltering(), 0);
		lProps = lProps.filterBySemanticProperties(getSemanticPropertiesForLocalPropertyFiltering(), 0);
		
		// apply
		node.initProperties(gProps, lProps);
		node.updatePropertiesWithUniqueSets(getUniqueFields());
		target.add(node);
	}
}