org.apache.flink.optimizer.plan.BulkPartialSolutionPlanNode Java Examples

The following examples show how to use org.apache.flink.optimizer.plan.BulkPartialSolutionPlanNode. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: BulkPartialSolutionNode.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
public void setCandidateProperties(GlobalProperties gProps, LocalProperties lProps, Channel initialInput) {
	if (this.cachedPlans != null) {
		throw new IllegalStateException();
	} else {
		this.cachedPlans = Collections.<PlanNode>singletonList(new BulkPartialSolutionPlanNode(this,
				"PartialSolution ("+this.getOperator().getName()+")", gProps, lProps, initialInput));
	}
}
 
Example #2
Source File: BulkPartialSolutionNode.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
public BulkPartialSolutionPlanNode getCurrentPartialSolutionPlanNode() {
	if (this.cachedPlans != null) {
		return (BulkPartialSolutionPlanNode) this.cachedPlans.get(0);
	} else {
		throw new IllegalStateException();
	}
}
 
Example #3
Source File: BulkPartialSolutionNode.java    From flink with Apache License 2.0 5 votes vote down vote up
public void setCandidateProperties(GlobalProperties gProps, LocalProperties lProps, Channel initialInput) {
	if (this.cachedPlans != null) {
		throw new IllegalStateException();
	} else {
		this.cachedPlans = Collections.<PlanNode>singletonList(new BulkPartialSolutionPlanNode(this,
				"PartialSolution ("+this.getOperator().getName()+")", gProps, lProps, initialInput));
	}
}
 
Example #4
Source File: BulkPartialSolutionNode.java    From flink with Apache License 2.0 5 votes vote down vote up
public BulkPartialSolutionPlanNode getCurrentPartialSolutionPlanNode() {
	if (this.cachedPlans != null) {
		return (BulkPartialSolutionPlanNode) this.cachedPlans.get(0);
	} else {
		throw new IllegalStateException();
	}
}
 
Example #5
Source File: BulkPartialSolutionNode.java    From flink with Apache License 2.0 5 votes vote down vote up
public void setCandidateProperties(GlobalProperties gProps, LocalProperties lProps, Channel initialInput) {
	if (this.cachedPlans != null) {
		throw new IllegalStateException();
	} else {
		this.cachedPlans = Collections.<PlanNode>singletonList(new BulkPartialSolutionPlanNode(this,
				"PartialSolution ("+this.getOperator().getName()+")", gProps, lProps, initialInput));
	}
}
 
Example #6
Source File: BulkPartialSolutionNode.java    From flink with Apache License 2.0 5 votes vote down vote up
public BulkPartialSolutionPlanNode getCurrentPartialSolutionPlanNode() {
	if (this.cachedPlans != null) {
		return (BulkPartialSolutionPlanNode) this.cachedPlans.get(0);
	} else {
		throw new IllegalStateException();
	}
}
 
Example #7
Source File: PageRankCompilerTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Test
public void testPageRank() {
	try {
		final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

		// get input data
		DataSet<Long> pagesInput = env.fromElements(1L);
		@SuppressWarnings("unchecked")
		DataSet<Tuple2<Long, Long>> linksInput = env.fromElements(new Tuple2<Long, Long>(1L, 2L));

		// assign initial rank to pages
		DataSet<Tuple2<Long, Double>> pagesWithRanks = pagesInput.
				map(new RankAssigner((1.0d / 10)));

		// build adjacency list from link input
		DataSet<Tuple2<Long, Long[]>> adjacencyListInput =
				linksInput.groupBy(0).reduceGroup(new BuildOutgoingEdgeList());

		// set iterative data set
		IterativeDataSet<Tuple2<Long, Double>> iteration = pagesWithRanks.iterate(10);

		Configuration cfg = new Configuration();
		cfg.setString(Optimizer.HINT_LOCAL_STRATEGY, Optimizer.HINT_LOCAL_STRATEGY_HASH_BUILD_SECOND);

		DataSet<Tuple2<Long, Double>> newRanks = iteration
				// join pages with outgoing edges and distribute rank
				.join(adjacencyListInput).where(0).equalTo(0).withParameters(cfg)
				.flatMap(new JoinVertexWithEdgesMatch())
				// collect and sum ranks
				.groupBy(0).aggregate(SUM, 1)
				// apply dampening factor
				.map(new Dampener(0.85, 10));

		DataSet<Tuple2<Long, Double>> finalPageRanks = iteration.closeWith(
				newRanks,
				newRanks.join(iteration).where(0).equalTo(0)
				// termination condition
				.filter(new EpsilonFilter()));

		finalPageRanks.output(new DiscardingOutputFormat<Tuple2<Long, Double>>());

		// get the plan and compile it
		Plan p = env.createProgramPlan();
		OptimizedPlan op = compileNoStats(p);

		SinkPlanNode sinkPlanNode = (SinkPlanNode) op.getDataSinks().iterator().next();
		BulkIterationPlanNode iterPlanNode = (BulkIterationPlanNode) sinkPlanNode.getInput().getSource();

		// check that the partitioning is pushed out of the first loop
		Assert.assertEquals(ShipStrategyType.PARTITION_HASH, iterPlanNode.getInput().getShipStrategy());
		Assert.assertEquals(LocalStrategy.NONE, iterPlanNode.getInput().getLocalStrategy());

		BulkPartialSolutionPlanNode partSolPlanNode = iterPlanNode.getPartialSolutionPlanNode();
		Assert.assertEquals(ShipStrategyType.FORWARD, partSolPlanNode.getOutgoingChannels().get(0).getShipStrategy());
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example #8
Source File: PageRankCompilerTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testPageRank() {
	try {
		final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

		// get input data
		DataSet<Long> pagesInput = env.fromElements(1L);
		@SuppressWarnings("unchecked")
		DataSet<Tuple2<Long, Long>> linksInput = env.fromElements(new Tuple2<Long, Long>(1L, 2L));

		// assign initial rank to pages
		DataSet<Tuple2<Long, Double>> pagesWithRanks = pagesInput.
				map(new RankAssigner((1.0d / 10)));

		// build adjacency list from link input
		DataSet<Tuple2<Long, Long[]>> adjacencyListInput =
				linksInput.groupBy(0).reduceGroup(new BuildOutgoingEdgeList());

		// set iterative data set
		IterativeDataSet<Tuple2<Long, Double>> iteration = pagesWithRanks.iterate(10);

		Configuration cfg = new Configuration();
		cfg.setString(Optimizer.HINT_LOCAL_STRATEGY, Optimizer.HINT_LOCAL_STRATEGY_HASH_BUILD_SECOND);

		DataSet<Tuple2<Long, Double>> newRanks = iteration
				// join pages with outgoing edges and distribute rank
				.join(adjacencyListInput).where(0).equalTo(0).withParameters(cfg)
				.flatMap(new JoinVertexWithEdgesMatch())
				// collect and sum ranks
				.groupBy(0).aggregate(SUM, 1)
				// apply dampening factor
				.map(new Dampener(0.85, 10));

		DataSet<Tuple2<Long, Double>> finalPageRanks = iteration.closeWith(
				newRanks,
				newRanks.join(iteration).where(0).equalTo(0)
				// termination condition
				.filter(new EpsilonFilter()));

		finalPageRanks.output(new DiscardingOutputFormat<Tuple2<Long, Double>>());

		// get the plan and compile it
		Plan p = env.createProgramPlan();
		OptimizedPlan op = compileNoStats(p);

		SinkPlanNode sinkPlanNode = (SinkPlanNode) op.getDataSinks().iterator().next();
		BulkIterationPlanNode iterPlanNode = (BulkIterationPlanNode) sinkPlanNode.getInput().getSource();

		// check that the partitioning is pushed out of the first loop
		Assert.assertEquals(ShipStrategyType.PARTITION_HASH, iterPlanNode.getInput().getShipStrategy());
		Assert.assertEquals(LocalStrategy.NONE, iterPlanNode.getInput().getLocalStrategy());

		BulkPartialSolutionPlanNode partSolPlanNode = iterPlanNode.getPartialSolutionPlanNode();
		Assert.assertEquals(ShipStrategyType.FORWARD, partSolPlanNode.getOutgoingChannels().get(0).getShipStrategy());
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example #9
Source File: PageRankCompilerTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testPageRank() {
	try {
		final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

		// get input data
		DataSet<Long> pagesInput = env.fromElements(1L);
		@SuppressWarnings("unchecked")
		DataSet<Tuple2<Long, Long>> linksInput = env.fromElements(new Tuple2<Long, Long>(1L, 2L));

		// assign initial rank to pages
		DataSet<Tuple2<Long, Double>> pagesWithRanks = pagesInput.
				map(new RankAssigner((1.0d / 10)));

		// build adjacency list from link input
		DataSet<Tuple2<Long, Long[]>> adjacencyListInput =
				linksInput.groupBy(0).reduceGroup(new BuildOutgoingEdgeList());

		// set iterative data set
		IterativeDataSet<Tuple2<Long, Double>> iteration = pagesWithRanks.iterate(10);

		Configuration cfg = new Configuration();
		cfg.setString(Optimizer.HINT_LOCAL_STRATEGY, Optimizer.HINT_LOCAL_STRATEGY_HASH_BUILD_SECOND);

		DataSet<Tuple2<Long, Double>> newRanks = iteration
				// join pages with outgoing edges and distribute rank
				.join(adjacencyListInput).where(0).equalTo(0).withParameters(cfg)
				.flatMap(new JoinVertexWithEdgesMatch())
				// collect and sum ranks
				.groupBy(0).aggregate(SUM, 1)
				// apply dampening factor
				.map(new Dampener(0.85, 10));

		DataSet<Tuple2<Long, Double>> finalPageRanks = iteration.closeWith(
				newRanks,
				newRanks.join(iteration).where(0).equalTo(0)
				// termination condition
				.filter(new EpsilonFilter()));

		finalPageRanks.output(new DiscardingOutputFormat<Tuple2<Long, Double>>());

		// get the plan and compile it
		Plan p = env.createProgramPlan();
		OptimizedPlan op = compileNoStats(p);

		SinkPlanNode sinkPlanNode = (SinkPlanNode) op.getDataSinks().iterator().next();
		BulkIterationPlanNode iterPlanNode = (BulkIterationPlanNode) sinkPlanNode.getInput().getSource();

		// check that the partitioning is pushed out of the first loop
		Assert.assertEquals(ShipStrategyType.PARTITION_HASH, iterPlanNode.getInput().getShipStrategy());
		Assert.assertEquals(LocalStrategy.NONE, iterPlanNode.getInput().getLocalStrategy());

		BulkPartialSolutionPlanNode partSolPlanNode = iterPlanNode.getPartialSolutionPlanNode();
		Assert.assertEquals(ShipStrategyType.FORWARD, partSolPlanNode.getOutgoingChannels().get(0).getShipStrategy());
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}