org.apache.flink.optimizer.plandump.PlanJSONDumpGenerator Java Examples

The following examples show how to use org.apache.flink.optimizer.plandump.PlanJSONDumpGenerator. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ClientTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testGetExecutionPlan() throws ProgramInvocationException {
	PackagedProgram prg = PackagedProgram.newBuilder()
		.setEntryPointClassName(TestOptimizerPlan.class.getName())
		.setArguments("/dev/random", "/tmp")
		.build();

	Optimizer optimizer = new Optimizer(new DataStatistics(), new DefaultCostEstimator(), config);
	Plan plan = (Plan) PackagedProgramUtils.getPipelineFromProgram(prg, new Configuration(), 1, true);
	OptimizedPlan op = optimizer.compile(plan);
	assertNotNull(op);

	PlanJSONDumpGenerator dumper = new PlanJSONDumpGenerator();
	assertNotNull(dumper.getOptimizerPlanAsJSON(op));

	// test HTML escaping
	PlanJSONDumpGenerator dumper2 = new PlanJSONDumpGenerator();
	dumper2.setEncodeForHTML(true);
	String htmlEscaped = dumper2.getOptimizerPlanAsJSON(op);

	assertEquals(-1, htmlEscaped.indexOf('\\'));
}
 
Example #2
Source File: DumpCompiledPlanTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private void verifyOptimizedPlan(Class<?> entrypoint, String... args) throws Exception {
	final PackagedProgram program = PackagedProgram
		.newBuilder()
		.setEntryPointClassName(entrypoint.getName())
		.setArguments(args)
		.build();

	final Pipeline pipeline = PackagedProgramUtils.getPipelineFromProgram(program, new Configuration(), 1, true);

	assertTrue(pipeline instanceof Plan);

	final Plan plan = (Plan) pipeline;

	final OptimizedPlan op = compileNoStats(plan);
	final PlanJSONDumpGenerator dumper = new PlanJSONDumpGenerator();
	final String json = dumper.getOptimizerPlanAsJSON(op);
	try (JsonParser parser = new JsonFactory().createParser(json)) {
		while (parser.nextToken() != null) {
		}
	}
}
 
Example #3
Source File: PreviewPlanDumpTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private static void verifyPlanDump(Class<?> entrypoint, String... args) throws Exception {
	final PackagedProgram program = PackagedProgram
		.newBuilder()
		.setEntryPointClassName(entrypoint.getName())
		.setArguments(args)
		.build();

	final Pipeline pipeline = PackagedProgramUtils.getPipelineFromProgram(program, new Configuration(), 1, true);

	assertTrue(pipeline instanceof Plan);

	final Plan plan = (Plan) pipeline;

	final List<DataSinkNode> sinks = Optimizer.createPreOptimizedPlan(plan);
	final PlanJSONDumpGenerator dumper = new PlanJSONDumpGenerator();
	final String json = dumper.getPactPlanAsJSON(sinks);

	try (JsonParser parser = new JsonFactory().createParser(json)) {
		while (parser.nextToken() != null) {
		}
	}
}
 
Example #4
Source File: ClientTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testGetExecutionPlan() throws ProgramInvocationException {
	PackagedProgram prg = new PackagedProgram(TestOptimizerPlan.class, "/dev/random", "/tmp");
	assertNotNull(prg.getPreviewPlan());

	Optimizer optimizer = new Optimizer(new DataStatistics(), new DefaultCostEstimator(), config);
	OptimizedPlan op = (OptimizedPlan) ClusterClient.getOptimizedPlan(optimizer, prg, 1);
	assertNotNull(op);

	PlanJSONDumpGenerator dumper = new PlanJSONDumpGenerator();
	assertNotNull(dumper.getOptimizerPlanAsJSON(op));

	// test HTML escaping
	PlanJSONDumpGenerator dumper2 = new PlanJSONDumpGenerator();
	dumper2.setEncodeForHTML(true);
	String htmlEscaped = dumper2.getOptimizerPlanAsJSON(op);

	assertEquals(-1, htmlEscaped.indexOf('\\'));
}
 
Example #5
Source File: ClientTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testGetExecutionPlan() throws ProgramInvocationException {
	PackagedProgram prg = new PackagedProgram(TestOptimizerPlan.class, "/dev/random", "/tmp");
	assertNotNull(prg.getPreviewPlan());

	Optimizer optimizer = new Optimizer(new DataStatistics(), new DefaultCostEstimator(), config);
	OptimizedPlan op = (OptimizedPlan) ClusterClient.getOptimizedPlan(optimizer, prg, 1);
	assertNotNull(op);

	PlanJSONDumpGenerator dumper = new PlanJSONDumpGenerator();
	assertNotNull(dumper.getOptimizerPlanAsJSON(op));

	// test HTML escaping
	PlanJSONDumpGenerator dumper2 = new PlanJSONDumpGenerator();
	dumper2.setEncodeForHTML(true);
	String htmlEscaped = dumper2.getOptimizerPlanAsJSON(op);

	assertEquals(-1, htmlEscaped.indexOf('\\'));
}
 
Example #6
Source File: LocalExecutor.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a JSON representation of the given dataflow's execution plan.
 *
 * @param plan The dataflow plan.
 * @return The dataflow's execution plan, as a JSON string.
 * @throws Exception Thrown, if the optimization process that creates the execution plan failed.
 */
public static String optimizerPlanAsJSON(Plan plan) throws Exception {
	final int parallelism = plan.getDefaultParallelism() == ExecutionConfig.PARALLELISM_DEFAULT ? 1 : plan.getDefaultParallelism();

	Optimizer pc = new Optimizer(new DataStatistics(), new Configuration());
	pc.setDefaultParallelism(parallelism);
	OptimizedPlan op = pc.compile(plan);

	return new PlanJSONDumpGenerator().getOptimizerPlanAsJSON(op);
}
 
Example #7
Source File: IterationsCompilerTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testSolutionSetDeltaDependsOnBroadcastVariable() {
	try {
		ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
		
		DataSet<Tuple2<Long, Long>> source =
					env.generateSequence(1, 1000).map(new DuplicateValueScalar<Long>());
		
		DataSet<Tuple2<Long, Long>> invariantInput =
				env.generateSequence(1, 1000).map(new DuplicateValueScalar<Long>());
		
		// iteration from here
		DeltaIteration<Tuple2<Long, Long>, Tuple2<Long, Long>> iter = source.iterateDelta(source, 1000, 1);
		
		DataSet<Tuple2<Long, Long>> result =
			invariantInput
				.map(new IdentityMapper<Tuple2<Long, Long>>()).withBroadcastSet(iter.getWorkset(), "bc data")
				.join(iter.getSolutionSet()).where(0).equalTo(1).projectFirst(1).projectSecond(1);
		
		iter.closeWith(result.map(new IdentityMapper<Tuple2<Long,Long>>()), result)
				.output(new DiscardingOutputFormat<Tuple2<Long,Long>>());
		
		OptimizedPlan p = compileNoStats(env.createProgramPlan());
		
		// check that the JSON generator accepts this plan
		new PlanJSONDumpGenerator().getOptimizerPlanAsJSON(p);
		
		// check that the JobGraphGenerator accepts the plan
		new JobGraphGenerator().compileJobGraph(p);
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example #8
Source File: ExecutionPlanCreationTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testGetExecutionPlan() {
	try {
		PackagedProgram prg = PackagedProgram.newBuilder()
			.setEntryPointClassName(TestOptimizerPlan.class.getName())
			.setArguments("/dev/random", "/tmp")
			.build();

		InetAddress mockAddress = InetAddress.getLocalHost();
		InetSocketAddress mockJmAddress = new InetSocketAddress(mockAddress, 12345);

		Configuration config = new Configuration();

		config.setString(JobManagerOptions.ADDRESS, mockJmAddress.getHostName());
		config.setInteger(JobManagerOptions.PORT, mockJmAddress.getPort());

		Optimizer optimizer = new Optimizer(new DataStatistics(), new DefaultCostEstimator(), config);
		Plan plan = (Plan) PackagedProgramUtils.getPipelineFromProgram(prg, config, -1, true);
		OptimizedPlan op = optimizer.compile(plan);
		assertNotNull(op);

		PlanJSONDumpGenerator dumper = new PlanJSONDumpGenerator();
		assertNotNull(dumper.getOptimizerPlanAsJSON(op));

		// test HTML escaping
		PlanJSONDumpGenerator dumper2 = new PlanJSONDumpGenerator();
		dumper2.setEncodeForHTML(true);
		String htmlEscaped = dumper2.getOptimizerPlanAsJSON(op);

		assertEquals(-1, htmlEscaped.indexOf('\\'));
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example #9
Source File: PlanTranslator.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public String translateToJSONExecutionPlan(Pipeline pipeline) {
	checkArgument(pipeline instanceof Plan, "Given pipeline is not a DataSet Plan.");

	Plan plan = (Plan) pipeline;

	Optimizer opt = new Optimizer(
			new DataStatistics(),
			new DefaultCostEstimator(),
			new Configuration());
	OptimizedPlan optPlan = opt.compile(plan);

	return new PlanJSONDumpGenerator().getOptimizerPlanAsJSON(optPlan);
}
 
Example #10
Source File: IterationsCompilerTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testSolutionSetDeltaDependsOnBroadcastVariable() {
	try {
		ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
		
		DataSet<Tuple2<Long, Long>> source =
					env.generateSequence(1, 1000).map(new DuplicateValueScalar<Long>());
		
		DataSet<Tuple2<Long, Long>> invariantInput =
				env.generateSequence(1, 1000).map(new DuplicateValueScalar<Long>());
		
		// iteration from here
		DeltaIteration<Tuple2<Long, Long>, Tuple2<Long, Long>> iter = source.iterateDelta(source, 1000, 1);
		
		DataSet<Tuple2<Long, Long>> result =
			invariantInput
				.map(new IdentityMapper<Tuple2<Long, Long>>()).withBroadcastSet(iter.getWorkset(), "bc data")
				.join(iter.getSolutionSet()).where(0).equalTo(1).projectFirst(1).projectSecond(1);
		
		iter.closeWith(result.map(new IdentityMapper<Tuple2<Long,Long>>()), result)
				.output(new DiscardingOutputFormat<Tuple2<Long,Long>>());
		
		OptimizedPlan p = compileNoStats(env.createProgramPlan());
		
		// check that the JSON generator accepts this plan
		new PlanJSONDumpGenerator().getOptimizerPlanAsJSON(p);
		
		// check that the JobGraphGenerator accepts the plan
		new JobGraphGenerator().compileJobGraph(p);
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example #11
Source File: ExecutionPlanCreationTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testGetExecutionPlan() {
	try {
		PackagedProgram prg = new PackagedProgram(TestOptimizerPlan.class, "/dev/random", "/tmp");
		assertNotNull(prg.getPreviewPlan());

		InetAddress mockAddress = InetAddress.getLocalHost();
		InetSocketAddress mockJmAddress = new InetSocketAddress(mockAddress, 12345);

		Configuration config = new Configuration();

		config.setString(JobManagerOptions.ADDRESS, mockJmAddress.getHostName());
		config.setInteger(JobManagerOptions.PORT, mockJmAddress.getPort());

		Optimizer optimizer = new Optimizer(new DataStatistics(), new DefaultCostEstimator(), config);
		OptimizedPlan op = (OptimizedPlan) ClusterClient.getOptimizedPlan(optimizer, prg, -1);
		assertNotNull(op);

		PlanJSONDumpGenerator dumper = new PlanJSONDumpGenerator();
		assertNotNull(dumper.getOptimizerPlanAsJSON(op));

		// test HTML escaping
		PlanJSONDumpGenerator dumper2 = new PlanJSONDumpGenerator();
		dumper2.setEncodeForHTML(true);
		String htmlEscaped = dumper2.getOptimizerPlanAsJSON(op);

		assertEquals(-1, htmlEscaped.indexOf('\\'));
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example #12
Source File: ContextEnvironment.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public String getExecutionPlan() throws Exception {
	Plan plan = createProgramPlan("unnamed job");

	OptimizedPlan op = ClusterClient.getOptimizedPlan(client.compiler, plan, getParallelism());
	PlanJSONDumpGenerator gen = new PlanJSONDumpGenerator();
	return gen.getOptimizerPlanAsJSON(op);
}
 
Example #13
Source File: TestEnvironment.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public String getExecutionPlan() throws Exception {
	OptimizedPlan op = compileProgram("unused");

	PlanJSONDumpGenerator jsonGen = new PlanJSONDumpGenerator();
	return jsonGen.getOptimizerPlanAsJSON(op);
}
 
Example #14
Source File: LocalExecutor.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a JSON representation of the given dataflow's execution plan.
 *
 * @param plan The dataflow plan.
 * @return The dataflow's execution plan, as a JSON string.
 * @throws Exception Thrown, if the optimization process that creates the execution plan failed.
 */
@Override
public String getOptimizerPlanAsJSON(Plan plan) throws Exception {
	final int parallelism = plan.getDefaultParallelism() == ExecutionConfig.PARALLELISM_DEFAULT ? 1 : plan.getDefaultParallelism();

	Optimizer pc = new Optimizer(new DataStatistics(), this.baseConfiguration);
	pc.setDefaultParallelism(parallelism);
	OptimizedPlan op = pc.compile(plan);

	return new PlanJSONDumpGenerator().getOptimizerPlanAsJSON(op);
}
 
Example #15
Source File: TestEnvironment.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public String getExecutionPlan() throws Exception {
	OptimizedPlan op = compileProgram("unused");

	PlanJSONDumpGenerator jsonGen = new PlanJSONDumpGenerator();
	return jsonGen.getOptimizerPlanAsJSON(op);
}
 
Example #16
Source File: IterationsCompilerTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testSolutionSetDeltaDependsOnBroadcastVariable() {
	try {
		ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
		
		DataSet<Tuple2<Long, Long>> source =
					env.generateSequence(1, 1000).map(new DuplicateValueScalar<Long>());
		
		DataSet<Tuple2<Long, Long>> invariantInput =
				env.generateSequence(1, 1000).map(new DuplicateValueScalar<Long>());
		
		// iteration from here
		DeltaIteration<Tuple2<Long, Long>, Tuple2<Long, Long>> iter = source.iterateDelta(source, 1000, 1);
		
		DataSet<Tuple2<Long, Long>> result =
			invariantInput
				.map(new IdentityMapper<Tuple2<Long, Long>>()).withBroadcastSet(iter.getWorkset(), "bc data")
				.join(iter.getSolutionSet()).where(0).equalTo(1).projectFirst(1).projectSecond(1);
		
		iter.closeWith(result.map(new IdentityMapper<Tuple2<Long,Long>>()), result)
				.output(new DiscardingOutputFormat<Tuple2<Long,Long>>());
		
		OptimizedPlan p = compileNoStats(env.createProgramPlan());
		
		// check that the JSON generator accepts this plan
		new PlanJSONDumpGenerator().getOptimizerPlanAsJSON(p);
		
		// check that the JobGraphGenerator accepts the plan
		new JobGraphGenerator().compileJobGraph(p);
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example #17
Source File: LocalExecutor.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a JSON representation of the given dataflow's execution plan.
 *
 * @param plan The dataflow plan.
 * @return The dataflow's execution plan, as a JSON string.
 * @throws Exception Thrown, if the optimization process that creates the execution plan failed.
 */
@Override
public String getOptimizerPlanAsJSON(Plan plan) throws Exception {
	final int parallelism = plan.getDefaultParallelism() == ExecutionConfig.PARALLELISM_DEFAULT ? 1 : plan.getDefaultParallelism();

	Optimizer pc = new Optimizer(new DataStatistics(), this.baseConfiguration);
	pc.setDefaultParallelism(parallelism);
	OptimizedPlan op = pc.compile(plan);

	return new PlanJSONDumpGenerator().getOptimizerPlanAsJSON(op);
}
 
Example #18
Source File: LocalExecutor.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a JSON representation of the given dataflow's execution plan.
 *
 * @param plan The dataflow plan.
 * @return The dataflow's execution plan, as a JSON string.
 * @throws Exception Thrown, if the optimization process that creates the execution plan failed.
 */
public static String optimizerPlanAsJSON(Plan plan) throws Exception {
	final int parallelism = plan.getDefaultParallelism() == ExecutionConfig.PARALLELISM_DEFAULT ? 1 : plan.getDefaultParallelism();

	Optimizer pc = new Optimizer(new DataStatistics(), new Configuration());
	pc.setDefaultParallelism(parallelism);
	OptimizedPlan op = pc.compile(plan);

	return new PlanJSONDumpGenerator().getOptimizerPlanAsJSON(op);
}
 
Example #19
Source File: ExecutionPlanCreationTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testGetExecutionPlan() {
	try {
		PackagedProgram prg = new PackagedProgram(TestOptimizerPlan.class, "/dev/random", "/tmp");
		assertNotNull(prg.getPreviewPlan());

		InetAddress mockAddress = InetAddress.getLocalHost();
		InetSocketAddress mockJmAddress = new InetSocketAddress(mockAddress, 12345);

		Configuration config = new Configuration();

		config.setString(JobManagerOptions.ADDRESS, mockJmAddress.getHostName());
		config.setInteger(JobManagerOptions.PORT, mockJmAddress.getPort());

		Optimizer optimizer = new Optimizer(new DataStatistics(), new DefaultCostEstimator(), config);
		OptimizedPlan op = (OptimizedPlan) ClusterClient.getOptimizedPlan(optimizer, prg, -1);
		assertNotNull(op);

		PlanJSONDumpGenerator dumper = new PlanJSONDumpGenerator();
		assertNotNull(dumper.getOptimizerPlanAsJSON(op));

		// test HTML escaping
		PlanJSONDumpGenerator dumper2 = new PlanJSONDumpGenerator();
		dumper2.setEncodeForHTML(true);
		String htmlEscaped = dumper2.getOptimizerPlanAsJSON(op);

		assertEquals(-1, htmlEscaped.indexOf('\\'));
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example #20
Source File: ContextEnvironment.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public String getExecutionPlan() throws Exception {
	Plan plan = createProgramPlan("unnamed job");

	OptimizedPlan op = ClusterClient.getOptimizedPlan(client.compiler, plan, getParallelism());
	PlanJSONDumpGenerator gen = new PlanJSONDumpGenerator();
	return gen.getOptimizerPlanAsJSON(op);
}
 
Example #21
Source File: ConnectedComponentsCoGroupTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Test
public void testWorksetConnectedComponents() {
	Plan plan = getConnectedComponentsCoGroupPlan();
	plan.setExecutionConfig(new ExecutionConfig());
	OptimizedPlan optPlan = compileNoStats(plan);
	OptimizerPlanNodeResolver or = getOptimizerPlanNodeResolver(optPlan);

	if (PRINT_PLAN) {
		PlanJSONDumpGenerator dumper = new PlanJSONDumpGenerator();
		String json = dumper.getOptimizerPlanAsJSON(optPlan);
		System.out.println(json);
	}

	SourcePlanNode vertexSource = or.getNode(VERTEX_SOURCE);
	SourcePlanNode edgesSource = or.getNode(EDGES_SOURCE);
	SinkPlanNode sink = or.getNode(SINK);
	WorksetIterationPlanNode iter = or.getNode(ITERATION_NAME);

	DualInputPlanNode neighborsJoin = or.getNode(JOIN_NEIGHBORS_MATCH);
	DualInputPlanNode cogroup = or.getNode(MIN_ID_AND_UPDATE);

	// --------------------------------------------------------------------
	// Plan validation:
	//
	// We expect the plan to go with a sort-merge join, because the CoGroup
	// sorts and the join in the successive iteration can re-exploit the sorting.
	// --------------------------------------------------------------------

	// test all drivers
	Assert.assertEquals(DriverStrategy.NONE, sink.getDriverStrategy());
	Assert.assertEquals(DriverStrategy.NONE, vertexSource.getDriverStrategy());
	Assert.assertEquals(DriverStrategy.NONE, edgesSource.getDriverStrategy());

	Assert.assertEquals(DriverStrategy.INNER_MERGE, neighborsJoin.getDriverStrategy());
	Assert.assertEquals(set0, neighborsJoin.getKeysForInput1());
	Assert.assertEquals(set0, neighborsJoin.getKeysForInput2());

	Assert.assertEquals(DriverStrategy.CO_GROUP, cogroup.getDriverStrategy());
	Assert.assertEquals(set0, cogroup.getKeysForInput1());
	Assert.assertEquals(set0, cogroup.getKeysForInput2());

	// test all the shipping strategies
	Assert.assertEquals(ShipStrategyType.FORWARD, sink.getInput().getShipStrategy());
	Assert.assertEquals(ShipStrategyType.PARTITION_HASH, iter.getInitialSolutionSetInput().getShipStrategy());
	Assert.assertEquals(set0, iter.getInitialSolutionSetInput().getShipStrategyKeys());
	Assert.assertEquals(ShipStrategyType.PARTITION_HASH, iter.getInitialWorksetInput().getShipStrategy());
	Assert.assertEquals(set0, iter.getInitialWorksetInput().getShipStrategyKeys());

	Assert.assertEquals(ShipStrategyType.FORWARD, neighborsJoin.getInput1().getShipStrategy()); // workset
	Assert.assertEquals(ShipStrategyType.PARTITION_HASH, neighborsJoin.getInput2().getShipStrategy()); // edges
	Assert.assertEquals(set0, neighborsJoin.getInput2().getShipStrategyKeys());
	Assert.assertTrue(neighborsJoin.getInput2().getTempMode().isCached());

	Assert.assertEquals(ShipStrategyType.PARTITION_HASH, cogroup.getInput1().getShipStrategy()); // min id
	Assert.assertEquals(ShipStrategyType.FORWARD, cogroup.getInput2().getShipStrategy()); // solution set

	// test all the local strategies
	Assert.assertEquals(LocalStrategy.NONE, sink.getInput().getLocalStrategy());
	Assert.assertEquals(LocalStrategy.NONE, iter.getInitialSolutionSetInput().getLocalStrategy());

	// the sort for the neighbor join in the first iteration is pushed out of the loop
	Assert.assertEquals(LocalStrategy.SORT, iter.getInitialWorksetInput().getLocalStrategy());
	Assert.assertEquals(LocalStrategy.NONE, neighborsJoin.getInput1().getLocalStrategy()); // workset
	Assert.assertEquals(LocalStrategy.SORT, neighborsJoin.getInput2().getLocalStrategy()); // edges

	Assert.assertEquals(LocalStrategy.SORT, cogroup.getInput1().getLocalStrategy());
	Assert.assertEquals(LocalStrategy.NONE, cogroup.getInput2().getLocalStrategy()); // solution set

	// check the caches
	Assert.assertTrue(TempMode.CACHED == neighborsJoin.getInput2().getTempMode());

	JobGraphGenerator jgg = new JobGraphGenerator();
	jgg.compileJobGraph(optPlan);
}
 
Example #22
Source File: ConnectedComponentsCoGroupTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testWorksetConnectedComponents() {
	Plan plan = getConnectedComponentsCoGroupPlan();
	plan.setExecutionConfig(new ExecutionConfig());
	OptimizedPlan optPlan = compileNoStats(plan);
	OptimizerPlanNodeResolver or = getOptimizerPlanNodeResolver(optPlan);

	if (PRINT_PLAN) {
		PlanJSONDumpGenerator dumper = new PlanJSONDumpGenerator();
		String json = dumper.getOptimizerPlanAsJSON(optPlan);
		System.out.println(json);
	}

	SourcePlanNode vertexSource = or.getNode(VERTEX_SOURCE);
	SourcePlanNode edgesSource = or.getNode(EDGES_SOURCE);
	SinkPlanNode sink = or.getNode(SINK);
	WorksetIterationPlanNode iter = or.getNode(ITERATION_NAME);

	DualInputPlanNode neighborsJoin = or.getNode(JOIN_NEIGHBORS_MATCH);
	DualInputPlanNode cogroup = or.getNode(MIN_ID_AND_UPDATE);

	// --------------------------------------------------------------------
	// Plan validation:
	//
	// We expect the plan to go with a sort-merge join, because the CoGroup
	// sorts and the join in the successive iteration can re-exploit the sorting.
	// --------------------------------------------------------------------

	// test all drivers
	Assert.assertEquals(DriverStrategy.NONE, sink.getDriverStrategy());
	Assert.assertEquals(DriverStrategy.NONE, vertexSource.getDriverStrategy());
	Assert.assertEquals(DriverStrategy.NONE, edgesSource.getDriverStrategy());

	Assert.assertEquals(DriverStrategy.INNER_MERGE, neighborsJoin.getDriverStrategy());
	Assert.assertEquals(set0, neighborsJoin.getKeysForInput1());
	Assert.assertEquals(set0, neighborsJoin.getKeysForInput2());

	Assert.assertEquals(DriverStrategy.CO_GROUP, cogroup.getDriverStrategy());
	Assert.assertEquals(set0, cogroup.getKeysForInput1());
	Assert.assertEquals(set0, cogroup.getKeysForInput2());

	// test all the shipping strategies
	Assert.assertEquals(ShipStrategyType.FORWARD, sink.getInput().getShipStrategy());
	Assert.assertEquals(ShipStrategyType.PARTITION_HASH, iter.getInitialSolutionSetInput().getShipStrategy());
	Assert.assertEquals(set0, iter.getInitialSolutionSetInput().getShipStrategyKeys());
	Assert.assertEquals(ShipStrategyType.PARTITION_HASH, iter.getInitialWorksetInput().getShipStrategy());
	Assert.assertEquals(set0, iter.getInitialWorksetInput().getShipStrategyKeys());

	Assert.assertEquals(ShipStrategyType.FORWARD, neighborsJoin.getInput1().getShipStrategy()); // workset
	Assert.assertEquals(ShipStrategyType.PARTITION_HASH, neighborsJoin.getInput2().getShipStrategy()); // edges
	Assert.assertEquals(set0, neighborsJoin.getInput2().getShipStrategyKeys());
	Assert.assertTrue(neighborsJoin.getInput2().getTempMode().isCached());

	Assert.assertEquals(ShipStrategyType.PARTITION_HASH, cogroup.getInput1().getShipStrategy()); // min id
	Assert.assertEquals(ShipStrategyType.FORWARD, cogroup.getInput2().getShipStrategy()); // solution set

	// test all the local strategies
	Assert.assertEquals(LocalStrategy.NONE, sink.getInput().getLocalStrategy());
	Assert.assertEquals(LocalStrategy.NONE, iter.getInitialSolutionSetInput().getLocalStrategy());

	// the sort for the neighbor join in the first iteration is pushed out of the loop
	Assert.assertEquals(LocalStrategy.SORT, iter.getInitialWorksetInput().getLocalStrategy());
	Assert.assertEquals(LocalStrategy.NONE, neighborsJoin.getInput1().getLocalStrategy()); // workset
	Assert.assertEquals(LocalStrategy.SORT, neighborsJoin.getInput2().getLocalStrategy()); // edges

	Assert.assertEquals(LocalStrategy.SORT, cogroup.getInput1().getLocalStrategy());
	Assert.assertEquals(LocalStrategy.NONE, cogroup.getInput2().getLocalStrategy()); // solution set

	// check the caches
	Assert.assertTrue(TempMode.CACHED == neighborsJoin.getInput2().getTempMode());

	JobGraphGenerator jgg = new JobGraphGenerator();
	jgg.compileJobGraph(optPlan);
}
 
Example #23
Source File: ConnectedComponentsCoGroupTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testWorksetConnectedComponents() throws Exception {
	Plan plan = getConnectedComponentsCoGroupPlan();
	plan.setExecutionConfig(new ExecutionConfig());
	OptimizedPlan optPlan = compileNoStats(plan);
	OptimizerPlanNodeResolver or = getOptimizerPlanNodeResolver(optPlan);

	if (PRINT_PLAN) {
		PlanJSONDumpGenerator dumper = new PlanJSONDumpGenerator();
		String json = dumper.getOptimizerPlanAsJSON(optPlan);
		System.out.println(json);
	}

	SourcePlanNode vertexSource = or.getNode(VERTEX_SOURCE);
	SourcePlanNode edgesSource = or.getNode(EDGES_SOURCE);
	SinkPlanNode sink = or.getNode(SINK);
	WorksetIterationPlanNode iter = or.getNode(ITERATION_NAME);

	DualInputPlanNode neighborsJoin = or.getNode(JOIN_NEIGHBORS_MATCH);
	DualInputPlanNode cogroup = or.getNode(MIN_ID_AND_UPDATE);

	// --------------------------------------------------------------------
	// Plan validation:
	//
	// We expect the plan to go with a sort-merge join, because the CoGroup
	// sorts and the join in the successive iteration can re-exploit the sorting.
	// --------------------------------------------------------------------

	// test all drivers
	Assert.assertEquals(DriverStrategy.NONE, sink.getDriverStrategy());
	Assert.assertEquals(DriverStrategy.NONE, vertexSource.getDriverStrategy());
	Assert.assertEquals(DriverStrategy.NONE, edgesSource.getDriverStrategy());

	Assert.assertEquals(DriverStrategy.INNER_MERGE, neighborsJoin.getDriverStrategy());
	Assert.assertEquals(set0, neighborsJoin.getKeysForInput1());
	Assert.assertEquals(set0, neighborsJoin.getKeysForInput2());

	Assert.assertEquals(DriverStrategy.CO_GROUP, cogroup.getDriverStrategy());
	Assert.assertEquals(set0, cogroup.getKeysForInput1());
	Assert.assertEquals(set0, cogroup.getKeysForInput2());

	// test all the shipping strategies
	Assert.assertEquals(ShipStrategyType.FORWARD, sink.getInput().getShipStrategy());
	Assert.assertEquals(ShipStrategyType.PARTITION_HASH, iter.getInitialSolutionSetInput().getShipStrategy());
	Assert.assertEquals(set0, iter.getInitialSolutionSetInput().getShipStrategyKeys());
	Assert.assertEquals(ShipStrategyType.PARTITION_HASH, iter.getInitialWorksetInput().getShipStrategy());
	Assert.assertEquals(set0, iter.getInitialWorksetInput().getShipStrategyKeys());

	Assert.assertEquals(ShipStrategyType.FORWARD, neighborsJoin.getInput1().getShipStrategy()); // workset
	Assert.assertEquals(ShipStrategyType.PARTITION_HASH, neighborsJoin.getInput2().getShipStrategy()); // edges
	Assert.assertEquals(set0, neighborsJoin.getInput2().getShipStrategyKeys());
	Assert.assertTrue(neighborsJoin.getInput2().getTempMode().isCached());

	Assert.assertEquals(ShipStrategyType.PARTITION_HASH, cogroup.getInput1().getShipStrategy()); // min id
	Assert.assertEquals(ShipStrategyType.FORWARD, cogroup.getInput2().getShipStrategy()); // solution set

	// test all the local strategies
	Assert.assertEquals(LocalStrategy.NONE, sink.getInput().getLocalStrategy());
	Assert.assertEquals(LocalStrategy.NONE, iter.getInitialSolutionSetInput().getLocalStrategy());

	// the sort for the neighbor join in the first iteration is pushed out of the loop
	Assert.assertEquals(LocalStrategy.SORT, iter.getInitialWorksetInput().getLocalStrategy());
	Assert.assertEquals(LocalStrategy.NONE, neighborsJoin.getInput1().getLocalStrategy()); // workset
	Assert.assertEquals(LocalStrategy.SORT, neighborsJoin.getInput2().getLocalStrategy()); // edges

	Assert.assertEquals(LocalStrategy.SORT, cogroup.getInput1().getLocalStrategy());
	Assert.assertEquals(LocalStrategy.NONE, cogroup.getInput2().getLocalStrategy()); // solution set

	// check the caches
	Assert.assertTrue(TempMode.CACHED == neighborsJoin.getInput2().getTempMode());

	JobGraphGenerator jgg = new JobGraphGenerator();
	jgg.compileJobGraph(optPlan);
}
 
Example #24
Source File: RemoteExecutor.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Override
public String getOptimizerPlanAsJSON(Plan plan) throws Exception {
	Optimizer opt = new Optimizer(new DataStatistics(), new DefaultCostEstimator(), new Configuration());
	OptimizedPlan optPlan = opt.compile(plan);
	return new PlanJSONDumpGenerator().getOptimizerPlanAsJSON(optPlan);
}
 
Example #25
Source File: ClusterClient.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
public static String getOptimizedPlanAsJson(Optimizer compiler, PackagedProgram prog, int parallelism)
		throws CompilerException, ProgramInvocationException {
	PlanJSONDumpGenerator jsonGen = new PlanJSONDumpGenerator();
	return jsonGen.getOptimizerPlanAsJSON((OptimizedPlan) getOptimizedPlan(compiler, prog, parallelism));
}
 
Example #26
Source File: PackagedProgram.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
/**
 * Returns the analyzed plan without any optimizations.
 *
 * @return
 *         the analyzed plan without any optimizations.
 * @throws ProgramInvocationException Thrown if an error occurred in the
 *  user-provided pact assembler. This may indicate
 *         missing parameters for generation.
 */
public String getPreviewPlan() throws ProgramInvocationException {
	Thread.currentThread().setContextClassLoader(this.getUserCodeClassLoader());
	List<DataSinkNode> previewPlan;

	if (isUsingProgramEntryPoint()) {
		previewPlan = Optimizer.createPreOptimizedPlan(getPlan());
	}
	else if (isUsingInteractiveMode()) {
		// temporary hack to support the web client
		PreviewPlanEnvironment env = new PreviewPlanEnvironment();
		env.setAsContext();
		try {
			invokeInteractiveModeForExecution();
		}
		catch (ProgramInvocationException e) {
			throw e;
		}
		catch (Throwable t) {
			// the invocation gets aborted with the preview plan
			if (env.previewPlan == null) {
				if (env.preview != null) {
					return env.preview;
				} else {
					throw new ProgramInvocationException("The program caused an error: ", getPlan().getJobId(), t);
				}
			}
		}
		finally {
			env.unsetAsContext();
		}

		if (env.previewPlan != null) {
			previewPlan =  env.previewPlan;
		} else {
			throw new ProgramInvocationException(
				"The program plan could not be fetched. The program silently swallowed the control flow exceptions.",
				getPlan().getJobId());
		}
	}
	else {
		throw new RuntimeException();
	}

	PlanJSONDumpGenerator jsonGen = new PlanJSONDumpGenerator();
	StringWriter string = new StringWriter(1024);
	try (PrintWriter pw = new PrintWriter(string)) {
		jsonGen.dumpPactPlanAsJSON(previewPlan, pw);
	}
	return string.toString();

}
 
Example #27
Source File: CliFrontend.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
/**
 * Executes the info action.
 *
 * @param args Command line arguments for the info action.
 */
protected void info(String[] args) throws CliArgsException, FileNotFoundException, ProgramInvocationException {
	LOG.info("Running 'info' command.");

	final Options commandOptions = CliFrontendParser.getInfoCommandOptions();

	final CommandLine commandLine = CliFrontendParser.parse(commandOptions, args, true);

	InfoOptions infoOptions = new InfoOptions(commandLine);

	// evaluate help flag
	if (infoOptions.isPrintHelp()) {
		CliFrontendParser.printHelpForInfo();
		return;
	}

	if (infoOptions.getJarFilePath() == null) {
		throw new CliArgsException("The program JAR file was not specified.");
	}

	// -------- build the packaged program -------------

	LOG.info("Building program from JAR file");
	final PackagedProgram program = buildProgram(infoOptions);

	try {
		int parallelism = infoOptions.getParallelism();
		if (ExecutionConfig.PARALLELISM_DEFAULT == parallelism) {
			parallelism = defaultParallelism;
		}

		LOG.info("Creating program plan dump");

		Optimizer compiler = new Optimizer(new DataStatistics(), new DefaultCostEstimator(), configuration);
		FlinkPlan flinkPlan = ClusterClient.getOptimizedPlan(compiler, program, parallelism);

		String jsonPlan = null;
		if (flinkPlan instanceof OptimizedPlan) {
			jsonPlan = new PlanJSONDumpGenerator().getOptimizerPlanAsJSON((OptimizedPlan) flinkPlan);
		} else if (flinkPlan instanceof StreamingPlan) {
			jsonPlan = ((StreamingPlan) flinkPlan).getStreamingPlanAsJSON();
		}

		if (jsonPlan != null) {
			System.out.println("----------------------- Execution Plan -----------------------");
			System.out.println(jsonPlan);
			System.out.println("--------------------------------------------------------------");
		}
		else {
			System.out.println("JSON plan could not be generated.");
		}

		String description = program.getDescription();
		if (description != null) {
			System.out.println();
			System.out.println(description);
		}
		else {
			System.out.println();
			System.out.println("No description provided.");
		}
	}
	finally {
		program.deleteExtractedLibraries();
	}
}
 
Example #28
Source File: CliFrontend.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Executes the info action.
 *
 * @param args Command line arguments for the info action.
 */
protected void info(String[] args) throws CliArgsException, FileNotFoundException, ProgramInvocationException {
	LOG.info("Running 'info' command.");

	final Options commandOptions = CliFrontendParser.getInfoCommandOptions();

	final CommandLine commandLine = CliFrontendParser.parse(commandOptions, args, true);

	InfoOptions infoOptions = new InfoOptions(commandLine);

	// evaluate help flag
	if (infoOptions.isPrintHelp()) {
		CliFrontendParser.printHelpForInfo();
		return;
	}

	if (infoOptions.getJarFilePath() == null) {
		throw new CliArgsException("The program JAR file was not specified.");
	}

	// -------- build the packaged program -------------

	LOG.info("Building program from JAR file");
	final PackagedProgram program = buildProgram(infoOptions);

	try {
		int parallelism = infoOptions.getParallelism();
		if (ExecutionConfig.PARALLELISM_DEFAULT == parallelism) {
			parallelism = defaultParallelism;
		}

		LOG.info("Creating program plan dump");

		Optimizer compiler = new Optimizer(new DataStatistics(), new DefaultCostEstimator(), configuration);
		FlinkPlan flinkPlan = ClusterClient.getOptimizedPlan(compiler, program, parallelism);

		String jsonPlan = null;
		if (flinkPlan instanceof OptimizedPlan) {
			jsonPlan = new PlanJSONDumpGenerator().getOptimizerPlanAsJSON((OptimizedPlan) flinkPlan);
		} else if (flinkPlan instanceof StreamingPlan) {
			jsonPlan = ((StreamingPlan) flinkPlan).getStreamingPlanAsJSON();
		}

		if (jsonPlan != null) {
			System.out.println("----------------------- Execution Plan -----------------------");
			System.out.println(jsonPlan);
			System.out.println("--------------------------------------------------------------");
		}
		else {
			System.out.println("JSON plan could not be generated.");
		}

		String description = program.getDescription();
		if (description != null) {
			System.out.println();
			System.out.println(description);
		}
		else {
			System.out.println();
			System.out.println("No description provided.");
		}
	}
	finally {
		program.deleteExtractedLibraries();
	}
}
 
Example #29
Source File: PackagedProgram.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Returns the analyzed plan without any optimizations.
 *
 * @return
 *         the analyzed plan without any optimizations.
 * @throws ProgramInvocationException Thrown if an error occurred in the
 *  user-provided pact assembler. This may indicate
 *         missing parameters for generation.
 */
public String getPreviewPlan() throws ProgramInvocationException {
	Thread.currentThread().setContextClassLoader(this.getUserCodeClassLoader());
	List<DataSinkNode> previewPlan;

	if (isUsingProgramEntryPoint()) {
		previewPlan = Optimizer.createPreOptimizedPlan(getPlan());
	}
	else if (isUsingInteractiveMode()) {
		// temporary hack to support the web client
		PreviewPlanEnvironment env = new PreviewPlanEnvironment();
		env.setAsContext();
		try {
			invokeInteractiveModeForExecution();
		}
		catch (ProgramInvocationException e) {
			throw e;
		}
		catch (Throwable t) {
			// the invocation gets aborted with the preview plan
			if (env.previewPlan == null) {
				if (env.preview != null) {
					return env.preview;
				} else {
					throw new ProgramInvocationException("The program caused an error: ", getPlan().getJobId(), t);
				}
			}
		}
		finally {
			env.unsetAsContext();
		}

		if (env.previewPlan != null) {
			previewPlan =  env.previewPlan;
		} else {
			throw new ProgramInvocationException(
				"The program plan could not be fetched. The program silently swallowed the control flow exceptions.",
				getPlan().getJobId());
		}
	}
	else {
		throw new RuntimeException();
	}

	PlanJSONDumpGenerator jsonGen = new PlanJSONDumpGenerator();
	StringWriter string = new StringWriter(1024);
	try (PrintWriter pw = new PrintWriter(string)) {
		jsonGen.dumpPactPlanAsJSON(previewPlan, pw);
	}
	return string.toString();

}
 
Example #30
Source File: ClusterClient.java    From flink with Apache License 2.0 4 votes vote down vote up
public static String getOptimizedPlanAsJson(Optimizer compiler, PackagedProgram prog, int parallelism)
		throws CompilerException, ProgramInvocationException {
	PlanJSONDumpGenerator jsonGen = new PlanJSONDumpGenerator();
	return jsonGen.getOptimizerPlanAsJSON((OptimizedPlan) getOptimizedPlan(compiler, prog, parallelism));
}