Java Code Examples for org.apache.flink.api.java.ExecutionEnvironment#setRestartStrategy()

The following examples show how to use org.apache.flink.api.java.ExecutionEnvironment#setRestartStrategy() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: PythonPlanBinder.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private void receiveParameters(ExecutionEnvironment env) throws IOException {
	for (int x = 0; x < Parameters.values().length; x++) {
		Tuple value = (Tuple) streamer.getRecord(true);
		switch (Parameters.valueOf(((String) value.getField(0)).toUpperCase())) {
			case DOP:
				Integer dop = value.<Integer>getField(1);
				env.setParallelism(dop);
				break;
			case RETRY:
				int retry = value.<Integer>getField(1);
				env.setRestartStrategy(RestartStrategies.fixedDelayRestart(retry, 10000L));
				break;
			case ID:
				currentEnvironmentID = value.<Integer>getField(1);
				break;
		}
	}
	if (env.getParallelism() < 0) {
		env.setParallelism(1);
	}
}
 
Example 2
Source File: TaskFailureITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private void executeTask(MapFunction<Long, Long> mapper, int retries) throws Exception {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	env.setRestartStrategy(RestartStrategies.fixedDelayRestart(retries, 0));
	List<Long> result = env.generateSequence(1, 9)
			.map(mapper)
			.collect();
	MultipleProgramsTestBase.compareResultAsText(result, "1\n2\n3\n4\n5\n6\n7\n8\n9");
}
 
Example 3
Source File: SimpleRecoveryITCaseBase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testRestartMultipleTimes() {
	try {
		List<Long> resultCollection = new ArrayList<Long>();

		ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

		env.setParallelism(4);
		env.setRestartStrategy(RestartStrategies.fixedDelayRestart(5, 100));
		env.getConfig().disableSysoutLogging();

		env.generateSequence(1, 10)
				.rebalance()
				.map(new FailingMapper3<Long>())
				.reduce(new ReduceFunction<Long>() {
					@Override
					public Long reduce(Long value1, Long value2) {
						return value1 + value2;
					}
				})
				.output(new LocalCollectionOutputFormat<Long>(resultCollection));

		executeAndRunAssertions(env);

		long sum = 0;
		for (long l : resultCollection) {
			sum += l;
		}
		assertEquals(55, sum);
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	} finally {
		FailingMapper3.failuresBeforeSuccess = 3;
	}
}
 
Example 4
Source File: TaskFailureITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
private void executeTask(MapFunction<Long, Long> mapper, int retries) throws Exception {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	env.setRestartStrategy(RestartStrategies.fixedDelayRestart(retries, 0));
	List<Long> result = env.generateSequence(1, 9)
			.map(mapper)
			.collect();
	MultipleProgramsTestBase.compareResultAsText(result, "1\n2\n3\n4\n5\n6\n7\n8\n9");
}
 
Example 5
Source File: SimpleRecoveryITCaseBase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testRestartMultipleTimes() {
	try {
		List<Long> resultCollection = new ArrayList<Long>();

		ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

		env.setParallelism(4);
		env.setRestartStrategy(RestartStrategies.fixedDelayRestart(5, 100));
		env.getConfig().disableSysoutLogging();

		env.generateSequence(1, 10)
				.rebalance()
				.map(new FailingMapper3<Long>())
				.reduce(new ReduceFunction<Long>() {
					@Override
					public Long reduce(Long value1, Long value2) {
						return value1 + value2;
					}
				})
				.output(new LocalCollectionOutputFormat<Long>(resultCollection));

		executeAndRunAssertions(env);

		long sum = 0;
		for (long l : resultCollection) {
			sum += l;
		}
		assertEquals(55, sum);
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	} finally {
		FailingMapper3.failuresBeforeSuccess = 3;
	}
}
 
Example 6
Source File: BatchFineGrainedRecoveryITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
private static ExecutionEnvironment createExecutionEnvironment() {
	@SuppressWarnings("StaticVariableUsedBeforeInitialization")
	ExecutionEnvironment env = new TestEnvironment(miniCluster, 1, true);
	env.setRestartStrategy(RestartStrategies.fixedDelayRestart(MAX_JOB_RESTART_ATTEMPTS, Time.milliseconds(10)));
	env.getConfig().setExecutionMode(ExecutionMode.BATCH_FORCED); // forces all partitions to be blocking
	return env;
}
 
Example 7
Source File: TaskFailureITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
private void executeTask(MapFunction<Long, Long> mapper, int retries) throws Exception {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	env.setParallelism(1);
	env.setRestartStrategy(RestartStrategies.fixedDelayRestart(retries, 0));
	List<Long> result = env.generateSequence(1, 9)
			.map(mapper)
			.collect();
	MultipleProgramsTestBase.compareResultAsText(result, "1\n2\n3\n4\n5\n6\n7\n8\n9");
}
 
Example 8
Source File: SimpleRecoveryITCaseBase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testRestartMultipleTimes() {
	try {
		List<Long> resultCollection = new ArrayList<Long>();

		ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

		env.setParallelism(4);
		env.setRestartStrategy(RestartStrategies.fixedDelayRestart(5, 100));

		env.generateSequence(1, 10)
				.rebalance()
				.map(new FailingMapper3<Long>())
				.reduce(new ReduceFunction<Long>() {
					@Override
					public Long reduce(Long value1, Long value2) {
						return value1 + value2;
					}
				})
				.output(new LocalCollectionOutputFormat<Long>(resultCollection));

		executeAndRunAssertions(env);

		long sum = 0;
		for (long l : resultCollection) {
			sum += l;
		}
		assertEquals(55, sum);
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	} finally {
		FailingMapper3.failuresBeforeSuccess = 3;
	}
}
 
Example 9
Source File: BatchFineGrainedRecoveryITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
private static ExecutionEnvironment createExecutionEnvironment() {
	@SuppressWarnings("StaticVariableUsedBeforeInitialization")
	ExecutionEnvironment env = new TestEnvironment(miniCluster, 1, true);
	env.setRestartStrategy(RestartStrategies.fixedDelayRestart(MAX_JOB_RESTART_ATTEMPTS, Time.milliseconds(10)));
	env.getConfig().setExecutionMode(ExecutionMode.BATCH_FORCED); // forces all partitions to be blocking
	return env;
}
 
Example 10
Source File: ExecutionContext.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
private ExecutionEnvironment createExecutionEnvironment() {
	final ExecutionEnvironment execEnv = ExecutionEnvironment.getExecutionEnvironment();
	execEnv.setRestartStrategy(mergedEnv.getExecution().getRestartStrategy());
	execEnv.setParallelism(mergedEnv.getExecution().getParallelism());
	return execEnv;
}
 
Example 11
Source File: TaskManagerProcessFailureBatchRecoveryITCase.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Override
public void testTaskManagerFailure(Configuration configuration, final File coordinateDir) throws Exception {

	ExecutionEnvironment env = ExecutionEnvironment.createRemoteEnvironment("localhost", 1337, configuration);
	env.setParallelism(PARALLELISM);
	env.setRestartStrategy(RestartStrategies.fixedDelayRestart(1, 0L));
	env.getConfig().setExecutionMode(executionMode);
	env.getConfig().disableSysoutLogging();

	final long numElements = 100000L;
	final DataSet<Long> result = env.generateSequence(1, numElements)

			// make sure every mapper is involved (no one is skipped because of lazy split assignment)
			.rebalance()
					// the majority of the behavior is in the MapFunction
			.map(new RichMapFunction<Long, Long>() {

				private final File proceedFile = new File(coordinateDir, PROCEED_MARKER_FILE);

				private boolean markerCreated = false;
				private boolean checkForProceedFile = true;

				@Override
				public Long map(Long value) throws Exception {
					if (!markerCreated) {
						int taskIndex = getRuntimeContext().getIndexOfThisSubtask();
						touchFile(new File(coordinateDir, READY_MARKER_FILE_PREFIX + taskIndex));
						markerCreated = true;
					}

					// check if the proceed file exists
					if (checkForProceedFile) {
						if (proceedFile.exists()) {
							checkForProceedFile = false;
						} else {
							// otherwise wait so that we make slow progress
							Thread.sleep(100);
						}
					}
					return value;
				}
			})
			.reduce(new ReduceFunction<Long>() {
				@Override
				public Long reduce(Long value1, Long value2) {
					return value1 + value2;
				}
			});

	long sum = result.collect().get(0);
	assertEquals(numElements * (numElements + 1L) / 2L, sum);
}
 
Example 12
Source File: ExecutionContext.java    From flink with Apache License 2.0 4 votes vote down vote up
private ExecutionEnvironment createExecutionEnvironment() {
	final ExecutionEnvironment execEnv = ExecutionEnvironment.getExecutionEnvironment();
	execEnv.setRestartStrategy(mergedEnv.getExecution().getRestartStrategy());
	execEnv.setParallelism(mergedEnv.getExecution().getParallelism());
	return execEnv;
}
 
Example 13
Source File: TaskManagerProcessFailureBatchRecoveryITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public void testTaskManagerFailure(Configuration configuration, final File coordinateDir) throws Exception {

	ExecutionEnvironment env = ExecutionEnvironment.createRemoteEnvironment("localhost", 1337, configuration);
	env.setParallelism(PARALLELISM);
	env.setRestartStrategy(RestartStrategies.fixedDelayRestart(2, 0L));
	env.getConfig().setExecutionMode(executionMode);
	env.getConfig().disableSysoutLogging();

	final long numElements = 100000L;
	final DataSet<Long> result = env.generateSequence(1, numElements)

			// make sure every mapper is involved (no one is skipped because of lazy split assignment)
			.rebalance()
					// the majority of the behavior is in the MapFunction
			.map(new RichMapFunction<Long, Long>() {

				private final File proceedFile = new File(coordinateDir, PROCEED_MARKER_FILE);

				private boolean markerCreated = false;
				private boolean checkForProceedFile = true;

				@Override
				public Long map(Long value) throws Exception {
					if (!markerCreated) {
						int taskIndex = getRuntimeContext().getIndexOfThisSubtask();
						touchFile(new File(coordinateDir, READY_MARKER_FILE_PREFIX + taskIndex));
						markerCreated = true;
					}

					// check if the proceed file exists
					if (checkForProceedFile) {
						if (proceedFile.exists()) {
							checkForProceedFile = false;
						} else {
							// otherwise wait so that we make slow progress
							Thread.sleep(100);
						}
					}
					return value;
				}
			})
			.reduce(new ReduceFunction<Long>() {
				@Override
				public Long reduce(Long value1, Long value2) {
					return value1 + value2;
				}
			});

	long sum = result.collect().get(0);
	assertEquals(numElements * (numElements + 1L) / 2L, sum);
}
 
Example 14
Source File: FlinkPravegaInputFormatITCase.java    From flink-connectors with Apache License 2.0 4 votes vote down vote up
/**
 * Verifies that the input format reads all records exactly-once in the presence of job failures.
 */
@Test
public void testBatchInputWithFailure() throws Exception {
    final int numElements = 100;

    // set up the stream
    final String streamName = RandomStringUtils.randomAlphabetic(20);
    SETUP_UTILS.createTestStream(streamName, 3);

    try (
            final EventStreamWriter<Integer> eventWriter = SETUP_UTILS.getIntegerWriter(streamName);

            // create the producer that writes to the stream
            final ThrottledIntegerWriter producer = new ThrottledIntegerWriter(
                    eventWriter,
                    numElements,
                    numElements + 1, // no need to block writer for a batch test
                    0,
                    false
            )
    ) {
        // write batch input
        producer.start();
        producer.sync();

        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(1, 1000L));
        env.setParallelism(3);

        // simple pipeline that reads from Pravega and collects the events
        List<Integer> integers = env.createInput(
                FlinkPravegaInputFormat.<Integer>builder()
                        .forStream(streamName)
                        .withPravegaConfig(SETUP_UTILS.getPravegaConfig())
                        .withDeserializationSchema(new IntegerDeserializationSchema())
                        .build(),
                BasicTypeInfo.INT_TYPE_INFO
        ).map(new FailOnceMapper(numElements / 2)).collect();

        // verify that the job did fail, and all events were still read
        Assert.assertTrue(FailOnceMapper.hasFailed());
        Assert.assertEquals(numElements, integers.size());

        FailOnceMapper.reset();
    }
}
 
Example 15
Source File: TaskManagerProcessFailureBatchRecoveryITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public void testTaskManagerFailure(Configuration configuration, final File coordinateDir) throws Exception {

	ExecutionEnvironment env = ExecutionEnvironment.createRemoteEnvironment("localhost", 1337, configuration);
	env.setParallelism(PARALLELISM);
	env.setRestartStrategy(RestartStrategies.fixedDelayRestart(2, 0L));
	env.getConfig().setExecutionMode(executionMode);

	final long numElements = 100000L;
	final DataSet<Long> result = env.generateSequence(1, numElements)

			// make sure every mapper is involved (no one is skipped because of lazy split assignment)
			.rebalance()
					// the majority of the behavior is in the MapFunction
			.map(new RichMapFunction<Long, Long>() {

				private final File proceedFile = new File(coordinateDir, PROCEED_MARKER_FILE);

				private boolean markerCreated = false;
				private boolean checkForProceedFile = true;

				@Override
				public Long map(Long value) throws Exception {
					if (!markerCreated) {
						int taskIndex = getRuntimeContext().getIndexOfThisSubtask();
						touchFile(new File(coordinateDir, READY_MARKER_FILE_PREFIX + taskIndex));
						markerCreated = true;
					}

					// check if the proceed file exists
					if (checkForProceedFile) {
						if (proceedFile.exists()) {
							checkForProceedFile = false;
						} else {
							// otherwise wait so that we make slow progress
							Thread.sleep(100);
						}
					}
					return value;
				}
			})
			.reduce(new ReduceFunction<Long>() {
				@Override
				public Long reduce(Long value1, Long value2) {
					return value1 + value2;
				}
			});

	long sum = result.collect().get(0);
	assertEquals(numElements * (numElements + 1L) / 2L, sum);
}