Java Code Examples for org.apache.flink.runtime.operators.util.TaskConfig

The following examples show how to use org.apache.flink.runtime.operators.util.TaskConfig. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: Flink-CEPplus   Source File: IterationHeadTask.java    License: Apache License 2.0 6 votes vote down vote up
@Override
protected void initOutputs() throws Exception {
	// initialize the regular outputs first (the ones into the step function).
	super.initOutputs();

	// at this time, the outputs to the step function are created
	// add the outputs for the final solution
	List<RecordWriter<?>> finalOutputWriters = new ArrayList<RecordWriter<?>>();
	final TaskConfig finalOutConfig = this.config.getIterationHeadFinalOutputConfig();
	final ClassLoader userCodeClassLoader = getUserCodeClassLoader();
	this.finalOutputCollector = BatchTask.getOutputCollector(this, finalOutConfig,
			userCodeClassLoader, finalOutputWriters, config.getNumOutputs(), finalOutConfig.getNumOutputs());

	// sanity check the setup
	final int writersIntoStepFunction = this.eventualOutputs.size();
	final int writersIntoFinalResult = finalOutputWriters.size();
	final int syncGateIndex = this.config.getIterationHeadIndexOfSyncOutput();

	if (writersIntoStepFunction + writersIntoFinalResult != syncGateIndex) {
		throw new Exception("Error: Inconsistent head task setup - wrong mapping of output gates.");
	}
	// now, we can instantiate the sync gate
	this.toSync = new RecordWriter<IOReadableWritable>(getEnvironment().getWriter(syncGateIndex));
	this.toSyncPartitionId = getEnvironment().getWriter(syncGateIndex).getPartitionId();
}
 
Example 2
Source Project: flink   Source File: JobGraphGenerator.java    License: Apache License 2.0 6 votes vote down vote up
private JobVertex createDataSinkVertex(SinkPlanNode node) throws CompilerException {
	final InputOutputFormatVertex vertex = new InputOutputFormatVertex(node.getNodeName());
	final TaskConfig config = new TaskConfig(vertex.getConfiguration());

	final OperatorID operatorID = new OperatorID();

	vertex.setResources(node.getMinResources(), node.getPreferredResources());
	vertex.setInvokableClass(DataSinkTask.class);
	vertex.setFormatDescription(operatorID, getDescriptionForUserCode(node.getProgramOperator().getUserCodeWrapper()));

	// set user code
	new InputOutputFormatContainer(Thread.currentThread().getContextClassLoader())
		.addOutputFormat(operatorID, (UserCodeWrapper<? extends OutputFormat<?>>) node.getProgramOperator().getUserCodeWrapper())
		.addParameters(operatorID, node.getProgramOperator().getParameters())
		.write(config);

	return vertex;
}
 
Example 3
Source Project: flink   Source File: TaskTestBase.java    License: Apache License 2.0 6 votes vote down vote up
public void registerFileInputTask(
	AbstractInvokable inTask,
	Class<? extends DelimitedInputFormat<Record>> stubClass,
	String inPath,
	String delimiter)	{

	DelimitedInputFormat<Record> format;
	try {
		format = stubClass.newInstance();
	}
	catch (Throwable t) {
		throw new RuntimeException("Could not instantiate test input format.", t);
	}

	format.setFilePath(inPath);
	format.setDelimiter(delimiter);

	OperatorID operatorID = new OperatorID();
	new InputOutputFormatContainer(Thread.currentThread().getContextClassLoader())
		.addInputFormat(operatorID, format)
		.write(new TaskConfig(this.mockEnv.getTaskConfiguration()));

	this.inputSplitProvider.addInputSplits(inPath, 5);
}
 
Example 4
Source Project: flink   Source File: InputOutputFormatContainerTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testOnlyInputFormat() {
	InputOutputFormatContainer formatContainer = new InputOutputFormatContainer(Thread.currentThread().getContextClassLoader());

	OperatorID operatorID = new OperatorID();
	formatContainer.addInputFormat(operatorID, new TestInputFormat("test input format"));
	formatContainer.addParameters(operatorID, "parameter1", "abc123");

	TaskConfig taskConfig = new TaskConfig(new Configuration());
	formatContainer.write(taskConfig);

	InputOutputFormatContainer loadedFormatContainer = new InputOutputFormatContainer(taskConfig, getClass().getClassLoader());

	Map<OperatorID, UserCodeWrapper<? extends InputFormat<?, ?>>> inputFormats = loadedFormatContainer.getInputFormats();
	assertEquals(1, inputFormats.size());
	assertEquals(0, loadedFormatContainer.getOutputFormats().size());

	TestInputFormat inputFormat = (TestInputFormat) inputFormats.get(operatorID).getUserCodeObject();
	assertEquals("test input format", inputFormat.getName());

	Configuration parameters = loadedFormatContainer.getParameters(operatorID);
	assertEquals(1, parameters.keySet().size());
	assertEquals("abc123", parameters.getString("parameter1", null));
}
 
Example 5
Source Project: flink   Source File: TaskTestBase.java    License: Apache License 2.0 6 votes vote down vote up
public void registerFileInputTask(
	AbstractInvokable inTask,
	Class<? extends DelimitedInputFormat<Record>> stubClass,
	String inPath,
	String delimiter)	{

	DelimitedInputFormat<Record> format;
	try {
		format = stubClass.newInstance();
	}
	catch (Throwable t) {
		throw new RuntimeException("Could not instantiate test input format.", t);
	}

	format.setFilePath(inPath);
	format.setDelimiter(delimiter);

	OperatorID operatorID = new OperatorID();
	new InputOutputFormatContainer(Thread.currentThread().getContextClassLoader())
		.addInputFormat(operatorID, format)
		.write(new TaskConfig(this.mockEnv.getTaskConfiguration()));

	this.inputSplitProvider.addInputSplits(inPath, 5);
}
 
Example 6
Source Project: Flink-CEPplus   Source File: CoGroupRawDriver.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void prepare() throws Exception {
	final TaskConfig config = this.taskContext.getTaskConfig();
	if (config.getDriverStrategy() != DriverStrategy.CO_GROUP_RAW) {
		throw new Exception("Unrecognized driver strategy for CoGoup Python driver: " + config.getDriverStrategy().name());
	}

	final MutableObjectIterator<IT1> in1 = this.taskContext.getInput(0);
	final MutableObjectIterator<IT2> in2 = this.taskContext.getInput(1);

	IT1 reuse1 = this.taskContext.<IT1>getInputSerializer(0).getSerializer().createInstance();
	IT2 reuse2 = this.taskContext.<IT2>getInputSerializer(1).getSerializer().createInstance();

	this.coGroupIterator1 = new SimpleIterable<IT1>(reuse1, in1);
	this.coGroupIterator2 = new SimpleIterable<IT2>(reuse2, in2);

	if (LOG.isDebugEnabled()) {
		LOG.debug(this.taskContext.formatLogString("CoGroup task iterator ready."));
	}
}
 
Example 7
Source Project: Flink-CEPplus   Source File: ReduceDriver.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void prepare() throws Exception {
	TaskConfig config = this.taskContext.getTaskConfig();
	if (config.getDriverStrategy() != DriverStrategy.SORTED_REDUCE) {
		throw new Exception("Unrecognized driver strategy for Reduce driver: " + config.getDriverStrategy().name());
	}
	this.serializer = this.taskContext.<T>getInputSerializer(0).getSerializer();
	this.comparator = this.taskContext.getDriverComparator(0);
	this.input = this.taskContext.getInput(0);

	ExecutionConfig executionConfig = taskContext.getExecutionConfig();
	this.objectReuseEnabled = executionConfig.isObjectReuseEnabled();

	if (LOG.isDebugEnabled()) {
		LOG.debug("ReduceDriver object reuse: " + (this.objectReuseEnabled ? "ENABLED" : "DISABLED") + ".");
	}
}
 
Example 8
Source Project: flink   Source File: DriverTestBase.java    License: Apache License 2.0 6 votes vote down vote up
protected DriverTestBase(ExecutionConfig executionConfig, long memory, int maxNumSorters, long perSortMemory) {
	if (memory < 0 || maxNumSorters < 0 || perSortMemory < 0) {
		throw new IllegalArgumentException();
	}
	
	final long totalMem = Math.max(memory, 0) + (Math.max(maxNumSorters, 0) * perSortMemory);
	
	this.perSortMem = perSortMemory;
	this.perSortFractionMem = (double)perSortMemory/totalMem;
	this.ioManager = new IOManagerAsync();
	this.memManager = totalMem > 0 ? new MemoryManager(totalMem,1) : null;

	this.inputs = new ArrayList<MutableObjectIterator<Record>>();
	this.comparators = new ArrayList<TypeComparator<Record>>();
	this.sorters = new ArrayList<UnilateralSortMerger<Record>>();
	
	this.owner = new DummyInvokable();
	this.taskConfig = new TaskConfig(new Configuration());
	this.executionConfig = executionConfig;
	this.taskManageInfo = new TestingTaskManagerRuntimeInfo();
}
 
Example 9
Source Project: flink   Source File: AllReduceDriver.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void prepare() throws Exception {
	final TaskConfig config = this.taskContext.getTaskConfig();
	if (config.getDriverStrategy() != DriverStrategy.ALL_REDUCE) {
		throw new Exception("Unrecognized driver strategy for AllReduce driver: " + config.getDriverStrategy().name());
	}
	
	TypeSerializerFactory<T> serializerFactory = this.taskContext.getInputSerializer(0);
	this.serializer = serializerFactory.getSerializer();
	this.input = this.taskContext.getInput(0);

	ExecutionConfig executionConfig = taskContext.getExecutionConfig();
	this.objectReuseEnabled = executionConfig.isObjectReuseEnabled();

	if (LOG.isDebugEnabled()) {
		LOG.debug("AllReduceDriver object reuse: " + (this.objectReuseEnabled ? "ENABLED" : "DISABLED") + ".");
	}
}
 
Example 10
Source Project: flink   Source File: IterationHeadTask.java    License: Apache License 2.0 6 votes vote down vote up
@Override
protected void initOutputs() throws Exception {
	// initialize the regular outputs first (the ones into the step function).
	super.initOutputs();

	// at this time, the outputs to the step function are created
	// add the outputs for the final solution
	List<RecordWriter<?>> finalOutputWriters = new ArrayList<RecordWriter<?>>();
	final TaskConfig finalOutConfig = this.config.getIterationHeadFinalOutputConfig();
	final ClassLoader userCodeClassLoader = getUserCodeClassLoader();
	this.finalOutputCollector = BatchTask.getOutputCollector(this, finalOutConfig,
			userCodeClassLoader, finalOutputWriters, config.getNumOutputs(), finalOutConfig.getNumOutputs());

	// sanity check the setup
	final int writersIntoStepFunction = this.eventualOutputs.size();
	final int writersIntoFinalResult = finalOutputWriters.size();
	final int syncGateIndex = this.config.getIterationHeadIndexOfSyncOutput();

	if (writersIntoStepFunction + writersIntoFinalResult != syncGateIndex) {
		throw new Exception("Error: Inconsistent head task setup - wrong mapping of output gates.");
	}
	// now, we can instantiate the sync gate
	this.toSync = new RecordWriterBuilder<>().build(getEnvironment().getWriter(syncGateIndex));
	this.toSyncPartitionId = getEnvironment().getWriter(syncGateIndex).getPartitionId();
}
 
Example 11
Source Project: flink   Source File: InputOutputFormatContainerTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testOnlyInputFormat() {
	InputOutputFormatContainer formatContainer = new InputOutputFormatContainer(Thread.currentThread().getContextClassLoader());

	OperatorID operatorID = new OperatorID();
	formatContainer.addInputFormat(operatorID, new TestInputFormat("test input format"));
	formatContainer.addParameters(operatorID, "parameter1", "abc123");

	TaskConfig taskConfig = new TaskConfig(new Configuration());
	formatContainer.write(taskConfig);

	InputOutputFormatContainer loadedFormatContainer = new InputOutputFormatContainer(taskConfig, getClass().getClassLoader());

	Map<OperatorID, UserCodeWrapper<? extends InputFormat<?, ?>>> inputFormats = loadedFormatContainer.getInputFormats();
	assertEquals(1, inputFormats.size());
	assertEquals(0, loadedFormatContainer.getOutputFormats().size());

	TestInputFormat inputFormat = (TestInputFormat) inputFormats.get(operatorID).getUserCodeObject();
	assertEquals("test input format", inputFormat.getName());

	Configuration parameters = loadedFormatContainer.getParameters(operatorID);
	assertEquals(1, parameters.keySet().size());
	assertEquals("abc123", parameters.getString("parameter1", null));
}
 
Example 12
Source Project: Flink-CEPplus   Source File: BinaryOperatorTestBase.java    License: Apache License 2.0 6 votes vote down vote up
protected BinaryOperatorTestBase(ExecutionConfig executionConfig, long memory, int maxNumSorters, long perSortMemory) {
	if (memory < 0 || maxNumSorters < 0 || perSortMemory < 0) {
		throw new IllegalArgumentException();
	}
	
	final long totalMem = Math.max(memory, 0) + (Math.max(maxNumSorters, 0) * perSortMemory);
	
	this.perSortMem = perSortMemory;
	this.perSortFractionMem = (double) perSortMemory / totalMem;
	this.ioManager = new IOManagerAsync();
	this.memManager = totalMem > 0 ? new MemoryManager(totalMem, 1) : null;
	
	this.inputs = new ArrayList<>();
	this.comparators = new ArrayList<>();
	this.sorters = new ArrayList<>();
	
	this.owner = new DummyInvokable();
	this.taskConfig = new TaskConfig(new Configuration());
	this.executionConfig = executionConfig;
	this.taskManageInfo = new TestingTaskManagerRuntimeInfo();
}
 
Example 13
Source Project: flink   Source File: JobGraphGenerator.java    License: Apache License 2.0 6 votes vote down vote up
private JobVertex createDataSourceVertex(SourcePlanNode node) throws CompilerException {
	final InputOutputFormatVertex vertex = new InputOutputFormatVertex(node.getNodeName());
	final TaskConfig config = new TaskConfig(vertex.getConfiguration());

	final OperatorID operatorID = new OperatorID();

	vertex.setResources(node.getMinResources(), node.getPreferredResources());
	vertex.setInvokableClass(DataSourceTask.class);
	vertex.setFormatDescription(operatorID, getDescriptionForUserCode(node.getProgramOperator().getUserCodeWrapper()));

	// set user code
	new InputOutputFormatContainer(Thread.currentThread().getContextClassLoader())
		.addInputFormat(operatorID, (UserCodeWrapper<? extends InputFormat<?, ?>>) node.getProgramOperator().getUserCodeWrapper())
		.addParameters(operatorID, node.getProgramOperator().getParameters())
		.write(config);

	config.setOutputSerializer(node.getSerializer());
	return vertex;
}
 
Example 14
Source Project: flink   Source File: AllReduceDriver.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void prepare() throws Exception {
	final TaskConfig config = this.taskContext.getTaskConfig();
	if (config.getDriverStrategy() != DriverStrategy.ALL_REDUCE) {
		throw new Exception("Unrecognized driver strategy for AllReduce driver: " + config.getDriverStrategy().name());
	}
	
	TypeSerializerFactory<T> serializerFactory = this.taskContext.getInputSerializer(0);
	this.serializer = serializerFactory.getSerializer();
	this.input = this.taskContext.getInput(0);

	ExecutionConfig executionConfig = taskContext.getExecutionConfig();
	this.objectReuseEnabled = executionConfig.isObjectReuseEnabled();

	if (LOG.isDebugEnabled()) {
		LOG.debug("AllReduceDriver object reuse: " + (this.objectReuseEnabled ? "ENABLED" : "DISABLED") + ".");
	}
}
 
Example 15
Source Project: flink   Source File: InputOutputFormatContainerTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testOnlyOutputFormat() {
	InputOutputFormatContainer formatContainer = new InputOutputFormatContainer(Thread.currentThread().getContextClassLoader());

	OperatorID operatorID = new OperatorID();
	formatContainer.addOutputFormat(operatorID, new DiscardingOutputFormat<>());

	Configuration parameters = new Configuration();
	parameters.setString("parameter1", "bcd234");
	formatContainer.addParameters(operatorID, parameters);

	TaskConfig taskConfig = new TaskConfig(new Configuration());
	formatContainer.write(taskConfig);

	InputOutputFormatContainer loadedFormatContainer = new InputOutputFormatContainer(taskConfig, getClass().getClassLoader());

	Map<OperatorID, UserCodeWrapper<? extends OutputFormat<?>>> outputFormats = loadedFormatContainer.getOutputFormats();
	assertEquals(1, outputFormats.size());
	assertEquals(0, loadedFormatContainer.getInputFormats().size());

	assertTrue(outputFormats.get(operatorID).getUserCodeObject() instanceof DiscardingOutputFormat);

	Configuration loadedParameters = loadedFormatContainer.getParameters(operatorID);
	assertEquals(1, loadedParameters.keySet().size());
	assertEquals("bcd234", loadedParameters.getString("parameter1", null));
}
 
Example 16
Source Project: flink   Source File: InputOutputFormatContainerTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testOnlyOutputFormat() {
	InputOutputFormatContainer formatContainer = new InputOutputFormatContainer(Thread.currentThread().getContextClassLoader());

	OperatorID operatorID = new OperatorID();
	formatContainer.addOutputFormat(operatorID, new DiscardingOutputFormat<>());

	Configuration parameters = new Configuration();
	parameters.setString("parameter1", "bcd234");
	formatContainer.addParameters(operatorID, parameters);

	TaskConfig taskConfig = new TaskConfig(new Configuration());
	formatContainer.write(taskConfig);

	InputOutputFormatContainer loadedFormatContainer = new InputOutputFormatContainer(taskConfig, getClass().getClassLoader());

	Map<OperatorID, UserCodeWrapper<? extends OutputFormat<?>>> outputFormats = loadedFormatContainer.getOutputFormats();
	assertEquals(1, outputFormats.size());
	assertEquals(0, loadedFormatContainer.getInputFormats().size());

	assertTrue(outputFormats.get(operatorID).getUserCodeObject() instanceof DiscardingOutputFormat);

	Configuration loadedParameters = loadedFormatContainer.getParameters(operatorID);
	assertEquals(1, loadedParameters.keySet().size());
	assertEquals("bcd234", loadedParameters.getString("parameter1", null));
}
 
Example 17
Source Project: flink   Source File: TaskTestBase.java    License: Apache License 2.0 5 votes vote down vote up
public void registerTask(
		@SuppressWarnings("rawtypes") Class<? extends Driver> driver,
		Class<? extends RichFunction> stubClass) {

	final TaskConfig config = new TaskConfig(this.mockEnv.getTaskConfiguration());
	config.setDriver(driver);
	config.setStubWrapper(new UserCodeClassWrapper<>(stubClass));
}
 
Example 18
Source Project: flink   Source File: JobGraphGenerator.java    License: Apache License 2.0 5 votes vote down vote up
private JobVertex createDualInputVertex(DualInputPlanNode node) throws CompilerException {
	final String taskName = node.getNodeName();
	final DriverStrategy ds = node.getDriverStrategy();
	final JobVertex vertex = new JobVertex(taskName);
	final TaskConfig config = new TaskConfig(vertex.getConfiguration());
	vertex.setResources(node.getMinResources(), node.getPreferredResources());
	vertex.setInvokableClass( (this.currentIteration != null && node.isOnDynamicPath()) ? IterationIntermediateTask.class : BatchTask.class);
	
	// set user code
	config.setStubWrapper(node.getProgramOperator().getUserCodeWrapper());
	config.setStubParameters(node.getProgramOperator().getParameters());
	
	// set the driver strategy
	config.setDriver(ds.getDriverClass());
	config.setDriverStrategy(ds);
	if (node.getComparator1() != null) {
		config.setDriverComparator(node.getComparator1(), 0);
	}
	if (node.getComparator2() != null) {
		config.setDriverComparator(node.getComparator2(), 1);
	}
	if (node.getPairComparator() != null) {
		config.setDriverPairComparator(node.getPairComparator());
	}
	
	// assign memory, file-handles, etc.
	assignDriverResources(node, config);
	return vertex;
}
 
Example 19
Source Project: Flink-CEPplus   Source File: BatchTask.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Instantiates a user code class from is definition in the task configuration.
 * The class is instantiated without arguments using the null-ary constructor. Instantiation
 * will fail if this constructor does not exist or is not public.
 * 
 * @param <T> The generic type of the user code class.
 * @param config The task configuration containing the class description.
 * @param cl The class loader to be used to load the class.
 * @param superClass The super class that the user code class extends or implements, for type checking.
 * 
 * @return An instance of the user code class.
 */
public static <T> T instantiateUserCode(TaskConfig config, ClassLoader cl, Class<? super T> superClass) {
	try {
		T stub = config.<T>getStubWrapper(cl).getUserCodeObject(superClass, cl);
		// check if the class is a subclass, if the check is required
		if (superClass != null && !superClass.isAssignableFrom(stub.getClass())) {
			throw new RuntimeException("The class '" + stub.getClass().getName() + "' is not a subclass of '" + 
					superClass.getName() + "' as is required.");
		}
		return stub;
	}
	catch (ClassCastException ccex) {
		throw new RuntimeException("The UDF class is not a proper subclass of " + superClass.getName(), ccex);
	}
}
 
Example 20
Source Project: Flink-CEPplus   Source File: ChainedDriver.java    License: Apache License 2.0 5 votes vote down vote up
public void setup(TaskConfig config, String taskName, Collector<OT> outputCollector,
		AbstractInvokable parent, ClassLoader userCodeClassLoader, ExecutionConfig executionConfig,
		Map<String, Accumulator<?,?>> accumulatorMap)
{
	this.config = config;
	this.taskName = taskName;
	this.userCodeClassLoader = userCodeClassLoader;
	this.metrics = parent.getEnvironment().getMetricGroup().getOrAddOperator(taskName);
	this.numRecordsIn = this.metrics.getIOMetricGroup().getNumRecordsInCounter();
	this.numRecordsOut = this.metrics.getIOMetricGroup().getNumRecordsOutCounter();
	this.outputCollector = new CountingCollector<>(outputCollector, numRecordsOut);

	Environment env = parent.getEnvironment();

	if (parent instanceof BatchTask) {
		this.udfContext = ((BatchTask<?, ?>) parent).createRuntimeContext(metrics);
	} else {
		this.udfContext = new DistributedRuntimeUDFContext(env.getTaskInfo(), userCodeClassLoader,
				parent.getExecutionConfig(), env.getDistributedCacheEntries(), accumulatorMap, metrics
		);
	}

	this.executionConfig = executionConfig;
	this.objectReuseEnabled = executionConfig.isObjectReuseEnabled();

	setup(parent);
}
 
Example 21
Source Project: flink   Source File: ChainTaskTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testDataSourceTaskOutputInCloseMethod() throws IOException {
	final int numChainedTasks = 10;
	final int keyCnt = 100;
	final int valCnt = 10;
	final File tempTestFile = new File(tempFolder.getRoot(), UUID.randomUUID().toString());
	DataSourceTaskTest.InputFilePreparator.prepareInputFile(
		new UniformRecordGenerator(keyCnt, valCnt, false), tempTestFile, true);
	initEnvironment(MEMORY_MANAGER_SIZE, NETWORK_BUFFER_SIZE);
	addOutput(outList);
	final DataSourceTask<Record> testTask = new DataSourceTask<>(mockEnv);
	registerFileInputTask(
		testTask, DataSourceTaskTest.MockInputFormat.class, tempTestFile.toURI().toString(), "\n");
	for (int i = 0; i < numChainedTasks; i++) {
		final TaskConfig taskConfig = new TaskConfig(new Configuration());
		taskConfig.addOutputShipStrategy(ShipStrategyType.FORWARD);
		taskConfig.setOutputSerializer(serFact);
		taskConfig.setStubWrapper(
			new UserCodeClassWrapper<>(ChainTaskTest.MockDuplicateLastValueMapFunction.class));
		getTaskConfig().addChainedTask(
			ChainedFlatMapDriver.class, taskConfig, "chained-" + i);
	}
	try {
		testTask.invoke();
		Assert.assertEquals(keyCnt * valCnt + numChainedTasks, outList.size());
	} catch (Exception e) {
		e.printStackTrace();
		Assert.fail("Invoke method caused exception.");
	}
}
 
Example 22
Source Project: Flink-CEPplus   Source File: DataSourceTask.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Initializes the InputFormat implementation and configuration.
 * 
 * @throws RuntimeException
 *         Throws if instance of InputFormat implementation can not be
 *         obtained.
 */
private void initInputFormat() {
	ClassLoader userCodeClassLoader = getUserCodeClassLoader();
	// obtain task configuration (including stub parameters)
	Configuration taskConf = getTaskConfiguration();
	this.config = new TaskConfig(taskConf);

	try {
		this.format = config.<InputFormat<OT, InputSplit>>getStubWrapper(userCodeClassLoader)
				.getUserCodeObject(InputFormat.class, userCodeClassLoader);

		// check if the class is a subclass, if the check is required
		if (!InputFormat.class.isAssignableFrom(this.format.getClass())) {
			throw new RuntimeException("The class '" + this.format.getClass().getName() + "' is not a subclass of '" +
					InputFormat.class.getName() + "' as is required.");
		}
	}
	catch (ClassCastException ccex) {
		throw new RuntimeException("The stub class is not a proper subclass of " + InputFormat.class.getName(),
				ccex);
	}

	Thread thread = Thread.currentThread();
	ClassLoader original = thread.getContextClassLoader();
	// configure the stub. catch exceptions here extra, to report them as originating from the user code
	try {
		thread.setContextClassLoader(userCodeClassLoader);
		this.format.configure(this.config.getStubParameters());
	}
	catch (Throwable t) {
		throw new RuntimeException("The user defined 'configure()' method caused an error: " + t.getMessage(), t);
	}
	finally {
		thread.setContextClassLoader(original);
	}

	// get the factory for the type serializer
	this.serializerFactory = this.config.getOutputSerializer(userCodeClassLoader);
}
 
Example 23
Source Project: flink   Source File: JobTaskVertexTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testInputFormat() {
	try {
		final InputOutputFormatVertex vertex = new InputOutputFormatVertex("Name");

		OperatorID operatorID = new OperatorID();
		Configuration parameters = new Configuration();
		parameters.setString("test_key", "test_value");
		new InputOutputFormatContainer(Thread.currentThread().getContextClassLoader())
			.addInputFormat(operatorID, new TestInputFormat(parameters))
			.addParameters(operatorID, "test_key", "test_value")
			.write(new TaskConfig(vertex.getConfiguration()));

		final ClassLoader cl = new TestClassLoader();

		vertex.initializeOnMaster(cl);
		InputSplit[] splits = vertex.getInputSplitSource().createInputSplits(77);

		assertNotNull(splits);
		assertEquals(1, splits.length);
		assertEquals(TestSplit.class, splits[0].getClass());
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example 24
Source Project: flink   Source File: TaskTestBase.java    License: Apache License 2.0 5 votes vote down vote up
public IteratorWrappingTestSingleInputGate<Record> addInput(MutableObjectIterator<Record> input, int groupId, boolean read) {
	final IteratorWrappingTestSingleInputGate<Record> reader = this.mockEnv.addInput(input);
	TaskConfig conf = new TaskConfig(this.mockEnv.getTaskConfiguration());
	conf.addInputToGroup(groupId);
	conf.setInputSerializer(RecordSerializerFactory.get(), groupId);

	if (read) {
		reader.notifyNonEmpty();
	}

	return reader;
}
 
Example 25
Source Project: Flink-CEPplus   Source File: TaskTestBase.java    License: Apache License 2.0 5 votes vote down vote up
public void registerTask(
		@SuppressWarnings("rawtypes") Class<? extends Driver> driver,
		Class<? extends RichFunction> stubClass) {

	final TaskConfig config = new TaskConfig(this.mockEnv.getTaskConfiguration());
	config.setDriver(driver);
	config.setStubWrapper(new UserCodeClassWrapper<>(stubClass));
}
 
Example 26
Source Project: Flink-CEPplus   Source File: TaskTestBase.java    License: Apache License 2.0 5 votes vote down vote up
public void registerFileOutputTask(FileOutputFormat<Record> outputFormat, String outPath) {
	TaskConfig dsConfig = new TaskConfig(this.mockEnv.getTaskConfiguration());

	outputFormat.setOutputFilePath(new Path(outPath));
	outputFormat.setWriteMode(WriteMode.OVERWRITE);

	dsConfig.setStubWrapper(new UserCodeObjectWrapper<>(outputFormat));
}
 
Example 27
Source Project: flink   Source File: JobGraphGenerator.java    License: Apache License 2.0 5 votes vote down vote up
public void setHeadTask(JobVertex headTask, TaskConfig headConfig) {
	this.headTask = headTask;
	this.headFinalResultConfig = new TaskConfig(new Configuration());
	
	// check if we already had a configuration, for example if the solution set was 
	if (this.headConfig != null) {
		headConfig.getConfiguration().addAll(this.headConfig.getConfiguration());
	}
	
	this.headConfig = headConfig;
}
 
Example 28
Source Project: flink   Source File: JobTaskVertexTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testInputFormat() {
	try {
		final InputOutputFormatVertex vertex = new InputOutputFormatVertex("Name");

		OperatorID operatorID = new OperatorID();
		Configuration parameters = new Configuration();
		parameters.setString("test_key", "test_value");
		new InputOutputFormatContainer(Thread.currentThread().getContextClassLoader())
			.addInputFormat(operatorID, new TestInputFormat(parameters))
			.addParameters(operatorID, "test_key", "test_value")
			.write(new TaskConfig(vertex.getConfiguration()));

		final ClassLoader cl = new TestClassLoader();

		vertex.initializeOnMaster(cl);
		InputSplit[] splits = vertex.getInputSplitSource().createInputSplits(77);

		assertNotNull(splits);
		assertEquals(1, splits.length);
		assertEquals(TestSplit.class, splits[0].getClass());
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example 29
Source Project: Flink-CEPplus   Source File: JobGraphGenerator.java    License: Apache License 2.0 5 votes vote down vote up
private void assignLocalStrategyResources(Channel c, TaskConfig config, int inputNum) {
	if (c.getRelativeMemoryLocalStrategy() > 0) {
		config.setRelativeMemoryInput(inputNum, c.getRelativeMemoryLocalStrategy());
		config.setFilehandlesInput(inputNum, this.defaultMaxFan);
		config.setSpillingThresholdInput(inputNum, this.defaultSortSpillingThreshold);
		config.setUseLargeRecordHandler(this.useLargeRecordHandler);
	}
}
 
Example 30
Source Project: flink   Source File: JobGraphGenerator.java    License: Apache License 2.0 5 votes vote down vote up
private void assignDriverResources(PlanNode node, TaskConfig config) {
	final double relativeMem = node.getRelativeMemoryPerSubTask();
	if (relativeMem > 0) {
		config.setRelativeMemoryDriver(relativeMem);
		config.setFilehandlesDriver(this.defaultMaxFan);
		config.setSpillingThresholdDriver(this.defaultSortSpillingThreshold);
	}
}