org.apache.flink.metrics.groups.UnregisteredMetricsGroup Java Examples

The following examples show how to use org.apache.flink.metrics.groups.UnregisteredMetricsGroup. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source Project: Flink-CEPplus   Author: ljygz   File: KafkaConsumerThreadTest.java    License: Apache License 2.0 6 votes vote down vote up
public TestKafkaConsumerThread(
		KafkaConsumer<byte[], byte[]> mockConsumer,
		ClosableBlockingQueue<KafkaTopicPartitionState<TopicPartition>> unassignedPartitionsQueue,
		Handover handover) {

	super(
			mock(Logger.class),
			handover,
			new Properties(),
			unassignedPartitionsQueue,
			new KafkaConsumerCallBridge09(),
			"test-kafka-consumer-thread",
			0,
			false,
			new UnregisteredMetricsGroup(),
			new UnregisteredMetricsGroup(),
			null);

	this.mockConsumer = mockConsumer;
}
 
Example #2
Source Project: Flink-CEPplus   Author: ljygz   File: AbstractFetcherTest.java    License: Apache License 2.0 6 votes vote down vote up
TestFetcher(
		SourceContext<T> sourceContext,
		Map<KafkaTopicPartition, Long> assignedPartitionsWithStartOffsets,
		SerializedValue<AssignerWithPeriodicWatermarks<T>> watermarksPeriodic,
		SerializedValue<AssignerWithPunctuatedWatermarks<T>> watermarksPunctuated,
		ProcessingTimeService processingTimeProvider,
		long autoWatermarkInterval,
		OneShotLatch fetchLoopWaitLatch,
		OneShotLatch stateIterationBlockLatch) throws Exception {

	super(
		sourceContext,
		assignedPartitionsWithStartOffsets,
		watermarksPeriodic,
		watermarksPunctuated,
		processingTimeProvider,
		autoWatermarkInterval,
		TestFetcher.class.getClassLoader(),
		new UnregisteredMetricsGroup(),
		false);

	this.fetchLoopWaitLatch = fetchLoopWaitLatch;
	this.stateIterationBlockLatch = stateIterationBlockLatch;
}
 
Example #3
Source Project: Flink-CEPplus   Author: ljygz   File: CollectionExecutor.java    License: Apache License 2.0 6 votes vote down vote up
private <OUT> List<OUT> executeDataSource(GenericDataSourceBase<?, ?> source, int superStep)
		throws Exception {
	@SuppressWarnings("unchecked")
	GenericDataSourceBase<OUT, ?> typedSource = (GenericDataSourceBase<OUT, ?>) source;
	// build the runtime context and compute broadcast variables, if necessary
	TaskInfo taskInfo = new TaskInfo(typedSource.getName(), 1, 0, 1, 0);
	
	RuntimeUDFContext ctx;

	MetricGroup metrics = new UnregisteredMetricsGroup();
	if (RichInputFormat.class.isAssignableFrom(typedSource.getUserCodeWrapper().getUserCodeClass())) {
		ctx = superStep == 0 ? new RuntimeUDFContext(taskInfo, userCodeClassLoader, executionConfig, cachedFiles, accumulators, metrics) :
				new IterationRuntimeUDFContext(taskInfo, userCodeClassLoader, executionConfig, cachedFiles, accumulators, metrics);
	} else {
		ctx = null;
	}
	return typedSource.executeOnCollections(ctx, executionConfig);
}
 
Example #4
Source Project: flink   Author: flink-tpc-ds   File: StateBackendMigrationTestBase.java    License: Apache License 2.0 6 votes vote down vote up
private <K> AbstractKeyedStateBackend<K> restoreKeyedBackend(
	TypeSerializer<K> keySerializer,
	int numberOfKeyGroups,
	KeyGroupRange keyGroupRange,
	List<KeyedStateHandle> state,
	Environment env) throws Exception {
	AbstractKeyedStateBackend<K> backend = getStateBackend().createKeyedStateBackend(
		env,
		new JobID(),
		"test_op",
		keySerializer,
		numberOfKeyGroups,
		keyGroupRange,
		env.getTaskKvStateRegistry(),
		TtlTimeProvider.DEFAULT,
		new UnregisteredMetricsGroup(),
		state,
		new CloseableRegistry());
	return backend;
}
 
Example #5
Source Project: Flink-CEPplus   Author: ljygz   File: OuterJoinOperatorBaseTest.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings({"rawtypes", "unchecked"})
@Before
public void setup() {
	joiner = new MockRichFlatJoinFunction();

	baseOperator =
		new OuterJoinOperatorBase(joiner,
			new BinaryOperatorInformation(BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO,
				BasicTypeInfo.STRING_TYPE_INFO), new int[0], new int[0], "TestJoiner", null);

	executionConfig = new ExecutionConfig();

	String taskName = "Test rich outer join function";
	TaskInfo taskInfo = new TaskInfo(taskName, 1, 0, 1, 0);
	HashMap<String, Accumulator<?, ?>> accumulatorMap = new HashMap<>();
	HashMap<String, Future<Path>> cpTasks = new HashMap<>();

	runtimeContext = new RuntimeUDFContext(taskInfo, null, executionConfig, cpTasks,
		accumulatorMap, new UnregisteredMetricsGroup());
}
 
Example #6
Source Project: flink   Author: flink-tpc-ds   File: ExecutionGraphPartitionReleaseTest.java    License: Apache License 2.0 6 votes vote down vote up
private ExecutionGraph createExecutionGraph(final PartitionTracker partitionTracker, final JobVertex... vertices) throws Exception {
	final ExecutionGraph executionGraph = ExecutionGraphBuilder.buildGraph(
		null,
		new JobGraph(new JobID(), "test job", vertices),
		new Configuration(),
		scheduledExecutorService,
		mainThreadExecutor.getMainThreadExecutor(),
		new TestingSlotProvider(ignored -> CompletableFuture.completedFuture(new TestingLogicalSlotBuilder().createTestingLogicalSlot())),
		ExecutionGraphPartitionReleaseTest.class.getClassLoader(),
		new StandaloneCheckpointRecoveryFactory(),
		AkkaUtils.getDefaultTimeout(),
		new NoRestartStrategy(),
		new UnregisteredMetricsGroup(),
		VoidBlobWriter.getInstance(),
		AkkaUtils.getDefaultTimeout(),
		log,
		NettyShuffleMaster.INSTANCE,
		partitionTracker);

	executionGraph.start(mainThreadExecutor.getMainThreadExecutor());
	mainThreadExecutor.execute(executionGraph::scheduleForExecution);

	return executionGraph;
}
 
Example #7
Source Project: Flink-CEPplus   Author: ljygz   File: RuntimeUDFContextTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testResetBroadcastVariableWithInitializer() {
	try {
		RuntimeUDFContext ctx = new RuntimeUDFContext(
				taskInfo, getClass().getClassLoader(), new ExecutionConfig(),
				new HashMap<>(),
				new HashMap<>(),
				new UnregisteredMetricsGroup());

		ctx.setBroadcastVariable("name", Arrays.asList(1, 2, 3, 4));

		// access it the first time with an initializer
		List<Double> list = ctx.getBroadcastVariableWithInitializer("name", new ConvertingInitializer());
		assertEquals(Arrays.asList(1.0, 2.0, 3.0, 4.0), list);

		// set it again to something different
		ctx.setBroadcastVariable("name", Arrays.asList(2, 3, 4, 5));

		List<Double> list2 = ctx.getBroadcastVariableWithInitializer("name", new ConvertingInitializer());
		assertEquals(Arrays.asList(2.0, 3.0, 4.0, 5.0), list2);
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example #8
Source Project: flink   Author: flink-tpc-ds   File: RocksDBStateBackendConfigTest.java    License: Apache License 2.0 6 votes vote down vote up
static RocksDBKeyedStateBackend<Integer> createKeyedStateBackend(
		RocksDBStateBackend rocksDbBackend, Environment env) throws Exception {

	return (RocksDBKeyedStateBackend<Integer>) rocksDbBackend.createKeyedStateBackend(
		env,
		env.getJobID(),
		"test_op",
		IntSerializer.INSTANCE,
		1,
		new KeyGroupRange(0, 0),
		env.getTaskKvStateRegistry(),
		TtlTimeProvider.DEFAULT,
		new UnregisteredMetricsGroup(),
		Collections.emptyList(),
		new CloseableRegistry());
}
 
Example #9
Source Project: Flink-CEPplus   Author: ljygz   File: StateBackendTestBase.java    License: Apache License 2.0 6 votes vote down vote up
protected <K> AbstractKeyedStateBackend<K> restoreKeyedBackend(
		TypeSerializer<K> keySerializer,
		int numberOfKeyGroups,
		KeyGroupRange keyGroupRange,
		List<KeyedStateHandle> state,
		Environment env) throws Exception {

	AbstractKeyedStateBackend<K> backend = getStateBackend().createKeyedStateBackend(
		env,
		new JobID(),
		"test_op",
		keySerializer,
		numberOfKeyGroups,
		keyGroupRange,
		env.getTaskKvStateRegistry(),
		TtlTimeProvider.DEFAULT,
		new UnregisteredMetricsGroup(),
		state,
		new CloseableRegistry());

	return backend;
}
 
Example #10
Source Project: Flink-CEPplus   Author: ljygz   File: StateBackendMigrationTestBase.java    License: Apache License 2.0 6 votes vote down vote up
private <K> AbstractKeyedStateBackend<K> createKeyedBackend(
	TypeSerializer<K> keySerializer,
	int numberOfKeyGroups,
	KeyGroupRange keyGroupRange,
	Environment env) throws Exception {
	AbstractKeyedStateBackend<K> backend = getStateBackend().createKeyedStateBackend(
		env,
		new JobID(),
		"test_op",
		keySerializer,
		numberOfKeyGroups,
		keyGroupRange,
		env.getTaskKvStateRegistry(),
		TtlTimeProvider.DEFAULT,
		new UnregisteredMetricsGroup(),
		Collections.emptyList(),
		new CloseableRegistry());
	return backend;
}
 
Example #11
Source Project: Flink-CEPplus   Author: ljygz   File: StateBackendMigrationTestBase.java    License: Apache License 2.0 6 votes vote down vote up
private <K> AbstractKeyedStateBackend<K> restoreKeyedBackend(
	TypeSerializer<K> keySerializer,
	int numberOfKeyGroups,
	KeyGroupRange keyGroupRange,
	List<KeyedStateHandle> state,
	Environment env) throws Exception {
	AbstractKeyedStateBackend<K> backend = getStateBackend().createKeyedStateBackend(
		env,
		new JobID(),
		"test_op",
		keySerializer,
		numberOfKeyGroups,
		keyGroupRange,
		env.getTaskKvStateRegistry(),
		TtlTimeProvider.DEFAULT,
		new UnregisteredMetricsGroup(),
		state,
		new CloseableRegistry());
	return backend;
}
 
Example #12
Source Project: Flink-CEPplus   Author: ljygz   File: HeapKeyedStateBackendAsyncByDefaultTest.java    License: Apache License 2.0 6 votes vote down vote up
private void validateSupportForAsyncSnapshots(StateBackend backend) throws Exception {

		AbstractKeyedStateBackend<Integer> keyedStateBackend = backend.createKeyedStateBackend(
			new DummyEnvironment("Test", 1, 0),
			new JobID(),
			"testOperator",
			IntSerializer.INSTANCE,
			1,
			new KeyGroupRange(0, 0),
			null,
			TtlTimeProvider.DEFAULT,
			new UnregisteredMetricsGroup(),
			Collections.emptyList(),
			new CloseableRegistry()
		);

		assertTrue(keyedStateBackend.supportsAsynchronousSnapshots());

		IOUtils.closeQuietly(keyedStateBackend);
		keyedStateBackend.dispose();
	}
 
Example #13
Source Project: Flink-CEPplus   Author: ljygz   File: ExecutionGraphSchedulingTest.java    License: Apache License 2.0 6 votes vote down vote up
private ExecutionGraph createExecutionGraph(JobGraph jobGraph, SlotProvider slotProvider, Time timeout) throws Exception {
	return ExecutionGraphBuilder.buildGraph(
		null,
		jobGraph,
		new Configuration(),
		executor,
		executor,
		slotProvider,
		getClass().getClassLoader(),
		new StandaloneCheckpointRecoveryFactory(),
		timeout,
		new NoRestartStrategy(),
		new UnregisteredMetricsGroup(),
		1,
		VoidBlobWriter.getInstance(),
		timeout,
		log);
}
 
Example #14
Source Project: Flink-CEPplus   Author: ljygz   File: PipelinedFailoverRegionBuildingTest.java    License: Apache License 2.0 6 votes vote down vote up
private ExecutionGraph createExecutionGraph(JobGraph jobGraph) throws JobException, JobExecutionException {
	// configure the pipelined failover strategy
	final Configuration jobManagerConfig = new Configuration();
	jobManagerConfig.setString(
			JobManagerOptions.EXECUTION_FAILOVER_STRATEGY,
			FailoverStrategyLoader.PIPELINED_REGION_RESTART_STRATEGY_NAME);

	final Time timeout = Time.seconds(10L);
	return ExecutionGraphBuilder.buildGraph(
		null,
		jobGraph,
		jobManagerConfig,
		TestingUtils.defaultExecutor(),
		TestingUtils.defaultExecutor(),
		mock(SlotProvider.class),
		PipelinedFailoverRegionBuildingTest.class.getClassLoader(),
		new StandaloneCheckpointRecoveryFactory(),
		timeout,
		new NoRestartStrategy(),
		new UnregisteredMetricsGroup(),
		1000,
		VoidBlobWriter.getInstance(),
		timeout,
		log);
}
 
Example #15
Source Project: flink   Author: flink-tpc-ds   File: AbstractFetcherTest.java    License: Apache License 2.0 6 votes vote down vote up
TestFetcher(
		SourceContext<T> sourceContext,
		Map<KafkaTopicPartition, Long> assignedPartitionsWithStartOffsets,
		SerializedValue<AssignerWithPeriodicWatermarks<T>> watermarksPeriodic,
		SerializedValue<AssignerWithPunctuatedWatermarks<T>> watermarksPunctuated,
		ProcessingTimeService processingTimeProvider,
		long autoWatermarkInterval,
		OneShotLatch fetchLoopWaitLatch,
		OneShotLatch stateIterationBlockLatch) throws Exception {

	super(
		sourceContext,
		assignedPartitionsWithStartOffsets,
		watermarksPeriodic,
		watermarksPunctuated,
		processingTimeProvider,
		autoWatermarkInterval,
		TestFetcher.class.getClassLoader(),
		new UnregisteredMetricsGroup(),
		false);

	this.fetchLoopWaitLatch = fetchLoopWaitLatch;
	this.stateIterationBlockLatch = stateIterationBlockLatch;
}
 
Example #16
Source Project: flink   Author: flink-tpc-ds   File: ExecutionGraphSchedulingTest.java    License: Apache License 2.0 6 votes vote down vote up
private ExecutionGraph createExecutionGraph(JobGraph jobGraph, SlotProvider slotProvider, Time timeout) throws Exception {
	return ExecutionGraphBuilder.buildGraph(
		null,
		jobGraph,
		new Configuration(),
		executor,
		executor,
		slotProvider,
		getClass().getClassLoader(),
		new StandaloneCheckpointRecoveryFactory(),
		timeout,
		new NoRestartStrategy(),
		new UnregisteredMetricsGroup(),
		VoidBlobWriter.getInstance(),
		timeout,
		log,
		NettyShuffleMaster.INSTANCE,
		NoOpPartitionTracker.INSTANCE);
}
 
Example #17
Source Project: flink   Author: flink-tpc-ds   File: CollectionExecutor.java    License: Apache License 2.0 6 votes vote down vote up
private <OUT> List<OUT> executeDataSource(GenericDataSourceBase<?, ?> source, int superStep)
		throws Exception {
	@SuppressWarnings("unchecked")
	GenericDataSourceBase<OUT, ?> typedSource = (GenericDataSourceBase<OUT, ?>) source;
	// build the runtime context and compute broadcast variables, if necessary
	TaskInfo taskInfo = new TaskInfo(typedSource.getName(), 1, 0, 1, 0);
	
	RuntimeUDFContext ctx;

	MetricGroup metrics = new UnregisteredMetricsGroup();
	if (RichInputFormat.class.isAssignableFrom(typedSource.getUserCodeWrapper().getUserCodeClass())) {
		ctx = superStep == 0 ? new RuntimeUDFContext(taskInfo, userCodeClassLoader, executionConfig, cachedFiles, accumulators, metrics) :
				new IterationRuntimeUDFContext(taskInfo, userCodeClassLoader, executionConfig, cachedFiles, accumulators, metrics);
	} else {
		ctx = null;
	}
	return typedSource.executeOnCollections(ctx, executionConfig);
}
 
Example #18
Source Project: flink   Author: flink-tpc-ds   File: StateBackendMigrationTestBase.java    License: Apache License 2.0 6 votes vote down vote up
private <K> AbstractKeyedStateBackend<K> createKeyedBackend(
	TypeSerializer<K> keySerializer,
	int numberOfKeyGroups,
	KeyGroupRange keyGroupRange,
	Environment env) throws Exception {
	AbstractKeyedStateBackend<K> backend = getStateBackend().createKeyedStateBackend(
		env,
		new JobID(),
		"test_op",
		keySerializer,
		numberOfKeyGroups,
		keyGroupRange,
		env.getTaskKvStateRegistry(),
		TtlTimeProvider.DEFAULT,
		new UnregisteredMetricsGroup(),
		Collections.emptyList(),
		new CloseableRegistry());
	return backend;
}
 
Example #19
Source Project: flink   Author: flink-tpc-ds   File: OuterJoinOperatorBaseTest.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings({"rawtypes", "unchecked"})
@Before
public void setup() {
	joiner = new MockRichFlatJoinFunction();

	baseOperator =
		new OuterJoinOperatorBase(joiner,
			new BinaryOperatorInformation(BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO,
				BasicTypeInfo.STRING_TYPE_INFO), new int[0], new int[0], "TestJoiner", null);

	executionConfig = new ExecutionConfig();

	String taskName = "Test rich outer join function";
	TaskInfo taskInfo = new TaskInfo(taskName, 1, 0, 1, 0);
	HashMap<String, Accumulator<?, ?>> accumulatorMap = new HashMap<>();
	HashMap<String, Future<Path>> cpTasks = new HashMap<>();

	runtimeContext = new RuntimeUDFContext(taskInfo, null, executionConfig, cpTasks,
		accumulatorMap, new UnregisteredMetricsGroup());
}
 
Example #20
Source Project: flink   Author: flink-tpc-ds   File: RichInputFormatTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testCheckRuntimeContextAccess() {
	final SerializedInputFormat<Value> inputFormat = new SerializedInputFormat<Value>();
	final TaskInfo taskInfo = new TaskInfo("test name", 3, 1, 3, 0);
	inputFormat.setRuntimeContext(
			new RuntimeUDFContext(
					taskInfo, getClass().getClassLoader(), new ExecutionConfig(),
					new HashMap<String, Future<Path>>(),
					new HashMap<String, Accumulator<?, ?>>(),
					new UnregisteredMetricsGroup()));

	assertEquals(inputFormat.getRuntimeContext().getIndexOfThisSubtask(), 1);
	assertEquals(inputFormat.getRuntimeContext().getNumberOfParallelSubtasks(),3);
}
 
Example #21
Source Project: Flink-CEPplus   Author: ljygz   File: FlinkKafkaConsumerBaseTest.java    License: Apache License 2.0 5 votes vote down vote up
@SafeVarargs
private MockFetcher(HashMap<KafkaTopicPartition, Long>... stateSnapshotsToReturn) throws Exception {
	super(
			new TestSourceContext<>(),
			new HashMap<>(),
			null,
			null,
			new TestProcessingTimeService(),
			0,
			MockFetcher.class.getClassLoader(),
			new UnregisteredMetricsGroup(),
			false);

	this.stateSnapshotsToReturn.addAll(Arrays.asList(stateSnapshotsToReturn));
}
 
Example #22
Source Project: Flink-CEPplus   Author: ljygz   File: TestableKinesisDataFetcher.java    License: Apache License 2.0 5 votes vote down vote up
private static RuntimeContext getMockedRuntimeContext(final int fakeTotalCountOfSubtasks, final int fakeIndexOfThisSubtask) {
	RuntimeContext mockedRuntimeContext = mock(RuntimeContext.class);

	Mockito.when(mockedRuntimeContext.getNumberOfParallelSubtasks()).thenReturn(fakeTotalCountOfSubtasks);
	Mockito.when(mockedRuntimeContext.getIndexOfThisSubtask()).thenReturn(fakeIndexOfThisSubtask);
	Mockito.when(mockedRuntimeContext.getTaskName()).thenReturn("Fake Task");
	Mockito.when(mockedRuntimeContext.getTaskNameWithSubtasks()).thenReturn(
			"Fake Task (" + fakeIndexOfThisSubtask + "/" + fakeTotalCountOfSubtasks + ")");
	Mockito.when(mockedRuntimeContext.getUserCodeClassLoader()).thenReturn(
			Thread.currentThread().getContextClassLoader());

	Mockito.when(mockedRuntimeContext.getMetricGroup()).thenReturn(new UnregisteredMetricsGroup());

	return mockedRuntimeContext;
}
 
Example #23
Source Project: flink   Author: flink-tpc-ds   File: StreamingRuntimeContextTest.java    License: Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
private static AbstractStreamOperator<?> createListPlainMockOp() throws Exception {

	AbstractStreamOperator<?> operatorMock = mock(AbstractStreamOperator.class);
	ExecutionConfig config = new ExecutionConfig();

	KeyedStateBackend keyedStateBackend = mock(KeyedStateBackend.class);

	DefaultKeyedStateStore keyedStateStore = new DefaultKeyedStateStore(keyedStateBackend, config);

	when(operatorMock.getExecutionConfig()).thenReturn(config);

	doAnswer(new Answer<ListState<String>>() {

		@Override
		public ListState<String> answer(InvocationOnMock invocationOnMock) throws Throwable {
			ListStateDescriptor<String> descr =
					(ListStateDescriptor<String>) invocationOnMock.getArguments()[2];

			AbstractKeyedStateBackend<Integer> backend = new MemoryStateBackend().createKeyedStateBackend(
				new DummyEnvironment("test_task", 1, 0),
				new JobID(),
				"test_op",
				IntSerializer.INSTANCE,
				1,
				new KeyGroupRange(0, 0),
				new KvStateRegistry().createTaskRegistry(new JobID(), new JobVertexID()),
				TtlTimeProvider.DEFAULT,
				new UnregisteredMetricsGroup(),
				Collections.emptyList(),
				new CloseableRegistry());
			backend.setCurrentKey(0);
			return backend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, descr);
		}
	}).when(keyedStateBackend).getPartitionedState(Matchers.any(), any(TypeSerializer.class), any(ListStateDescriptor.class));

	when(operatorMock.getKeyedStateStore()).thenReturn(keyedStateStore);
	when(operatorMock.getOperatorID()).thenReturn(new OperatorID());
	return operatorMock;
}
 
Example #24
Source Project: Flink-CEPplus   Author: ljygz   File: CollectionExecutor.java    License: Apache License 2.0 5 votes vote down vote up
private <IN> void executeDataSink(GenericDataSinkBase<?> sink, int superStep) throws Exception {
	Operator<?> inputOp = sink.getInput();
	if (inputOp == null) {
		throw new InvalidProgramException("The data sink " + sink.getName() + " has no input.");
	}
	
	@SuppressWarnings("unchecked")
	List<IN> input = (List<IN>) execute(inputOp);
	
	@SuppressWarnings("unchecked")
	GenericDataSinkBase<IN> typedSink = (GenericDataSinkBase<IN>) sink;

	// build the runtime context and compute broadcast variables, if necessary
	TaskInfo taskInfo = new TaskInfo(typedSink.getName(), 1, 0, 1, 0);
	RuntimeUDFContext ctx;

	MetricGroup metrics = new UnregisteredMetricsGroup();
		
	if (RichOutputFormat.class.isAssignableFrom(typedSink.getUserCodeWrapper().getUserCodeClass())) {
		ctx = superStep == 0 ? new RuntimeUDFContext(taskInfo, userCodeClassLoader, executionConfig, cachedFiles, accumulators, metrics) :
				new IterationRuntimeUDFContext(taskInfo, userCodeClassLoader, executionConfig, cachedFiles, accumulators, metrics);
	} else {
		ctx = null;
	}

	typedSink.executeOnCollections(input, ctx, executionConfig);
}
 
Example #25
Source Project: Flink-CEPplus   Author: ljygz   File: CollectionExecutor.java    License: Apache License 2.0 5 votes vote down vote up
private <IN, OUT> List<OUT> executeUnaryOperator(SingleInputOperator<?, ?, ?> operator, int superStep) throws Exception {
	Operator<?> inputOp = operator.getInput();
	if (inputOp == null) {
		throw new InvalidProgramException("The unary operation " + operator.getName() + " has no input.");
	}
	
	@SuppressWarnings("unchecked")
	List<IN> inputData = (List<IN>) execute(inputOp, superStep);
	
	@SuppressWarnings("unchecked")
	SingleInputOperator<IN, OUT, ?> typedOp = (SingleInputOperator<IN, OUT, ?>) operator;
	
	// build the runtime context and compute broadcast variables, if necessary
	TaskInfo taskInfo = new TaskInfo(typedOp.getName(), 1, 0, 1, 0);
	RuntimeUDFContext ctx;

	MetricGroup metrics = new UnregisteredMetricsGroup();
	if (RichFunction.class.isAssignableFrom(typedOp.getUserCodeWrapper().getUserCodeClass())) {
		ctx = superStep == 0 ? new RuntimeUDFContext(taskInfo, userCodeClassLoader, executionConfig, cachedFiles, accumulators, metrics) :
				new IterationRuntimeUDFContext(taskInfo, userCodeClassLoader, executionConfig, cachedFiles, accumulators, metrics);
		
		for (Map.Entry<String, Operator<?>> bcInputs : operator.getBroadcastInputs().entrySet()) {
			List<?> bcData = execute(bcInputs.getValue());
			ctx.setBroadcastVariable(bcInputs.getKey(), bcData);
		}
	} else {
		ctx = null;
	}

	return typedOp.executeOnCollections(inputData, ctx, executionConfig);
}
 
Example #26
Source Project: Flink-CEPplus   Author: ljygz   File: CollectionExecutor.java    License: Apache License 2.0 5 votes vote down vote up
private <IN1, IN2, OUT> List<OUT> executeBinaryOperator(DualInputOperator<?, ?, ?, ?> operator, int superStep) throws Exception {
	Operator<?> inputOp1 = operator.getFirstInput();
	Operator<?> inputOp2 = operator.getSecondInput();
	
	if (inputOp1 == null) {
		throw new InvalidProgramException("The binary operation " + operator.getName() + " has no first input.");
	}
	if (inputOp2 == null) {
		throw new InvalidProgramException("The binary operation " + operator.getName() + " has no second input.");
	}
	
	// compute inputs
	@SuppressWarnings("unchecked")
	List<IN1> inputData1 = (List<IN1>) execute(inputOp1, superStep);
	@SuppressWarnings("unchecked")
	List<IN2> inputData2 = (List<IN2>) execute(inputOp2, superStep);
	
	@SuppressWarnings("unchecked")
	DualInputOperator<IN1, IN2, OUT, ?> typedOp = (DualInputOperator<IN1, IN2, OUT, ?>) operator;
	
	// build the runtime context and compute broadcast variables, if necessary
	TaskInfo taskInfo = new TaskInfo(typedOp.getName(), 1, 0, 1, 0);
	RuntimeUDFContext ctx;

	MetricGroup metrics = new UnregisteredMetricsGroup();

	if (RichFunction.class.isAssignableFrom(typedOp.getUserCodeWrapper().getUserCodeClass())) {
		ctx = superStep == 0 ? new RuntimeUDFContext(taskInfo, userCodeClassLoader, executionConfig, cachedFiles, accumulators, metrics) :
			new IterationRuntimeUDFContext(taskInfo, userCodeClassLoader, executionConfig, cachedFiles, accumulators, metrics);
		
		for (Map.Entry<String, Operator<?>> bcInputs : operator.getBroadcastInputs().entrySet()) {
			List<?> bcData = execute(bcInputs.getValue());
			ctx.setBroadcastVariable(bcInputs.getKey(), bcData);
		}
	} else {
		ctx = null;
	}

	return typedOp.executeOnCollections(inputData1, inputData2, ctx, executionConfig);
}
 
Example #27
Source Project: flink   Author: flink-tpc-ds   File: SingleInputGateTest.java    License: Apache License 2.0 5 votes vote down vote up
private static Map<InputGateID, SingleInputGate> createInputGateWithLocalChannels(
		NettyShuffleEnvironment network,
		int numberOfGates,
		@SuppressWarnings("SameParameterValue") int numberOfLocalChannels) {
	ShuffleDescriptor[] channelDescs = new NettyShuffleDescriptor[numberOfLocalChannels];
	for (int i = 0; i < numberOfLocalChannels; i++) {
		channelDescs[i] = createRemoteWithIdAndLocation(new IntermediateResultPartitionID(), ResourceID.generate());
	}

	InputGateDeploymentDescriptor[] gateDescs = new InputGateDeploymentDescriptor[numberOfGates];
	IntermediateDataSetID[] ids = new IntermediateDataSetID[numberOfGates];
	for (int i = 0; i < numberOfGates; i++) {
		ids[i] = new IntermediateDataSetID();
		gateDescs[i] = new InputGateDeploymentDescriptor(
			ids[i],
			ResultPartitionType.PIPELINED,
			0,
			channelDescs);
	}

	ExecutionAttemptID consumerID = new ExecutionAttemptID();
	SingleInputGate[] gates = network.createInputGates(
		network.createShuffleIOOwnerContext("", consumerID, new UnregisteredMetricsGroup()),
		SingleInputGateBuilder.NO_OP_PRODUCER_CHECKER,
		Arrays.asList(gateDescs)).toArray(new SingleInputGate[] {});
	Map<InputGateID, SingleInputGate> inputGatesById = new HashMap<>();
	for (int i = 0; i < numberOfGates; i++) {
		inputGatesById.put(new InputGateID(ids[i], consumerID), gates[i]);
	}

	return inputGatesById;
}
 
Example #28
Source Project: Flink-CEPplus   Author: ljygz   File: RuntimeUDFContextTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testBroadcastVariableSimple() {
	try {
		RuntimeUDFContext ctx = new RuntimeUDFContext(
				taskInfo, getClass().getClassLoader(), new ExecutionConfig(),
				new HashMap<>(),
				new HashMap<>(),
				new UnregisteredMetricsGroup());

		ctx.setBroadcastVariable("name1", Arrays.asList(1, 2, 3, 4));
		ctx.setBroadcastVariable("name2", Arrays.asList(1.0, 2.0, 3.0, 4.0));

		assertTrue(ctx.hasBroadcastVariable("name1"));
		assertTrue(ctx.hasBroadcastVariable("name2"));

		List<Integer> list1 = ctx.getBroadcastVariable("name1");
		List<Double> list2 = ctx.getBroadcastVariable("name2");

		assertEquals(Arrays.asList(1, 2, 3, 4), list1);
		assertEquals(Arrays.asList(1.0, 2.0, 3.0, 4.0), list2);

		// access again
		List<Integer> list3 = ctx.getBroadcastVariable("name1");
		List<Double> list4 = ctx.getBroadcastVariable("name2");

		assertEquals(Arrays.asList(1, 2, 3, 4), list3);
		assertEquals(Arrays.asList(1.0, 2.0, 3.0, 4.0), list4);

		// and again ;-)
		List<Integer> list5 = ctx.getBroadcastVariable("name1");
		List<Double> list6 = ctx.getBroadcastVariable("name2");

		assertEquals(Arrays.asList(1, 2, 3, 4), list5);
		assertEquals(Arrays.asList(1.0, 2.0, 3.0, 4.0), list6);
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example #29
Source Project: Flink-CEPplus   Author: ljygz   File: RuntimeUDFContextTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testBroadcastVariableWithInitializer() {
	try {
		RuntimeUDFContext ctx = new RuntimeUDFContext(
				taskInfo, getClass().getClassLoader(), new ExecutionConfig(),
				new HashMap<>(),
				new HashMap<>(),
				new UnregisteredMetricsGroup());

		ctx.setBroadcastVariable("name", Arrays.asList(1, 2, 3, 4));

		// access it the first time with an initializer
		List<Double> list = ctx.getBroadcastVariableWithInitializer("name", new ConvertingInitializer());
		assertEquals(Arrays.asList(1.0, 2.0, 3.0, 4.0), list);

		// access it the second time with an initializer (which might not get executed)
		List<Double> list2 = ctx.getBroadcastVariableWithInitializer("name", new ConvertingInitializer());
		assertEquals(Arrays.asList(1.0, 2.0, 3.0, 4.0), list2);

		// access it the third time without an initializer (should work by "chance", because the result is a list)
		List<Double> list3 = ctx.getBroadcastVariable("name");
		assertEquals(Arrays.asList(1.0, 2.0, 3.0, 4.0), list3);
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example #30
Source Project: flink   Author: flink-tpc-ds   File: ExecutionGraphDeploymentTest.java    License: Apache License 2.0 5 votes vote down vote up
private ExecutionGraph createExecutionGraph(Configuration configuration) throws Exception {
	final ScheduledExecutorService executor = TestingUtils.defaultExecutor();

	final JobID jobId = new JobID();
	final JobGraph jobGraph = new JobGraph(jobId, "test");
	jobGraph.setSnapshotSettings(
		new JobCheckpointingSettings(
			Collections.<JobVertexID>emptyList(),
			Collections.<JobVertexID>emptyList(),
			Collections.<JobVertexID>emptyList(),
			new CheckpointCoordinatorConfiguration(
				100,
				10 * 60 * 1000,
				0,
				1,
				CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION,
				false,
				false,
				0),
			null));

	final Time timeout = Time.seconds(10L);
	return ExecutionGraphBuilder.buildGraph(
		null,
		jobGraph,
		configuration,
		executor,
		executor,
		new ProgrammedSlotProvider(1),
		getClass().getClassLoader(),
		new StandaloneCheckpointRecoveryFactory(),
		timeout,
		new NoRestartStrategy(),
		new UnregisteredMetricsGroup(),
		blobWriter,
		timeout,
		LoggerFactory.getLogger(getClass()),
		NettyShuffleMaster.INSTANCE,
		NoOpPartitionTracker.INSTANCE);
}