org.apache.flink.streaming.runtime.streamrecord.StreamElementSerializer Java Examples

The following examples show how to use org.apache.flink.streaming.runtime.streamrecord.StreamElementSerializer. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: RecordWriterOutput.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
public RecordWriterOutput(
		RecordWriter<SerializationDelegate<StreamRecord<OUT>>> recordWriter,
		TypeSerializer<OUT> outSerializer,
		OutputTag outputTag,
		StreamStatusProvider streamStatusProvider) {

	checkNotNull(recordWriter);
	this.outputTag = outputTag;
	// generic hack: cast the writer to generic Object type so we can use it
	// with multiplexed records and watermarks
	this.recordWriter = (RecordWriter<SerializationDelegate<StreamElement>>)
			(RecordWriter<?>) recordWriter;

	TypeSerializer<StreamElement> outRecordSerializer =
			new StreamElementSerializer<>(outSerializer);

	if (outSerializer != null) {
		serializationDelegate = new SerializationDelegate<StreamElement>(outRecordSerializer);
	}

	this.streamStatusProvider = checkNotNull(streamStatusProvider);
}
 
Example #2
Source File: AvroSerializerMigrationTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void compositeSerializerFromFlink_1_6_WithNestedAvroSerializer() throws IOException {
	String streamElementSerializerBase64 = "AAAAAQAAAq2s7QAFc3IAR29yZy5hcGFjaGUuZmxpbmsuc3RyZWFtaW5nLnJ1bnRpbWUuc3RyZWFtcmVj\n" +
		"b3JkLlN0cmVhbUVsZW1lbnRTZXJpYWxpemVyAAAAAAAAAAECAAFMAA50eXBlU2VyaWFsaXplcnQANkxv\n" +
		"cmcvYXBhY2hlL2ZsaW5rL2FwaS9jb21tb24vdHlwZXV0aWxzL1R5cGVTZXJpYWxpemVyO3hyADRvcmcu\n" +
		"YXBhY2hlLmZsaW5rLmFwaS5jb21tb24udHlwZXV0aWxzLlR5cGVTZXJpYWxpemVyAAAAAAAAAAECAAB4\n" +
		"cHNyADZvcmcuYXBhY2hlLmZsaW5rLmZvcm1hdHMuYXZyby50eXBldXRpbHMuQXZyb1NlcmlhbGl6ZXIA\n" +
		"AAAAAAAAAQIAAkwADHNjaGVtYVN0cmluZ3QAEkxqYXZhL2xhbmcvU3RyaW5nO0wABHR5cGV0ABFMamF2\n" +
		"YS9sYW5nL0NsYXNzO3hxAH4AAnQBAXsidHlwZSI6InJlY29yZCIsIm5hbWUiOiJBZGRyZXNzIiwibmFt\n" +
		"ZXNwYWNlIjoib3JnLmFwYWNoZS5mbGluay5mb3JtYXRzLmF2cm8uZ2VuZXJhdGVkIiwiZmllbGRzIjpb\n" +
		"eyJuYW1lIjoibnVtIiwidHlwZSI6ImludCJ9LHsibmFtZSI6InN0cmVldCIsInR5cGUiOiJzdHJpbmci\n" +
		"fSx7Im5hbWUiOiJjaXR5IiwidHlwZSI6InN0cmluZyJ9LHsibmFtZSI6InN0YXRlIiwidHlwZSI6InN0\n" +
		"cmluZyJ9LHsibmFtZSI6InppcCIsInR5cGUiOiJzdHJpbmcifV19dnIAJW9yZy5hcGFjaGUuYXZyby5n\n" +
		"ZW5lcmljLkdlbmVyaWNSZWNvcmQAAAAAAAAAAAAAAHhw";

	StreamElementSerializer<?> ser = (StreamElementSerializer<?>) javaDeserialize(streamElementSerializerBase64);
	TypeSerializer<?> containedTypeSerializer = ser.getContainedTypeSerializer();

	assertThat(containedTypeSerializer, instanceOf(AvroSerializer.class));

	AvroSerializer<?> avroSerializer = (AvroSerializer<?>) containedTypeSerializer;
	assertSame(avroSerializer.getType(), GenericRecord.class);
	assertThat(avroSerializer.getAvroSchema(), is(Address.SCHEMA$));
}
 
Example #3
Source File: StreamTaskTestHarness.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
public StreamTaskTestHarness(
	Function<Environment, ? extends StreamTask<OUT, ?>> taskFactory,
	TypeInformation<OUT> outputType,
	LocalRecoveryConfig localRecoveryConfig) {
	this.taskFactory = checkNotNull(taskFactory);
	this.memorySize = DEFAULT_MEMORY_MANAGER_SIZE;
	this.bufferSize = DEFAULT_NETWORK_BUFFER_SIZE;

	this.jobConfig = new Configuration();
	this.taskConfig = new Configuration();
	this.executionConfig = new ExecutionConfig();

	streamConfig = new StreamConfig(taskConfig);

	outputSerializer = outputType.createSerializer(executionConfig);
	outputStreamRecordSerializer = new StreamElementSerializer<OUT>(outputSerializer);

	this.taskStateManager = new TestTaskStateManager(localRecoveryConfig);
}
 
Example #4
Source File: AsyncWaitOperator.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public void setup(StreamTask<?, ?> containingTask, StreamConfig config, Output<StreamRecord<OUT>> output) {
	super.setup(containingTask, config, output);

	this.inStreamElementSerializer = new StreamElementSerializer<>(
		getOperatorConfig().<IN>getTypeSerializerIn1(getUserCodeClassloader()));

	switch (outputMode) {
		case ORDERED:
			queue = new OrderedStreamElementQueue<>(capacity);
			break;
		case UNORDERED:
			queue = new UnorderedStreamElementQueue<>(capacity);
			break;
		default:
			throw new IllegalStateException("Unknown async mode: " + outputMode + '.');
	}

	this.timestampedCollector = new TimestampedCollector<>(output);
}
 
Example #5
Source File: RecordWriterOutput.java    From flink with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
public RecordWriterOutput(
		RecordWriter<SerializationDelegate<StreamRecord<OUT>>> recordWriter,
		TypeSerializer<OUT> outSerializer,
		OutputTag outputTag,
		StreamStatusProvider streamStatusProvider) {

	checkNotNull(recordWriter);
	this.outputTag = outputTag;
	// generic hack: cast the writer to generic Object type so we can use it
	// with multiplexed records and watermarks
	this.recordWriter = (RecordWriter<SerializationDelegate<StreamElement>>)
			(RecordWriter<?>) recordWriter;

	TypeSerializer<StreamElement> outRecordSerializer =
			new StreamElementSerializer<>(outSerializer);

	if (outSerializer != null) {
		serializationDelegate = new SerializationDelegate<StreamElement>(outRecordSerializer);
	}

	this.streamStatusProvider = checkNotNull(streamStatusProvider);
}
 
Example #6
Source File: StreamTaskNetworkInput.java    From flink with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
public StreamTaskNetworkInput(
		CheckpointedInputGate checkpointedInputGate,
		TypeSerializer<?> inputSerializer,
		IOManager ioManager,
		StatusWatermarkValve statusWatermarkValve,
		int inputIndex) {
	this.checkpointedInputGate = checkpointedInputGate;
	this.deserializationDelegate = new NonReusingDeserializationDelegate<>(
		new StreamElementSerializer<>(inputSerializer));

	// Initialize one deserializer per input channel
	this.recordDeserializers = new SpillingAdaptiveSpanningRecordDeserializer[checkpointedInputGate.getNumberOfInputChannels()];
	for (int i = 0; i < recordDeserializers.length; i++) {
		recordDeserializers[i] = new SpillingAdaptiveSpanningRecordDeserializer<>(
			ioManager.getSpillingDirectoriesPaths());
	}

	this.statusWatermarkValve = checkNotNull(statusWatermarkValve);
	this.inputIndex = inputIndex;
	this.channelIndexes = getChannelIndexes(checkpointedInputGate);
}
 
Example #7
Source File: StreamTaskNetworkInput.java    From flink with Apache License 2.0 6 votes vote down vote up
@VisibleForTesting
StreamTaskNetworkInput(
	CheckpointedInputGate checkpointedInputGate,
	TypeSerializer<?> inputSerializer,
	StatusWatermarkValve statusWatermarkValve,
	int inputIndex,
	RecordDeserializer<DeserializationDelegate<StreamElement>>[] recordDeserializers) {

	this.checkpointedInputGate = checkpointedInputGate;
	this.deserializationDelegate = new NonReusingDeserializationDelegate<>(
		new StreamElementSerializer<>(inputSerializer));
	this.recordDeserializers = recordDeserializers;
	this.statusWatermarkValve = statusWatermarkValve;
	this.inputIndex = inputIndex;
	this.channelIndexes = getChannelIndexes(checkpointedInputGate);
}
 
Example #8
Source File: StreamTaskTestHarness.java    From flink with Apache License 2.0 6 votes vote down vote up
public StreamTaskTestHarness(
	Function<Environment, ? extends StreamTask<OUT, ?>> taskFactory,
	TypeInformation<OUT> outputType,
	LocalRecoveryConfig localRecoveryConfig) {
	this.taskFactory = checkNotNull(taskFactory);
	this.memorySize = DEFAULT_MEMORY_MANAGER_SIZE;
	this.bufferSize = DEFAULT_NETWORK_BUFFER_SIZE;

	this.jobConfig = new Configuration();
	this.taskConfig = new Configuration();
	this.executionConfig = new ExecutionConfig();

	streamConfig = new StreamConfig(taskConfig);

	outputSerializer = outputType.createSerializer(executionConfig);
	outputStreamRecordSerializer = new StreamElementSerializer<OUT>(outputSerializer);

	this.taskStateManager = new TestTaskStateManager(localRecoveryConfig);
}
 
Example #9
Source File: StreamTaskTestHarness.java    From flink with Apache License 2.0 6 votes vote down vote up
public StreamTaskTestHarness(
	FunctionWithException<Environment, ? extends StreamTask<OUT, ?>, Exception> taskFactory,
	TypeInformation<OUT> outputType,
	LocalRecoveryConfig localRecoveryConfig) {
	this.taskFactory = checkNotNull(taskFactory);
	this.memorySize = DEFAULT_MEMORY_MANAGER_SIZE;
	this.bufferSize = DEFAULT_NETWORK_BUFFER_SIZE;

	this.jobConfig = new Configuration();
	this.taskConfig = new Configuration();
	this.executionConfig = new ExecutionConfig();

	streamConfig = new StreamConfig(taskConfig);
	streamConfig.setBufferTimeout(0);

	outputSerializer = outputType.createSerializer(executionConfig);
	outputStreamRecordSerializer = new StreamElementSerializer<>(outputSerializer);

	this.taskStateManager = new TestTaskStateManager(localRecoveryConfig);
}
 
Example #10
Source File: StreamTaskNetworkInput.java    From flink with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
public StreamTaskNetworkInput(
		CheckpointedInputGate checkpointedInputGate,
		TypeSerializer<?> inputSerializer,
		IOManager ioManager,
		int inputIndex) {
	this.checkpointedInputGate = checkpointedInputGate;
	this.deserializationDelegate = new NonReusingDeserializationDelegate<>(
		new StreamElementSerializer<>(inputSerializer));

	// Initialize one deserializer per input channel
	this.recordDeserializers = new SpillingAdaptiveSpanningRecordDeserializer[checkpointedInputGate.getNumberOfInputChannels()];
	for (int i = 0; i < recordDeserializers.length; i++) {
		recordDeserializers[i] = new SpillingAdaptiveSpanningRecordDeserializer<>(
			ioManager.getSpillingDirectoriesPaths());
	}

	this.inputIndex = inputIndex;
}
 
Example #11
Source File: RecordWriterOutput.java    From flink with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
public RecordWriterOutput(
		RecordWriter<SerializationDelegate<StreamRecord<OUT>>> recordWriter,
		TypeSerializer<OUT> outSerializer,
		OutputTag outputTag,
		StreamStatusProvider streamStatusProvider) {

	checkNotNull(recordWriter);
	this.outputTag = outputTag;
	// generic hack: cast the writer to generic Object type so we can use it
	// with multiplexed records and watermarks
	this.recordWriter = (RecordWriter<SerializationDelegate<StreamElement>>)
			(RecordWriter<?>) recordWriter;

	TypeSerializer<StreamElement> outRecordSerializer =
			new StreamElementSerializer<>(outSerializer);

	if (outSerializer != null) {
		serializationDelegate = new SerializationDelegate<StreamElement>(outRecordSerializer);
	}

	this.streamStatusProvider = checkNotNull(streamStatusProvider);
}
 
Example #12
Source File: AvroSerializerMigrationTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void compositeSerializerFromFlink_1_6_WithNestedAvroSerializer() throws IOException {
	String streamElementSerializerBase64 = "AAAAAQAAAq2s7QAFc3IAR29yZy5hcGFjaGUuZmxpbmsuc3RyZWFtaW5nLnJ1bnRpbWUuc3RyZWFtcmVj\n" +
		"b3JkLlN0cmVhbUVsZW1lbnRTZXJpYWxpemVyAAAAAAAAAAECAAFMAA50eXBlU2VyaWFsaXplcnQANkxv\n" +
		"cmcvYXBhY2hlL2ZsaW5rL2FwaS9jb21tb24vdHlwZXV0aWxzL1R5cGVTZXJpYWxpemVyO3hyADRvcmcu\n" +
		"YXBhY2hlLmZsaW5rLmFwaS5jb21tb24udHlwZXV0aWxzLlR5cGVTZXJpYWxpemVyAAAAAAAAAAECAAB4\n" +
		"cHNyADZvcmcuYXBhY2hlLmZsaW5rLmZvcm1hdHMuYXZyby50eXBldXRpbHMuQXZyb1NlcmlhbGl6ZXIA\n" +
		"AAAAAAAAAQIAAkwADHNjaGVtYVN0cmluZ3QAEkxqYXZhL2xhbmcvU3RyaW5nO0wABHR5cGV0ABFMamF2\n" +
		"YS9sYW5nL0NsYXNzO3hxAH4AAnQBAXsidHlwZSI6InJlY29yZCIsIm5hbWUiOiJBZGRyZXNzIiwibmFt\n" +
		"ZXNwYWNlIjoib3JnLmFwYWNoZS5mbGluay5mb3JtYXRzLmF2cm8uZ2VuZXJhdGVkIiwiZmllbGRzIjpb\n" +
		"eyJuYW1lIjoibnVtIiwidHlwZSI6ImludCJ9LHsibmFtZSI6InN0cmVldCIsInR5cGUiOiJzdHJpbmci\n" +
		"fSx7Im5hbWUiOiJjaXR5IiwidHlwZSI6InN0cmluZyJ9LHsibmFtZSI6InN0YXRlIiwidHlwZSI6InN0\n" +
		"cmluZyJ9LHsibmFtZSI6InppcCIsInR5cGUiOiJzdHJpbmcifV19dnIAJW9yZy5hcGFjaGUuYXZyby5n\n" +
		"ZW5lcmljLkdlbmVyaWNSZWNvcmQAAAAAAAAAAAAAAHhw";

	StreamElementSerializer<?> ser = (StreamElementSerializer<?>) javaDeserialize(streamElementSerializerBase64);
	TypeSerializer<?> containedTypeSerializer = ser.getContainedTypeSerializer();

	assertThat(containedTypeSerializer, instanceOf(AvroSerializer.class));

	AvroSerializer<?> avroSerializer = (AvroSerializer<?>) containedTypeSerializer;
	assertSame(avroSerializer.getType(), GenericRecord.class);
	assertThat(avroSerializer.getAvroSchema(), is(Address.SCHEMA$));
}
 
Example #13
Source File: StreamTaskNetworkInputTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private void serializeRecord(long value, BufferBuilder bufferBuilder) throws IOException {
	RecordSerializer<SerializationDelegate<StreamElement>> serializer = new SpanningRecordSerializer<>();
	SerializationDelegate<StreamElement> serializationDelegate =
		new SerializationDelegate<>(
			new StreamElementSerializer<>(LongSerializer.INSTANCE));
	serializationDelegate.setInstance(new StreamRecord<>(value));
	serializer.serializeRecord(serializationDelegate);

	assertFalse(serializer.copyToBufferBuilder(bufferBuilder).isFullBuffer());
}
 
Example #14
Source File: EvictingWindowOperatorContractTest.java    From flink with Apache License 2.0 5 votes vote down vote up
protected <W extends Window, OUT> KeyedOneInputStreamOperatorTestHarness<Integer, Integer, OUT> createWindowOperator(
		WindowAssigner<Integer, W> assigner,
		Trigger<Integer, W> trigger,
		long allowedLatenss,
		InternalWindowFunction<Iterable<Integer>, OUT, Integer, W> windowFunction,
		OutputTag<Integer> lateOutputTag) throws Exception {

	KeySelector<Integer, Integer> keySelector = new KeySelector<Integer, Integer>() {
		private static final long serialVersionUID = 1L;

		@Override
		public Integer getKey(Integer value) throws Exception {
			return value;
		}
	};

	ListStateDescriptor<StreamRecord<Integer>> intListDescriptor =
			new ListStateDescriptor<>(
					"int-list",
					(TypeSerializer<StreamRecord<Integer>>) new StreamElementSerializer(IntSerializer.INSTANCE));

	@SuppressWarnings("unchecked")
	EvictingWindowOperator<Integer, Integer, OUT, W> operator = new EvictingWindowOperator<>(
			assigner,
			assigner.getWindowSerializer(new ExecutionConfig()),
			keySelector,
			IntSerializer.INSTANCE,
			intListDescriptor,
			windowFunction,
			trigger,
			CountEvictor.<W>of(100),
			allowedLatenss,
			lateOutputTag);

	return new KeyedOneInputStreamOperatorTestHarness<>(
			operator,
			keySelector,
			BasicTypeInfo.INT_TYPE_INFO);
}
 
Example #15
Source File: AbstractSiddhiOperator.java    From flink-siddhi with Apache License 2.0 5 votes vote down vote up
protected StreamElementSerializer<IN> getStreamRecordSerializer(String streamId) {
    if (streamRecordSerializers.containsKey(streamId)) {
        return streamRecordSerializers.get(streamId);
    } else {
        throw new UndefinedStreamException("Stream " + streamId + " not defined");
    }
}
 
Example #16
Source File: AbstractSiddhiOperator.java    From bahir-flink with Apache License 2.0 5 votes vote down vote up
protected StreamElementSerializer<IN> getStreamRecordSerializer(String streamId) {
    if (streamRecordSerializers.containsKey(streamId)) {
        return streamRecordSerializers.get(streamId);
    } else {
        throw new UndefinedStreamException("Stream " + streamId + " not defined");
    }
}
 
Example #17
Source File: StreamTaskMailboxTestHarnessBuilder.java    From flink with Apache License 2.0 5 votes vote down vote up
public StreamTaskMailboxTestHarness<OUT> build() throws Exception {
	streamConfig.setBufferTimeout(bufferTimeout);

	TestTaskStateManager taskStateManager = new TestTaskStateManager(localRecoveryConfig);
	if (taskStateSnapshots != null) {
		taskStateManager.setReportedCheckpointId(taskStateSnapshots.keySet().iterator().next());
		taskStateManager.setJobManagerTaskStateSnapshotsByCheckpointId(taskStateSnapshots);
	}

	StreamMockEnvironment streamMockEnvironment = new StreamMockEnvironment(
		jobConfig,
		taskConfig,
		executionConfig,
		memorySize,
		new MockInputSplitProvider(),
		bufferSize,
		taskStateManager);

	streamMockEnvironment.setCheckpointResponder(taskStateManager.getCheckpointResponder());
	initializeInputs(streamMockEnvironment);

	checkState(inputGates != null, "InputGates hasn't been initialised");

	StreamElementSerializer<OUT> outputStreamRecordSerializer = new StreamElementSerializer<>(outputSerializer);

	Queue<Object> outputList = new ArrayDeque<>();
	streamMockEnvironment.addOutput(outputList, outputStreamRecordSerializer);
	streamMockEnvironment.setTaskMetricGroup(taskMetricGroup);

	StreamTask<OUT, ?> task = taskFactory.apply(streamMockEnvironment);
	task.beforeInvoke();

	return new StreamTaskMailboxTestHarness<>(
		task,
		outputList,
		inputGates,
		streamMockEnvironment);
}
 
Example #18
Source File: EvictingWindowOperatorContractTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
protected <W extends Window, OUT> KeyedOneInputStreamOperatorTestHarness<Integer, Integer, OUT> createWindowOperator(
		WindowAssigner<Integer, W> assigner,
		Trigger<Integer, W> trigger,
		long allowedLatenss,
		InternalWindowFunction<Iterable<Integer>, OUT, Integer, W> windowFunction,
		OutputTag<Integer> lateOutputTag) throws Exception {

	KeySelector<Integer, Integer> keySelector = new KeySelector<Integer, Integer>() {
		private static final long serialVersionUID = 1L;

		@Override
		public Integer getKey(Integer value) throws Exception {
			return value;
		}
	};

	ListStateDescriptor<StreamRecord<Integer>> intListDescriptor =
			new ListStateDescriptor<>(
					"int-list",
					(TypeSerializer<StreamRecord<Integer>>) new StreamElementSerializer(IntSerializer.INSTANCE));

	@SuppressWarnings("unchecked")
	EvictingWindowOperator<Integer, Integer, OUT, W> operator = new EvictingWindowOperator<>(
			assigner,
			assigner.getWindowSerializer(new ExecutionConfig()),
			keySelector,
			IntSerializer.INSTANCE,
			intListDescriptor,
			windowFunction,
			trigger,
			CountEvictor.<W>of(100),
			allowedLatenss,
			lateOutputTag);

	return new KeyedOneInputStreamOperatorTestHarness<>(
			operator,
			keySelector,
			BasicTypeInfo.INT_TYPE_INFO);
}
 
Example #19
Source File: StreamTaskNetworkInputTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private void serializeRecord(long value, BufferBuilder bufferBuilder) throws IOException {
	RecordSerializer<SerializationDelegate<StreamElement>> serializer = new SpanningRecordSerializer<>();
	SerializationDelegate<StreamElement> serializationDelegate =
		new SerializationDelegate<>(
			new StreamElementSerializer<>(LongSerializer.INSTANCE));
	serializationDelegate.setInstance(new StreamRecord<>(value));
	serializer.serializeRecord(serializationDelegate);

	assertFalse(serializer.copyToBufferBuilder(bufferBuilder).isFullBuffer());
}
 
Example #20
Source File: EvictingWindowOperatorContractTest.java    From flink with Apache License 2.0 5 votes vote down vote up
protected <W extends Window, OUT> KeyedOneInputStreamOperatorTestHarness<Integer, Integer, OUT> createWindowOperator(
		WindowAssigner<Integer, W> assigner,
		Trigger<Integer, W> trigger,
		long allowedLatenss,
		InternalWindowFunction<Iterable<Integer>, OUT, Integer, W> windowFunction,
		OutputTag<Integer> lateOutputTag) throws Exception {

	KeySelector<Integer, Integer> keySelector = new KeySelector<Integer, Integer>() {
		private static final long serialVersionUID = 1L;

		@Override
		public Integer getKey(Integer value) throws Exception {
			return value;
		}
	};

	ListStateDescriptor<StreamRecord<Integer>> intListDescriptor =
			new ListStateDescriptor<>(
					"int-list",
					(TypeSerializer<StreamRecord<Integer>>) new StreamElementSerializer(IntSerializer.INSTANCE));

	@SuppressWarnings("unchecked")
	EvictingWindowOperator<Integer, Integer, OUT, W> operator = new EvictingWindowOperator<>(
			assigner,
			assigner.getWindowSerializer(new ExecutionConfig()),
			keySelector,
			IntSerializer.INSTANCE,
			intListDescriptor,
			windowFunction,
			trigger,
			CountEvictor.<W>of(100),
			allowedLatenss,
			lateOutputTag);

	return new KeyedOneInputStreamOperatorTestHarness<>(
			operator,
			keySelector,
			BasicTypeInfo.INT_TYPE_INFO);
}
 
Example #21
Source File: EvictingWindowOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
@SuppressWarnings("unchecked")
public void testCountTriggerWithApply() throws Exception {
	AtomicInteger closeCalled = new AtomicInteger(0);

	final int windowSize = 4;
	final int windowSlide = 2;

	@SuppressWarnings({"unchecked", "rawtypes"})
	TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer =
			(TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc =
			new ListStateDescriptor<>("window-contents", streamRecordSerializer);

	EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, GlobalWindow> operator = new EvictingWindowOperator<>(
		GlobalWindows.create(),
		new GlobalWindow.Serializer(),
		new TupleKeySelector(),
		BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
		stateDesc,
		new InternalIterableWindowFunction<>(new RichSumReducer<GlobalWindow>(closeCalled)),
		CountTrigger.of(windowSlide),
		CountEvictor.of(windowSize),
		0,
		null /* late data output tag */);

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);

	long initialTime = 0L;
	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	// The global window actually ignores these timestamps...

	// add elements out-of-order
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3999));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 20));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 999));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1998));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1999));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));

	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), Long.MAX_VALUE));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), Long.MAX_VALUE));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 10999));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));

	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 4), Long.MAX_VALUE));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());

	testHarness.close();

	Assert.assertEquals("Close was not called.", 1, closeCalled.get());
}
 
Example #22
Source File: EvictingWindowOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
@SuppressWarnings("unchecked")
public void testTumblingWindowWithApply() throws Exception {
	AtomicInteger closeCalled = new AtomicInteger(0);

	final int windowSize = 4;

	@SuppressWarnings({"unchecked", "rawtypes"})
	TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer =
			(TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc =
			new ListStateDescriptor<>("window-contents", streamRecordSerializer);

	EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator = new EvictingWindowOperator<>(
		TumblingEventTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS)),
		new TimeWindow.Serializer(),
		new TupleKeySelector(),
		BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
		stateDesc,
		new InternalIterableWindowFunction<>(new RichSumReducer<TimeWindow>(closeCalled)),
		EventTimeTrigger.create(),
		CountEvictor.of(windowSize),
		0,
		null /* late data output tag */);

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);

	long initialTime = 0L;

	testHarness.open();

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 10));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 100));

	testHarness.processWatermark(new Watermark(1999));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 1997));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 1998));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 2310)); // not late but more than 4
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 2310));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 2310));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 2310));

	testHarness.processWatermark(new Watermark(3999));											 // now is the evictor

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
	expectedOutput.add(new Watermark(1999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 4), 3999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), 3999));
	expectedOutput.add(new Watermark(3999));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(),
		new EvictingWindowOperatorTest.ResultSortComparator());
	testHarness.close();
}
 
Example #23
Source File: SiddhiStreamOperator.java    From bahir-flink with Apache License 2.0 4 votes vote down vote up
@Override
protected StreamElementSerializer<Tuple2<String, IN>> createStreamRecordSerializer(StreamSchema streamSchema, ExecutionConfig executionConfig) {
    TypeInformation<Tuple2<String, IN>> tuple2TypeInformation = SiddhiTypeFactory.getStreamTupleTypeInformation((TypeInformation<IN>) streamSchema.getTypeInfo());
    return new StreamElementSerializer<>(tuple2TypeInformation.createSerializer(executionConfig));
}
 
Example #24
Source File: EvictingWindowOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
@SuppressWarnings("unchecked")
public void testCountTriggerWithApply() throws Exception {
	AtomicInteger closeCalled = new AtomicInteger(0);

	final int windowSize = 4;
	final int windowSlide = 2;

	@SuppressWarnings({"unchecked", "rawtypes"})
	TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer =
			(TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc =
			new ListStateDescriptor<>("window-contents", streamRecordSerializer);

	EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, GlobalWindow> operator = new EvictingWindowOperator<>(
		GlobalWindows.create(),
		new GlobalWindow.Serializer(),
		new TupleKeySelector(),
		BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
		stateDesc,
		new InternalIterableWindowFunction<>(new RichSumReducer<GlobalWindow>(closeCalled)),
		CountTrigger.of(windowSlide),
		CountEvictor.of(windowSize),
		0,
		null /* late data output tag */);

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);

	long initialTime = 0L;
	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	// The global window actually ignores these timestamps...

	// add elements out-of-order
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3999));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 20));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 999));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1998));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1999));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));

	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), Long.MAX_VALUE));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), Long.MAX_VALUE));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 10999));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));

	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 4), Long.MAX_VALUE));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());

	testHarness.close();

	Assert.assertEquals("Close was not called.", 1, closeCalled.get());
}
 
Example #25
Source File: EvictingWindowOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
@SuppressWarnings("unchecked")
public void testCountTrigger() throws Exception {

	final int windowSize = 4;
	final int windowSlide = 2;

	@SuppressWarnings({"unchecked", "rawtypes"})
	TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer =
			(TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc =
			new ListStateDescriptor<>("window-contents", streamRecordSerializer);

	EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, GlobalWindow> operator = new EvictingWindowOperator<>(
			GlobalWindows.create(),
			new GlobalWindow.Serializer(),
			new TupleKeySelector(),
			BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
			stateDesc,
			new InternalIterableWindowFunction<>(
					new ReduceApplyWindowFunction<>(
							new SumReducer(),
							// on some versions of Java we seem to need the explicit type
							new PassThroughWindowFunction<String, GlobalWindow, Tuple2<String, Integer>>())),
			CountTrigger.of(windowSlide),
			CountEvictor.of(windowSize),
			0,
			null /* late data output tag */);

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);

	long initialTime = 0L;
	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	// The global window actually ignores these timestamps...

	// add elements out-of-order
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3999));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 20));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 999));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1998));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1999));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));

	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), Long.MAX_VALUE));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), Long.MAX_VALUE));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 10999));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));

	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 4), Long.MAX_VALUE));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());

	testHarness.close();
}
 
Example #26
Source File: EvictingWindowOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Tests DeltaEvictor, evictAfter behavior.
 */
@Test
public void testDeltaEvictorEvictAfter() throws Exception {
	AtomicInteger closeCalled = new AtomicInteger(0);
	final int triggerCount = 2;
	final boolean evictAfter = true;
	final int threshold = 2;

	@SuppressWarnings({"unchecked", "rawtypes"})
	TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer =
		(TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc =
		new ListStateDescriptor<>("window-contents", streamRecordSerializer);

	EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, GlobalWindow> operator = new EvictingWindowOperator<>(
		GlobalWindows.create(),
		new GlobalWindow.Serializer(),
		new TupleKeySelector(),
		BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
		stateDesc,
		new InternalIterableWindowFunction<>(new RichSumReducer<GlobalWindow>(closeCalled)),
		CountTrigger.of(triggerCount),
		DeltaEvictor.of(threshold, new DeltaFunction<Tuple2<String, Integer>>() {
			@Override
			public double getDelta(Tuple2<String, Integer> oldDataPoint, Tuple2<String, Integer> newDataPoint) {
				return newDataPoint.f1 - oldDataPoint.f1;
			}
		}, evictAfter),
		0,
		null /* late data output tag */);

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
		new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);

	long initialTime = 0L;
	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), initialTime + 3999));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 20));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 5), initialTime + 999));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), initialTime + 1998));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 6), initialTime + 1999));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));

	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 5), Long.MAX_VALUE));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 15), Long.MAX_VALUE));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), Long.MAX_VALUE));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 9), initialTime + 10999));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 10), initialTime + 1000));

	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 16), Long.MAX_VALUE));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 22), Long.MAX_VALUE));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());

	testHarness.close();

	Assert.assertEquals("Close was not called.", 1, closeCalled.get());
}
 
Example #27
Source File: EvictingWindowOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Tests DeltaEvictor, evictBefore behavior.
 */
@Test
public void testDeltaEvictorEvictBefore() throws Exception {
	AtomicInteger closeCalled = new AtomicInteger(0);
	final int triggerCount = 2;
	final boolean evictAfter = false;
	final int threshold = 2;

	@SuppressWarnings({"unchecked", "rawtypes"})
	TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer =
		(TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc =
		new ListStateDescriptor<>("window-contents", streamRecordSerializer);

	EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, GlobalWindow> operator = new EvictingWindowOperator<>(
		GlobalWindows.create(),
		new GlobalWindow.Serializer(),
		new TupleKeySelector(),
		BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
		stateDesc,
		new InternalIterableWindowFunction<>(new RichSumReducer<GlobalWindow>(closeCalled)),
		CountTrigger.of(triggerCount),
		DeltaEvictor.of(threshold, new DeltaFunction<Tuple2<String, Integer>>() {
			@Override
			public double getDelta(Tuple2<String, Integer> oldDataPoint, Tuple2<String, Integer> newDataPoint) {
				return newDataPoint.f1 - oldDataPoint.f1;
			}
		}, evictAfter),
		0,
		null /* late data output tag */);

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
		new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);

	long initialTime = 0L;
	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), initialTime + 3999));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 20));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 5), initialTime + 999));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), initialTime + 1998));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 6), initialTime + 1999));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));

	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 11), Long.MAX_VALUE));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), Long.MAX_VALUE));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3), initialTime + 10999));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 10), initialTime + 1000));

	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 8), Long.MAX_VALUE));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 10), Long.MAX_VALUE));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());

	testHarness.close();

	Assert.assertEquals("Close was not called.", 1, closeCalled.get());
}
 
Example #28
Source File: EvictingWindowOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Tests time evictor, if no timestamp information in the StreamRecord.
 * No element will be evicted from the window.
 */
@Test
public void testTimeEvictorNoTimestamp() throws Exception {
	AtomicInteger closeCalled = new AtomicInteger(0);
	final int triggerCount = 2;
	final boolean evictAfter = true;

	@SuppressWarnings({"unchecked", "rawtypes"})
	TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer =
		(TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc =
		new ListStateDescriptor<>("window-contents", streamRecordSerializer);

	EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, GlobalWindow> operator = new EvictingWindowOperator<>(
		GlobalWindows.create(),
		new GlobalWindow.Serializer(),
		new TupleKeySelector(),
		BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
		stateDesc,
		new InternalIterableWindowFunction<>(new RichSumReducer<GlobalWindow>(closeCalled)),
		CountTrigger.of(triggerCount),
		TimeEvictor.of(Time.seconds(2), evictAfter),
		0,
		null /* late data output tag */);

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
		new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1)));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1)));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1)));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1)));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1)));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1)));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1)));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1)));

	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), Long.MAX_VALUE));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), Long.MAX_VALUE));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1)));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1)));

	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 4), Long.MAX_VALUE));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 6), Long.MAX_VALUE));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());

	testHarness.close();

	Assert.assertEquals("Close was not called.", 1, closeCalled.get());
}
 
Example #29
Source File: StreamTestSingleInputGate.java    From flink with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("unchecked")
private void setupInputChannels() {

	for (int i = 0; i < numInputChannels; i++) {
		final int channelIndex = i;
		final RecordSerializer<SerializationDelegate<Object>> recordSerializer = new SpanningRecordSerializer<SerializationDelegate<Object>>();
		final SerializationDelegate<Object> delegate = (SerializationDelegate<Object>) (SerializationDelegate<?>)
			new SerializationDelegate<>(new StreamElementSerializer<T>(serializer));

		inputQueues[channelIndex] = new ConcurrentLinkedQueue<>();
		inputChannels[channelIndex] = new TestInputChannel(inputGate, i);

		final BufferAndAvailabilityProvider answer = () -> {
			ConcurrentLinkedQueue<InputValue<Object>> inputQueue = inputQueues[channelIndex];
			InputValue<Object> input;
			boolean moreAvailable;
			synchronized (inputQueue) {
				input = inputQueue.poll();
				moreAvailable = !inputQueue.isEmpty();
			}
			if (input != null && input.isStreamEnd()) {
				inputChannels[channelIndex].setReleased();
				return Optional.of(new BufferAndAvailability(EventSerializer.toBuffer(EndOfPartitionEvent.INSTANCE), moreAvailable, 0));
			} else if (input != null && input.isStreamRecord()) {
				Object inputElement = input.getStreamRecord();

				delegate.setInstance(inputElement);
				recordSerializer.serializeRecord(delegate);
				BufferBuilder bufferBuilder = createBufferBuilder(bufferSize);
				BufferConsumer bufferConsumer = bufferBuilder.createBufferConsumer();
				recordSerializer.copyToBufferBuilder(bufferBuilder);
				bufferBuilder.finish();

				// Call getCurrentBuffer to ensure size is set
				return Optional.of(new BufferAndAvailability(bufferConsumer.build(), moreAvailable, 0));
			} else if (input != null && input.isEvent()) {
				AbstractEvent event = input.getEvent();
				if (event instanceof EndOfPartitionEvent) {
					inputChannels[channelIndex].setReleased();
				}

				return Optional.of(new BufferAndAvailability(EventSerializer.toBuffer(event), moreAvailable, 0));
			} else {
				return Optional.empty();
			}
		};

		inputChannels[channelIndex].addBufferAndAvailability(answer);
	}
	inputGate.setInputChannels(inputChannels);
}
 
Example #30
Source File: EvictingWindowOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Tests TimeEvictor evictBefore behavior.
 */
@Test
public void testTimeEvictorEvictBefore() throws Exception {
	AtomicInteger closeCalled = new AtomicInteger(0);
	final int triggerCount = 2;
	final int windowSize = 4;

	@SuppressWarnings({"unchecked", "rawtypes"})
	TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer =
		(TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc =
		new ListStateDescriptor<>("window-contents", streamRecordSerializer);

	EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator = new EvictingWindowOperator<>(
		TumblingEventTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS)),
		new TimeWindow.Serializer(),
		new TupleKeySelector(),
		BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
		stateDesc,
		new InternalIterableWindowFunction<>(new RichSumReducer<TimeWindow>(closeCalled)),
		CountTrigger.of(triggerCount),
		TimeEvictor.of(Time.seconds(2)),
		0,
		null /* late data output tag */);

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
		new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);

	long initialTime = 0L;
	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3999));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 20));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 999));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 5999));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3500));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 2001));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1001));

	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 1), 3999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), 3999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 3), 3999));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 6500));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1002));

	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), 7999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 3), 3999));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());

	testHarness.close();

	Assert.assertEquals("Close was not called.", 1, closeCalled.get());
}