Java Code Examples for org.apache.flink.api.common.typeinfo.BasicTypeInfo#INT_TYPE_INFO

The following examples show how to use org.apache.flink.api.common.typeinfo.BasicTypeInfo#INT_TYPE_INFO . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: JavaTableEnvironmentITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testAsFromAndToTuple() throws Exception {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	BatchTableEnvironment tableEnv = BatchTableEnvironment.create(env, config());

	Table table = tableEnv
		.fromDataSet(CollectionDataSets.get3TupleDataSet(env), "a, b, c")
		.select("a, b, c");

	TypeInformation<?> ti = new TupleTypeInfo<Tuple3<Integer, Long, String>>(
		BasicTypeInfo.INT_TYPE_INFO,
		BasicTypeInfo.LONG_TYPE_INFO,
		BasicTypeInfo.STRING_TYPE_INFO);

	DataSet<?> ds = tableEnv.toDataSet(table, ti);
	List<?> results = ds.collect();
	String expected = "(1,1,Hi)\n" + "(2,2,Hello)\n" + "(3,2,Hello world)\n" +
		"(4,3,Hello world, how are you?)\n" + "(5,3,I am fine.)\n" + "(6,3,Luke Skywalker)\n" +
		"(7,4,Comment#1)\n" + "(8,4,Comment#2)\n" + "(9,4,Comment#3)\n" + "(10,4,Comment#4)\n" +
		"(11,5,Comment#5)\n" + "(12,5,Comment#6)\n" + "(13,5,Comment#7)\n" +
		"(14,5,Comment#8)\n" + "(15,5,Comment#9)\n" + "(16,6,Comment#10)\n" +
		"(17,6,Comment#11)\n" + "(18,6,Comment#12)\n" + "(19,6,Comment#13)\n" +
		"(20,6,Comment#14)\n" + "(21,6,Comment#15)\n";
	compareResultAsText(results, expected);
}
 
Example 2
Source File: CollectionDataSets.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
public static DataSet<Tuple3<Tuple2<Integer, Integer>, String, Integer>> getGroupSortedNestedTupleDataSet2(ExecutionEnvironment env) {

		List<Tuple3<Tuple2<Integer, Integer>, String, Integer>> data = new ArrayList<>();
		data.add(new Tuple3<>(new Tuple2<>(1, 3), "a", 2));
		data.add(new Tuple3<>(new Tuple2<>(1, 2), "a", 1));
		data.add(new Tuple3<>(new Tuple2<>(2, 1), "a", 3));
		data.add(new Tuple3<>(new Tuple2<>(2, 2), "b", 4));
		data.add(new Tuple3<>(new Tuple2<>(3, 3), "c", 5));
		data.add(new Tuple3<>(new Tuple2<>(3, 6), "c", 6));
		data.add(new Tuple3<>(new Tuple2<>(4, 9), "c", 7));

		TupleTypeInfo<Tuple3<Tuple2<Integer, Integer>, String, Integer>> type = new TupleTypeInfo<>(
				new TupleTypeInfo<Tuple2<Integer, Integer>>(BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO),
				BasicTypeInfo.STRING_TYPE_INFO,
				BasicTypeInfo.INT_TYPE_INFO
		);

		return env.fromCollection(data, type);
	}
 
Example 3
Source File: TypeExtractorTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testEitherHierarchy() {
	MapFunction<?, ?> function = new EitherMapper<Boolean>();
	TypeInformation<?> ti = TypeExtractor.getMapReturnTypes((MapFunction) function, BasicTypeInfo.BOOLEAN_TYPE_INFO);
	TypeInformation<?> expected = new EitherTypeInfo(BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.BOOLEAN_TYPE_INFO);
	Assert.assertEquals(expected, ti);

	function = new EitherMapper2();
	ti = TypeExtractor.getMapReturnTypes((MapFunction) function, BasicTypeInfo.STRING_TYPE_INFO);
	expected = new EitherTypeInfo(BasicTypeInfo.STRING_TYPE_INFO, new TupleTypeInfo(BasicTypeInfo.INT_TYPE_INFO));
	Assert.assertEquals(expected, ti);

	function = new EitherMapper3();
	ti = TypeExtractor.getMapReturnTypes((MapFunction) function, expected);
	Assert.assertEquals(expected, ti);

	Either<String, Tuple1<Integer>> either = new Either2();
	ti = TypeExtractor.getForObject(either);
	Assert.assertEquals(expected, ti);
}
 
Example 4
Source File: CollectionDataSets.java    From flink with Apache License 2.0 6 votes vote down vote up
public static DataSet<Tuple3<Tuple2<Integer, Integer>, String, Integer>> getGroupSortedNestedTupleDataSet2(ExecutionEnvironment env) {

		List<Tuple3<Tuple2<Integer, Integer>, String, Integer>> data = new ArrayList<>();
		data.add(new Tuple3<>(new Tuple2<>(1, 3), "a", 2));
		data.add(new Tuple3<>(new Tuple2<>(1, 2), "a", 1));
		data.add(new Tuple3<>(new Tuple2<>(2, 1), "a", 3));
		data.add(new Tuple3<>(new Tuple2<>(2, 2), "b", 4));
		data.add(new Tuple3<>(new Tuple2<>(3, 3), "c", 5));
		data.add(new Tuple3<>(new Tuple2<>(3, 6), "c", 6));
		data.add(new Tuple3<>(new Tuple2<>(4, 9), "c", 7));

		TupleTypeInfo<Tuple3<Tuple2<Integer, Integer>, String, Integer>> type = new TupleTypeInfo<>(
				new TupleTypeInfo<Tuple2<Integer, Integer>>(BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO),
				BasicTypeInfo.STRING_TYPE_INFO,
				BasicTypeInfo.INT_TYPE_INFO
		);

		return env.fromCollection(data, type);
	}
 
Example 5
Source File: LegacyKeyedProcessOperatorTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testNullOutputTagRefusal() throws Exception {
	LegacyKeyedProcessOperator<Integer, Integer, String> operator =
		new LegacyKeyedProcessOperator<>(new NullOutputTagEmittingProcessFunction());

	OneInputStreamOperatorTestHarness<Integer, String> testHarness =
		new KeyedOneInputStreamOperatorTestHarness<>(
			operator, new IdentityKeySelector<>(), BasicTypeInfo.INT_TYPE_INFO);

	testHarness.setup();
	testHarness.open();

	testHarness.setProcessingTime(17);
	try {
		expectedException.expect(IllegalArgumentException.class);
		testHarness.processElement(new StreamRecord<>(5));
	} finally {
		testHarness.close();
	}
}
 
Example 6
Source File: RowSerializerTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testLargeRowSerializer() {
	TypeInformation<Row> typeInfo = new RowTypeInfo(
		BasicTypeInfo.INT_TYPE_INFO,
		BasicTypeInfo.INT_TYPE_INFO,
		BasicTypeInfo.INT_TYPE_INFO,
		BasicTypeInfo.INT_TYPE_INFO,
		BasicTypeInfo.INT_TYPE_INFO,
		BasicTypeInfo.INT_TYPE_INFO,
		BasicTypeInfo.INT_TYPE_INFO,
		BasicTypeInfo.INT_TYPE_INFO,
		BasicTypeInfo.INT_TYPE_INFO,
		BasicTypeInfo.INT_TYPE_INFO,
		BasicTypeInfo.INT_TYPE_INFO,
		BasicTypeInfo.INT_TYPE_INFO,
		BasicTypeInfo.STRING_TYPE_INFO);

	Row row = new Row(13);
	row.setField(0, 2);
	row.setField(1, null);
	row.setField(3, null);
	row.setField(4, null);
	row.setField(5, null);
	row.setField(6, null);
	row.setField(7, null);
	row.setField(8, null);
	row.setField(9, null);
	row.setField(10, null);
	row.setField(11, null);
	row.setField(12, "Test");

	TypeSerializer<Row> serializer = typeInfo.createSerializer(new ExecutionConfig());
	RowSerializerTestInstance testInstance = new RowSerializerTestInstance(serializer, row);
	testInstance.testAll();
}
 
Example 7
Source File: RowTypeInfoTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected RowTypeInfo[] getTestData() {
	return new RowTypeInfo[] {
		new RowTypeInfo(BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO),
		new RowTypeInfo(BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.BOOLEAN_TYPE_INFO),
		new RowTypeInfo(typeList),
		new RowTypeInfo(
			new TypeInformation[]{BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO},
			new String[]{"int", "int2"})
	};
}
 
Example 8
Source File: CollectionDataSets.java    From flink with Apache License 2.0 5 votes vote down vote up
public static DataSet<Tuple2<Tuple2<Integer, Integer>, String>> getSmallNestedTupleDataSet(ExecutionEnvironment env) {

		List<Tuple2<Tuple2<Integer, Integer>, String>> data = new ArrayList<>();
		data.add(new Tuple2<>(new Tuple2<>(1, 1), "one"));
		data.add(new Tuple2<>(new Tuple2<>(2, 2), "two"));
		data.add(new Tuple2<>(new Tuple2<>(3, 3), "three"));

		TupleTypeInfo<Tuple2<Tuple2<Integer, Integer>, String>> type = new TupleTypeInfo<>(
				new TupleTypeInfo<Tuple2<Integer, Integer>>(BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO),
				BasicTypeInfo.STRING_TYPE_INFO
		);

		return env.fromCollection(data, type);
	}
 
Example 9
Source File: LegacyKeyedProcessOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Verifies that we don't have leakage between different keys.
 */
@Test
public void testEventTimeTimerWithState() throws Exception {

	LegacyKeyedProcessOperator<Integer, Integer, String> operator =
			new LegacyKeyedProcessOperator<>(new TriggeringStatefulFlatMapFunction(TimeDomain.EVENT_TIME));

	OneInputStreamOperatorTestHarness<Integer, String> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(operator, new IdentityKeySelector<Integer>(), BasicTypeInfo.INT_TYPE_INFO);

	testHarness.setup();
	testHarness.open();

	testHarness.processWatermark(new Watermark(1));
	testHarness.processElement(new StreamRecord<>(17, 0L)); // should set timer for 6

	testHarness.processWatermark(new Watermark(2));
	testHarness.processElement(new StreamRecord<>(42, 1L)); // should set timer for 7

	testHarness.processWatermark(new Watermark(6));
	testHarness.processWatermark(new Watermark(7));

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	expectedOutput.add(new Watermark(1L));
	expectedOutput.add(new StreamRecord<>("INPUT:17", 0L));
	expectedOutput.add(new Watermark(2L));
	expectedOutput.add(new StreamRecord<>("INPUT:42", 1L));
	expectedOutput.add(new StreamRecord<>("STATE:17", 6L));
	expectedOutput.add(new Watermark(6L));
	expectedOutput.add(new StreamRecord<>("STATE:42", 7L));
	expectedOutput.add(new Watermark(7L));

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.close();
}
 
Example 10
Source File: RegularWindowOperatorContractTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected <W extends Window, OUT> KeyedOneInputStreamOperatorTestHarness<Integer, Integer, OUT> createWindowOperator(
		WindowAssigner<Integer, W> assigner,
		Trigger<Integer, W> trigger,
		long allowedLatenss,
		InternalWindowFunction<Iterable<Integer>, OUT, Integer, W> windowFunction,
		OutputTag<Integer> lateOutputTag) throws Exception {

	KeySelector<Integer, Integer> keySelector = new KeySelector<Integer, Integer>() {
		private static final long serialVersionUID = 1L;

		@Override
		public Integer getKey(Integer value) throws Exception {
			return value;
		}
	};

	ListStateDescriptor<Integer> intListDescriptor =
			new ListStateDescriptor<>("int-list", IntSerializer.INSTANCE);

	@SuppressWarnings("unchecked")
	WindowOperator<Integer, Integer, Iterable<Integer>, OUT, W> operator = new WindowOperator<>(
			assigner,
			assigner.getWindowSerializer(new ExecutionConfig()),
			keySelector,
			IntSerializer.INSTANCE,
			intListDescriptor,
			windowFunction,
			trigger,
			allowedLatenss,
			lateOutputTag);

	return new KeyedOneInputStreamOperatorTestHarness<>(
			operator,
			keySelector,
			BasicTypeInfo.INT_TYPE_INFO);
}
 
Example 11
Source File: StreamGroupedReduceTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testGroupedReduce() throws Exception {

	KeySelector<Integer, Integer> keySelector = new IntegerKeySelector();

	StreamGroupedReduce<Integer> operator = new StreamGroupedReduce<>(new MyReducer(), IntSerializer.INSTANCE);

	OneInputStreamOperatorTestHarness<Integer, Integer> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(operator, keySelector, BasicTypeInfo.INT_TYPE_INFO);

	long initialTime = 0L;
	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	testHarness.processElement(new StreamRecord<>(1, initialTime + 1));
	testHarness.processElement(new StreamRecord<>(1, initialTime + 2));
	testHarness.processWatermark(new Watermark(initialTime + 2));
	testHarness.processElement(new StreamRecord<>(2, initialTime + 3));
	testHarness.processElement(new StreamRecord<>(2, initialTime + 4));
	testHarness.processElement(new StreamRecord<>(3, initialTime + 5));

	expectedOutput.add(new StreamRecord<>(1, initialTime + 1));
	expectedOutput.add(new StreamRecord<>(2, initialTime + 2));
	expectedOutput.add(new Watermark(initialTime + 2));
	expectedOutput.add(new StreamRecord<>(2, initialTime + 3));
	expectedOutput.add(new StreamRecord<>(4, initialTime + 4));
	expectedOutput.add(new StreamRecord<>(3, initialTime + 5));

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
}
 
Example 12
Source File: MapTypeInfoTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected MapTypeInfo<?, ?>[] getTestData() {
	return new MapTypeInfo<?, ?>[] {
		new MapTypeInfo<>(BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO),
		new MapTypeInfo<>(BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO),
		new MapTypeInfo<>(String.class, Boolean.class)
	};
}
 
Example 13
Source File: StreamIterationHeadTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testIterationHeadWatermarkEmission() throws Exception {
	StreamTaskTestHarness<Integer> harness = new StreamTaskTestHarness<>(
			StreamIterationHead::new,
			BasicTypeInfo.INT_TYPE_INFO);
	harness.setupOutputForSingletonOperatorChain();
	harness.getStreamConfig().setIterationId("1");
	harness.getStreamConfig().setIterationWaitTime(1);

	harness.invoke();
	harness.waitForTaskCompletion();

	assertEquals(1, harness.getOutput().size());
	assertEquals(new Watermark(Long.MAX_VALUE), harness.getOutput().peek());
}
 
Example 14
Source File: TwoInputStreamTaskTest.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * This test verifies that checkpoint barriers and barrier buffers work correctly with
 * concurrent checkpoint barriers where one checkpoint is "overtaking" another checkpoint, i.e.
 * some inputs receive barriers from an earlier checkpoint, thereby blocking,
 * then all inputs receive barriers from a later checkpoint.
 */
@Test
public void testOvertakingCheckpointBarriers() throws Exception {

	final TwoInputStreamTaskTestHarness<String, Integer, String> testHarness =
			new TwoInputStreamTaskTestHarness<>(
					TwoInputStreamTask::new,
					2, 2, new int[] {1, 2},
					BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.setupOutputForSingletonOperatorChain();

	StreamConfig streamConfig = testHarness.getStreamConfig();
	CoStreamMap<String, Integer, String> coMapOperator = new CoStreamMap<>(new IdentityMap());
	streamConfig.setStreamOperator(coMapOperator);
	streamConfig.setOperatorID(new OperatorID());

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
	long initialTime = 0L;

	testHarness.invoke();
	testHarness.waitForTaskRunning();

	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 0);

	// These elements should be forwarded, since we did not yet receive a checkpoint barrier
	// on that input, only add to same input, otherwise we would not know the ordering
	// of the output since the Task might read the inputs in any order
	testHarness.processElement(new StreamRecord<>(42, initialTime), 1, 1);
	testHarness.processElement(new StreamRecord<>(1337, initialTime), 1, 1);
	expectedOutput.add(new StreamRecord<>("42", initialTime));
	expectedOutput.add(new StreamRecord<>("1337", initialTime));

	testHarness.waitForInputProcessing();
	// we should not yet see the barrier, only the two elements from non-blocked input
	TestHarnessUtil.assertOutputEquals("Output was not correct.",
			expectedOutput,
			testHarness.getOutput());

	// Now give a later barrier to all inputs, this should unblock the first channel
	testHarness.processEvent(new CheckpointBarrier(1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 1);
	testHarness.processEvent(new CheckpointBarrier(1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 0);
	testHarness.processEvent(new CheckpointBarrier(1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 0);
	testHarness.processEvent(new CheckpointBarrier(1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 1);

	expectedOutput.add(new CancelCheckpointMarker(0));
	expectedOutput.add(new CheckpointBarrier(1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()));

	testHarness.waitForInputProcessing();

	TestHarnessUtil.assertOutputEquals("Output was not correct.",
			expectedOutput,
			testHarness.getOutput());

	// Then give the earlier barrier, these should be ignored
	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 1);
	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 0);
	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 1);

	testHarness.waitForInputProcessing();

	testHarness.endInput();

	testHarness.waitForTaskCompletion();

	TestHarnessUtil.assertOutputEquals("Output was not correct.",
			expectedOutput,
			testHarness.getOutput());
}
 
Example 15
Source File: JoinITCase.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Override
public TypeInformation[] getKeyTypes() {
	return new TypeInformation[]{BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO};
}
 
Example 16
Source File: KafkaConsumerTestBase.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Tests the proper consumption when having fewer Flink sources than Kafka partitions, so
 * one Flink source will read multiple Kafka partitions.
 */
public void runOneSourceMultiplePartitionsExactlyOnceTest() throws Exception {
	final String topic = "oneToManyTopic";
	final int numPartitions = 5;
	final int numElementsPerPartition = 1000;
	final int totalElements = numPartitions * numElementsPerPartition;
	final int failAfterElements = numElementsPerPartition / 3;

	final int parallelism = 2;

	createTestTopic(topic, numPartitions, 1);

	DataGenerators.generateRandomizedIntegerSequence(
			StreamExecutionEnvironment.getExecutionEnvironment(),
			kafkaServer,
			topic,
			numPartitions,
			numElementsPerPartition,
			true);

	// run the topology that fails and recovers

	DeserializationSchema<Integer> schema =
			new TypeInformationSerializationSchema<>(BasicTypeInfo.INT_TYPE_INFO, new ExecutionConfig());

	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.enableCheckpointing(500);
	env.setParallelism(parallelism);
	env.setRestartStrategy(RestartStrategies.fixedDelayRestart(1, 0));

	Properties props = new Properties();
	props.putAll(standardProps);
	props.putAll(secureProps);
	FlinkKafkaConsumerBase<Integer> kafkaSource = kafkaServer.getConsumer(topic, schema, props);

	env
			.addSource(kafkaSource)
			.map(new PartitionValidatingMapper(numPartitions, 3))
			.map(new FailingIdentityMapper<Integer>(failAfterElements))
			.addSink(new ValidatingExactlyOnceSink(totalElements)).setParallelism(1);

	FailingIdentityMapper.failedBefore = false;
	tryExecute(env, "One-source-multi-partitions exactly once test");

	deleteTestTopic(topic);
}
 
Example 17
Source File: CEPMigrationTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testSinglePatternAfterMigration() throws Exception {

	KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
		private static final long serialVersionUID = -4873366487571254798L;

		@Override
		public Integer getKey(Event value) throws Exception {
			return value.getId();
		}
	};

	final Event startEvent1 = new Event(42, "start", 1.0);

	OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
			new KeyedOneInputStreamOperatorTestHarness<>(
					getKeyedCepOpearator(false, new SinglePatternNFAFactory()),
					keySelector,
					BasicTypeInfo.INT_TYPE_INFO);

	try {
		harness.setup();

		harness.initializeState(
			OperatorSnapshotUtil.getResourceFilename(
				"cep-migration-single-pattern-afterwards-flink" + migrateVersion + "-snapshot"));

		harness.open();

		harness.processElement(new StreamRecord<>(startEvent1, 5));

		harness.processWatermark(new Watermark(20));

		ConcurrentLinkedQueue<Object> result = harness.getOutput();

		// watermark and the result
		assertEquals(2, result.size());

		Object resultObject = result.poll();
		assertTrue(resultObject instanceof StreamRecord);
		StreamRecord<?> resultRecord = (StreamRecord<?>) resultObject;
		assertTrue(resultRecord.getValue() instanceof Map);

		@SuppressWarnings("unchecked")
		Map<String, List<Event>> patternMap =
			(Map<String, List<Event>>) resultRecord.getValue();

		assertEquals(startEvent1, patternMap.get("start").get(0));
	} finally {
		harness.close();
	}
}
 
Example 18
Source File: CustomInputSplitProgram.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public TypeInformation<Integer> getProducedType() {
	return BasicTypeInfo.INT_TYPE_INFO;
}
 
Example 19
Source File: RowCsvInputFormatTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testIntegerFields() throws Exception {
	String fileContent = "111|222|333|444|555\n666|777|888|999|000|\n";

	FileInputSplit split = createTempFile(fileContent);

	TypeInformation[] fieldTypes = new TypeInformation[]{
		BasicTypeInfo.INT_TYPE_INFO,
		BasicTypeInfo.INT_TYPE_INFO,
		BasicTypeInfo.INT_TYPE_INFO,
		BasicTypeInfo.INT_TYPE_INFO,
		BasicTypeInfo.INT_TYPE_INFO};

	RowCsvInputFormat format = new RowCsvInputFormat(PATH, fieldTypes, "\n", "|");

	format.setFieldDelimiter("|");
	format.configure(new Configuration());
	format.open(split);

	Row result = new Row(5);

	result = format.nextRecord(result);
	assertNotNull(result);
	assertEquals(111, result.getField(0));
	assertEquals(222, result.getField(1));
	assertEquals(333, result.getField(2));
	assertEquals(444, result.getField(3));
	assertEquals(555, result.getField(4));

	result = format.nextRecord(result);
	assertNotNull(result);
	assertEquals(666, result.getField(0));
	assertEquals(777, result.getField(1));
	assertEquals(888, result.getField(2));
	assertEquals(999, result.getField(3));
	assertEquals(0, result.getField(4));

	result = format.nextRecord(result);
	assertNull(result);
	assertTrue(format.reachedEnd());
}
 
Example 20
Source File: AbstractStreamOperatorTest.java    From flink with Apache License 2.0 3 votes vote down vote up
/**
 * Verify that firing processing-time timers see the state of the key that was active
 * when the timer was set.
 */
@Test
public void testProcessingTimeTimersDontInterfere() throws Exception {
	TestOperator testOperator = new TestOperator();

	KeyedOneInputStreamOperatorTestHarness<Integer, Tuple2<Integer, String>, String> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(testOperator, new TestKeySelector(), BasicTypeInfo.INT_TYPE_INFO);

	testHarness.open();

	testHarness.setProcessingTime(0L);

	testHarness.processElement(new Tuple2<>(1, "SET_PROC_TIME_TIMER:20"), 0);

	testHarness.processElement(new Tuple2<>(0, "SET_STATE:HELLO"), 0);
	testHarness.processElement(new Tuple2<>(1, "SET_STATE:CIAO"), 0);

	testHarness.processElement(new Tuple2<>(0, "SET_PROC_TIME_TIMER:10"), 0);

	testHarness.setProcessingTime(10L);

	assertThat(
			extractResult(testHarness),
			contains("ON_PROC_TIME:HELLO"));

	testHarness.setProcessingTime(20L);

	assertThat(
			extractResult(testHarness),
			contains("ON_PROC_TIME:CIAO"));
}