Java Code Examples for org.apache.flink.api.common.typeinfo.TypeInformation#of()

The following examples show how to use org.apache.flink.api.common.typeinfo.TypeInformation#of() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: EventTimeJoinExercise.java    From flink-training-exercises with Apache License 2.0 6 votes vote down vote up
@Override
public void open(Configuration config) {
	MapStateDescriptor tDescriptor = new MapStateDescriptor<Long, Trade>(
			"tradeBuffer",
			TypeInformation.of(Long.class),
			TypeInformation.of(Trade.class)
	);
	tradeMap = getRuntimeContext().getMapState(tDescriptor);

	MapStateDescriptor cDescriptor = new MapStateDescriptor<Long, Customer>(
			"customerBuffer",
			TypeInformation.of(Long.class),
			TypeInformation.of(Customer.class)
	);
	customerMap = getRuntimeContext().getMapState(cDescriptor);
}
 
Example 2
Source File: IntervalJoinOperatorTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private TestHarness createTestHarness(long lowerBound,
	boolean lowerBoundInclusive,
	long upperBound,
	boolean upperBoundInclusive) throws Exception {

	IntervalJoinOperator<String, TestElem, TestElem, Tuple2<TestElem, TestElem>> operator =
		new IntervalJoinOperator<>(
			lowerBound,
			upperBound,
			lowerBoundInclusive,
			upperBoundInclusive,
			TestElem.serializer(),
			TestElem.serializer(),
			new PassthroughFunction()
		);

	return new TestHarness(
		operator,
		(elem) -> elem.key, // key
		(elem) -> elem.key, // key
		TypeInformation.of(String.class)
	);
}
 
Example 3
Source File: IntervalJoinOperatorTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private JoinTestBuilder setupHarness(long lowerBound,
	boolean lowerBoundInclusive,
	long upperBound,
	boolean upperBoundInclusive) throws Exception {

	IntervalJoinOperator<String, TestElem, TestElem, Tuple2<TestElem, TestElem>> operator =
		new IntervalJoinOperator<>(
			lowerBound,
			upperBound,
			lowerBoundInclusive,
			upperBoundInclusive,
			TestElem.serializer(),
			TestElem.serializer(),
			new PassthroughFunction()
		);

	TestHarness t = new TestHarness(
		operator,
		(elem) -> elem.key, // key
		(elem) -> elem.key, // key
		TypeInformation.of(String.class)
	);

	return new JoinTestBuilder(t, operator);
}
 
Example 4
Source File: FieldAccessorTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testPojoInPojo() {
	Outer o = new Outer(10, new Inner(4L), (short) 12);
	PojoTypeInfo<Outer> tpeInfo = (PojoTypeInfo<Outer>) TypeInformation.of(Outer.class);

	FieldAccessor<Outer, Long> fix = FieldAccessorFactory.getAccessor(tpeInfo, "i.x", null);
	assertEquals(4L, (long) fix.get(o));
	assertEquals(4L, o.i.x);
	o = fix.set(o, 22L);
	assertEquals(22L, (long) fix.get(o));
	assertEquals(22L, o.i.x);

	FieldAccessor<Outer, Inner> fi = FieldAccessorFactory.getAccessor(tpeInfo, "i", null);
	assertEquals(22L, fi.get(o).x);
	assertEquals(22L, (long) fix.get(o));
	assertEquals(22L, o.i.x);
	o = fi.set(o, new Inner(30L));
	assertEquals(30L, fi.get(o).x);
	assertEquals(30L, (long) fix.get(o));
	assertEquals(30L, o.i.x);
}
 
Example 5
Source File: AbstractDeserializationSchemaTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testTypeExtractionGenericAnonymous() {
	TypeInformation<JSONPObject> type = new AbstractDeserializationSchema<JSONPObject>() {
		@Override
		public JSONPObject deserialize(byte[] message) throws IOException {
			throw new UnsupportedOperationException();
		}
	}.getProducedType();

	TypeInformation<JSONPObject> expected = TypeInformation.of(new TypeHint<JSONPObject>(){});
	assertEquals(expected, type);
}
 
Example 6
Source File: StaticallyRegisteredTypes.java    From flink-statefun with Apache License 2.0 5 votes vote down vote up
private TypeInformation<?> typeInformation(Class<?> valueType) {
  if (Message.class.isAssignableFrom(valueType)) {
    Class<Message> message = (Class<Message>) valueType;
    return new ProtobufTypeInformation<>(message);
  }
  if (org.apache.flink.statefun.flink.core.message.Message.class.isAssignableFrom(valueType)) {
    return new MessageTypeInformation(messageFactoryType);
  }
  // TODO: we may want to restrict the allowed typeInfo here to theses that respect shcema
  // evaluation.
  return TypeInformation.of(valueType);
}
 
Example 7
Source File: IntervalJoinOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testReturnsCorrectTimestamp() throws Exception {
	IntervalJoinOperator<String, TestElem, TestElem, Tuple2<TestElem, TestElem>> op =
		new IntervalJoinOperator<>(
			-1,
			1,
			true,
			true,
			TestElem.serializer(),
			TestElem.serializer(),
			new ProcessJoinFunction<TestElem, TestElem, Tuple2<TestElem, TestElem>>() {

				private static final long serialVersionUID = 1L;

				@Override
				public void processElement(
					TestElem left,
					TestElem right,
					Context ctx,
					Collector<Tuple2<TestElem, TestElem>> out) throws Exception {
					Assert.assertEquals(Math.max(left.ts, right.ts), ctx.getTimestamp());
				}
			}
		);

	try (TestHarness testHarness = new TestHarness(
		op,
		(elem) -> elem.key,
		(elem) -> elem.key,
		TypeInformation.of(String.class)
	)) {

		testHarness.setup();
		testHarness.open();

		processElementsAndWatermarks(testHarness);
	}
}
 
Example 8
Source File: IntervalJoinOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testContextCorrectRightTimestamp() throws Exception {

	IntervalJoinOperator<String, TestElem, TestElem, Tuple2<TestElem, TestElem>> op =
		new IntervalJoinOperator<>(
			-1,
			1,
			true,
			true,
			TestElem.serializer(),
			TestElem.serializer(),
			new ProcessJoinFunction<TestElem, TestElem, Tuple2<TestElem, TestElem>>() {
				@Override
				public void processElement(
					TestElem left,
					TestElem right,
					Context ctx,
					Collector<Tuple2<TestElem, TestElem>> out) throws Exception {
					Assert.assertEquals(right.ts, ctx.getRightTimestamp());
				}
			}
		);

	try (TestHarness testHarness = new TestHarness(
		op,
		(elem) -> elem.key,
		(elem) -> elem.key,
		TypeInformation.of(String.class)
	)) {

		testHarness.setup();
		testHarness.open();

		processElementsAndWatermarks(testHarness);
	}
}
 
Example 9
Source File: CoGroupOperatorCollectionTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private CoGroupOperatorBase<Tuple2<String, Integer>, Tuple2<String, Integer>,
		Tuple2<String, Integer>, CoGroupFunction<Tuple2<String, Integer>, Tuple2<String, Integer>,
		Tuple2<String, Integer>>> getCoGroupOperator(
		RichCoGroupFunction<Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple2<String, Integer>> udf) {

	TypeInformation<Tuple2<String, Integer>> tuple2Info = TypeInformation.of(new TypeHint<Tuple2<String, Integer>>(){});

	return new CoGroupOperatorBase<>(
			udf,
			new BinaryOperatorInformation<>(tuple2Info, tuple2Info, tuple2Info),
			new int[]{0},
			new int[]{0},
			"coGroup on Collections"
	);
}
 
Example 10
Source File: InputFormatSourceFunctionTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private void testFormatLifecycle(final boolean midCancel) throws Exception {

		final int noOfSplits = 5;
		final int cancelAt = 2;

		final LifeCycleTestInputFormat format = new LifeCycleTestInputFormat();
		final InputFormatSourceFunction<Integer> reader = new InputFormatSourceFunction<>(format, TypeInformation.of(Integer.class));

		try (MockEnvironment environment =
				new MockEnvironmentBuilder()
					.setTaskName("no")
					.setManagedMemorySize(4 * MemoryManager.DEFAULT_PAGE_SIZE)
					.build()) {

			reader.setRuntimeContext(new MockRuntimeContext(format, noOfSplits, environment));

			Assert.assertTrue(!format.isConfigured);
			Assert.assertTrue(!format.isInputFormatOpen);
			Assert.assertTrue(!format.isSplitOpen);

			reader.open(new Configuration());
			Assert.assertTrue(format.isConfigured);

			TestSourceContext ctx = new TestSourceContext(reader, format, midCancel, cancelAt);
			reader.run(ctx);

			int splitsSeen = ctx.getSplitsSeen();
			Assert.assertTrue(midCancel ? splitsSeen == cancelAt : splitsSeen == noOfSplits);

			// we have exhausted the splits so the
			// format and splits should be closed by now

			Assert.assertTrue(!format.isSplitOpen);
			Assert.assertTrue(!format.isInputFormatOpen);
		}
	}
 
Example 11
Source File: AbstractDeserializationSchemaTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Test
public void testTypeExtractionTuple() {
	TypeInformation<Tuple2<byte[], byte[]>> type = new TupleSchema().getProducedType();
	TypeInformation<Tuple2<byte[], byte[]>> expected = TypeInformation.of(new TypeHint<Tuple2<byte[], byte[]>>(){});
	assertEquals(expected, type);
}
 
Example 12
Source File: LogSchema.java    From flink-learning with Apache License 2.0 4 votes vote down vote up
@Override
public TypeInformation<LogEvent> getProducedType() {
    return TypeInformation.of(LogEvent.class);
}
 
Example 13
Source File: LogSchema.java    From flink-learning with Apache License 2.0 4 votes vote down vote up
@Override
public TypeInformation<LogEvent> getProducedType() {
    return TypeInformation.of(LogEvent.class);
}
 
Example 14
Source File: DynamoDBStreamsSchema.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public TypeInformation<Record> getProducedType() {
	return TypeInformation.of(Record.class);
}
 
Example 15
Source File: CEPOperatorTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Test
public void testCEPOperatorSideOutputLateElementsEventTime() throws Exception {

	Event startEvent = new Event(41, "c", 1.0);
	Event middle1Event1 = new Event(41, "a", 2.0);
	Event middle1Event2 = new Event(41, "a", 3.0);
	Event middle1Event3 = new Event(41, "a", 4.0);

	OutputTag<Event> lateDataTag = new OutputTag<Event>("late-data", TypeInformation.of(Event.class));

	CepOperator<Event, Integer, Map<String, List<Event>>> operator = CepOperatorTestUtilities.getKeyedCepOpearator(
		false,
		new ComplexNFAFactory(),
		null,
		lateDataTag);
	try (OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
			CepOperatorTestUtilities.getCepTestHarness(operator)) {

		harness.open();

		harness.processWatermark(new Watermark(Long.MIN_VALUE));
		harness.processElement(new StreamRecord<>(startEvent, 6));

		verifyWatermark(harness.getOutput().poll(), Long.MIN_VALUE);
		harness.processWatermark(new Watermark(6L));
		verifyWatermark(harness.getOutput().poll(), 6L);

		harness.processElement(new StreamRecord<>(middle1Event1, 4));
		harness.processElement(new StreamRecord<>(middle1Event2, 5));
		harness.processElement(new StreamRecord<>(middle1Event3, 7));

		List<Event> late = new ArrayList<>();

		while (!harness.getSideOutput(lateDataTag).isEmpty()) {
			StreamRecord<Event> eventStreamRecord = harness.getSideOutput(lateDataTag).poll();
			late.add(eventStreamRecord.getValue());
		}

		List<Event> expected = Lists.newArrayList(middle1Event1, middle1Event2);
		Assert.assertArrayEquals(expected.toArray(), late.toArray());
	}
}
 
Example 16
Source File: OriginalLogEventSchema.java    From flink-learning with Apache License 2.0 4 votes vote down vote up
@Override
public TypeInformation<OriginalLogEvent> getProducedType() {
    return TypeInformation.of(OriginalLogEvent.class);
}
 
Example 17
Source File: KafkaConsumerTestBase.java    From flink with Apache License 2.0 4 votes vote down vote up
protected String writeSequence(
		String baseTopicName,
		final int numElements,
		final int parallelism,
		final int replicationFactor) throws Exception {
	LOG.info("\n===================================\n" +
			"== Writing sequence of " + numElements + " into " + baseTopicName + " with p=" + parallelism + "\n" +
			"===================================");

	final TypeInformation<Tuple2<Integer, Integer>> resultType =
			TypeInformation.of(new TypeHint<Tuple2<Integer, Integer>>() {});

	final KeyedSerializationSchema<Tuple2<Integer, Integer>> serSchema =
			new KeyedSerializationSchemaWrapper<>(
					new TypeInformationSerializationSchema<>(resultType, new ExecutionConfig()));

	final KafkaDeserializationSchema<Tuple2<Integer, Integer>> deserSchema =
			new KafkaDeserializationSchemaWrapper<>(
					new TypeInformationSerializationSchema<>(resultType, new ExecutionConfig()));

	final int maxNumAttempts = 10;

	for (int attempt = 1; attempt <= maxNumAttempts; attempt++) {

		final String topicName = baseTopicName + '-' + attempt;

		LOG.info("Writing attempt #" + attempt);

		// -------- Write the Sequence --------

		createTestTopic(topicName, parallelism, replicationFactor);

		StreamExecutionEnvironment writeEnv = StreamExecutionEnvironment.getExecutionEnvironment();
		writeEnv.getConfig().setRestartStrategy(RestartStrategies.noRestart());
		writeEnv.getConfig().disableSysoutLogging();

		DataStream<Tuple2<Integer, Integer>> stream = writeEnv.addSource(new RichParallelSourceFunction<Tuple2<Integer, Integer>>() {

			private boolean running = true;

			@Override
			public void run(SourceContext<Tuple2<Integer, Integer>> ctx) throws Exception {
				int cnt = 0;
				int partition = getRuntimeContext().getIndexOfThisSubtask();

				while (running && cnt < numElements) {
					ctx.collect(new Tuple2<>(partition, cnt));
					cnt++;
				}
			}

			@Override
			public void cancel() {
				running = false;
			}
		}).setParallelism(parallelism);

		// the producer must not produce duplicates
		Properties producerProperties = FlinkKafkaProducerBase.getPropertiesFromBrokerList(brokerConnectionStrings);
		producerProperties.setProperty("retries", "0");
		producerProperties.putAll(secureProps);

		kafkaServer.produceIntoKafka(stream, topicName, serSchema, producerProperties, new Tuple2FlinkPartitioner(parallelism))
				.setParallelism(parallelism);

		try {
			writeEnv.execute("Write sequence");
		}
		catch (Exception e) {
			LOG.error("Write attempt failed, trying again", e);
			deleteTestTopic(topicName);
			waitUntilNoJobIsRunning(client);
			continue;
		}

		LOG.info("Finished writing sequence");

		// -------- Validate the Sequence --------

		// we need to validate the sequence, because kafka's producers are not exactly once
		LOG.info("Validating sequence");

		waitUntilNoJobIsRunning(client);

		if (validateSequence(topicName, parallelism, deserSchema, numElements)) {
			// everything is good!
			return topicName;
		}
		else {
			deleteTestTopic(topicName);
			// fall through the loop
		}
	}

	throw new Exception("Could not write a valid sequence to Kafka after " + maxNumAttempts + " attempts");
}
 
Example 18
Source File: MetricSchema.java    From flink-learning with Apache License 2.0 4 votes vote down vote up
@Override
public TypeInformation<MetricEvent> getProducedType() {
    return TypeInformation.of(MetricEvent.class);
}
 
Example 19
Source File: AbstractDeserializationSchemaTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testTypeExtractionGeneric() {
	TypeInformation<JSONPObject> type = new JsonSchema().getProducedType();
	TypeInformation<JSONPObject> expected = TypeInformation.of(new TypeHint<JSONPObject>(){});
	assertEquals(expected, type);
}
 
Example 20
Source File: AbstractDeserializationSchema.java    From flink with Apache License 2.0 2 votes vote down vote up
/**
 * Creates an AbstractDeserializationSchema that returns the TypeInformation
 * indicated by the given class. This constructor is only necessary when creating a generic
 * implementation, see {@link AbstractDeserializationSchema Generic Use}.
 *
 * <p>This constructor may fail if the class is generic. In that case, please
 * use the constructor that accepts a {@link #AbstractDeserializationSchema(TypeHint) TypeHint},
 * or a {@link #AbstractDeserializationSchema(TypeInformation) TypeInformation}.
 *
 * @param type The class of the produced type.
 */
protected AbstractDeserializationSchema(Class<T> type) {
	checkNotNull(type, "type");
	this.type = TypeInformation.of(type);
}