Java Code Examples for org.apache.flink.api.common.typeinfo.Types#ROW

The following examples show how to use org.apache.flink.api.common.typeinfo.Types#ROW . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CsvRowDeSerializationSchemaTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private <T> void testField(
		TypeInformation<T> fieldInfo,
		String csvValue,
		T value,
		Consumer<CsvRowSerializationSchema.Builder> serializationConfig,
		Consumer<CsvRowDeserializationSchema.Builder> deserializationConfig,
		String fieldDelimiter) throws Exception {
	final TypeInformation<Row> rowInfo = Types.ROW(Types.STRING, fieldInfo, Types.STRING);
	final String expectedCsv = "BEGIN" + fieldDelimiter + csvValue + fieldDelimiter + "END\n";
	final Row expectedRow = Row.of("BEGIN", value, "END");

	// serialization
	final CsvRowSerializationSchema.Builder serSchemaBuilder = new CsvRowSerializationSchema.Builder(rowInfo);
	serializationConfig.accept(serSchemaBuilder);
	final byte[] serializedRow = serialize(serSchemaBuilder, expectedRow);
	assertEquals(expectedCsv, new String(serializedRow));

	// deserialization
	final CsvRowDeserializationSchema.Builder deserSchemaBuilder = new CsvRowDeserializationSchema.Builder(rowInfo);
	deserializationConfig.accept(deserSchemaBuilder);
	final Row deserializedRow = deserialize(deserSchemaBuilder, expectedCsv);
	assertEquals(expectedRow, deserializedRow);
}
 
Example 2
Source File: CsvRowDeSerializationSchemaTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private <T> void testField(
		TypeInformation<T> fieldInfo,
		String csvValue,
		T value,
		Consumer<CsvRowDeserializationSchema.Builder> deserializationConfig,
		String fieldDelimiter) throws Exception {
	final TypeInformation<Row> rowInfo = Types.ROW(Types.STRING, fieldInfo, Types.STRING);
	final String csv = "BEGIN" + fieldDelimiter + csvValue + fieldDelimiter + "END\n";
	final Row expectedRow = Row.of("BEGIN", value, "END");

	// deserialization
	final CsvRowDeserializationSchema.Builder deserSchemaBuilder = new CsvRowDeserializationSchema.Builder(rowInfo);
	deserializationConfig.accept(deserSchemaBuilder);
	final Row deserializedRow = deserialize(deserSchemaBuilder, csv);
	assertEquals(expectedRow, deserializedRow);
}
 
Example 3
Source File: CsvRowDeSerializationSchemaTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private <T> void testField(
		TypeInformation<T> fieldInfo,
		String csvValue,
		T value,
		Consumer<CsvRowSerializationSchema.Builder> serializationConfig,
		Consumer<CsvRowDeserializationSchema.Builder> deserializationConfig,
		String fieldDelimiter) throws Exception {
	final TypeInformation<Row> rowInfo = Types.ROW(Types.STRING, fieldInfo, Types.STRING);
	final String expectedCsv = "BEGIN" + fieldDelimiter + csvValue + fieldDelimiter + "END\n";
	final Row expectedRow = Row.of("BEGIN", value, "END");

	// serialization
	final CsvRowSerializationSchema.Builder serSchemaBuilder = new CsvRowSerializationSchema.Builder(rowInfo);
	serializationConfig.accept(serSchemaBuilder);
	final byte[] serializedRow = serialize(serSchemaBuilder, expectedRow);
	assertEquals(expectedCsv, new String(serializedRow));

	// deserialization
	final CsvRowDeserializationSchema.Builder deserSchemaBuilder = new CsvRowDeserializationSchema.Builder(rowInfo);
	deserializationConfig.accept(deserSchemaBuilder);
	final Row deserializedRow = deserialize(deserSchemaBuilder, expectedCsv);
	assertEquals(expectedRow, deserializedRow);
}
 
Example 4
Source File: CsvRowDeSerializationSchemaTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private <T> void testField(
		TypeInformation<T> fieldInfo,
		String csvValue,
		T value,
		Consumer<CsvRowSerializationSchema.Builder> serializationConfig,
		Consumer<CsvRowDeserializationSchema.Builder> deserializationConfig,
		String fieldDelimiter) throws Exception {
	final TypeInformation<Row> rowInfo = Types.ROW(Types.STRING, fieldInfo, Types.STRING);
	final String expectedCsv = "BEGIN" + fieldDelimiter + csvValue + fieldDelimiter + "END\n";
	final Row expectedRow = Row.of("BEGIN", value, "END");

	// serialization
	final CsvRowSerializationSchema.Builder serSchemaBuilder = new CsvRowSerializationSchema.Builder(rowInfo);
	serializationConfig.accept(serSchemaBuilder);
	final byte[] serializedRow = serialize(serSchemaBuilder, expectedRow);
	assertEquals(expectedCsv, new String(serializedRow));

	// deserialization
	final CsvRowDeserializationSchema.Builder deserSchemaBuilder = new CsvRowDeserializationSchema.Builder(rowInfo);
	deserializationConfig.accept(deserSchemaBuilder);
	final Row deserializedRow = deserialize(deserSchemaBuilder, expectedCsv);
	assertEquals(expectedRow, deserializedRow);
}
 
Example 5
Source File: CsvRowDeSerializationSchemaTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testSerializationProperties() throws Exception {
	final TypeInformation<Row> rowInfo = Types.ROW(Types.STRING, Types.INT, Types.STRING);
	final CsvRowSerializationSchema.Builder serSchemaBuilder = new CsvRowSerializationSchema.Builder(rowInfo)
		.setLineDelimiter("\r");

	assertArrayEquals(
		"Test,12,Hello\r".getBytes(),
		serialize(serSchemaBuilder, Row.of("Test", 12, "Hello")));

	serSchemaBuilder.setQuoteCharacter('#');

	assertArrayEquals(
		"Test,12,#2019-12-26 12:12:12#\r".getBytes(),
		serialize(serSchemaBuilder, Row.of("Test", 12, "2019-12-26 12:12:12")));

	serSchemaBuilder.disableQuoteCharacter();

	assertArrayEquals(
		"Test,12,2019-12-26 12:12:12\r".getBytes(),
		serialize(serSchemaBuilder, Row.of("Test", 12, "2019-12-26 12:12:12")));
}
 
Example 6
Source File: CsvRowDeSerializationSchemaTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testSerializationProperties() throws Exception {
	final TypeInformation<Row> rowInfo = Types.ROW(Types.STRING, Types.INT, Types.STRING);
	final CsvRowSerializationSchema.Builder serSchemaBuilder = new CsvRowSerializationSchema.Builder(rowInfo)
		.setLineDelimiter("\r");

	assertArrayEquals(
		"Test,12,Hello\r".getBytes(),
		serialize(serSchemaBuilder, Row.of("Test", 12, "Hello")));
}
 
Example 7
Source File: CsvRowDeSerializationSchemaTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private Row testDeserialization(
		boolean allowParsingErrors,
		boolean allowComments,
		String string) throws Exception {
	final TypeInformation<Row> rowInfo = Types.ROW(Types.STRING, Types.INT, Types.STRING);
	final CsvRowDeserializationSchema.Builder deserSchemaBuilder = new CsvRowDeserializationSchema.Builder(rowInfo)
		.setIgnoreParseErrors(allowParsingErrors)
		.setAllowComments(allowComments);
	return deserialize(deserSchemaBuilder, string);
}
 
Example 8
Source File: JsonRowSchemaConverter.java    From flink with Apache License 2.0 5 votes vote down vote up
private static TypeInformation<?> convertArray(String location, JsonNode node, JsonNode root) {
	// validate items
	if (!node.has(ITEMS)) {
		throw new IllegalArgumentException(
			"Arrays must specify an '" + ITEMS + "' property in node: " + location);
	}
	final JsonNode items = node.get(ITEMS);

	// list (translated to object array)
	if (items.isObject()) {
		final TypeInformation<?> elementType = convertType(
			location + '/' + ITEMS,
			items,
			root);
		// result type might either be ObjectArrayTypeInfo or BasicArrayTypeInfo for Strings
		return Types.OBJECT_ARRAY(elementType);
	}
	// tuple (translated to row)
	else if (items.isArray()) {
		final TypeInformation<?>[] types = convertTypes(location + '/' + ITEMS, items, root);

		// validate that array does not contain additional items
		if (node.has(ADDITIONAL_ITEMS) && node.get(ADDITIONAL_ITEMS).isBoolean() &&
				node.get(ADDITIONAL_ITEMS).asBoolean()) {
			throw new IllegalArgumentException(
				"An array tuple must not allow additional items in node: " + location);
		}

		return Types.ROW(types);
	}
	throw new IllegalArgumentException(
		"Invalid type for '" + ITEMS + "' property in node: " + location);
}
 
Example 9
Source File: CsvRowDeSerializationSchemaTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testEmptyLineDelimiter() throws Exception {
	final TypeInformation<Row> rowInfo = Types.ROW(Types.STRING, Types.INT, Types.STRING);
	final CsvRowSerializationSchema.Builder serSchemaBuilder = new CsvRowSerializationSchema.Builder(rowInfo)
			.setLineDelimiter("");

	assertArrayEquals(
			"Test,12,Hello".getBytes(),
			serialize(serSchemaBuilder, Row.of("Test", 12, "Hello")));
}
 
Example 10
Source File: CsvRowDeSerializationSchemaTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testSerializationProperties() throws Exception {
	final TypeInformation<Row> rowInfo = Types.ROW(Types.STRING, Types.INT, Types.STRING);
	final CsvRowSerializationSchema.Builder serSchemaBuilder = new CsvRowSerializationSchema.Builder(rowInfo)
		.setLineDelimiter("\r");

	assertArrayEquals(
		"Test,12,Hello\r".getBytes(),
		serialize(serSchemaBuilder, Row.of("Test", 12, "Hello")));
}
 
Example 11
Source File: CsvRowDeSerializationSchemaTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private Row testDeserialization(
		boolean allowParsingErrors,
		boolean allowComments,
		String string) throws Exception {
	final TypeInformation<Row> rowInfo = Types.ROW(Types.STRING, Types.INT, Types.STRING);
	final CsvRowDeserializationSchema.Builder deserSchemaBuilder = new CsvRowDeserializationSchema.Builder(rowInfo)
		.setIgnoreParseErrors(allowParsingErrors)
		.setAllowComments(allowComments);
	return deserialize(deserSchemaBuilder, string);
}
 
Example 12
Source File: StreamSQLTestProgram.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Override
public TypeInformation getProducedType() {
	return Types.ROW(Types.INT, Types.SQL_TIMESTAMP);
}
 
Example 13
Source File: StreamSQLTestProgram.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public TypeInformation getProducedType() {
	return Types.ROW(Types.INT, Types.SQL_TIMESTAMP);
}
 
Example 14
Source File: StreamSQLTestProgram.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public TypeInformation<Row> getProducedType() {
	return Types.ROW(Types.INT, Types.LONG, Types.STRING);
}
 
Example 15
Source File: StreamSQLTestProgram.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public TypeInformation<Row> getReturnType() {
	return Types.ROW(Types.INT, Types.LONG, Types.STRING);
}
 
Example 16
Source File: StreamSQLTestProgram.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public TypeInformation<Row> getReturnType() {
	return Types.ROW(Types.INT, Types.LONG, Types.STRING);
}
 
Example 17
Source File: StreamSQLTestProgram.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Override
public TypeInformation<Row> getProducedType() {
	return Types.ROW(Types.INT, Types.LONG, Types.STRING);
}
 
Example 18
Source File: StreamSQLTestProgram.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Override
public TypeInformation<Row> getReturnType() {
	return Types.ROW(Types.INT, Types.LONG, Types.STRING);
}
 
Example 19
Source File: StreamSQLTestProgram.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public TypeInformation getProducedType() {
	return Types.ROW(Types.INT, Types.SQL_TIMESTAMP);
}
 
Example 20
Source File: MaterializedCollectStreamResultTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Test
public void testLimitedSnapshot() throws UnknownHostException {
	final TypeInformation<Row> type = Types.ROW(Types.STRING, Types.LONG);

	TestMaterializedCollectStreamResult<?> result = null;
	try {
		result = new TestMaterializedCollectStreamResult<>(
			type,
			new ExecutionConfig(),
			InetAddress.getLocalHost(),
			0,
			2,  // limit the materialized table to 2 rows
			3); // with 3 rows overcommitment

		result.isRetrieving = true;

		result.processRecord(Tuple2.of(true, Row.of("D", 1)));
		result.processRecord(Tuple2.of(true, Row.of("A", 1)));
		result.processRecord(Tuple2.of(true, Row.of("B", 1)));
		result.processRecord(Tuple2.of(true, Row.of("A", 1)));

		assertEquals(
			Arrays.asList(null, null, Row.of("B", 1), Row.of("A", 1)), // two over-committed rows
			result.getMaterializedTable());

		assertEquals(TypedResult.payload(2), result.snapshot(1));

		assertEquals(Collections.singletonList(Row.of("B", 1)), result.retrievePage(1));
		assertEquals(Collections.singletonList(Row.of("A", 1)), result.retrievePage(2));

		result.processRecord(Tuple2.of(true, Row.of("C", 1)));

		assertEquals(
			Arrays.asList(Row.of("A", 1), Row.of("C", 1)), // limit clean up has taken place
			result.getMaterializedTable());

		result.processRecord(Tuple2.of(false, Row.of("A", 1)));

		assertEquals(
			Collections.singletonList(Row.of("C", 1)), // regular clean up has taken place
			result.getMaterializedTable());
	} finally {
		if (result != null) {
			result.close();
		}
	}
}