Java Code Examples for org.apache.flink.api.java.typeutils.PojoTypeInfo

The following examples show how to use org.apache.flink.api.java.typeutils.PojoTypeInfo. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: Flink-CEPplus   Source File: CassandraSink.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Writes a DataStream into a Cassandra database.
 *
 * @param input input DataStream
 * @param <IN>  input type
 * @return CassandraSinkBuilder, to further configure the sink
 */
public static <IN> CassandraSinkBuilder<IN> addSink(DataStream<IN> input) {
	TypeInformation<IN> typeInfo = input.getType();
	if (typeInfo instanceof TupleTypeInfo) {
		DataStream<Tuple> tupleInput = (DataStream<Tuple>) input;
		return (CassandraSinkBuilder<IN>) new CassandraTupleSinkBuilder<>(tupleInput, tupleInput.getType(), tupleInput.getType().createSerializer(tupleInput.getExecutionEnvironment().getConfig()));
	}
	if (typeInfo instanceof RowTypeInfo) {
		DataStream<Row> rowInput = (DataStream<Row>) input;
		return (CassandraSinkBuilder<IN>) new CassandraRowSinkBuilder(rowInput, rowInput.getType(), rowInput.getType().createSerializer(rowInput.getExecutionEnvironment().getConfig()));
	}
	if (typeInfo instanceof PojoTypeInfo) {
		return new CassandraPojoSinkBuilder<>(input, input.getType(), input.getType().createSerializer(input.getExecutionEnvironment().getConfig()));
	}
	if (typeInfo instanceof CaseClassTypeInfo) {
		DataStream<Product> productInput = (DataStream<Product>) input;
		return (CassandraSinkBuilder<IN>) new CassandraScalaProductSinkBuilder<>(productInput, productInput.getType(), productInput.getType().createSerializer(input.getExecutionEnvironment().getConfig()));
	}
	throw new IllegalArgumentException("No support for the type of the given DataStream: " + input.getType());
}
 
Example 2
@Test
public void testTypeExtraction() {
	try {
		InputFormat<MyAvroType, ?> format = new AvroInputFormat<MyAvroType>(new Path("file:///ignore/this/file"), MyAvroType.class);

		TypeInformation<?> typeInfoDirect = TypeExtractor.getInputFormatTypes(format);

		ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
		DataSet<MyAvroType> input = env.createInput(format);
		TypeInformation<?> typeInfoDataSet = input.getType();

		Assert.assertTrue(typeInfoDirect instanceof PojoTypeInfo);
		Assert.assertTrue(typeInfoDataSet instanceof PojoTypeInfo);

		Assert.assertEquals(MyAvroType.class, typeInfoDirect.getTypeClass());
		Assert.assertEquals(MyAvroType.class, typeInfoDataSet.getTypeClass());
	} catch (Exception e) {
		e.printStackTrace();
		Assert.fail(e.getMessage());
	}
}
 
Example 3
Source Project: flink   Source File: ParquetPojoInputFormat.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Extracts the {@link TypeInformation}s  from {@link PojoTypeInfo} according to the given field name.
 */
private static <E> TypeInformation<?>[] extractTypeInfos(PojoTypeInfo<E> pojoTypeInfo, String[] fieldNames) {
	Preconditions.checkNotNull(pojoTypeInfo);
	Preconditions.checkNotNull(fieldNames);
	Preconditions.checkArgument(pojoTypeInfo.getArity() >= fieldNames.length);
	TypeInformation<?>[] fieldTypes = new TypeInformation<?>[fieldNames.length];
	for (int i = 0; i < fieldNames.length; ++i) {
		String fieldName = fieldNames[i];
		Preconditions.checkNotNull(fieldName, "The field can't be null");
		int fieldPos = pojoTypeInfo.getFieldIndex(fieldName);
		Preconditions.checkArgument(fieldPos >= 0,
			String.format("Field %s is not a member of POJO type %s",
				fieldName, pojoTypeInfo.getTypeClass().getName()));
		fieldTypes[i] = pojoTypeInfo.getTypeAt(fieldPos);
	}

	return fieldTypes;
}
 
Example 4
Source Project: Flink-CEPplus   Source File: CsvInputFormatTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testPojoType() throws Exception {
	File tempFile = File.createTempFile("CsvReaderPojoType", "tmp");
	tempFile.deleteOnExit();
	tempFile.setWritable(true);

	OutputStreamWriter wrt = new OutputStreamWriter(new FileOutputStream(tempFile));
	wrt.write("123,AAA,3.123,BBB\n");
	wrt.write("456,BBB,1.123,AAA\n");
	wrt.close();

	@SuppressWarnings("unchecked")
	PojoTypeInfo<PojoItem> typeInfo = (PojoTypeInfo<PojoItem>) TypeExtractor.createTypeInfo(PojoItem.class);
	CsvInputFormat<PojoItem> inputFormat = new PojoCsvInputFormat<PojoItem>(new Path(tempFile.toURI().toString()), typeInfo);

	inputFormat.configure(new Configuration());
	FileInputSplit[] splits = inputFormat.createInputSplits(1);

	inputFormat.open(splits[0]);

	validatePojoItem(inputFormat);
}
 
Example 5
Source Project: Flink-CEPplus   Source File: CsvInputFormatTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testPojoTypeWithMappingInformation() throws Exception {
	File tempFile = File.createTempFile("CsvReaderPojoType", "tmp");
	tempFile.deleteOnExit();
	tempFile.setWritable(true);

	OutputStreamWriter wrt = new OutputStreamWriter(new FileOutputStream(tempFile));
	wrt.write("123,3.123,AAA,BBB\n");
	wrt.write("456,1.123,BBB,AAA\n");
	wrt.close();

	@SuppressWarnings("unchecked")
	PojoTypeInfo<PojoItem> typeInfo = (PojoTypeInfo<PojoItem>) TypeExtractor.createTypeInfo(PojoItem.class);
	CsvInputFormat<PojoItem> inputFormat = new PojoCsvInputFormat<PojoItem>(new Path(tempFile.toURI().toString()), typeInfo, new String[]{"field1", "field3", "field2", "field4"});

	inputFormat.configure(new Configuration());
	FileInputSplit[] splits = inputFormat.createInputSplits(1);

	inputFormat.open(splits[0]);

	validatePojoItem(inputFormat);
}
 
Example 6
Source Project: Flink-CEPplus   Source File: CsvInputFormatTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testPojoTypeWithMappingInfoAndPartialField() throws Exception {
	File tempFile = File.createTempFile("CsvReaderPojoType", "tmp");
	tempFile.deleteOnExit();
	tempFile.setWritable(true);

	OutputStreamWriter wrt = new OutputStreamWriter(new FileOutputStream(tempFile));
	wrt.write("123,3.123,AAA,BBB\n");
	wrt.write("456,1.123,BBB,AAA\n");
	wrt.close();

	@SuppressWarnings("unchecked")
	PojoTypeInfo<PojoItem> typeInfo = (PojoTypeInfo<PojoItem>) TypeExtractor.createTypeInfo(PojoItem.class);
	CsvInputFormat<PojoItem> inputFormat = new PojoCsvInputFormat<PojoItem>(new Path(tempFile.toURI().toString()), typeInfo, new String[]{"field1", "field4"}, new boolean[]{true, false, false, true});

	inputFormat.configure(new Configuration());
	FileInputSplit[] splits = inputFormat.createInputSplits(1);

	inputFormat.open(splits[0]);

	PojoItem item = new PojoItem();
	inputFormat.nextRecord(item);

	assertEquals(123, item.field1);
	assertEquals("BBB", item.field4);
}
 
Example 7
Source Project: Flink-CEPplus   Source File: FieldAccessorTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testPojoInPojo() {
	Outer o = new Outer(10, new Inner(4L), (short) 12);
	PojoTypeInfo<Outer> tpeInfo = (PojoTypeInfo<Outer>) TypeInformation.of(Outer.class);

	FieldAccessor<Outer, Long> fix = FieldAccessorFactory.getAccessor(tpeInfo, "i.x", null);
	assertEquals(4L, (long) fix.get(o));
	assertEquals(4L, o.i.x);
	o = fix.set(o, 22L);
	assertEquals(22L, (long) fix.get(o));
	assertEquals(22L, o.i.x);

	FieldAccessor<Outer, Inner> fi = FieldAccessorFactory.getAccessor(tpeInfo, "i", null);
	assertEquals(22L, fi.get(o).x);
	assertEquals(22L, (long) fix.get(o));
	assertEquals(22L, o.i.x);
	o = fi.set(o, new Inner(30L));
	assertEquals(30L, fi.get(o).x);
	assertEquals(30L, (long) fix.get(o));
	assertEquals(30L, o.i.x);
}
 
Example 8
Source Project: flink   Source File: CassandraSink.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Writes a DataStream into a Cassandra database.
 *
 * @param input input DataStream
 * @param <IN>  input type
 * @return CassandraSinkBuilder, to further configure the sink
 */
public static <IN> CassandraSinkBuilder<IN> addSink(DataStream<IN> input) {
	TypeInformation<IN> typeInfo = input.getType();
	if (typeInfo instanceof TupleTypeInfo) {
		DataStream<Tuple> tupleInput = (DataStream<Tuple>) input;
		return (CassandraSinkBuilder<IN>) new CassandraTupleSinkBuilder<>(tupleInput, tupleInput.getType(), tupleInput.getType().createSerializer(tupleInput.getExecutionEnvironment().getConfig()));
	}
	if (typeInfo instanceof RowTypeInfo) {
		DataStream<Row> rowInput = (DataStream<Row>) input;
		return (CassandraSinkBuilder<IN>) new CassandraRowSinkBuilder(rowInput, rowInput.getType(), rowInput.getType().createSerializer(rowInput.getExecutionEnvironment().getConfig()));
	}
	if (typeInfo instanceof PojoTypeInfo) {
		return new CassandraPojoSinkBuilder<>(input, input.getType(), input.getType().createSerializer(input.getExecutionEnvironment().getConfig()));
	}
	if (typeInfo instanceof CaseClassTypeInfo) {
		DataStream<Product> productInput = (DataStream<Product>) input;
		return (CassandraSinkBuilder<IN>) new CassandraScalaProductSinkBuilder<>(productInput, productInput.getType(), productInput.getType().createSerializer(input.getExecutionEnvironment().getConfig()));
	}
	throw new IllegalArgumentException("No support for the type of the given DataStream: " + input.getType());
}
 
Example 9
Source Project: flink   Source File: FieldInfoUtils.java    License: Apache License 2.0 6 votes vote down vote up
private static <A> List<FieldInfo> extractFieldInformation(
	TypeInformation<A> inputType,
	Expression[] exprs) {
	final List<FieldInfo> fieldInfos;
	if (inputType instanceof GenericTypeInfo && inputType.getTypeClass() == Row.class) {
		throw new ValidationException(
			"An input of GenericTypeInfo<Row> cannot be converted to Table. " +
				"Please specify the type of the input with a RowTypeInfo.");
	} else if (inputType instanceof TupleTypeInfoBase) {
		fieldInfos = extractFieldInfosFromTupleType((TupleTypeInfoBase<?>) inputType, exprs);
	} else if (inputType instanceof PojoTypeInfo) {
		fieldInfos = extractFieldInfosByNameReference((CompositeType<?>) inputType, exprs);
	} else {
		fieldInfos = extractFieldInfoFromAtomicType(inputType, exprs);
	}
	return fieldInfos;
}
 
Example 10
Source Project: flink   Source File: CsvInputFormatTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testPojoType() throws Exception {
	File tempFile = File.createTempFile("CsvReaderPojoType", "tmp");
	tempFile.deleteOnExit();
	tempFile.setWritable(true);

	OutputStreamWriter wrt = new OutputStreamWriter(new FileOutputStream(tempFile));
	wrt.write("123,AAA,3.123,BBB\n");
	wrt.write("456,BBB,1.123,AAA\n");
	wrt.close();

	@SuppressWarnings("unchecked")
	PojoTypeInfo<PojoItem> typeInfo = (PojoTypeInfo<PojoItem>) TypeExtractor.createTypeInfo(PojoItem.class);
	CsvInputFormat<PojoItem> inputFormat = new PojoCsvInputFormat<PojoItem>(new Path(tempFile.toURI().toString()), typeInfo);

	inputFormat.configure(new Configuration());
	FileInputSplit[] splits = inputFormat.createInputSplits(1);

	inputFormat.open(splits[0]);

	validatePojoItem(inputFormat);
}
 
Example 11
Source Project: flink   Source File: AvroInputFormatTypeExtractionTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testTypeExtraction() {
	try {
		InputFormat<MyAvroType, ?> format = new AvroInputFormat<MyAvroType>(new Path("file:///ignore/this/file"), MyAvroType.class);

		TypeInformation<?> typeInfoDirect = TypeExtractor.getInputFormatTypes(format);

		ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
		DataSet<MyAvroType> input = env.createInput(format);
		TypeInformation<?> typeInfoDataSet = input.getType();

		Assert.assertTrue(typeInfoDirect instanceof PojoTypeInfo);
		Assert.assertTrue(typeInfoDataSet instanceof PojoTypeInfo);

		Assert.assertEquals(MyAvroType.class, typeInfoDirect.getTypeClass());
		Assert.assertEquals(MyAvroType.class, typeInfoDataSet.getTypeClass());
	} catch (Exception e) {
		e.printStackTrace();
		Assert.fail(e.getMessage());
	}
}
 
Example 12
Source Project: flink   Source File: ParquetPojoInputFormat.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Extracts the {@link TypeInformation}s  from {@link PojoTypeInfo} according to the given field name.
 */
private static <E> TypeInformation<?>[] extractTypeInfos(PojoTypeInfo<E> pojoTypeInfo, String[] fieldNames) {
	Preconditions.checkNotNull(pojoTypeInfo);
	Preconditions.checkNotNull(fieldNames);
	Preconditions.checkArgument(pojoTypeInfo.getArity() >= fieldNames.length);
	TypeInformation<?>[] fieldTypes = new TypeInformation<?>[fieldNames.length];
	for (int i = 0; i < fieldNames.length; ++i) {
		String fieldName = fieldNames[i];
		Preconditions.checkNotNull(fieldName, "The field can't be null");
		int fieldPos = pojoTypeInfo.getFieldIndex(fieldName);
		Preconditions.checkArgument(fieldPos >= 0,
			String.format("Field %s is not a member of POJO type %s",
				fieldName, pojoTypeInfo.getTypeClass().getName()));
		fieldTypes[i] = pojoTypeInfo.getTypeAt(fieldPos);
	}

	return fieldTypes;
}
 
Example 13
Source Project: flink   Source File: ParquetPojoInputFormatTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testReadPojoFromSimpleRecord() throws IOException {
	Tuple3<Class<? extends SpecificRecord>, SpecificRecord, Row> simple = TestUtil.getSimpleRecordTestData();
	MessageType messageType = SCHEMA_CONVERTER.convert(TestUtil.SIMPLE_SCHEMA);
	Path path = TestUtil.createTempParquetFile(tempRoot.getRoot(), TestUtil.SIMPLE_SCHEMA, Collections.singletonList(simple.f1));

	ParquetPojoInputFormat<PojoSimpleRecord> inputFormat = new ParquetPojoInputFormat<>(
		path, messageType, (PojoTypeInfo<PojoSimpleRecord>) Types.POJO(PojoSimpleRecord.class));
	inputFormat.setRuntimeContext(TestUtil.getMockRuntimeContext());

	FileInputSplit[] splits = inputFormat.createInputSplits(1);
	assertEquals(1, splits.length);
	inputFormat.open(splits[0]);

	PojoSimpleRecord simpleRecord = inputFormat.nextRecord(null);
	assertEquals(simple.f2.getField(0), simpleRecord.getFoo());
	assertEquals(simple.f2.getField(1), simpleRecord.getBar());
	assertArrayEquals((Long[]) simple.f2.getField(2), simpleRecord.getArr());
}
 
Example 14
Source Project: flink   Source File: ParquetPojoInputFormatTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testProjectedReadPojoFromSimpleRecord() throws IOException, NoSuchFieldError {
	Tuple3<Class<? extends SpecificRecord>, SpecificRecord, Row> simple = TestUtil.getSimpleRecordTestData();
	MessageType messageType = SCHEMA_CONVERTER.convert(TestUtil.SIMPLE_SCHEMA);
	Path path = TestUtil.createTempParquetFile(tempRoot.getRoot(), TestUtil.SIMPLE_SCHEMA, Collections.singletonList(simple.f1));

	ParquetPojoInputFormat<PojoSimpleRecord> inputFormat = new ParquetPojoInputFormat<>(
		path, messageType, (PojoTypeInfo<PojoSimpleRecord>) Types.POJO(PojoSimpleRecord.class));
	inputFormat.setRuntimeContext(TestUtil.getMockRuntimeContext());

	FileInputSplit[] splits = inputFormat.createInputSplits(1);
	assertEquals(1, splits.length);

	inputFormat.selectFields(new String[]{"foo"});
	inputFormat.open(splits[0]);

	PojoSimpleRecord simpleRecord = inputFormat.nextRecord(null);
	assertEquals(simple.f2.getField(0), simpleRecord.getFoo());
	assertEquals("", simpleRecord.getBar());
	assertArrayEquals(new Long[0], simpleRecord.getArr());
}
 
Example 15
Source Project: flink   Source File: ParquetPojoInputFormatTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testProjectedReadPojoFromSimpleRecord() throws IOException, NoSuchFieldError {
	Tuple3<Class<? extends SpecificRecord>, SpecificRecord, Row> simple = TestUtil.getSimpleRecordTestData();
	MessageType messageType = SCHEMA_CONVERTER.convert(TestUtil.SIMPLE_SCHEMA);
	Path path = TestUtil.createTempParquetFile(tempRoot.getRoot(), TestUtil.SIMPLE_SCHEMA, Collections.singletonList(simple.f1));

	ParquetPojoInputFormat<PojoSimpleRecord> inputFormat = new ParquetPojoInputFormat<>(
		path, messageType, (PojoTypeInfo<PojoSimpleRecord>) Types.POJO(PojoSimpleRecord.class));
	inputFormat.setRuntimeContext(TestUtil.getMockRuntimeContext());

	FileInputSplit[] splits = inputFormat.createInputSplits(1);
	assertEquals(1, splits.length);

	inputFormat.selectFields(new String[]{"foo"});
	inputFormat.open(splits[0]);

	PojoSimpleRecord simpleRecord = inputFormat.nextRecord(null);
	assertEquals(simple.f2.getField(0), simpleRecord.getFoo());
	assertEquals("", simpleRecord.getBar());
	assertArrayEquals(new Long[0], simpleRecord.getArr());
}
 
Example 16
Source Project: bahir-flink   Source File: SiddhiExecutionPlanSchemaTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testStreamSchemaWithPojo() {
    TypeInformation<Event> typeInfo = TypeExtractor.createTypeInfo(Event.class);
    assertTrue("Type information should be PojoTypeInfo", typeInfo instanceof PojoTypeInfo);

    SiddhiStreamSchema<Event> schema = new SiddhiStreamSchema<>(typeInfo, "id", "timestamp", "name", "price");
    assertEquals(4, schema.getFieldIndexes().length);

    StreamDefinition streamDefinition = schema.getStreamDefinition("test_stream");
    assertArrayEquals(new String[]{"id", "timestamp", "name", "price"}, streamDefinition.getAttributeNameArray());

    assertEquals(Attribute.Type.INT, streamDefinition.getAttributeType("id"));
    assertEquals(Attribute.Type.LONG, streamDefinition.getAttributeType("timestamp"));
    assertEquals(Attribute.Type.STRING, streamDefinition.getAttributeType("name"));
    assertEquals(Attribute.Type.DOUBLE, streamDefinition.getAttributeType("price"));

    assertEquals("define stream test_stream (id int,timestamp long,name string,price double);", schema.getStreamDefinitionExpression("test_stream"));
}
 
Example 17
Source Project: flink   Source File: AvroInputFormatTypeExtractionTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testTypeExtraction() {
	try {
		InputFormat<MyAvroType, ?> format = new AvroInputFormat<MyAvroType>(new Path("file:///ignore/this/file"), MyAvroType.class);

		TypeInformation<?> typeInfoDirect = TypeExtractor.getInputFormatTypes(format);

		ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
		DataSet<MyAvroType> input = env.createInput(format);
		TypeInformation<?> typeInfoDataSet = input.getType();

		Assert.assertTrue(typeInfoDirect instanceof PojoTypeInfo);
		Assert.assertTrue(typeInfoDataSet instanceof PojoTypeInfo);

		Assert.assertEquals(MyAvroType.class, typeInfoDirect.getTypeClass());
		Assert.assertEquals(MyAvroType.class, typeInfoDataSet.getTypeClass());
	} catch (Exception e) {
		e.printStackTrace();
		Assert.fail(e.getMessage());
	}
}
 
Example 18
Source Project: flink   Source File: CsvInputFormatTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testPojoTypeWithMappingInformation() throws Exception {
	File tempFile = File.createTempFile("CsvReaderPojoType", "tmp");
	tempFile.deleteOnExit();
	tempFile.setWritable(true);

	OutputStreamWriter wrt = new OutputStreamWriter(new FileOutputStream(tempFile));
	wrt.write("123,3.123,AAA,BBB\n");
	wrt.write("456,1.123,BBB,AAA\n");
	wrt.close();

	@SuppressWarnings("unchecked")
	PojoTypeInfo<PojoItem> typeInfo = (PojoTypeInfo<PojoItem>) TypeExtractor.createTypeInfo(PojoItem.class);
	CsvInputFormat<PojoItem> inputFormat = new PojoCsvInputFormat<PojoItem>(new Path(tempFile.toURI().toString()), typeInfo, new String[]{"field1", "field3", "field2", "field4"});

	inputFormat.configure(new Configuration());
	FileInputSplit[] splits = inputFormat.createInputSplits(1);

	inputFormat.open(splits[0]);

	validatePojoItem(inputFormat);
}
 
Example 19
Source Project: flink   Source File: CsvInputFormatTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testPojoTypeWithPartialFieldInCSV() throws Exception {
	File tempFile = File.createTempFile("CsvReaderPojoType", "tmp");
	tempFile.deleteOnExit();
	tempFile.setWritable(true);

	OutputStreamWriter wrt = new OutputStreamWriter(new FileOutputStream(tempFile));
	wrt.write("123,NODATA,AAA,NODATA,3.123,BBB\n");
	wrt.write("456,NODATA,BBB,NODATA,1.123,AAA\n");
	wrt.close();

	@SuppressWarnings("unchecked")
	PojoTypeInfo<PojoItem> typeInfo = (PojoTypeInfo<PojoItem>) TypeExtractor.createTypeInfo(PojoItem.class);
	CsvInputFormat<PojoItem> inputFormat = new PojoCsvInputFormat<PojoItem>(new Path(tempFile.toURI().toString()), typeInfo, new boolean[]{true, false, true, false, true, true});

	inputFormat.configure(new Configuration());
	FileInputSplit[] splits = inputFormat.createInputSplits(1);

	inputFormat.open(splits[0]);

	validatePojoItem(inputFormat);
}
 
Example 20
Source Project: flink   Source File: CsvInputFormatTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testPojoTypeWithMappingInformation() throws Exception {
	File tempFile = File.createTempFile("CsvReaderPojoType", "tmp");
	tempFile.deleteOnExit();
	tempFile.setWritable(true);

	OutputStreamWriter wrt = new OutputStreamWriter(new FileOutputStream(tempFile));
	wrt.write("123,3.123,AAA,BBB\n");
	wrt.write("456,1.123,BBB,AAA\n");
	wrt.close();

	@SuppressWarnings("unchecked")
	PojoTypeInfo<PojoItem> typeInfo = (PojoTypeInfo<PojoItem>) TypeExtractor.createTypeInfo(PojoItem.class);
	CsvInputFormat<PojoItem> inputFormat = new PojoCsvInputFormat<PojoItem>(new Path(tempFile.toURI().toString()), typeInfo, new String[]{"field1", "field3", "field2", "field4"});

	inputFormat.configure(new Configuration());
	FileInputSplit[] splits = inputFormat.createInputSplits(1);

	inputFormat.open(splits[0]);

	validatePojoItem(inputFormat);
}
 
Example 21
Source Project: flink   Source File: FieldAccessorTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testPojoInPojo() {
	Outer o = new Outer(10, new Inner(4L), (short) 12);
	PojoTypeInfo<Outer> tpeInfo = (PojoTypeInfo<Outer>) TypeInformation.of(Outer.class);

	FieldAccessor<Outer, Long> fix = FieldAccessorFactory.getAccessor(tpeInfo, "i.x", null);
	assertEquals(4L, (long) fix.get(o));
	assertEquals(4L, o.i.x);
	o = fix.set(o, 22L);
	assertEquals(22L, (long) fix.get(o));
	assertEquals(22L, o.i.x);

	FieldAccessor<Outer, Inner> fi = FieldAccessorFactory.getAccessor(tpeInfo, "i", null);
	assertEquals(22L, fi.get(o).x);
	assertEquals(22L, (long) fix.get(o));
	assertEquals(22L, o.i.x);
	o = fi.set(o, new Inner(30L));
	assertEquals(30L, fi.get(o).x);
	assertEquals(30L, (long) fix.get(o));
	assertEquals(30L, o.i.x);
}
 
Example 22
Source Project: bahir-flink   Source File: StreamOutputHandler.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void receive(Event[] events) {
    StreamRecord<R> reusableRecord = new StreamRecord<>(null, 0L);
    for (Event event : events) {
        if (typeInfo == null || Map.class.isAssignableFrom(typeInfo.getTypeClass())) {
            reusableRecord.replace(toMap(event), event.getTimestamp());
            output.collect(reusableRecord);
        } else if (typeInfo.isTupleType()) {
            Tuple tuple = this.toTuple(event);
            reusableRecord.replace(tuple, event.getTimestamp());
            output.collect(reusableRecord);
        } else if (typeInfo instanceof PojoTypeInfo) {
            R obj;
            try {
                obj = objectMapper.convertValue(toMap(event), typeInfo.getTypeClass());
            } catch (IllegalArgumentException ex) {
                LOGGER.error("Failed to map event: " + event + " into type: " + typeInfo, ex);
                throw ex;
            }
            reusableRecord.replace(obj, event.getTimestamp());
            output.collect(reusableRecord);
        } else {
            throw new IllegalArgumentException("Unable to format " + event + " as type " + typeInfo);
        }
    }
}
 
Example 23
Source Project: flink   Source File: CsvInputFormatTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testPojoTypeWithMappingInfoAndPartialField() throws Exception {
	File tempFile = File.createTempFile("CsvReaderPojoType", "tmp");
	tempFile.deleteOnExit();
	tempFile.setWritable(true);

	OutputStreamWriter wrt = new OutputStreamWriter(new FileOutputStream(tempFile));
	wrt.write("123,3.123,AAA,BBB\n");
	wrt.write("456,1.123,BBB,AAA\n");
	wrt.close();

	@SuppressWarnings("unchecked")
	PojoTypeInfo<PojoItem> typeInfo = (PojoTypeInfo<PojoItem>) TypeExtractor.createTypeInfo(PojoItem.class);
	CsvInputFormat<PojoItem> inputFormat = new PojoCsvInputFormat<PojoItem>(new Path(tempFile.toURI().toString()), typeInfo, new String[]{"field1", "field4"}, new boolean[]{true, false, false, true});

	inputFormat.configure(new Configuration());
	FileInputSplit[] splits = inputFormat.createInputSplits(1);

	inputFormat.open(splits[0]);

	PojoItem item = new PojoItem();
	inputFormat.nextRecord(item);

	assertEquals(123, item.field1);
	assertEquals("BBB", item.field4);
}
 
Example 24
Source Project: flink   Source File: FieldInfoUtils.java    License: Apache License 2.0 6 votes vote down vote up
private static <A> List<FieldInfo> extractFieldInformation(
		TypeInformation<A> inputType,
		Expression[] exprs) {
	final List<FieldInfo> fieldInfos;
	if (inputType instanceof GenericTypeInfo && inputType.getTypeClass() == Row.class) {
		throw new ValidationException(
			"An input of GenericTypeInfo<Row> cannot be converted to Table. " +
				"Please specify the type of the input with a RowTypeInfo.");
	} else if (inputType instanceof TupleTypeInfoBase) {
		fieldInfos = extractFieldInfosFromTupleType((TupleTypeInfoBase<?>) inputType, exprs);
	} else if (inputType instanceof PojoTypeInfo) {
		fieldInfos = extractFieldInfosByNameReference((CompositeType<?>) inputType, exprs);
	} else {
		fieldInfos = extractFieldInfoFromAtomicType(inputType, exprs);
	}
	return fieldInfos;
}
 
Example 25
@Test
public void testStreamSchemaWithPojo() {
    TypeInformation<Event> typeInfo = TypeExtractor.createTypeInfo(Event.class);
    assertTrue("Type information should be PojoTypeInfo", typeInfo instanceof PojoTypeInfo);

    SiddhiStreamSchema<Event> schema = new SiddhiStreamSchema<>(typeInfo, "id", "timestamp", "name", "price");
    assertEquals(4, schema.getFieldIndexes().length);

    StreamDefinition streamDefinition = schema.getStreamDefinition("test_stream");
    assertArrayEquals(new String[]{"id", "timestamp", "name", "price"}, streamDefinition.getAttributeNameArray());

    assertEquals(Attribute.Type.INT, streamDefinition.getAttributeType("id"));
    assertEquals(Attribute.Type.LONG, streamDefinition.getAttributeType("timestamp"));
    assertEquals(Attribute.Type.STRING, streamDefinition.getAttributeType("name"));
    assertEquals(Attribute.Type.DOUBLE, streamDefinition.getAttributeType("price"));

    assertEquals("define stream test_stream (id int,timestamp long,name string,price double);", schema.getStreamDefinitionExpression("test_stream"));
}
 
Example 26
Source Project: Flink-CEPplus   Source File: PojoCsvInputFormat.java    License: Apache License 2.0 5 votes vote down vote up
public PojoCsvInputFormat(Path filePath, String lineDelimiter, String fieldDelimiter, PojoTypeInfo<OUT> pojoTypeInfo, String[] fieldNames, int[] includedFieldsMask) {
	super(filePath);
	boolean[] mask = (includedFieldsMask == null)
			? createDefaultMask(fieldNames.length)
			: toBooleanMask(includedFieldsMask);
	configure(lineDelimiter, fieldDelimiter, pojoTypeInfo, fieldNames, mask);
}
 
Example 27
Source Project: Flink-CEPplus   Source File: PojoCsvInputFormat.java    License: Apache License 2.0 5 votes vote down vote up
private void configure(String lineDelimiter, String fieldDelimiter, PojoTypeInfo<OUT> pojoTypeInfo, String[] fieldNames, boolean[] includedFieldsMask) {

		if (includedFieldsMask == null) {
			includedFieldsMask = createDefaultMask(fieldNames.length);
		}

		for (String name : fieldNames) {
			if (name == null) {
				throw new NullPointerException("Field name must not be null.");
			}
			if (pojoTypeInfo.getFieldIndex(name) < 0) {
				throw new IllegalArgumentException("Field \"" + name + "\" not part of POJO type " + pojoTypeInfo.getTypeClass().getCanonicalName());
			}
		}

		setDelimiter(lineDelimiter);
		setFieldDelimiter(fieldDelimiter);

		Class<?>[] classes = new Class<?>[fieldNames.length];

		for (int i = 0; i < fieldNames.length; i++) {
			try {
				classes[i] = pojoTypeInfo.getTypeAt(pojoTypeInfo.getFieldIndex(fieldNames[i])).getTypeClass();
			} catch (IndexOutOfBoundsException e) {
				throw new IllegalArgumentException("Invalid field name: " + fieldNames[i]);
			}
		}

		this.pojoTypeClass = pojoTypeInfo.getTypeClass();
		this.pojoTypeInfo = pojoTypeInfo;
		setFieldsGeneric(includedFieldsMask, classes);
		setOrderOfPOJOFields(fieldNames);
	}
 
Example 28
Source Project: flink   Source File: StateDescriptorTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testSerializerLazyInitializeInParallel() throws Exception {
	final String name = "testSerializerLazyInitializeInParallel";
	// use PojoTypeInfo which will create a new serializer when createSerializer is invoked.
	final TestStateDescriptor<String> desc =
		new TestStateDescriptor<>(name, new PojoTypeInfo<>(String.class, new ArrayList<>()));
	final int threadNumber = 20;
	final ArrayList<CheckedThread> threads = new ArrayList<>(threadNumber);
	final ExecutionConfig executionConfig = new ExecutionConfig();
	final ConcurrentHashMap<Integer, TypeSerializer<String>> serializers = new ConcurrentHashMap<>();
	for (int i = 0; i < threadNumber; i++) {
		threads.add(new CheckedThread() {
			@Override
			public void go() {
				desc.initializeSerializerUnlessSet(executionConfig);
				TypeSerializer<String> serializer = desc.getOriginalSerializer();
				serializers.put(System.identityHashCode(serializer), serializer);
			}
		});
	}
	threads.forEach(Thread::start);
	for (CheckedThread t : threads) {
		t.sync();
	}
	assertEquals("Should use only one serializer but actually: " + serializers, 1, serializers.size());
	threads.clear();
}
 
Example 29
Source Project: flink   Source File: FieldInfoUtilsTest.java    License: Apache License 2.0 5 votes vote down vote up
@Parameterized.Parameters(name = "{0}")
public static Collection<TypeInformation> parameters() throws Exception {
	return Arrays.asList(
		new RowTypeInfo(
			new TypeInformation[]{Types.INT, Types.LONG, Types.SQL_TIMESTAMP},
			new String[]{"f0", "f1", "f2"}),
		new PojoTypeInfo(MyPojo.class, Arrays.asList(
			new PojoField(MyPojo.class.getDeclaredField("f0"), Types.INT),
			new PojoField(MyPojo.class.getDeclaredField("f1"), Types.LONG),
			new PojoField(MyPojo.class.getDeclaredField("f2"), Types.SQL_TIMESTAMP))));
}
 
Example 30
Source Project: flink   Source File: CsvInputFormatTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testPojoTypeWithPrivateField() throws Exception {
	File tempFile = File.createTempFile("CsvReaderPojoType", "tmp");
	tempFile.deleteOnExit();
	tempFile.setWritable(true);

	OutputStreamWriter wrt = new OutputStreamWriter(new FileOutputStream(tempFile));
	wrt.write("123,AAA,3.123,BBB\n");
	wrt.write("456,BBB,1.123,AAA\n");
	wrt.close();

	@SuppressWarnings("unchecked")
	PojoTypeInfo<PrivatePojoItem> typeInfo = (PojoTypeInfo<PrivatePojoItem>) TypeExtractor.createTypeInfo(PrivatePojoItem.class);
	CsvInputFormat<PrivatePojoItem> inputFormat = new PojoCsvInputFormat<PrivatePojoItem>(new Path(tempFile.toURI().toString()), typeInfo);

	inputFormat.configure(new Configuration());

	FileInputSplit[] splits = inputFormat.createInputSplits(1);
	inputFormat.open(splits[0]);

	PrivatePojoItem item = new PrivatePojoItem();
	inputFormat.nextRecord(item);

	assertEquals(123, item.field1);
	assertEquals("AAA", item.field2);
	assertEquals(Double.valueOf(3.123), item.field3);
	assertEquals("BBB", item.field4);

	inputFormat.nextRecord(item);

	assertEquals(456, item.field1);
	assertEquals("BBB", item.field2);
	assertEquals(Double.valueOf(1.123), item.field3);
	assertEquals("AAA", item.field4);
}