org.apache.flink.api.java.typeutils.ObjectArrayTypeInfo Java Examples
The following examples show how to use
org.apache.flink.api.java.typeutils.ObjectArrayTypeInfo.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: DataStreamTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testObjectArrayKeyRejection() { KeySelector<Tuple2<Integer[], String>, Object[]> keySelector = new KeySelector<Tuple2<Integer[], String>, Object[]>() { @Override public Object[] getKey(Tuple2<Integer[], String> value) throws Exception { Object[] ks = new Object[value.f0.length]; for (int i = 0; i < ks.length; i++) { ks[i] = new Object(); } return ks; } }; ObjectArrayTypeInfo<Object[], Object> keyTypeInfo = ObjectArrayTypeInfo.getInfoFor( Object[].class, new GenericTypeInfo<>(Object.class)); assertArrayKeyRejection(keySelector, keyTypeInfo); }
Example #2
Source File: ParquetMapInputFormat.java From flink with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") private void convert(List<Object> target, Object[] source, ObjectArrayTypeInfo objectArrayTypeInfo) { TypeInformation<?> itemType = objectArrayTypeInfo.getComponentInfo(); for (Object field : source) { if (itemType instanceof RowTypeInfo) { Map<String, Object> nestedRow = new HashMap<>(); convert(nestedRow, (Row) field, ((RowTypeInfo) itemType).getFieldTypes(), ((RowTypeInfo) itemType).getFieldNames()); target.add(nestedRow); } else if (itemType instanceof MapTypeInfo) { Map<String, Object> nestedMap = new HashMap<>(); MapTypeInfo mapTypeInfo = (MapTypeInfo) itemType; convert(nestedMap, (Map<String, Object>) field, mapTypeInfo); target.add(nestedMap); } else if (itemType instanceof ObjectArrayTypeInfo) { List<Object> nestedObjectList = new ArrayList<>(); convert(nestedObjectList, (Row[]) field, (ObjectArrayTypeInfo) itemType); target.add(nestedObjectList); } } }
Example #3
Source File: Serializers.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
public static void recursivelyRegisterType(TypeInformation<?> typeInfo, ExecutionConfig config, Set<Class<?>> alreadySeen) { if (typeInfo instanceof GenericTypeInfo) { GenericTypeInfo<?> genericTypeInfo = (GenericTypeInfo<?>) typeInfo; Serializers.recursivelyRegisterType(genericTypeInfo.getTypeClass(), config, alreadySeen); } else if (typeInfo instanceof CompositeType) { List<GenericTypeInfo<?>> genericTypesInComposite = new ArrayList<>(); getContainedGenericTypes((CompositeType<?>)typeInfo, genericTypesInComposite); for (GenericTypeInfo<?> gt : genericTypesInComposite) { Serializers.recursivelyRegisterType(gt.getTypeClass(), config, alreadySeen); } } else if (typeInfo instanceof ObjectArrayTypeInfo) { ObjectArrayTypeInfo<?, ?> objectArrayTypeInfo = (ObjectArrayTypeInfo<?, ?>) typeInfo; recursivelyRegisterType(objectArrayTypeInfo.getComponentInfo(), config, alreadySeen); } }
Example #4
Source File: DataStreamTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testObjectArrayKeyRejection() { KeySelector<Tuple2<Integer[], String>, Object[]> keySelector = new KeySelector<Tuple2<Integer[], String>, Object[]>() { @Override public Object[] getKey(Tuple2<Integer[], String> value) throws Exception { Object[] ks = new Object[value.f0.length]; for (int i = 0; i < ks.length; i++) { ks[i] = new Object(); } return ks; } }; ObjectArrayTypeInfo<Object[], Object> keyTypeInfo = ObjectArrayTypeInfo.getInfoFor( Object[].class, new GenericTypeInfo<>(Object.class)); testKeyRejection(keySelector, keyTypeInfo); }
Example #5
Source File: ParquetMapInputFormat.java From flink with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") private void convert(Map<String, Object> target, Map<String, Object> source, MapTypeInfo mapTypeInfo) { TypeInformation valueTypeInfp = mapTypeInfo.getValueTypeInfo(); for (String key : source.keySet()) { if (valueTypeInfp instanceof RowTypeInfo) { Map<String, Object> nestedRow = new HashMap<>(); convert(nestedRow, (Row) source.get(key), ((RowTypeInfo) valueTypeInfp).getFieldTypes(), ((RowTypeInfo) valueTypeInfp).getFieldNames()); target.put(key, nestedRow); } else if (valueTypeInfp instanceof MapTypeInfo) { Map<String, Object> nestedMap = new HashMap<>(); convert(nestedMap, (Map<String, Object>) source.get(key), (MapTypeInfo) valueTypeInfp); target.put(key, nestedMap); } else if (valueTypeInfp instanceof ObjectArrayTypeInfo) { List<Object> nestedObjectList = new ArrayList<>(); convert(nestedObjectList, (Object[]) source.get(key), (ObjectArrayTypeInfo) valueTypeInfp); target.put(key, nestedObjectList); } } }
Example #6
Source File: Serializers.java From flink with Apache License 2.0 | 6 votes |
public static void recursivelyRegisterType(TypeInformation<?> typeInfo, ExecutionConfig config, Set<Class<?>> alreadySeen) { if (typeInfo instanceof GenericTypeInfo) { GenericTypeInfo<?> genericTypeInfo = (GenericTypeInfo<?>) typeInfo; Serializers.recursivelyRegisterType(genericTypeInfo.getTypeClass(), config, alreadySeen); } else if (typeInfo instanceof CompositeType) { List<GenericTypeInfo<?>> genericTypesInComposite = new ArrayList<>(); getContainedGenericTypes((CompositeType<?>)typeInfo, genericTypesInComposite); for (GenericTypeInfo<?> gt : genericTypesInComposite) { Serializers.recursivelyRegisterType(gt.getTypeClass(), config, alreadySeen); } } else if (typeInfo instanceof ObjectArrayTypeInfo) { ObjectArrayTypeInfo<?, ?> objectArrayTypeInfo = (ObjectArrayTypeInfo<?, ?>) typeInfo; recursivelyRegisterType(objectArrayTypeInfo.getComponentInfo(), config, alreadySeen); } }
Example #7
Source File: DataStreamTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testObjectArrayKeyRejection() { KeySelector<Tuple2<Integer[], String>, Object[]> keySelector = new KeySelector<Tuple2<Integer[], String>, Object[]>() { @Override public Object[] getKey(Tuple2<Integer[], String> value) throws Exception { Object[] ks = new Object[value.f0.length]; for (int i = 0; i < ks.length; i++) { ks[i] = new Object(); } return ks; } }; ObjectArrayTypeInfo<Object[], Object> keyTypeInfo = ObjectArrayTypeInfo.getInfoFor( Object[].class, new GenericTypeInfo<>(Object.class)); testKeyRejection(keySelector, keyTypeInfo); }
Example #8
Source File: JsonRowDeserializationSchema.java From flink with Apache License 2.0 | 6 votes |
private Optional<DeserializationRuntimeConverter> createContainerConverter(TypeInformation<?> typeInfo) { if (typeInfo instanceof RowTypeInfo) { return Optional.of(createRowConverter((RowTypeInfo) typeInfo)); } else if (typeInfo instanceof ObjectArrayTypeInfo) { return Optional.of(createObjectArrayConverter(((ObjectArrayTypeInfo) typeInfo).getComponentInfo())); } else if (typeInfo instanceof BasicArrayTypeInfo) { return Optional.of(createObjectArrayConverter(((BasicArrayTypeInfo) typeInfo).getComponentInfo())); } else if (isPrimitiveByteArray(typeInfo)) { return Optional.of(createByteArrayConverter()); } else if (typeInfo instanceof MapTypeInfo) { MapTypeInfo<?, ?> mapTypeInfo = (MapTypeInfo<?, ?>) typeInfo; return Optional.of(createMapConverter(mapTypeInfo.getKeyTypeInfo(), mapTypeInfo.getValueTypeInfo())); } else { return Optional.empty(); } }
Example #9
Source File: ParquetMapInputFormat.java From flink with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") private void convert(Map<String, Object> target, Map<String, Object> source, MapTypeInfo mapTypeInfo) { TypeInformation valueTypeInfp = mapTypeInfo.getValueTypeInfo(); for (Map.Entry<String, Object> entry : source.entrySet()) { String key = entry.getKey(); Object value = entry.getValue(); if (valueTypeInfp instanceof RowTypeInfo) { Map<String, Object> nestedRow = new HashMap<>(); convert(nestedRow, (Row) value, ((RowTypeInfo) valueTypeInfp).getFieldTypes(), ((RowTypeInfo) valueTypeInfp).getFieldNames()); target.put(key, nestedRow); } else if (valueTypeInfp instanceof MapTypeInfo) { Map<String, Object> nestedMap = new HashMap<>(); convert(nestedMap, (Map<String, Object>) value, (MapTypeInfo) valueTypeInfp); target.put(key, nestedMap); } else if (valueTypeInfp instanceof ObjectArrayTypeInfo) { List<Object> nestedObjectList = new ArrayList<>(); convert(nestedObjectList, (Object[]) value, (ObjectArrayTypeInfo) valueTypeInfp); target.put(key, nestedObjectList); } } }
Example #10
Source File: ParquetMapInputFormat.java From flink with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") private void convert(List<Object> target, Object[] source, ObjectArrayTypeInfo objectArrayTypeInfo) { TypeInformation<?> itemType = objectArrayTypeInfo.getComponentInfo(); for (Object field : source) { if (itemType instanceof RowTypeInfo) { Map<String, Object> nestedRow = new HashMap<>(); convert(nestedRow, (Row) field, ((RowTypeInfo) itemType).getFieldTypes(), ((RowTypeInfo) itemType).getFieldNames()); target.add(nestedRow); } else if (itemType instanceof MapTypeInfo) { Map<String, Object> nestedMap = new HashMap<>(); MapTypeInfo mapTypeInfo = (MapTypeInfo) itemType; convert(nestedMap, (Map<String, Object>) field, mapTypeInfo); target.add(nestedMap); } else if (itemType instanceof ObjectArrayTypeInfo) { List<Object> nestedObjectList = new ArrayList<>(); convert(nestedObjectList, (Row[]) field, (ObjectArrayTypeInfo) itemType); target.add(nestedObjectList); } } }
Example #11
Source File: Serializers.java From flink with Apache License 2.0 | 6 votes |
public static void recursivelyRegisterType(TypeInformation<?> typeInfo, ExecutionConfig config, Set<Class<?>> alreadySeen) { if (typeInfo instanceof GenericTypeInfo) { GenericTypeInfo<?> genericTypeInfo = (GenericTypeInfo<?>) typeInfo; Serializers.recursivelyRegisterType(genericTypeInfo.getTypeClass(), config, alreadySeen); } else if (typeInfo instanceof CompositeType) { List<GenericTypeInfo<?>> genericTypesInComposite = new ArrayList<>(); getContainedGenericTypes((CompositeType<?>)typeInfo, genericTypesInComposite); for (GenericTypeInfo<?> gt : genericTypesInComposite) { Serializers.recursivelyRegisterType(gt.getTypeClass(), config, alreadySeen); } } else if (typeInfo instanceof ObjectArrayTypeInfo) { ObjectArrayTypeInfo<?, ?> objectArrayTypeInfo = (ObjectArrayTypeInfo<?, ?>) typeInfo; recursivelyRegisterType(objectArrayTypeInfo.getComponentInfo(), config, alreadySeen); } }
Example #12
Source File: OrcRowInputFormatTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testProducedTypeWithProjection() throws IOException { rowOrcInputFormat = new OrcRowInputFormat(getPath(TEST_FILE_NESTED), TEST_SCHEMA_NESTED, new Configuration()); rowOrcInputFormat.selectFields(9, 3, 7, 10); assertTrue(rowOrcInputFormat.getProducedType() instanceof RowTypeInfo); RowTypeInfo producedType = (RowTypeInfo) rowOrcInputFormat.getProducedType(); assertArrayEquals( new TypeInformation[]{ // struct Types.ROW_NAMED( new String[]{"list"}, ObjectArrayTypeInfo.getInfoFor( Types.ROW_NAMED(new String[]{"int1", "string1"}, Types.INT, Types.STRING))), // int Types.INT, // binary PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO, // list ObjectArrayTypeInfo.getInfoFor( Types.ROW_NAMED(new String[]{"int1", "string1"}, Types.INT, Types.STRING)) }, producedType.getFieldTypes()); assertArrayEquals( new String[]{"middle", "int1", "bytes1", "list"}, producedType.getFieldNames()); }
Example #13
Source File: CsvRowSchemaConverter.java From flink with Apache License 2.0 | 5 votes |
/** * Convert {@link TypeInformation} to {@link CsvSchema.ColumnType} based on Jackson's categories. */ private static CsvSchema.ColumnType convertType(String fieldName, TypeInformation<?> info) { if (STRING_TYPES.contains(info)) { return CsvSchema.ColumnType.STRING; } else if (NUMBER_TYPES.contains(info)) { return CsvSchema.ColumnType.NUMBER; } else if (BOOLEAN_TYPES.contains(info)) { return CsvSchema.ColumnType.BOOLEAN; } else if (info instanceof ObjectArrayTypeInfo) { validateNestedField(fieldName, ((ObjectArrayTypeInfo) info).getComponentInfo()); return CsvSchema.ColumnType.ARRAY; } else if (info instanceof BasicArrayTypeInfo) { validateNestedField(fieldName, ((BasicArrayTypeInfo) info).getComponentInfo()); return CsvSchema.ColumnType.ARRAY; } else if (info instanceof RowTypeInfo) { final TypeInformation<?>[] types = ((RowTypeInfo) info).getFieldTypes(); for (TypeInformation<?> type : types) { validateNestedField(fieldName, type); } return CsvSchema.ColumnType.ARRAY; } else if (info instanceof PrimitiveArrayTypeInfo && ((PrimitiveArrayTypeInfo) info).getComponentType() == Types.BYTE) { return CsvSchema.ColumnType.STRING; } else { throw new IllegalArgumentException( "Unsupported type information '" + info.toString() + "' for field '" + fieldName + "'."); } }
Example #14
Source File: OrcTableSourceTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
private TypeInformation[] getNestedFieldTypes() { return new TypeInformation[]{ Types.BOOLEAN, Types.BYTE, Types.SHORT, Types.INT, Types.LONG, Types.FLOAT, Types.DOUBLE, PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO, Types.STRING, Types.ROW_NAMED( new String[]{"list"}, ObjectArrayTypeInfo.getInfoFor( Types.ROW_NAMED( new String[]{"int1", "string1"}, Types.INT, Types.STRING ) ) ), ObjectArrayTypeInfo.getInfoFor( Types.ROW_NAMED( new String[]{"int1", "string1"}, Types.INT, Types.STRING ) ), new MapTypeInfo<>( Types.STRING, Types.ROW_NAMED( new String[]{"int1", "string1"}, Types.INT, Types.STRING ) ) }; }
Example #15
Source File: JsonRowSerializationSchema.java From flink with Apache License 2.0 | 5 votes |
private Optional<SerializationRuntimeConverter> createContainerConverter(TypeInformation<?> typeInfo) { if (typeInfo instanceof RowTypeInfo) { return Optional.of(createRowConverter((RowTypeInfo) typeInfo)); } else if (typeInfo instanceof ObjectArrayTypeInfo) { return Optional.of(createObjectArrayConverter(((ObjectArrayTypeInfo) typeInfo).getComponentInfo())); } else if (typeInfo instanceof BasicArrayTypeInfo) { return Optional.of(createObjectArrayConverter(((BasicArrayTypeInfo) typeInfo).getComponentInfo())); } else if (isPrimitiveByteArray(typeInfo)) { return Optional.of((mapper, reuse, object) -> mapper.getNodeFactory().binaryNode((byte[]) object)); } else { return Optional.empty(); } }
Example #16
Source File: JsonRowDeserializationSchema.java From flink with Apache License 2.0 | 5 votes |
private Optional<DeserializationRuntimeConverter> createContainerConverter(TypeInformation<?> typeInfo) { if (typeInfo instanceof RowTypeInfo) { return Optional.of(createRowConverter((RowTypeInfo) typeInfo)); } else if (typeInfo instanceof ObjectArrayTypeInfo) { return Optional.of(createObjectArrayConverter(((ObjectArrayTypeInfo) typeInfo).getComponentInfo())); } else if (typeInfo instanceof BasicArrayTypeInfo) { return Optional.of(createObjectArrayConverter(((BasicArrayTypeInfo) typeInfo).getComponentInfo())); } else if (isPrimitiveByteArray(typeInfo)) { return Optional.of(createByteArrayConverter()); } else { return Optional.empty(); } }
Example #17
Source File: ParquetMapInputFormat.java From flink with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") private void convert(Map<String, Object> map, Row row, TypeInformation<?>[] fieldTypes, String[] fieldNames) { for (int i = 0; i < fieldNames.length; i++) { if (row.getField(i) != null) { if (fieldTypes[i] instanceof BasicTypeInfo || fieldTypes[i] instanceof PrimitiveArrayTypeInfo || fieldTypes[i] instanceof BasicArrayTypeInfo) { map.put(fieldNames[i], row.getField(i)); } else if (fieldTypes[i] instanceof RowTypeInfo) { Map<String, Object> nestedRow = new HashMap<>(); RowTypeInfo nestedRowTypeInfo = (RowTypeInfo) fieldTypes[i]; convert(nestedRow, (Row) row.getField(i), nestedRowTypeInfo.getFieldTypes(), nestedRowTypeInfo.getFieldNames()); map.put(fieldNames[i], nestedRow); } else if (fieldTypes[i] instanceof MapTypeInfo) { Map<String, Object> nestedMap = new HashMap<>(); MapTypeInfo mapTypeInfo = (MapTypeInfo) fieldTypes[i]; convert(nestedMap, (Map<String, Object>) row.getField(i), mapTypeInfo); map.put(fieldNames[i], nestedMap); } else if (fieldTypes[i] instanceof ObjectArrayTypeInfo) { List<Object> nestedObjectList = new ArrayList<>(); ObjectArrayTypeInfo objectArrayTypeInfo = (ObjectArrayTypeInfo) fieldTypes[i]; convert(nestedObjectList, (Row[]) row.getField(i), objectArrayTypeInfo); map.put(fieldNames[i], nestedObjectList); } } } }
Example #18
Source File: OrcRowInputFormatTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testProducedType() throws IOException { rowOrcInputFormat = new OrcRowInputFormat(getPath(TEST_FILE_NESTED), TEST_SCHEMA_NESTED, new Configuration()); assertTrue(rowOrcInputFormat.getProducedType() instanceof RowTypeInfo); RowTypeInfo producedType = (RowTypeInfo) rowOrcInputFormat.getProducedType(); assertArrayEquals( new TypeInformation[]{ // primitives Types.BOOLEAN, Types.BYTE, Types.SHORT, Types.INT, Types.LONG, Types.FLOAT, Types.DOUBLE, // binary PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO, // string Types.STRING, // struct Types.ROW_NAMED( new String[]{"list"}, ObjectArrayTypeInfo.getInfoFor( Types.ROW_NAMED(new String[]{"int1", "string1"}, Types.INT, Types.STRING))), // list ObjectArrayTypeInfo.getInfoFor( Types.ROW_NAMED(new String[]{"int1", "string1"}, Types.INT, Types.STRING)), // map new MapTypeInfo<>(Types.STRING, Types.ROW_NAMED(new String[]{"int1", "string1"}, Types.INT, Types.STRING)) }, producedType.getFieldTypes()); assertArrayEquals( new String[]{"boolean1", "byte1", "short1", "int1", "long1", "float1", "double1", "bytes1", "string1", "middle", "list", "map"}, producedType.getFieldNames()); }
Example #19
Source File: JDBCTypeUtil.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
static int typeInformationToSqlType(TypeInformation<?> type) { if (TYPE_MAPPING.containsKey(type)) { return TYPE_MAPPING.get(type); } else if (type instanceof ObjectArrayTypeInfo || type instanceof PrimitiveArrayTypeInfo) { return Types.ARRAY; } else { throw new IllegalArgumentException("Unsupported type: " + type); } }
Example #20
Source File: ParquetSchemaConverter.java From flink with Apache License 2.0 | 5 votes |
private static TypeInformation<?> convertParquetPrimitiveListToFlinkArray(Type type) { // Backward-compatibility element group doesn't exist also allowed TypeInformation<?> flinkType = convertParquetTypeToTypeInfo(type); if (flinkType.isBasicType()) { return BasicArrayTypeInfo.getInfoFor(Array.newInstance(flinkType.getTypeClass(), 0).getClass()); } else { // flinkType here can be either SqlTimeTypeInfo or BasicTypeInfo.BIG_DEC_TYPE_INFO, // So it should be converted to ObjectArrayTypeInfo return ObjectArrayTypeInfo.getInfoFor(flinkType); } }
Example #21
Source File: ParquetMapInputFormat.java From flink with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") private void convert(Map<String, Object> map, Row row, TypeInformation<?>[] fieldTypes, String[] fieldNames) { for (int i = 0; i < fieldNames.length; i++) { if (row.getField(i) != null) { if (fieldTypes[i] instanceof BasicTypeInfo || fieldTypes[i] instanceof PrimitiveArrayTypeInfo || fieldTypes[i] instanceof BasicArrayTypeInfo) { map.put(fieldNames[i], row.getField(i)); } else if (fieldTypes[i] instanceof RowTypeInfo) { Map<String, Object> nestedRow = new HashMap<>(); RowTypeInfo nestedRowTypeInfo = (RowTypeInfo) fieldTypes[i]; convert(nestedRow, (Row) row.getField(i), nestedRowTypeInfo.getFieldTypes(), nestedRowTypeInfo.getFieldNames()); map.put(fieldNames[i], nestedRow); } else if (fieldTypes[i] instanceof MapTypeInfo) { Map<String, Object> nestedMap = new HashMap<>(); MapTypeInfo mapTypeInfo = (MapTypeInfo) fieldTypes[i]; convert(nestedMap, (Map<String, Object>) row.getField(i), mapTypeInfo); map.put(fieldNames[i], nestedMap); } else if (fieldTypes[i] instanceof ObjectArrayTypeInfo) { List<Object> nestedObjectList = new ArrayList<>(); ObjectArrayTypeInfo objectArrayTypeInfo = (ObjectArrayTypeInfo) fieldTypes[i]; convert(nestedObjectList, (Row[]) row.getField(i), objectArrayTypeInfo); map.put(fieldNames[i], nestedObjectList); } } } }
Example #22
Source File: CsvRowSchemaConverter.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Convert {@link TypeInformation} to {@link CsvSchema.ColumnType} based on Jackson's categories. */ private static CsvSchema.ColumnType convertType(String fieldName, TypeInformation<?> info) { if (STRING_TYPES.contains(info)) { return CsvSchema.ColumnType.STRING; } else if (NUMBER_TYPES.contains(info)) { return CsvSchema.ColumnType.NUMBER; } else if (BOOLEAN_TYPES.contains(info)) { return CsvSchema.ColumnType.BOOLEAN; } else if (info instanceof ObjectArrayTypeInfo) { validateNestedField(fieldName, ((ObjectArrayTypeInfo) info).getComponentInfo()); return CsvSchema.ColumnType.ARRAY; } else if (info instanceof BasicArrayTypeInfo) { validateNestedField(fieldName, ((BasicArrayTypeInfo) info).getComponentInfo()); return CsvSchema.ColumnType.ARRAY; } else if (info instanceof RowTypeInfo) { final TypeInformation<?>[] types = ((RowTypeInfo) info).getFieldTypes(); for (TypeInformation<?> type : types) { validateNestedField(fieldName, type); } return CsvSchema.ColumnType.ARRAY; } else if (info instanceof PrimitiveArrayTypeInfo && ((PrimitiveArrayTypeInfo) info).getComponentType() == Types.BYTE) { return CsvSchema.ColumnType.STRING; } else { throw new IllegalArgumentException( "Unsupported type information '" + info.toString() + "' for field '" + fieldName + "'."); } }
Example #23
Source File: Types.java From flink with Apache License 2.0 | 5 votes |
/** * Returns type information for Java arrays of object types (such as <code>String[]</code>, * <code>Integer[]</code>). The array itself must not be null. Null values for elements are supported. * * @param elementType element type of the array */ @SuppressWarnings("unchecked") public static <E> TypeInformation<E[]> OBJECT_ARRAY(TypeInformation<E> elementType) { if (elementType == Types.STRING) { return (TypeInformation) BasicArrayTypeInfo.STRING_ARRAY_TYPE_INFO; } return ObjectArrayTypeInfo.getInfoFor(elementType); }
Example #24
Source File: OrcRowInputFormatTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testProducedTypeWithProjection() throws IOException { rowOrcInputFormat = new OrcRowInputFormat(getPath(TEST_FILE_NESTED), TEST_SCHEMA_NESTED, new Configuration()); rowOrcInputFormat.selectFields(9, 3, 7, 10); assertTrue(rowOrcInputFormat.getProducedType() instanceof RowTypeInfo); RowTypeInfo producedType = (RowTypeInfo) rowOrcInputFormat.getProducedType(); assertArrayEquals( new TypeInformation[]{ // struct Types.ROW_NAMED( new String[]{"list"}, ObjectArrayTypeInfo.getInfoFor( Types.ROW_NAMED(new String[]{"int1", "string1"}, Types.INT, Types.STRING))), // int Types.INT, // binary PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO, // list ObjectArrayTypeInfo.getInfoFor( Types.ROW_NAMED(new String[]{"int1", "string1"}, Types.INT, Types.STRING)) }, producedType.getFieldTypes()); assertArrayEquals( new String[]{"middle", "int1", "bytes1", "list"}, producedType.getFieldNames()); }
Example #25
Source File: JsonRowSerializationSchema.java From flink with Apache License 2.0 | 5 votes |
private Optional<SerializationRuntimeConverter> createContainerConverter(TypeInformation<?> typeInfo) { if (typeInfo instanceof RowTypeInfo) { return Optional.of(createRowConverter((RowTypeInfo) typeInfo)); } else if (typeInfo instanceof ObjectArrayTypeInfo) { return Optional.of(createObjectArrayConverter(((ObjectArrayTypeInfo) typeInfo).getComponentInfo())); } else if (typeInfo instanceof BasicArrayTypeInfo) { return Optional.of(createObjectArrayConverter(((BasicArrayTypeInfo) typeInfo).getComponentInfo())); } else if (isPrimitiveByteArray(typeInfo)) { return Optional.of((mapper, reuse, object) -> mapper.getNodeFactory().binaryNode((byte[]) object)); } else { return Optional.empty(); } }
Example #26
Source File: JdbcTypeConverter.java From Alink with Apache License 2.0 | 5 votes |
/** * Get {@link java.sql.Types} (in integer form) from Flink TypeInformation. * * @param type flink TypeInformation. * @return Corresponding type integer in {@link java.sql.Types}. * @throws IllegalArgumentException when unsupported type encountered. */ public static int getIntegerSqlType(TypeInformation<?> type) { if (MAP_FLINK_TYPE_TO_INDEX.containsKey(type)) { return MAP_FLINK_TYPE_TO_INDEX.get(type); } else if (type instanceof ObjectArrayTypeInfo || type instanceof PrimitiveArrayTypeInfo) { return Types.ARRAY; } else { throw new IllegalArgumentException("Unsupported type: " + type); } }
Example #27
Source File: JdbcTypeUtil.java From flink with Apache License 2.0 | 5 votes |
public static int typeInformationToSqlType(TypeInformation<?> type) { if (TYPE_MAPPING.containsKey(type)) { return TYPE_MAPPING.get(type); } else if (type instanceof ObjectArrayTypeInfo || type instanceof PrimitiveArrayTypeInfo) { return Types.ARRAY; } else { throw new IllegalArgumentException("Unsupported type: " + type); } }
Example #28
Source File: CsvRowSchemaConverter.java From flink with Apache License 2.0 | 5 votes |
/** * Convert {@link TypeInformation} to {@link CsvSchema.ColumnType} based on Jackson's categories. */ private static CsvSchema.ColumnType convertType(String fieldName, TypeInformation<?> info) { if (STRING_TYPES.contains(info)) { return CsvSchema.ColumnType.STRING; } else if (NUMBER_TYPES.contains(info)) { return CsvSchema.ColumnType.NUMBER; } else if (BOOLEAN_TYPES.contains(info)) { return CsvSchema.ColumnType.BOOLEAN; } else if (info instanceof ObjectArrayTypeInfo) { validateNestedField(fieldName, ((ObjectArrayTypeInfo) info).getComponentInfo()); return CsvSchema.ColumnType.ARRAY; } else if (info instanceof BasicArrayTypeInfo) { validateNestedField(fieldName, ((BasicArrayTypeInfo) info).getComponentInfo()); return CsvSchema.ColumnType.ARRAY; } else if (info instanceof RowTypeInfo) { final TypeInformation<?>[] types = ((RowTypeInfo) info).getFieldTypes(); for (TypeInformation<?> type : types) { validateNestedField(fieldName, type); } return CsvSchema.ColumnType.ARRAY; } else if (info instanceof PrimitiveArrayTypeInfo && ((PrimitiveArrayTypeInfo) info).getComponentType() == Types.BYTE) { return CsvSchema.ColumnType.STRING; } else { throw new IllegalArgumentException( "Unsupported type information '" + info.toString() + "' for field '" + fieldName + "'."); } }
Example #29
Source File: LegacyTypeInfoDataTypeConverter.java From flink with Apache License 2.0 | 5 votes |
public static DataType toDataType(TypeInformation<?> typeInfo) { // time indicators first as their hashCode/equals is shared with those of regular timestamps if (typeInfo instanceof TimeIndicatorTypeInfo) { return convertToTimeAttributeType((TimeIndicatorTypeInfo) typeInfo); } final DataType foundDataType = typeInfoDataTypeMap.get(typeInfo); if (foundDataType != null) { return foundDataType; } if (typeInfo instanceof RowTypeInfo) { return convertToRowType((RowTypeInfo) typeInfo); } else if (typeInfo instanceof ObjectArrayTypeInfo) { return convertToArrayType( typeInfo.getTypeClass(), ((ObjectArrayTypeInfo) typeInfo).getComponentInfo()); } else if (typeInfo instanceof BasicArrayTypeInfo) { return createLegacyType(LogicalTypeRoot.ARRAY, typeInfo); } else if (typeInfo instanceof MultisetTypeInfo) { return convertToMultisetType(((MultisetTypeInfo) typeInfo).getElementTypeInfo()); } else if (typeInfo instanceof MapTypeInfo) { return convertToMapType((MapTypeInfo) typeInfo); } else if (typeInfo instanceof CompositeType) { return createLegacyType(LogicalTypeRoot.STRUCTURED_TYPE, typeInfo); } return createLegacyType(LogicalTypeRoot.RAW, typeInfo); }
Example #30
Source File: ParquetSchemaConverter.java From flink with Apache License 2.0 | 5 votes |
private static TypeInformation<?> convertParquetPrimitiveListToFlinkArray(Type type) { // Backward-compatibility element group doesn't exist also allowed TypeInformation<?> flinkType = convertParquetTypeToTypeInfo(type); if (flinkType.isBasicType()) { return BasicArrayTypeInfo.getInfoFor(Array.newInstance(flinkType.getTypeClass(), 0).getClass()); } else { // flinkType here can be either SqlTimeTypeInfo or BasicTypeInfo.BIG_DEC_TYPE_INFO, // So it should be converted to ObjectArrayTypeInfo return ObjectArrayTypeInfo.getInfoFor(flinkType); } }