Java Code Examples for org.apache.flink.table.api.DataTypes#ARRAY

The following examples show how to use org.apache.flink.table.api.DataTypes#ARRAY . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HiveCatalogDataTypeTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testComplexDataTypes() throws Exception {
	DataType[] types = new DataType[]{
		DataTypes.ARRAY(DataTypes.DOUBLE()),
		DataTypes.MAP(DataTypes.FLOAT(), DataTypes.BIGINT()),
		DataTypes.ROW(
			DataTypes.FIELD("0", DataTypes.BOOLEAN()),
			DataTypes.FIELD("1", DataTypes.BOOLEAN()),
			DataTypes.FIELD("2", DataTypes.DATE())),

		// nested complex types
		DataTypes.ARRAY(DataTypes.ARRAY(DataTypes.INT())),
		DataTypes.MAP(DataTypes.STRING(), DataTypes.MAP(DataTypes.STRING(), DataTypes.BIGINT())),
		DataTypes.ROW(
			DataTypes.FIELD("3", DataTypes.ARRAY(DataTypes.DECIMAL(5, 3))),
			DataTypes.FIELD("4", DataTypes.MAP(DataTypes.TINYINT(), DataTypes.SMALLINT())),
			DataTypes.FIELD("5", DataTypes.ROW(DataTypes.FIELD("3", DataTypes.TIMESTAMP())))
		)
	};

	verifyDataTypes(types);
}
 
Example 2
Source File: HiveGenericUDTFTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testArray() throws Exception {
	Object[] constantArgs = new Object[] {
		null
	};

	DataType[] dataTypes = new DataType[] {
		DataTypes.ARRAY(DataTypes.INT())
	};

	HiveGenericUDTF udf = init(
		GenericUDTFPosExplode.class,
		constantArgs,
		dataTypes
	);

	udf.eval(new Integer[] { 1, 2, 3});

	assertEquals(Arrays.asList(Row.of(0, 1), Row.of(1, 2), Row.of(2, 3)), collector.result);
}
 
Example 3
Source File: HiveCatalogDataTypeTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testComplexDataTypes() throws Exception {
	DataType[] types = new DataType[]{
		DataTypes.ARRAY(DataTypes.DOUBLE()),
		DataTypes.MAP(DataTypes.FLOAT(), DataTypes.BIGINT()),
		DataTypes.ROW(
			DataTypes.FIELD("0", DataTypes.BOOLEAN()),
			DataTypes.FIELD("1", DataTypes.BOOLEAN()),
			DataTypes.FIELD("2", DataTypes.DATE())),

		// nested complex types
		DataTypes.ARRAY(DataTypes.ARRAY(DataTypes.INT())),
		DataTypes.MAP(DataTypes.STRING(), DataTypes.MAP(DataTypes.STRING(), DataTypes.BIGINT())),
		DataTypes.ROW(
			DataTypes.FIELD("3", DataTypes.ARRAY(DataTypes.DECIMAL(5, 3))),
			DataTypes.FIELD("4", DataTypes.MAP(DataTypes.TINYINT(), DataTypes.SMALLINT())),
			DataTypes.FIELD("5", DataTypes.ROW(DataTypes.FIELD("3", DataTypes.TIMESTAMP(9))))
		)
	};

	verifyDataTypes(types);
}
 
Example 4
Source File: HiveGenericUDTFTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testArray() throws Exception {
	Object[] constantArgs = new Object[] {
		null
	};

	DataType[] dataTypes = new DataType[] {
		DataTypes.ARRAY(DataTypes.INT())
	};

	HiveGenericUDTF udf = init(
		GenericUDTFPosExplode.class,
		constantArgs,
		dataTypes
	);

	udf.eval(new Integer[] { 1, 2, 3});

	assertEquals(Arrays.asList(Row.of(0, 1), Row.of(1, 2), Row.of(2, 3)), collector.result);
}
 
Example 5
Source File: HiveTypeUtil.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Convert Hive data type to a Flink data type.
 *
 * @param hiveType a Hive data type
 * @return the corresponding Flink data type
 */
public static DataType toFlinkType(TypeInfo hiveType) {
	checkNotNull(hiveType, "hiveType cannot be null");

	switch (hiveType.getCategory()) {
		case PRIMITIVE:
			return toFlinkPrimitiveType((PrimitiveTypeInfo) hiveType);
		case LIST:
			ListTypeInfo listTypeInfo = (ListTypeInfo) hiveType;
			return DataTypes.ARRAY(toFlinkType(listTypeInfo.getListElementTypeInfo()));
		case MAP:
			MapTypeInfo mapTypeInfo = (MapTypeInfo) hiveType;
			return DataTypes.MAP(toFlinkType(mapTypeInfo.getMapKeyTypeInfo()), toFlinkType(mapTypeInfo.getMapValueTypeInfo()));
		case STRUCT:
			StructTypeInfo structTypeInfo = (StructTypeInfo) hiveType;

			List<String> names = structTypeInfo.getAllStructFieldNames();
			List<TypeInfo> typeInfos = structTypeInfo.getAllStructFieldTypeInfos();

			DataTypes.Field[] fields = new DataTypes.Field[names.size()];

			for (int i = 0; i < fields.length; i++) {
				fields[i] = DataTypes.FIELD(names.get(i), toFlinkType(typeInfos.get(i)));
			}

			return DataTypes.ROW(fields);
		default:
			throw new UnsupportedOperationException(
				String.format("Flink doesn't support Hive data type %s yet.", hiveType));
	}
}
 
Example 6
Source File: HiveGenericUDTFTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testStruct() throws Exception {
	Object[] constantArgs = new Object[] {
		null
	};

	DataType[] dataTypes = new DataType[] {
		DataTypes.ARRAY(
			DataTypes.ROW(
				DataTypes.FIELD("1", DataTypes.INT()),
				DataTypes.FIELD("2", DataTypes.DOUBLE())
			)
		)
	};

	HiveGenericUDTF udf = init(
		GenericUDTFInline.class,
		constantArgs,
		dataTypes
	);

	udf.eval(
		new Row[]{
			Row.of(1, 2.2d),
			Row.of(3, 4.4d)
		}
	);

	assertEquals(Arrays.asList(Row.of(1, 2.2), Row.of(3, 4.4)), collector.result);
}
 
Example 7
Source File: HiveTypeUtil.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Convert Hive data type to a Flink data type.
 *
 * @param hiveType a Hive data type
 * @return the corresponding Flink data type
 */
public static DataType toFlinkType(TypeInfo hiveType) {
	checkNotNull(hiveType, "hiveType cannot be null");

	switch (hiveType.getCategory()) {
		case PRIMITIVE:
			return toFlinkPrimitiveType((PrimitiveTypeInfo) hiveType);
		case LIST:
			ListTypeInfo listTypeInfo = (ListTypeInfo) hiveType;
			return DataTypes.ARRAY(toFlinkType(listTypeInfo.getListElementTypeInfo()));
		case MAP:
			MapTypeInfo mapTypeInfo = (MapTypeInfo) hiveType;
			return DataTypes.MAP(toFlinkType(mapTypeInfo.getMapKeyTypeInfo()), toFlinkType(mapTypeInfo.getMapValueTypeInfo()));
		case STRUCT:
			StructTypeInfo structTypeInfo = (StructTypeInfo) hiveType;

			List<String> names = structTypeInfo.getAllStructFieldNames();
			List<TypeInfo> typeInfos = structTypeInfo.getAllStructFieldTypeInfos();

			DataTypes.Field[] fields = new DataTypes.Field[names.size()];

			for (int i = 0; i < fields.length; i++) {
				fields[i] = DataTypes.FIELD(names.get(i), toFlinkType(typeInfos.get(i)));
			}

			return DataTypes.ROW(fields);
		default:
			throw new UnsupportedOperationException(
				String.format("Flink doesn't support Hive data type %s yet.", hiveType));
	}
}
 
Example 8
Source File: HiveGenericUDTFTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testStruct() throws Exception {
	Object[] constantArgs = new Object[] {
		null
	};

	DataType[] dataTypes = new DataType[] {
		DataTypes.ARRAY(
			DataTypes.ROW(
				DataTypes.FIELD("1", DataTypes.INT()),
				DataTypes.FIELD("2", DataTypes.DOUBLE())
			)
		)
	};

	HiveGenericUDTF udf = init(
		GenericUDTFInline.class,
		constantArgs,
		dataTypes
	);

	udf.eval(
		new Row[]{
			Row.of(1, 2.2d),
			Row.of(3, 4.4d)
		}
	);

	assertEquals(Arrays.asList(Row.of(1, 2.2), Row.of(3, 4.4)), collector.result);
}
 
Example 9
Source File: SchemaUtils.java    From pulsar-flink with Apache License 2.0 4 votes vote down vote up
private static DataType avro2SqlType(Schema avroSchema, Set<String> existingRecordNames) throws IncompatibleSchemaException {
    LogicalType logicalType = avroSchema.getLogicalType();
    switch (avroSchema.getType()) {
        case INT:
            if (logicalType instanceof LogicalTypes.Date) {
                return DataTypes.DATE();
            } else {
                return DataTypes.INT();
            }

        case STRING:
        case ENUM:
            return DataTypes.STRING();

        case BOOLEAN:
            return DataTypes.BOOLEAN();

        case BYTES:
        case FIXED:
            // For FIXED type, if the precision requires more bytes than fixed size, the logical
            // type will be null, which is handled by Avro library.
            if (logicalType instanceof LogicalTypes.Decimal) {
                LogicalTypes.Decimal d = (LogicalTypes.Decimal) logicalType;
                return DataTypes.DECIMAL(d.getPrecision(), d.getScale());
            } else {
                return DataTypes.BYTES();
            }

        case DOUBLE:
            return DataTypes.DOUBLE();

        case FLOAT:
            return DataTypes.FLOAT();

        case LONG:
            if (logicalType instanceof LogicalTypes.TimestampMillis ||
                    logicalType instanceof LogicalTypes.TimestampMicros) {
                return DataTypes.TIMESTAMP(3).bridgedTo(java.sql.Timestamp.class);
            } else {
                return DataTypes.BIGINT();
            }

        case RECORD:
            if (existingRecordNames.contains(avroSchema.getFullName())) {
                throw new IncompatibleSchemaException(
                        String.format("Found recursive reference in Avro schema, which can not be processed by Flink: %s", avroSchema.toString(true)), null);
            }

            Set<String> newRecordName = ImmutableSet.<String>builder()
                    .addAll(existingRecordNames).add(avroSchema.getFullName()).build();
            List<DataTypes.Field> fields = new ArrayList<>();
            for (Schema.Field f : avroSchema.getFields()) {
                DataType fieldType = avro2SqlType(f.schema(), newRecordName);
                fields.add(DataTypes.FIELD(f.name(), fieldType));
            }
            return DataTypes.ROW(fields.toArray(new DataTypes.Field[0]));

        case ARRAY:
            DataType elementType = avro2SqlType(avroSchema.getElementType(), existingRecordNames);
            return DataTypes.ARRAY(elementType);

        case MAP:
            DataType valueType = avro2SqlType(avroSchema.getValueType(), existingRecordNames);
            return DataTypes.MAP(DataTypes.STRING(), valueType);

        case UNION:
            if (avroSchema.getTypes().stream().anyMatch(f -> f.getType() == Schema.Type.NULL)) {
                // In case of a union with null, eliminate it and make a recursive call
                List<Schema> remainingUnionTypes =
                        avroSchema.getTypes().stream().filter(f -> f.getType() != Schema.Type.NULL).collect(Collectors.toList());
                if (remainingUnionTypes.size() == 1) {
                    return avro2SqlType(remainingUnionTypes.get(0), existingRecordNames).nullable();
                } else {
                    return avro2SqlType(Schema.createUnion(remainingUnionTypes), existingRecordNames).nullable();
                }
            } else {
                List<Schema.Type> types = avroSchema.getTypes().stream().map(Schema::getType).collect(Collectors.toList());
                if (types.size() == 1) {
                    return avro2SqlType(avroSchema.getTypes().get(0), existingRecordNames);
                } else if (types.size() == 2 && types.contains(Schema.Type.INT) && types.contains(Schema.Type.LONG)) {
                    return DataTypes.BIGINT();
                } else if (types.size() == 2 && types.contains(Schema.Type.FLOAT) && types.contains(Schema.Type.DOUBLE)) {
                    return DataTypes.DOUBLE();
                } else {
                    // Convert complex unions to struct types where field names are member0, member1, etc.
                    // This is consistent with the behavior when converting between Avro and Parquet.
                    List<DataTypes.Field> memberFields = new ArrayList<>();
                    List<Schema> schemas = avroSchema.getTypes();
                    for (int i = 0; i < schemas.size(); i++) {
                        DataType memberType = avro2SqlType(schemas.get(i), existingRecordNames);
                        memberFields.add(DataTypes.FIELD("member" + i, memberType));
                    }
                    return DataTypes.ROW(memberFields.toArray(new DataTypes.Field[0]));
                }
            }

        default:
            throw new IncompatibleSchemaException(String.format("Unsupported type %s", avroSchema.toString(true)), null);
    }
}
 
Example 10
Source File: PostgresCatalog.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Converts Postgres type to Flink {@link DataType}.
 *
 * @see org.postgresql.jdbc.TypeInfoCache
 */
private DataType fromJDBCType(ResultSetMetaData metadata, int colIndex) throws SQLException {
	String pgType = metadata.getColumnTypeName(colIndex);

	int precision = metadata.getPrecision(colIndex);
	int scale = metadata.getScale(colIndex);

	switch (pgType) {
		case PG_BOOLEAN:
			return DataTypes.BOOLEAN();
		case PG_BOOLEAN_ARRAY:
			return DataTypes.ARRAY(DataTypes.BOOLEAN());
		case PG_BYTEA:
			return DataTypes.BYTES();
		case PG_BYTEA_ARRAY:
			return DataTypes.ARRAY(DataTypes.BYTES());
		case PG_SMALLINT:
			return DataTypes.SMALLINT();
		case PG_SMALLINT_ARRAY:
			return DataTypes.ARRAY(DataTypes.SMALLINT());
		case PG_INTEGER:
		case PG_SERIAL:
			return DataTypes.INT();
		case PG_INTEGER_ARRAY:
			return DataTypes.ARRAY(DataTypes.INT());
		case PG_BIGINT:
		case PG_BIGSERIAL:
			return DataTypes.BIGINT();
		case PG_BIGINT_ARRAY:
			return DataTypes.ARRAY(DataTypes.BIGINT());
		case PG_REAL:
			return DataTypes.FLOAT();
		case PG_REAL_ARRAY:
			return DataTypes.ARRAY(DataTypes.FLOAT());
		case PG_DOUBLE_PRECISION:
			return DataTypes.DOUBLE();
		case PG_DOUBLE_PRECISION_ARRAY:
			return DataTypes.ARRAY(DataTypes.DOUBLE());
		case PG_NUMERIC:
			// see SPARK-26538: handle numeric without explicit precision and scale.
			if (precision > 0) {
				return DataTypes.DECIMAL(precision, metadata.getScale(colIndex));
			}
			return DataTypes.DECIMAL(DecimalType.MAX_PRECISION, 18);
		case PG_NUMERIC_ARRAY:
			// see SPARK-26538: handle numeric without explicit precision and scale.
			if (precision > 0) {
				return DataTypes.ARRAY(DataTypes.DECIMAL(precision, metadata.getScale(colIndex)));
			}
			return DataTypes.ARRAY(DataTypes.DECIMAL(DecimalType.MAX_PRECISION, 18));
		case PG_CHAR:
		case PG_CHARACTER:
			return DataTypes.CHAR(precision);
		case PG_CHAR_ARRAY:
		case PG_CHARACTER_ARRAY:
			return DataTypes.ARRAY(DataTypes.CHAR(precision));
		case PG_CHARACTER_VARYING:
			return DataTypes.VARCHAR(precision);
		case PG_CHARACTER_VARYING_ARRAY:
			return DataTypes.ARRAY(DataTypes.VARCHAR(precision));
		case PG_TEXT:
			return DataTypes.STRING();
		case PG_TEXT_ARRAY:
			return DataTypes.ARRAY(DataTypes.STRING());
		case PG_TIMESTAMP:
			return DataTypes.TIMESTAMP(scale);
		case PG_TIMESTAMP_ARRAY:
			return DataTypes.ARRAY(DataTypes.TIMESTAMP(scale));
		case PG_TIMESTAMPTZ:
			return DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(scale);
		case PG_TIMESTAMPTZ_ARRAY:
			return DataTypes.ARRAY(DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(scale));
		case PG_TIME:
			return DataTypes.TIME(scale);
		case PG_TIME_ARRAY:
			return DataTypes.ARRAY(DataTypes.TIME(scale));
		case PG_DATE:
			return DataTypes.DATE();
		case PG_DATE_ARRAY:
			return DataTypes.ARRAY(DataTypes.DATE());
		default:
			throw new UnsupportedOperationException(
				String.format("Doesn't support Postgres type '%s' yet", pgType));
	}
}
 
Example 11
Source File: DataTypeExtractor.java    From flink with Apache License 2.0 4 votes vote down vote up
private @Nullable DataType extractArrayType(
		DataTypeTemplate template,
		List<Type> typeHierarchy,
		Type type) {
	// prefer BYTES over ARRAY<TINYINT> for byte[]
	if (type == byte[].class) {
		return DataTypes.BYTES();
	}
	// for T[]
	else if (type instanceof GenericArrayType) {
		final GenericArrayType genericArray = (GenericArrayType) type;
		return DataTypes.ARRAY(
			extractDataTypeOrRaw(template, typeHierarchy, genericArray.getGenericComponentType()));
	}

	final Class<?> clazz = toClass(type);
	if (clazz == null) {
		return null;
	}

	// for my.custom.Pojo[][]
	if (clazz.isArray()) {
		return DataTypes.ARRAY(
			extractDataTypeOrRaw(template, typeHierarchy, clazz.getComponentType()));
	}

	// for List<T>
	// we only allow List here (not a subclass) because we cannot guarantee more specific
	// data structures after conversion
	if (clazz != List.class) {
		return null;
	}
	if (!(type instanceof ParameterizedType)) {
		throw extractionError(
			"The class '%s' needs generic parameters for an array type.",
			List.class.getName());
	}
	final ParameterizedType parameterizedType = (ParameterizedType) type;
	final DataType element = extractDataTypeOrRaw(
		template,
		typeHierarchy,
		parameterizedType.getActualTypeArguments()[0]);
	return DataTypes.ARRAY(element).bridgedTo(List.class);
}