Java Code Examples for org.apache.flink.api.common.typeinfo.Types#OBJECT_ARRAY

The following examples show how to use org.apache.flink.api.common.typeinfo.Types#OBJECT_ARRAY . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: StreamSqlUtil.java    From sylph with Apache License 2.0 6 votes vote down vote up
private static TypeInformation<?> getFlinkType(Type type)
{
    if (type instanceof ParameterizedType && ((ParameterizedType) type).getRawType() == Map.class) {
        Type[] arguments = ((ParameterizedType) type).getActualTypeArguments();
        Type valueType = arguments[1];
        TypeInformation<?> valueInfo = getFlinkType(valueType);
        return new MapTypeInfo<>(TypeExtractor.createTypeInfo(arguments[0]), valueInfo);
    }
    else if (type instanceof ParameterizedType && ((ParameterizedType) type).getRawType() == List.class) {
        TypeInformation<?> typeInformation = getFlinkType(((ParameterizedType) type).getActualTypeArguments()[0]);
        if (typeInformation.isBasicType() && typeInformation != Types.STRING) {
            return Types.PRIMITIVE_ARRAY(typeInformation);
        }
        else {
            return Types.OBJECT_ARRAY(typeInformation);
        }
    }
    else {
        return TypeExtractor.createTypeInfo(type);
    }
}
 
Example 2
Source File: JsonRowSchemaConverter.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private static TypeInformation<?> convertArray(String location, JsonNode node, JsonNode root) {
	// validate items
	if (!node.has(ITEMS)) {
		throw new IllegalArgumentException(
			"Arrays must specify an '" + ITEMS + "' property in node: " + location);
	}
	final JsonNode items = node.get(ITEMS);

	// list (translated to object array)
	if (items.isObject()) {
		final TypeInformation<?> elementType = convertType(
			location + '/' + ITEMS,
			items,
			root);
		// result type might either be ObjectArrayTypeInfo or BasicArrayTypeInfo for Strings
		return Types.OBJECT_ARRAY(elementType);
	}
	// tuple (translated to row)
	else if (items.isArray()) {
		final TypeInformation<?>[] types = convertTypes(location + '/' + ITEMS, items, root);

		// validate that array does not contain additional items
		if (node.has(ADDITIONAL_ITEMS) && node.get(ADDITIONAL_ITEMS).isBoolean() &&
				node.get(ADDITIONAL_ITEMS).asBoolean()) {
			throw new IllegalArgumentException(
				"An array tuple must not allow additional items in node: " + location);
		}

		return Types.ROW(types);
	}
	throw new IllegalArgumentException(
		"Invalid type for '" + ITEMS + "' property in node: " + location);
}
 
Example 3
Source File: JsonRowSchemaConverter.java    From flink with Apache License 2.0 5 votes vote down vote up
private static TypeInformation<?> convertArray(String location, JsonNode node, JsonNode root) {
	// validate items
	if (!node.has(ITEMS)) {
		throw new IllegalArgumentException(
			"Arrays must specify an '" + ITEMS + "' property in node: " + location);
	}
	final JsonNode items = node.get(ITEMS);

	// list (translated to object array)
	if (items.isObject()) {
		final TypeInformation<?> elementType = convertType(
			location + '/' + ITEMS,
			items,
			root);
		// result type might either be ObjectArrayTypeInfo or BasicArrayTypeInfo for Strings
		return Types.OBJECT_ARRAY(elementType);
	}
	// tuple (translated to row)
	else if (items.isArray()) {
		final TypeInformation<?>[] types = convertTypes(location + '/' + ITEMS, items, root);

		// validate that array does not contain additional items
		if (node.has(ADDITIONAL_ITEMS) && node.get(ADDITIONAL_ITEMS).isBoolean() &&
				node.get(ADDITIONAL_ITEMS).asBoolean()) {
			throw new IllegalArgumentException(
				"An array tuple must not allow additional items in node: " + location);
		}

		return Types.ROW(types);
	}
	throw new IllegalArgumentException(
		"Invalid type for '" + ITEMS + "' property in node: " + location);
}
 
Example 4
Source File: JsonRowSchemaConverter.java    From flink with Apache License 2.0 5 votes vote down vote up
private static TypeInformation<?> convertArray(String location, JsonNode node, JsonNode root) {
	// validate items
	if (!node.has(ITEMS)) {
		throw new IllegalArgumentException(
			"Arrays must specify an '" + ITEMS + "' property in node: " + location);
	}
	final JsonNode items = node.get(ITEMS);

	// list (translated to object array)
	if (items.isObject()) {
		final TypeInformation<?> elementType = convertType(
			location + '/' + ITEMS,
			items,
			root);
		// result type might either be ObjectArrayTypeInfo or BasicArrayTypeInfo for Strings
		return Types.OBJECT_ARRAY(elementType);
	}
	// tuple (translated to row)
	else if (items.isArray()) {
		final TypeInformation<?>[] types = convertTypes(location + '/' + ITEMS, items, root);

		// validate that array does not contain additional items
		if (node.has(ADDITIONAL_ITEMS) && node.get(ADDITIONAL_ITEMS).isBoolean() &&
				node.get(ADDITIONAL_ITEMS).asBoolean()) {
			throw new IllegalArgumentException(
				"An array tuple must not allow additional items in node: " + location);
		}

		return Types.ROW(types);
	}
	throw new IllegalArgumentException(
		"Invalid type for '" + ITEMS + "' property in node: " + location);
}
 
Example 5
Source File: AvroSchemaConverter.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
private static TypeInformation<?> convertToTypeInfo(Schema schema) {
	switch (schema.getType()) {
		case RECORD:
			final List<Schema.Field> fields = schema.getFields();

			final TypeInformation<?>[] types = new TypeInformation<?>[fields.size()];
			final String[] names = new String[fields.size()];
			for (int i = 0; i < fields.size(); i++) {
				final Schema.Field field = fields.get(i);
				types[i] = convertToTypeInfo(field.schema());
				names[i] = field.name();
			}
			return Types.ROW_NAMED(names, types);
		case ENUM:
			return Types.STRING;
		case ARRAY:
			// result type might either be ObjectArrayTypeInfo or BasicArrayTypeInfo for Strings
			return Types.OBJECT_ARRAY(convertToTypeInfo(schema.getElementType()));
		case MAP:
			return Types.MAP(Types.STRING, convertToTypeInfo(schema.getValueType()));
		case UNION:
			final Schema actualSchema;
			if (schema.getTypes().size() == 2 && schema.getTypes().get(0).getType() == Schema.Type.NULL) {
				actualSchema = schema.getTypes().get(1);
			} else if (schema.getTypes().size() == 2 && schema.getTypes().get(1).getType() == Schema.Type.NULL) {
				actualSchema = schema.getTypes().get(0);
			} else if (schema.getTypes().size() == 1) {
				actualSchema = schema.getTypes().get(0);
			} else {
				// use Kryo for serialization
				return Types.GENERIC(Object.class);
			}
			return convertToTypeInfo(actualSchema);
		case FIXED:
			// logical decimal type
			if (schema.getLogicalType() instanceof LogicalTypes.Decimal) {
				return Types.BIG_DEC;
			}
			// convert fixed size binary data to primitive byte arrays
			return Types.PRIMITIVE_ARRAY(Types.BYTE);
		case STRING:
			// convert Avro's Utf8/CharSequence to String
			return Types.STRING;
		case BYTES:
			// logical decimal type
			if (schema.getLogicalType() instanceof LogicalTypes.Decimal) {
				return Types.BIG_DEC;
			}
			return Types.PRIMITIVE_ARRAY(Types.BYTE);
		case INT:
			// logical date and time type
			final LogicalType logicalType = schema.getLogicalType();
			if (logicalType == LogicalTypes.date()) {
				return Types.SQL_DATE;
			} else if (logicalType == LogicalTypes.timeMillis()) {
				return Types.SQL_TIME;
			}
			return Types.INT;
		case LONG:
			// logical timestamp type
			if (schema.getLogicalType() == LogicalTypes.timestampMillis()) {
				return Types.SQL_TIMESTAMP;
			}
			return Types.LONG;
		case FLOAT:
			return Types.FLOAT;
		case DOUBLE:
			return Types.DOUBLE;
		case BOOLEAN:
			return Types.BOOLEAN;
		case NULL:
			return Types.VOID;
	}
	throw new IllegalArgumentException("Unsupported Avro type '" + schema.getType() + "'.");
}
 
Example 6
Source File: AvroSchemaConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
private static TypeInformation<?> convertToTypeInfo(Schema schema) {
	switch (schema.getType()) {
		case RECORD:
			final List<Schema.Field> fields = schema.getFields();

			final TypeInformation<?>[] types = new TypeInformation<?>[fields.size()];
			final String[] names = new String[fields.size()];
			for (int i = 0; i < fields.size(); i++) {
				final Schema.Field field = fields.get(i);
				types[i] = convertToTypeInfo(field.schema());
				names[i] = field.name();
			}
			return Types.ROW_NAMED(names, types);
		case ENUM:
			return Types.STRING;
		case ARRAY:
			// result type might either be ObjectArrayTypeInfo or BasicArrayTypeInfo for Strings
			return Types.OBJECT_ARRAY(convertToTypeInfo(schema.getElementType()));
		case MAP:
			return Types.MAP(Types.STRING, convertToTypeInfo(schema.getValueType()));
		case UNION:
			final Schema actualSchema;
			if (schema.getTypes().size() == 2 && schema.getTypes().get(0).getType() == Schema.Type.NULL) {
				actualSchema = schema.getTypes().get(1);
			} else if (schema.getTypes().size() == 2 && schema.getTypes().get(1).getType() == Schema.Type.NULL) {
				actualSchema = schema.getTypes().get(0);
			} else if (schema.getTypes().size() == 1) {
				actualSchema = schema.getTypes().get(0);
			} else {
				// use Kryo for serialization
				return Types.GENERIC(Object.class);
			}
			return convertToTypeInfo(actualSchema);
		case FIXED:
			// logical decimal type
			if (schema.getLogicalType() instanceof LogicalTypes.Decimal) {
				return Types.BIG_DEC;
			}
			// convert fixed size binary data to primitive byte arrays
			return Types.PRIMITIVE_ARRAY(Types.BYTE);
		case STRING:
			// convert Avro's Utf8/CharSequence to String
			return Types.STRING;
		case BYTES:
			// logical decimal type
			if (schema.getLogicalType() instanceof LogicalTypes.Decimal) {
				return Types.BIG_DEC;
			}
			return Types.PRIMITIVE_ARRAY(Types.BYTE);
		case INT:
			// logical date and time type
			final LogicalType logicalType = schema.getLogicalType();
			if (logicalType == LogicalTypes.date()) {
				return Types.SQL_DATE;
			} else if (logicalType == LogicalTypes.timeMillis()) {
				return Types.SQL_TIME;
			}
			return Types.INT;
		case LONG:
			// logical timestamp type
			if (schema.getLogicalType() == LogicalTypes.timestampMillis()) {
				return Types.SQL_TIMESTAMP;
			}
			return Types.LONG;
		case FLOAT:
			return Types.FLOAT;
		case DOUBLE:
			return Types.DOUBLE;
		case BOOLEAN:
			return Types.BOOLEAN;
		case NULL:
			return Types.VOID;
	}
	throw new IllegalArgumentException("Unsupported Avro type '" + schema.getType() + "'.");
}
 
Example 7
Source File: AvroSchemaConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
private static TypeInformation<?> convertToTypeInfo(Schema schema) {
	switch (schema.getType()) {
		case RECORD:
			final List<Schema.Field> fields = schema.getFields();

			final TypeInformation<?>[] types = new TypeInformation<?>[fields.size()];
			final String[] names = new String[fields.size()];
			for (int i = 0; i < fields.size(); i++) {
				final Schema.Field field = fields.get(i);
				types[i] = convertToTypeInfo(field.schema());
				names[i] = field.name();
			}
			return Types.ROW_NAMED(names, types);
		case ENUM:
			return Types.STRING;
		case ARRAY:
			// result type might either be ObjectArrayTypeInfo or BasicArrayTypeInfo for Strings
			return Types.OBJECT_ARRAY(convertToTypeInfo(schema.getElementType()));
		case MAP:
			return Types.MAP(Types.STRING, convertToTypeInfo(schema.getValueType()));
		case UNION:
			final Schema actualSchema;
			if (schema.getTypes().size() == 2 && schema.getTypes().get(0).getType() == Schema.Type.NULL) {
				actualSchema = schema.getTypes().get(1);
			} else if (schema.getTypes().size() == 2 && schema.getTypes().get(1).getType() == Schema.Type.NULL) {
				actualSchema = schema.getTypes().get(0);
			} else if (schema.getTypes().size() == 1) {
				actualSchema = schema.getTypes().get(0);
			} else {
				// use Kryo for serialization
				return Types.GENERIC(Object.class);
			}
			return convertToTypeInfo(actualSchema);
		case FIXED:
			// logical decimal type
			if (schema.getLogicalType() instanceof LogicalTypes.Decimal) {
				return Types.BIG_DEC;
			}
			// convert fixed size binary data to primitive byte arrays
			return Types.PRIMITIVE_ARRAY(Types.BYTE);
		case STRING:
			// convert Avro's Utf8/CharSequence to String
			return Types.STRING;
		case BYTES:
			// logical decimal type
			if (schema.getLogicalType() instanceof LogicalTypes.Decimal) {
				return Types.BIG_DEC;
			}
			return Types.PRIMITIVE_ARRAY(Types.BYTE);
		case INT:
			// logical date and time type
			final org.apache.avro.LogicalType logicalType = schema.getLogicalType();
			if (logicalType == LogicalTypes.date()) {
				return Types.SQL_DATE;
			} else if (logicalType == LogicalTypes.timeMillis()) {
				return Types.SQL_TIME;
			}
			return Types.INT;
		case LONG:
			// logical timestamp type
			if (schema.getLogicalType() == LogicalTypes.timestampMillis()) {
				return Types.SQL_TIMESTAMP;
			}
			return Types.LONG;
		case FLOAT:
			return Types.FLOAT;
		case DOUBLE:
			return Types.DOUBLE;
		case BOOLEAN:
			return Types.BOOLEAN;
		case NULL:
			return Types.VOID;
	}
	throw new IllegalArgumentException("Unsupported Avro type '" + schema.getType() + "'.");
}
 
Example 8
Source File: TypeStringUtils.java    From Flink-CEPplus with Apache License 2.0 3 votes vote down vote up
private TypeInformation<?> convertObjectArray() {
	nextToken(TokenType.BEGIN);

	nextToken(TokenType.LITERAL);
	final TypeInformation<?> elementTypeInfo = convertType();

	nextToken(TokenType.END);

	return Types.OBJECT_ARRAY(elementTypeInfo);
}
 
Example 9
Source File: TypeStringUtils.java    From flink with Apache License 2.0 3 votes vote down vote up
private TypeInformation<?> convertObjectArray() {
	nextToken(TokenType.BEGIN);

	nextToken(TokenType.LITERAL);
	final TypeInformation<?> elementTypeInfo = convertType();

	nextToken(TokenType.END);

	return Types.OBJECT_ARRAY(elementTypeInfo);
}
 
Example 10
Source File: TypeStringUtils.java    From flink with Apache License 2.0 3 votes vote down vote up
private TypeInformation<?> convertObjectArray() {
	nextToken(TokenType.BEGIN);

	nextToken(TokenType.LITERAL);
	final TypeInformation<?> elementTypeInfo = convertType();

	nextToken(TokenType.END);

	return Types.OBJECT_ARRAY(elementTypeInfo);
}