Java Code Examples for org.apache.flink.table.types.logical.LogicalTypeRoot#ARRAY

The following examples show how to use org.apache.flink.table.types.logical.LogicalTypeRoot#ARRAY . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CsvRowSchemaConverter.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Convert {@link LogicalType} to {@link CsvSchema.ColumnType} based on Jackson's categories.
 */
private static CsvSchema.ColumnType convertType(String fieldName, LogicalType type) {
	if (STRING_TYPE_ROOTS.contains(type.getTypeRoot())) {
		return CsvSchema.ColumnType.STRING;
	} else if (NUMBER_TYPE_ROOTS.contains(type.getTypeRoot())) {
		return CsvSchema.ColumnType.NUMBER;
	} else if (BOOLEAN_TYPE_ROOTS.contains(type.getTypeRoot())) {
		return CsvSchema.ColumnType.BOOLEAN;
	} else if (type.getTypeRoot() == LogicalTypeRoot.ARRAY) {
		validateNestedField(fieldName, ((ArrayType) type).getElementType());
		return CsvSchema.ColumnType.ARRAY;
	} else if (type.getTypeRoot() == LogicalTypeRoot.ROW) {
		RowType rowType = (RowType) type;
		for (LogicalType fieldType : rowType.getChildren()) {
			validateNestedField(fieldName, fieldType);
		}
		return CsvSchema.ColumnType.ARRAY;
	} else {
		throw new IllegalArgumentException(
			"Unsupported type '" + type.asSummaryString() + "' for field '" + fieldName + "'.");
	}
}
 
Example 2
Source File: CollectionDataType.java    From flink with Apache License 2.0 5 votes vote down vote up
private static Class<?> ensureArrayConversionClass(
		LogicalType logicalType,
		DataType elementDataType,
		@Nullable Class<?> clazz) {
	// arrays are a special case because their default conversion class depends on the
	// conversion class of the element type
	if (logicalType.getTypeRoot() == LogicalTypeRoot.ARRAY && clazz == null) {
		return Array.newInstance(elementDataType.getConversionClass(), 0).getClass();
	}
	return clazz;
}
 
Example 3
Source File: PostgresRowConverter.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public JdbcDeserializationConverter createNullableInternalConverter(LogicalType type) {
	LogicalTypeRoot root = type.getTypeRoot();

	if (root == LogicalTypeRoot.ARRAY) {
		ArrayType arrayType = (ArrayType) type;
		return createPostgresArrayConverter(arrayType);
	} else {
		return createPrimitiveConverter(type);
	}
}
 
Example 4
Source File: PostgresRowConverter.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected JdbcSerializationConverter createNullableExternalConverter(LogicalType type) {
	LogicalTypeRoot root = type.getTypeRoot();
	if (root == LogicalTypeRoot.ARRAY) {
		//note:Writing ARRAY type is not yet supported by PostgreSQL dialect now.
		return (val, index, statement) -> {
			throw new IllegalStateException(
				String.format("Writing ARRAY type is not yet supported in JDBC:%s.", converterName()));
		};
	} else {
		return super.createNullableExternalConverter(type);
	}
}
 
Example 5
Source File: CollectionDataType.java    From flink with Apache License 2.0 5 votes vote down vote up
private static Class<?> ensureArrayConversionClass(
		LogicalType logicalType,
		DataType elementDataType,
		@Nullable Class<?> clazz) {
	// arrays are a special case because their default conversion class depends on the
	// conversion class of the element type
	if (logicalType.getTypeRoot() == LogicalTypeRoot.ARRAY && clazz == null) {
		return Array.newInstance(elementDataType.getConversionClass(), 0).getClass();
	}
	return clazz;
}
 
Example 6
Source File: CollectionDataType.java    From flink with Apache License 2.0 5 votes vote down vote up
private DataType ensureElementConversionClass(
		DataType elementDataType,
		Class<?> clazz) {
	// arrays are a special case because their element conversion class depends on the
	// outer conversion class
	if (logicalType.getTypeRoot() == LogicalTypeRoot.ARRAY && clazz.isArray()) {
		return elementDataType.bridgedTo(clazz.getComponentType());
	}
	return elementDataType;
}
 
Example 7
Source File: SchemaUtils.java    From pulsar-flink with Apache License 2.0 4 votes vote down vote up
private static Schema sqlType2AvroSchema(DataType flinkType, boolean nullable, String recordName, String namespace) throws IncompatibleSchemaException {
    SchemaBuilder.TypeBuilder<Schema> builder = SchemaBuilder.builder();
    LogicalTypeRoot type = flinkType.getLogicalType().getTypeRoot();
    Schema schema = null;

    if (flinkType instanceof AtomicDataType) {
        switch (type) {
            case BOOLEAN:
                schema = builder.booleanType();
                break;
            case TINYINT:
            case SMALLINT:
            case INTEGER:
                schema = builder.intType();
                break;
            case BIGINT:
                schema = builder.longType();
                break;
            case DATE:
                schema = LogicalTypes.date().addToSchema(builder.intType());
                break;
            case TIMESTAMP_WITHOUT_TIME_ZONE:
                schema = LogicalTypes.timestampMicros().addToSchema(builder.longType());
                break;
            case FLOAT:
                schema = builder.floatType();
                break;
            case DOUBLE:
                schema = builder.doubleType();
                break;
            case VARCHAR:
                schema = builder.stringType();
                break;
            case BINARY:
            case VARBINARY:
                schema = builder.bytesType();
                break;
            case DECIMAL:
                DecimalType dt = (DecimalType) flinkType.getLogicalType();
                LogicalTypes.Decimal avroType = LogicalTypes.decimal(dt.getPrecision(), dt.getScale());
                int fixedSize = minBytesForPrecision[dt.getPrecision()];
                // Need to avoid naming conflict for the fixed fields
                String name;
                if (namespace.equals("")) {
                    name = recordName + ".fixed";
                } else {
                    name = namespace + recordName + ".fixed";
                }
                schema = avroType.addToSchema(SchemaBuilder.fixed(name).size(fixedSize));
                break;
            default:
                throw new IncompatibleSchemaException(String.format("Unsupported type %s", flinkType.toString()), null);
        }
    } else if (flinkType instanceof CollectionDataType) {
        if (type == LogicalTypeRoot.ARRAY) {
            CollectionDataType cdt = (CollectionDataType) flinkType;
            DataType elementType = cdt.getElementDataType();
            schema = builder.array().items(sqlType2AvroSchema(elementType, elementType.getLogicalType().isNullable(), recordName, namespace));
        } else {
            throw new IncompatibleSchemaException("Pulsar only support collection as array", null);
        }
    } else if (flinkType instanceof KeyValueDataType) {
        KeyValueDataType kvType = (KeyValueDataType) flinkType;
        DataType keyType = kvType.getKeyDataType();
        DataType valueType = kvType.getValueDataType();
        if (!(keyType instanceof AtomicDataType) || keyType.getLogicalType().getTypeRoot() != LogicalTypeRoot.VARCHAR) {
            throw new IncompatibleSchemaException("Pulsar only support string key map", null);
        }
        schema = builder.map().values(sqlType2AvroSchema(valueType, valueType.getLogicalType().isNullable(), recordName, namespace));
    } else if (flinkType instanceof FieldsDataType) {
        FieldsDataType fieldsDataType = (FieldsDataType) flinkType;
        String childNamespace = namespace.equals("") ? recordName : namespace + "." + recordName;
        SchemaBuilder.FieldAssembler<Schema> fieldsAssembler = builder.record(recordName).namespace(namespace).fields();
        RowType rowType = (RowType) fieldsDataType.getLogicalType();

        for (String fieldName : rowType.getFieldNames()) {
            DataType ftype = fieldsDataType.getFieldDataTypes().get(fieldName);
            Schema fieldAvroSchema = sqlType2AvroSchema(ftype, ftype.getLogicalType().isNullable(), fieldName, childNamespace);
            fieldsAssembler.name(fieldName).type(fieldAvroSchema).noDefault();
        }
        schema = fieldsAssembler.endRecord();
    } else {
        throw new IncompatibleSchemaException(String.format("Unexpected type %s", flinkType.toString()), null);
    }

    if (nullable) {
        return Schema.createUnion(schema, NULL_SCHEMA);
    } else {
        return schema;
    }
}