org.apache.flink.table.types.AtomicDataType Java Examples

The following examples show how to use org.apache.flink.table.types.AtomicDataType. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ConnectorCatalogTable.java    From flink with Apache License 2.0 6 votes vote down vote up
private static void updateRowtimeIndicators(
		DefinedRowtimeAttributes source,
		String[] fieldNames,
		DataType[] types) {
	List<String> rowtimeAttributes = source.getRowtimeAttributeDescriptors()
		.stream()
		.map(RowtimeAttributeDescriptor::getAttributeName)
		.collect(Collectors.toList());

	for (int i = 0; i < fieldNames.length; i++) {
		if (rowtimeAttributes.contains(fieldNames[i])) {
			// bridged to timestamp for compatible flink-planner
			types[i] = new AtomicDataType(new TimestampType(true, TimestampKind.ROWTIME, 3))
					.bridgedTo(java.sql.Timestamp.class);
		}
	}
}
 
Example #2
Source File: ClassDataTypeConverter.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Returns the clearly identifiable data type if possible. For example, {@link Long} can be
 * expressed as {@link DataTypes#BIGINT()}. However, for example, {@link Row} cannot be extracted
 * as information about the fields is missing. Or {@link BigDecimal} needs to be mapped from a
 * variable precision/scale to constant ones.
 */
@SuppressWarnings("unchecked")
public static Optional<DataType> extractDataType(Class<?> clazz) {
	// prefer BYTES over ARRAY<TINYINT> for byte[]
	if (clazz == byte[].class) {
		return Optional.of(DataTypes.BYTES());
	}

	if (clazz.isArray()) {
		return extractDataType(clazz.getComponentType())
			.map(DataTypes::ARRAY);
	}

	if (TableSymbol.class.isAssignableFrom(clazz)) {
		return Optional.of(new AtomicDataType(new SymbolType(clazz)));
	}

	return Optional.ofNullable(defaultDataTypes.get(clazz.getName()));
}
 
Example #3
Source File: ClassDataTypeConverter.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Returns the clearly identifiable data type if possible. For example, {@link Long} can be
 * expressed as {@link DataTypes#BIGINT()}. However, for example, {@link Row} cannot be extracted
 * as information about the fields is missing. Or {@link BigDecimal} needs to be mapped from a
 * variable precision/scale to constant ones.
 */
@SuppressWarnings("unchecked")
public static Optional<DataType> extractDataType(Class<?> clazz) {
	// byte arrays have higher priority than regular arrays
	if (clazz.equals(byte[].class)) {
		return Optional.of(DataTypes.BYTES());
	}

	if (clazz.isArray()) {
		return extractDataType(clazz.getComponentType())
			.map(DataTypes::ARRAY);
	}

	if (TableSymbol.class.isAssignableFrom(clazz)) {
		return Optional.of(new AtomicDataType(new SymbolType(clazz)));
	}

	return Optional.ofNullable(defaultDataTypes.get(clazz.getName()));
}
 
Example #4
Source File: ComparableInputTypeStrategyTests.java    From flink with Apache License 2.0 5 votes vote down vote up
private static DataType distinctType(String typeName, DataType sourceType) {
	return new AtomicDataType(
		DistinctType.newBuilder(
			ObjectIdentifier.of("cat", "db", typeName),
			sourceType.getLogicalType()
		).build(),
		sourceType.getConversionClass());
}
 
Example #5
Source File: ConnectorCatalogTable.java    From flink with Apache License 2.0 5 votes vote down vote up
private static void updateProctimeIndicator(
		DefinedProctimeAttribute source,
		String[] fieldNames,
		DataType[] types) {
	String proctimeAttribute = source.getProctimeAttribute();

	for (int i = 0; i < fieldNames.length; i++) {
		if (fieldNames[i].equals(proctimeAttribute)) {
			// bridged to timestamp for compatible flink-planner
			types[i] = new AtomicDataType(new TimestampType(true, TimestampKind.PROCTIME, 3))
					.bridgedTo(java.sql.Timestamp.class);
			break;
		}
	}
}
 
Example #6
Source File: FlinkTypeToType.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("checkstyle:CyclomaticComplexity")
@Override
public Type atomic(AtomicDataType type) {
  LogicalType inner = type.getLogicalType();
  if (inner instanceof VarCharType ||
      inner instanceof CharType) {
    return Types.StringType.get();
  } else if (inner instanceof BooleanType) {
    return Types.BooleanType.get();
  } else if (inner instanceof IntType ||
      inner instanceof SmallIntType ||
      inner instanceof TinyIntType) {
    return Types.IntegerType.get();
  } else if (inner instanceof BigIntType) {
    return Types.LongType.get();
  } else if (inner instanceof VarBinaryType) {
    return Types.BinaryType.get();
  } else if (inner instanceof BinaryType) {
    BinaryType binaryType = (BinaryType) inner;
    return Types.FixedType.ofLength(binaryType.getLength());
  } else if (inner instanceof FloatType) {
    return Types.FloatType.get();
  } else if (inner instanceof DoubleType) {
    return Types.DoubleType.get();
  } else if (inner instanceof DateType) {
    return Types.DateType.get();
  } else if (inner instanceof TimeType) {
    return Types.TimeType.get();
  } else if (inner instanceof TimestampType) {
    return Types.TimestampType.withoutZone();
  } else if (inner instanceof LocalZonedTimestampType) {
    return Types.TimestampType.withZone();
  } else if (inner instanceof DecimalType) {
    DecimalType decimalType = (DecimalType) inner;
    return Types.DecimalType.of(decimalType.getPrecision(), decimalType.getScale());
  } else {
    throw new UnsupportedOperationException("Not a supported type: " + type.toString());
  }
}
 
Example #7
Source File: LegacyTypeInfoDataTypeConverter.java    From flink with Apache License 2.0 5 votes vote down vote up
private static DataType convertToTimeAttributeType(TimeIndicatorTypeInfo timeIndicatorTypeInfo) {
	final TimestampKind kind;
	if (timeIndicatorTypeInfo.isEventTime()) {
		kind = TimestampKind.ROWTIME;
	} else {
		kind = TimestampKind.PROCTIME;
	}
	return new AtomicDataType(new TimestampType(true, kind, 3))
		.bridgedTo(java.sql.Timestamp.class);
}
 
Example #8
Source File: ConnectorCatalogTable.java    From flink with Apache License 2.0 5 votes vote down vote up
private static void updateProctimeIndicator(
		DefinedProctimeAttribute source,
		String[] fieldNames,
		DataType[] types) {
	String proctimeAttribute = source.getProctimeAttribute();

	for (int i = 0; i < fieldNames.length; i++) {
		if (fieldNames[i].equals(proctimeAttribute)) {
			// bridged to timestamp for compatible flink-planner
			types[i] = new AtomicDataType(new TimestampType(true, TimestampKind.PROCTIME, 3))
					.bridgedTo(java.sql.Timestamp.class);
			break;
		}
	}
}
 
Example #9
Source File: TableSourceValidation.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Optional<DataType> visit(AtomicDataType atomicDataType) {
	//  This is check for backwards compatibility. We should also support legacy type with composite type info
	LogicalType logicalType = atomicDataType.getLogicalType();
	if (logicalType instanceof LegacyTypeInformationType) {
		LegacyTypeInformationType<?> legacyTypeInformationType = (LegacyTypeInformationType<?>) logicalType;
		TypeInformation<?> typeInformation = legacyTypeInformationType.getTypeInformation();
		if (typeInformation instanceof CompositeType<?>) {
			CompositeType<?> compositeType = (CompositeType<?>) typeInformation;
			return Optional.of(TypeConversions.fromLegacyInfoToDataType(compositeType.getTypeAt(fieldName)));
		}
	}

	return Optional.of(atomicDataType);
}
 
Example #10
Source File: LogicalTypeDataTypeConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType visit(LogicalType other) {
	return new AtomicDataType(other);
}
 
Example #11
Source File: LogicalTypeDataTypeConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType visit(DoubleType doubleType) {
	return new AtomicDataType(doubleType);
}
 
Example #12
Source File: LogicalTypeDataTypeConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType visit(DecimalType decimalType) {
	return new AtomicDataType(decimalType);
}
 
Example #13
Source File: LogicalTypeDataTypeConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType visit(DoubleType doubleType) {
	return new AtomicDataType(doubleType);
}
 
Example #14
Source File: DataTypePrecisionFixer.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType visit(AtomicDataType dataType) {
	switch (logicalType.getTypeRoot()) {
		case DECIMAL:
			DecimalType decimalType = (DecimalType) logicalType;
			return DataTypes
				// fix the precision and scale, because precision may lose or not correct.
				// precision from DDL is the only source of truth.
				// we don't care about nullability for now.
				.DECIMAL(decimalType.getPrecision(), decimalType.getScale())
				// only keep the original conversion class
				.bridgedTo(dataType.getConversionClass());

		case TIMESTAMP_WITHOUT_TIME_ZONE :
			TimestampType timestampType = (TimestampType) logicalType;
			if (timestampType.getKind() == TimestampKind.REGULAR) {
				return DataTypes
					.TIMESTAMP(timestampType.getPrecision())
					.bridgedTo(dataType.getConversionClass());
			} else {
				// keep the original type if it is time attribute type
				// because time attribute can only be precision 3
				// and the original type may be BIGINT.
				return dataType;
			}

		case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
			LocalZonedTimestampType localZonedTimestampType = (LocalZonedTimestampType) logicalType;
			return DataTypes
				.TIMESTAMP_WITH_LOCAL_TIME_ZONE(localZonedTimestampType.getPrecision())
				.bridgedTo(dataType.getConversionClass());

		case TIMESTAMP_WITH_TIME_ZONE:
			ZonedTimestampType zonedTimestampType = (ZonedTimestampType) logicalType;
			return DataTypes
				.TIMESTAMP_WITH_TIME_ZONE(zonedTimestampType.getPrecision())
				.bridgedTo(dataType.getConversionClass());

		case TIME_WITHOUT_TIME_ZONE:
			TimeType timeType = (TimeType) logicalType;
			return DataTypes
				.TIME(timeType.getPrecision())
				.bridgedTo(dataType.getConversionClass());

		default:
			return dataType;
	}
}
 
Example #15
Source File: DataTypeUtils.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType visit(AtomicDataType atomicDataType) {
	return transformation.transform(atomicDataType);
}
 
Example #16
Source File: FlinkTypeVisitor.java    From iceberg with Apache License 2.0 4 votes vote down vote up
public T atomic(AtomicDataType type) {
  return null;
}
 
Example #17
Source File: LogicalTypeDataTypeConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType visit(IntType intType) {
	return new AtomicDataType(intType);
}
 
Example #18
Source File: FieldInfoUtils.java    From flink with Apache License 2.0 4 votes vote down vote up
private static DataType createTimeIndicatorType(TimestampKind kind) {
	return new AtomicDataType(new TimestampType(true, kind, 3))
		.bridgedTo(java.sql.Timestamp.class);
}
 
Example #19
Source File: LogicalTypeDataTypeConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType visit(YearMonthIntervalType yearMonthIntervalType) {
	return new AtomicDataType(yearMonthIntervalType);
}
 
Example #20
Source File: SchemaUtils.java    From pulsar-flink with Apache License 2.0 4 votes vote down vote up
private static Schema sqlType2AvroSchema(DataType flinkType, boolean nullable, String recordName, String namespace) throws IncompatibleSchemaException {
    SchemaBuilder.TypeBuilder<Schema> builder = SchemaBuilder.builder();
    LogicalTypeRoot type = flinkType.getLogicalType().getTypeRoot();
    Schema schema = null;

    if (flinkType instanceof AtomicDataType) {
        switch (type) {
            case BOOLEAN:
                schema = builder.booleanType();
                break;
            case TINYINT:
            case SMALLINT:
            case INTEGER:
                schema = builder.intType();
                break;
            case BIGINT:
                schema = builder.longType();
                break;
            case DATE:
                schema = LogicalTypes.date().addToSchema(builder.intType());
                break;
            case TIMESTAMP_WITHOUT_TIME_ZONE:
                schema = LogicalTypes.timestampMicros().addToSchema(builder.longType());
                break;
            case FLOAT:
                schema = builder.floatType();
                break;
            case DOUBLE:
                schema = builder.doubleType();
                break;
            case VARCHAR:
                schema = builder.stringType();
                break;
            case BINARY:
            case VARBINARY:
                schema = builder.bytesType();
                break;
            case DECIMAL:
                DecimalType dt = (DecimalType) flinkType.getLogicalType();
                LogicalTypes.Decimal avroType = LogicalTypes.decimal(dt.getPrecision(), dt.getScale());
                int fixedSize = minBytesForPrecision[dt.getPrecision()];
                // Need to avoid naming conflict for the fixed fields
                String name;
                if (namespace.equals("")) {
                    name = recordName + ".fixed";
                } else {
                    name = namespace + recordName + ".fixed";
                }
                schema = avroType.addToSchema(SchemaBuilder.fixed(name).size(fixedSize));
                break;
            default:
                throw new IncompatibleSchemaException(String.format("Unsupported type %s", flinkType.toString()), null);
        }
    } else if (flinkType instanceof CollectionDataType) {
        if (type == LogicalTypeRoot.ARRAY) {
            CollectionDataType cdt = (CollectionDataType) flinkType;
            DataType elementType = cdt.getElementDataType();
            schema = builder.array().items(sqlType2AvroSchema(elementType, elementType.getLogicalType().isNullable(), recordName, namespace));
        } else {
            throw new IncompatibleSchemaException("Pulsar only support collection as array", null);
        }
    } else if (flinkType instanceof KeyValueDataType) {
        KeyValueDataType kvType = (KeyValueDataType) flinkType;
        DataType keyType = kvType.getKeyDataType();
        DataType valueType = kvType.getValueDataType();
        if (!(keyType instanceof AtomicDataType) || keyType.getLogicalType().getTypeRoot() != LogicalTypeRoot.VARCHAR) {
            throw new IncompatibleSchemaException("Pulsar only support string key map", null);
        }
        schema = builder.map().values(sqlType2AvroSchema(valueType, valueType.getLogicalType().isNullable(), recordName, namespace));
    } else if (flinkType instanceof FieldsDataType) {
        FieldsDataType fieldsDataType = (FieldsDataType) flinkType;
        String childNamespace = namespace.equals("") ? recordName : namespace + "." + recordName;
        SchemaBuilder.FieldAssembler<Schema> fieldsAssembler = builder.record(recordName).namespace(namespace).fields();
        RowType rowType = (RowType) fieldsDataType.getLogicalType();

        for (String fieldName : rowType.getFieldNames()) {
            DataType ftype = fieldsDataType.getFieldDataTypes().get(fieldName);
            Schema fieldAvroSchema = sqlType2AvroSchema(ftype, ftype.getLogicalType().isNullable(), fieldName, childNamespace);
            fieldsAssembler.name(fieldName).type(fieldAvroSchema).noDefault();
        }
        schema = fieldsAssembler.endRecord();
    } else {
        throw new IncompatibleSchemaException(String.format("Unexpected type %s", flinkType.toString()), null);
    }

    if (nullable) {
        return Schema.createUnion(schema, NULL_SCHEMA);
    } else {
        return schema;
    }
}
 
Example #21
Source File: LogicalTypeDataTypeConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType visit(TimestampType timestampType) {
	return new AtomicDataType(timestampType);
}
 
Example #22
Source File: LogicalTypeDataTypeConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType visit(DateType dateType) {
	return new AtomicDataType(dateType);
}
 
Example #23
Source File: LogicalTypeDataTypeConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType visit(DayTimeIntervalType dayTimeIntervalType) {
	return new AtomicDataType(dayTimeIntervalType);
}
 
Example #24
Source File: LogicalTypeDataTypeConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType visit(FloatType floatType) {
	return new AtomicDataType(floatType);
}
 
Example #25
Source File: LogicalTypeDataTypeConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType visit(VarCharType varCharType) {
	return new AtomicDataType(varCharType);
}
 
Example #26
Source File: LogicalTypeDataTypeConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType visit(SmallIntType smallIntType) {
	return new AtomicDataType(smallIntType);
}
 
Example #27
Source File: LogicalTypeDataTypeConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType visit(TinyIntType tinyIntType) {
	return new AtomicDataType(tinyIntType);
}
 
Example #28
Source File: LogicalTypeDataTypeConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType visit(DecimalType decimalType) {
	return new AtomicDataType(decimalType);
}
 
Example #29
Source File: LogicalTypeDataTypeConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType visit(BinaryType binaryType) {
	return new AtomicDataType(binaryType);
}
 
Example #30
Source File: LogicalTypeDataTypeConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType visit(BooleanType booleanType) {
	return new AtomicDataType(booleanType);
}