Java Code Examples for org.apache.flink.table.api.DataTypes#CHAR

The following examples show how to use org.apache.flink.table.api.DataTypes#CHAR . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HiveCatalogDataTypeTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testDataTypes() throws Exception {
	DataType[] types = new DataType[] {
		DataTypes.TINYINT(),
		DataTypes.SMALLINT(),
		DataTypes.INT(),
		DataTypes.BIGINT(),
		DataTypes.FLOAT(),
		DataTypes.DOUBLE(),
		DataTypes.BOOLEAN(),
		DataTypes.STRING(),
		DataTypes.BYTES(),
		DataTypes.DATE(),
		DataTypes.TIMESTAMP(),
		DataTypes.CHAR(HiveChar.MAX_CHAR_LENGTH),
		DataTypes.VARCHAR(HiveVarchar.MAX_VARCHAR_LENGTH),
		DataTypes.DECIMAL(5, 3)
	};

	verifyDataTypes(types);
}
 
Example 2
Source File: HiveCatalogDataTypeTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testDataTypes() throws Exception {
	DataType[] types = new DataType[] {
		DataTypes.TINYINT(),
		DataTypes.SMALLINT(),
		DataTypes.INT(),
		DataTypes.BIGINT(),
		DataTypes.FLOAT(),
		DataTypes.DOUBLE(),
		DataTypes.BOOLEAN(),
		DataTypes.STRING(),
		DataTypes.BYTES(),
		DataTypes.DATE(),
		DataTypes.TIMESTAMP(9),
		DataTypes.CHAR(HiveChar.MAX_CHAR_LENGTH),
		DataTypes.VARCHAR(HiveVarchar.MAX_VARCHAR_LENGTH),
		DataTypes.DECIMAL(5, 3)
	};

	verifyDataTypes(types);
}
 
Example 3
Source File: HiveTypeUtil.java    From flink with Apache License 2.0 5 votes vote down vote up
private static DataType toFlinkPrimitiveType(PrimitiveTypeInfo hiveType) {
	checkNotNull(hiveType, "hiveType cannot be null");

	switch (hiveType.getPrimitiveCategory()) {
		case CHAR:
			return DataTypes.CHAR(((CharTypeInfo) hiveType).getLength());
		case VARCHAR:
			return DataTypes.VARCHAR(((VarcharTypeInfo) hiveType).getLength());
		case STRING:
			return DataTypes.STRING();
		case BOOLEAN:
			return DataTypes.BOOLEAN();
		case BYTE:
			return DataTypes.TINYINT();
		case SHORT:
			return DataTypes.SMALLINT();
		case INT:
			return DataTypes.INT();
		case LONG:
			return DataTypes.BIGINT();
		case FLOAT:
			return DataTypes.FLOAT();
		case DOUBLE:
			return DataTypes.DOUBLE();
		case DATE:
			return DataTypes.DATE();
		case TIMESTAMP:
			return DataTypes.TIMESTAMP();
		case BINARY:
			return DataTypes.BYTES();
		case DECIMAL:
			DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) hiveType;
			return DataTypes.DECIMAL(decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale());
		default:
			throw new UnsupportedOperationException(
				String.format("Flink doesn't support Hive primitive type %s yet", hiveType));
	}
}
 
Example 4
Source File: HiveCatalogDataTypeTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testCharTypeLength() throws Exception {
	DataType[] types = new DataType[] {
		DataTypes.CHAR(HiveChar.MAX_CHAR_LENGTH + 1)
	};

	exception.expect(CatalogException.class);
	exception.expectMessage("HiveCatalog doesn't support char type with length of '256'. The maximum length is 255");
	verifyDataTypes(types);
}
 
Example 5
Source File: HiveTypeUtil.java    From flink with Apache License 2.0 5 votes vote down vote up
private static DataType toFlinkPrimitiveType(PrimitiveTypeInfo hiveType) {
	checkNotNull(hiveType, "hiveType cannot be null");

	switch (hiveType.getPrimitiveCategory()) {
		case CHAR:
			return DataTypes.CHAR(((CharTypeInfo) hiveType).getLength());
		case VARCHAR:
			return DataTypes.VARCHAR(((VarcharTypeInfo) hiveType).getLength());
		case STRING:
			return DataTypes.STRING();
		case BOOLEAN:
			return DataTypes.BOOLEAN();
		case BYTE:
			return DataTypes.TINYINT();
		case SHORT:
			return DataTypes.SMALLINT();
		case INT:
			return DataTypes.INT();
		case LONG:
			return DataTypes.BIGINT();
		case FLOAT:
			return DataTypes.FLOAT();
		case DOUBLE:
			return DataTypes.DOUBLE();
		case DATE:
			return DataTypes.DATE();
		case TIMESTAMP:
			return DataTypes.TIMESTAMP(9);
		case BINARY:
			return DataTypes.BYTES();
		case DECIMAL:
			DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) hiveType;
			return DataTypes.DECIMAL(decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale());
		default:
			throw new UnsupportedOperationException(
				String.format("Flink doesn't support Hive primitive type %s yet", hiveType));
	}
}
 
Example 6
Source File: HiveCatalogDataTypeTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testCharTypeLength() throws Exception {
	DataType[] types = new DataType[] {
		DataTypes.CHAR(HiveChar.MAX_CHAR_LENGTH + 1)
	};

	exception.expect(CatalogException.class);
	verifyDataTypes(types);
}
 
Example 7
Source File: ValueDataTypeConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
private static DataType convertToCharType(String string) {
	if (string.isEmpty()) {
		return new AtomicDataType(CharType.ofEmptyLiteral());
	}
	return DataTypes.CHAR(string.length());
}
 
Example 8
Source File: PostgresCatalog.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Converts Postgres type to Flink {@link DataType}.
 *
 * @see org.postgresql.jdbc.TypeInfoCache
 */
private DataType fromJDBCType(ResultSetMetaData metadata, int colIndex) throws SQLException {
	String pgType = metadata.getColumnTypeName(colIndex);

	int precision = metadata.getPrecision(colIndex);
	int scale = metadata.getScale(colIndex);

	switch (pgType) {
		case PG_BOOLEAN:
			return DataTypes.BOOLEAN();
		case PG_BOOLEAN_ARRAY:
			return DataTypes.ARRAY(DataTypes.BOOLEAN());
		case PG_BYTEA:
			return DataTypes.BYTES();
		case PG_BYTEA_ARRAY:
			return DataTypes.ARRAY(DataTypes.BYTES());
		case PG_SMALLINT:
			return DataTypes.SMALLINT();
		case PG_SMALLINT_ARRAY:
			return DataTypes.ARRAY(DataTypes.SMALLINT());
		case PG_INTEGER:
		case PG_SERIAL:
			return DataTypes.INT();
		case PG_INTEGER_ARRAY:
			return DataTypes.ARRAY(DataTypes.INT());
		case PG_BIGINT:
		case PG_BIGSERIAL:
			return DataTypes.BIGINT();
		case PG_BIGINT_ARRAY:
			return DataTypes.ARRAY(DataTypes.BIGINT());
		case PG_REAL:
			return DataTypes.FLOAT();
		case PG_REAL_ARRAY:
			return DataTypes.ARRAY(DataTypes.FLOAT());
		case PG_DOUBLE_PRECISION:
			return DataTypes.DOUBLE();
		case PG_DOUBLE_PRECISION_ARRAY:
			return DataTypes.ARRAY(DataTypes.DOUBLE());
		case PG_NUMERIC:
			// see SPARK-26538: handle numeric without explicit precision and scale.
			if (precision > 0) {
				return DataTypes.DECIMAL(precision, metadata.getScale(colIndex));
			}
			return DataTypes.DECIMAL(DecimalType.MAX_PRECISION, 18);
		case PG_NUMERIC_ARRAY:
			// see SPARK-26538: handle numeric without explicit precision and scale.
			if (precision > 0) {
				return DataTypes.ARRAY(DataTypes.DECIMAL(precision, metadata.getScale(colIndex)));
			}
			return DataTypes.ARRAY(DataTypes.DECIMAL(DecimalType.MAX_PRECISION, 18));
		case PG_CHAR:
		case PG_CHARACTER:
			return DataTypes.CHAR(precision);
		case PG_CHAR_ARRAY:
		case PG_CHARACTER_ARRAY:
			return DataTypes.ARRAY(DataTypes.CHAR(precision));
		case PG_CHARACTER_VARYING:
			return DataTypes.VARCHAR(precision);
		case PG_CHARACTER_VARYING_ARRAY:
			return DataTypes.ARRAY(DataTypes.VARCHAR(precision));
		case PG_TEXT:
			return DataTypes.STRING();
		case PG_TEXT_ARRAY:
			return DataTypes.ARRAY(DataTypes.STRING());
		case PG_TIMESTAMP:
			return DataTypes.TIMESTAMP(scale);
		case PG_TIMESTAMP_ARRAY:
			return DataTypes.ARRAY(DataTypes.TIMESTAMP(scale));
		case PG_TIMESTAMPTZ:
			return DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(scale);
		case PG_TIMESTAMPTZ_ARRAY:
			return DataTypes.ARRAY(DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(scale));
		case PG_TIME:
			return DataTypes.TIME(scale);
		case PG_TIME_ARRAY:
			return DataTypes.ARRAY(DataTypes.TIME(scale));
		case PG_DATE:
			return DataTypes.DATE();
		case PG_DATE_ARRAY:
			return DataTypes.ARRAY(DataTypes.DATE());
		default:
			throw new UnsupportedOperationException(
				String.format("Doesn't support Postgres type '%s' yet", pgType));
	}
}
 
Example 9
Source File: StrategyUtils.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Returns a data type for the given data type and expected root.
 *
 * <p>This method is aligned with {@link LogicalTypeCasts#supportsImplicitCast(LogicalType, LogicalType)}.
 *
 * <p>The "fallback" data type for each root represents the default data type for a NULL literal. NULL
 * literals will receive the smallest precision possible for having little impact when finding a common
 * type. The output of this method needs to be checked again if an implicit cast is supported.
 */
private static @Nullable DataType findDataTypeOfRoot(
		DataType actualDataType,
		LogicalTypeRoot expectedRoot) {
	final LogicalType actualType = actualDataType.getLogicalType();
	if (hasRoot(actualType, expectedRoot)) {
		return actualDataType;
	}
	switch (expectedRoot) {
		case CHAR:
			return DataTypes.CHAR(CharType.DEFAULT_LENGTH);
		case VARCHAR:
			if (hasRoot(actualType, CHAR)) {
				return DataTypes.VARCHAR(getLength(actualType));
			}
			return DataTypes.VARCHAR(VarCharType.DEFAULT_LENGTH);
		case BOOLEAN:
			return DataTypes.BOOLEAN();
		case BINARY:
			return DataTypes.BINARY(BinaryType.DEFAULT_LENGTH);
		case VARBINARY:
			if (hasRoot(actualType, BINARY)) {
				return DataTypes.VARBINARY(getLength(actualType));
			}
			return DataTypes.VARBINARY(VarBinaryType.DEFAULT_LENGTH);
		case DECIMAL:
			if (hasFamily(actualType, EXACT_NUMERIC)) {
				return DataTypes.DECIMAL(getPrecision(actualType), getScale(actualType));
			} else if (hasFamily(actualType, APPROXIMATE_NUMERIC)) {
				final int precision = getPrecision(actualType);
				// we don't know where the precision occurs (before or after the dot)
				return DataTypes.DECIMAL(precision * 2, precision);
			}
			return DataTypes.DECIMAL(DecimalType.MIN_PRECISION, DecimalType.MIN_SCALE);
		case TINYINT:
			return DataTypes.TINYINT();
		case SMALLINT:
			return DataTypes.SMALLINT();
		case INTEGER:
			return DataTypes.INT();
		case BIGINT:
			return DataTypes.BIGINT();
		case FLOAT:
			return DataTypes.FLOAT();
		case DOUBLE:
			return DataTypes.DOUBLE();
		case DATE:
			return DataTypes.DATE();
		case TIME_WITHOUT_TIME_ZONE:
			if (hasRoot(actualType, TIMESTAMP_WITHOUT_TIME_ZONE)) {
				return DataTypes.TIME(getPrecision(actualType));
			}
			return DataTypes.TIME();
		case TIMESTAMP_WITHOUT_TIME_ZONE:
			return DataTypes.TIMESTAMP();
		case TIMESTAMP_WITH_TIME_ZONE:
			return DataTypes.TIMESTAMP_WITH_TIME_ZONE();
		case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
			return DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE();
		case INTERVAL_YEAR_MONTH:
			return DataTypes.INTERVAL(DataTypes.MONTH());
		case INTERVAL_DAY_TIME:
			return DataTypes.INTERVAL(DataTypes.SECOND());
		case NULL:
			return DataTypes.NULL();
		case ARRAY:
		case MULTISET:
		case MAP:
		case ROW:
		case DISTINCT_TYPE:
		case STRUCTURED_TYPE:
		case RAW:
		case SYMBOL:
		case UNRESOLVED:
		default:
			return null;
	}
}
 
Example 10
Source File: ValueDataTypeConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
private static DataType convertToCharType(String string) {
	if (string.isEmpty()) {
		return new AtomicDataType(CharType.ofEmptyLiteral());
	}
	return DataTypes.CHAR(string.length());
}