Java Code Examples for org.apache.flink.table.api.DataTypes#INT

The following examples show how to use org.apache.flink.table.api.DataTypes#INT . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HiveTableUtilTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testMakePartitionFilter() {
	List<String> partColNames = Arrays.asList("p1", "p2", "p3");
	ResolvedExpression p1Ref = new FieldReferenceExpression("p1", DataTypes.INT(), 0, 2);
	ResolvedExpression p2Ref = new FieldReferenceExpression("p2", DataTypes.STRING(), 0, 3);
	ResolvedExpression p3Ref = new FieldReferenceExpression("p3", DataTypes.DOUBLE(), 0, 4);
	ResolvedExpression p1Exp = new CallExpression(BuiltInFunctionDefinitions.EQUALS,
			Arrays.asList(p1Ref, valueLiteral(1)), DataTypes.BOOLEAN());
	ResolvedExpression p2Exp = new CallExpression(BuiltInFunctionDefinitions.EQUALS,
			Arrays.asList(p2Ref, valueLiteral("a", DataTypes.STRING().notNull())), DataTypes.BOOLEAN());
	ResolvedExpression p3Exp = new CallExpression(BuiltInFunctionDefinitions.EQUALS,
			Arrays.asList(p3Ref, valueLiteral(1.1)), DataTypes.BOOLEAN());
	Optional<String> filter = HiveTableUtil.makePartitionFilter(2, partColNames, Arrays.asList(p1Exp), hiveShim);
	assertEquals("(p1 = 1)", filter.orElse(null));

	filter = HiveTableUtil.makePartitionFilter(2, partColNames, Arrays.asList(p1Exp, p3Exp), hiveShim);
	assertEquals("(p1 = 1) and (p3 = 1.1)", filter.orElse(null));

	filter = HiveTableUtil.makePartitionFilter(2, partColNames,
			Arrays.asList(p2Exp,
					new CallExpression(BuiltInFunctionDefinitions.OR, Arrays.asList(p1Exp, p3Exp), DataTypes.BOOLEAN())),
			hiveShim);
	assertEquals("(p2 = 'a') and ((p1 = 1) or (p3 = 1.1))", filter.orElse(null));
}
 
Example 2
Source File: HiveCatalogDataTypeTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testDataTypes() throws Exception {
	DataType[] types = new DataType[] {
		DataTypes.TINYINT(),
		DataTypes.SMALLINT(),
		DataTypes.INT(),
		DataTypes.BIGINT(),
		DataTypes.FLOAT(),
		DataTypes.DOUBLE(),
		DataTypes.BOOLEAN(),
		DataTypes.STRING(),
		DataTypes.BYTES(),
		DataTypes.DATE(),
		DataTypes.TIMESTAMP(9),
		DataTypes.CHAR(HiveChar.MAX_CHAR_LENGTH),
		DataTypes.VARCHAR(HiveVarchar.MAX_VARCHAR_LENGTH),
		DataTypes.DECIMAL(5, 3)
	};

	verifyDataTypes(types);
}
 
Example 3
Source File: HiveTypeUtil.java    From flink with Apache License 2.0 5 votes vote down vote up
private static DataType toFlinkPrimitiveType(PrimitiveTypeInfo hiveType) {
	checkNotNull(hiveType, "hiveType cannot be null");

	switch (hiveType.getPrimitiveCategory()) {
		case CHAR:
			return DataTypes.CHAR(((CharTypeInfo) hiveType).getLength());
		case VARCHAR:
			return DataTypes.VARCHAR(((VarcharTypeInfo) hiveType).getLength());
		case STRING:
			return DataTypes.STRING();
		case BOOLEAN:
			return DataTypes.BOOLEAN();
		case BYTE:
			return DataTypes.TINYINT();
		case SHORT:
			return DataTypes.SMALLINT();
		case INT:
			return DataTypes.INT();
		case LONG:
			return DataTypes.BIGINT();
		case FLOAT:
			return DataTypes.FLOAT();
		case DOUBLE:
			return DataTypes.DOUBLE();
		case DATE:
			return DataTypes.DATE();
		case TIMESTAMP:
			return DataTypes.TIMESTAMP();
		case BINARY:
			return DataTypes.BYTES();
		case DECIMAL:
			DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) hiveType;
			return DataTypes.DECIMAL(decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale());
		default:
			throw new UnsupportedOperationException(
				String.format("Flink doesn't support Hive primitive type %s yet", hiveType));
	}
}
 
Example 4
Source File: HiveGenericUDTFTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testOverSumInt() throws Exception {
	Object[] constantArgs = new Object[] {
		null,
		4
	};

	DataType[] dataTypes = new DataType[] {
		DataTypes.INT(),
		DataTypes.INT()
	};

	HiveGenericUDTF udf = init(
		TestOverSumIntUDTF.class,
		constantArgs,
		dataTypes
	);

	udf.eval(5, 4);

	assertEquals(Arrays.asList(Row.of(9), Row.of(9)), collector.result);

	// Test empty input and empty output
	constantArgs = new Object[] {};

	dataTypes = new DataType[] {};

	udf = init(
		TestOverSumIntUDTF.class,
		constantArgs,
		dataTypes
	);

	udf.eval();

	assertEquals(Arrays.asList(), collector.result);
}
 
Example 5
Source File: HiveTypeUtil.java    From flink with Apache License 2.0 5 votes vote down vote up
private static DataType toFlinkPrimitiveType(PrimitiveTypeInfo hiveType) {
	checkNotNull(hiveType, "hiveType cannot be null");

	switch (hiveType.getPrimitiveCategory()) {
		case CHAR:
			return DataTypes.CHAR(((CharTypeInfo) hiveType).getLength());
		case VARCHAR:
			return DataTypes.VARCHAR(((VarcharTypeInfo) hiveType).getLength());
		case STRING:
			return DataTypes.STRING();
		case BOOLEAN:
			return DataTypes.BOOLEAN();
		case BYTE:
			return DataTypes.TINYINT();
		case SHORT:
			return DataTypes.SMALLINT();
		case INT:
			return DataTypes.INT();
		case LONG:
			return DataTypes.BIGINT();
		case FLOAT:
			return DataTypes.FLOAT();
		case DOUBLE:
			return DataTypes.DOUBLE();
		case DATE:
			return DataTypes.DATE();
		case TIMESTAMP:
			return DataTypes.TIMESTAMP(9);
		case BINARY:
			return DataTypes.BYTES();
		case DECIMAL:
			DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) hiveType;
			return DataTypes.DECIMAL(decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale());
		default:
			throw new UnsupportedOperationException(
				String.format("Flink doesn't support Hive primitive type %s yet", hiveType));
	}
}
 
Example 6
Source File: ExpressionTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private static Expression createExpressionTree(Integer nestedValue) {
	final ValueLiteralExpression nestedLiteral;
	if (nestedValue != null) {
		nestedLiteral = new ValueLiteralExpression(nestedValue, DataTypes.INT().notNull());
	} else {
		nestedLiteral = new ValueLiteralExpression(null, DataTypes.INT());
	}
	return new CallExpression(
		AND,
		asList(
			new ValueLiteralExpression(true),
			new CallExpression(
				EQUALS,
				asList(
					new FieldReferenceExpression("field", DataTypes.INT(), 0, 0),
					new CallExpression(
						new ScalarFunctionDefinition("dummy", DUMMY_FUNCTION),
						singletonList(nestedLiteral),
						DataTypes.INT()
					)
				),
				DataTypes.BOOLEAN()
			)
		),
		DataTypes.BOOLEAN()
	);
}
 
Example 7
Source File: HiveGenericUDTFTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testStack() throws Exception {
	Object[] constantArgs = new Object[] {
		2,
		null,
		null,
		null,
		null
	};

	DataType[] dataTypes = new DataType[] {
		DataTypes.INT(),
		DataTypes.STRING(),
		DataTypes.STRING(),
		DataTypes.STRING(),
		DataTypes.STRING()
	};

	HiveGenericUDTF udf = init(
		GenericUDTFStack.class,
		constantArgs,
		dataTypes
	);

	udf.eval(2, "a", "b", "c", "d");

	assertEquals(Arrays.asList(Row.of("a", "b"), Row.of("c", "d")), collector.result);
}
 
Example 8
Source File: IncrSumWithRetractAggFunction.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType getResultType() {
	return DataTypes.INT();
}
 
Example 9
Source File: IndexGeneratorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Before
public void prepareData() {
	String[] fieldNames = new String[]{
		"id",
		"item",
		"log_ts",
		"log_date",
		"order_timestamp",
		"log_time",
		"local_datetime",
		"local_date",
		"local_time",
		"note",
		"status"};
	DataType[] dataTypes = new DataType[]{
		DataTypes.INT(),
		DataTypes.STRING(),
		DataTypes.BIGINT(),
		DataTypes.DATE().bridgedTo(java.sql.Date.class),
		DataTypes.TIMESTAMP().bridgedTo(java.sql.Timestamp.class),
		DataTypes.TIME().bridgedTo(java.sql.Time.class),
		DataTypes.TIMESTAMP().bridgedTo(java.time.LocalDateTime.class),
		DataTypes.DATE().bridgedTo(java.time.LocalDate.class),
		DataTypes.TIME().bridgedTo(java.time.LocalTime.class),
		DataTypes.STRING(),
		DataTypes.BOOLEAN()
	};
	schema = new TableSchema.Builder().fields(fieldNames, dataTypes).build();

	rows = new ArrayList<>();
	rows.add(Row.of(
		1,
		"apple",
		Timestamp.valueOf("2020-03-18 12:12:14").getTime(),
		Date.valueOf("2020-03-18"),
		Timestamp.valueOf("2020-03-18 12:12:14"),
		Time.valueOf("12:12:14"),
		LocalDateTime.of(2020, 3, 18, 12, 12, 14, 1000),
		LocalDate.of(2020, 3, 18),
		LocalTime.of(12, 13, 14, 2000),
		"test1",
		true));
	rows.add(Row.of(
		2,
		"peanut",
		Timestamp.valueOf("2020-03-19 12:22:14").getTime(),
		Date.valueOf("2020-03-19"),
		Timestamp.valueOf("2020-03-19 12:22:21"),
		Time.valueOf("12:22:21"),
		LocalDateTime.of(2020, 3, 19, 12, 22, 14, 1000),
		LocalDate.of(2020, 3, 19),
		LocalTime.of(12, 13, 14, 2000),
		"test2",
		false));
}
 
Example 10
Source File: StrategyUtils.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Returns a data type for the given data type and expected root.
 *
 * <p>This method is aligned with {@link LogicalTypeCasts#supportsImplicitCast(LogicalType, LogicalType)}.
 *
 * <p>The "fallback" data type for each root represents the default data type for a NULL literal. NULL
 * literals will receive the smallest precision possible for having little impact when finding a common
 * type. The output of this method needs to be checked again if an implicit cast is supported.
 */
private static @Nullable DataType findDataTypeOfRoot(
		DataType actualDataType,
		LogicalTypeRoot expectedRoot) {
	final LogicalType actualType = actualDataType.getLogicalType();
	if (hasRoot(actualType, expectedRoot)) {
		return actualDataType;
	}
	switch (expectedRoot) {
		case CHAR:
			return DataTypes.CHAR(CharType.DEFAULT_LENGTH);
		case VARCHAR:
			if (hasRoot(actualType, CHAR)) {
				return DataTypes.VARCHAR(getLength(actualType));
			}
			return DataTypes.VARCHAR(VarCharType.DEFAULT_LENGTH);
		case BOOLEAN:
			return DataTypes.BOOLEAN();
		case BINARY:
			return DataTypes.BINARY(BinaryType.DEFAULT_LENGTH);
		case VARBINARY:
			if (hasRoot(actualType, BINARY)) {
				return DataTypes.VARBINARY(getLength(actualType));
			}
			return DataTypes.VARBINARY(VarBinaryType.DEFAULT_LENGTH);
		case DECIMAL:
			if (hasFamily(actualType, EXACT_NUMERIC)) {
				return DataTypes.DECIMAL(getPrecision(actualType), getScale(actualType));
			} else if (hasFamily(actualType, APPROXIMATE_NUMERIC)) {
				final int precision = getPrecision(actualType);
				// we don't know where the precision occurs (before or after the dot)
				return DataTypes.DECIMAL(precision * 2, precision);
			}
			return DataTypes.DECIMAL(DecimalType.MIN_PRECISION, DecimalType.MIN_SCALE);
		case TINYINT:
			return DataTypes.TINYINT();
		case SMALLINT:
			return DataTypes.SMALLINT();
		case INTEGER:
			return DataTypes.INT();
		case BIGINT:
			return DataTypes.BIGINT();
		case FLOAT:
			return DataTypes.FLOAT();
		case DOUBLE:
			return DataTypes.DOUBLE();
		case DATE:
			return DataTypes.DATE();
		case TIME_WITHOUT_TIME_ZONE:
			if (hasRoot(actualType, TIMESTAMP_WITHOUT_TIME_ZONE)) {
				return DataTypes.TIME(getPrecision(actualType));
			}
			return DataTypes.TIME();
		case TIMESTAMP_WITHOUT_TIME_ZONE:
			return DataTypes.TIMESTAMP();
		case TIMESTAMP_WITH_TIME_ZONE:
			return DataTypes.TIMESTAMP_WITH_TIME_ZONE();
		case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
			return DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE();
		case INTERVAL_YEAR_MONTH:
			return DataTypes.INTERVAL(DataTypes.MONTH());
		case INTERVAL_DAY_TIME:
			return DataTypes.INTERVAL(DataTypes.SECOND());
		case NULL:
			return DataTypes.NULL();
		case ARRAY:
		case MULTISET:
		case MAP:
		case ROW:
		case DISTINCT_TYPE:
		case STRUCTURED_TYPE:
		case RAW:
		case SYMBOL:
		case UNRESOLVED:
		default:
			return null;
	}
}
 
Example 11
Source File: SingleValueAggFunction.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType[] getAggBufferTypes() {
	return new DataType[]{
		getResultType(),
		DataTypes.INT()};
}
 
Example 12
Source File: SumAggFunction.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType getResultType() {
	return DataTypes.INT();
}
 
Example 13
Source File: SumWithRetractAggFunction.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType getResultType() {
	return DataTypes.INT();
}
 
Example 14
Source File: Sum0AggFunction.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType getResultType() {
	return DataTypes.INT();
}
 
Example 15
Source File: SingleValueAggFunction.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType[] getAggBufferTypes() {
	return new DataType[]{
		getResultType(),
		DataTypes.INT()};
}
 
Example 16
Source File: IncrSumWithRetractAggFunction.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType getResultType() {
	return DataTypes.INT();
}
 
Example 17
Source File: MaxAggFunction.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType getResultType() {
	return DataTypes.INT();
}
 
Example 18
Source File: LeadLagAggFunction.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType getResultType() {
	return DataTypes.INT();
}
 
Example 19
Source File: AvgAggFunction.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataType getResultType() {
	return DataTypes.INT();
}
 
Example 20
Source File: SchemaUtils.java    From pulsar-flink with Apache License 2.0 4 votes vote down vote up
private static DataType avro2SqlType(Schema avroSchema, Set<String> existingRecordNames) throws IncompatibleSchemaException {
    LogicalType logicalType = avroSchema.getLogicalType();
    switch (avroSchema.getType()) {
        case INT:
            if (logicalType instanceof LogicalTypes.Date) {
                return DataTypes.DATE();
            } else {
                return DataTypes.INT();
            }

        case STRING:
        case ENUM:
            return DataTypes.STRING();

        case BOOLEAN:
            return DataTypes.BOOLEAN();

        case BYTES:
        case FIXED:
            // For FIXED type, if the precision requires more bytes than fixed size, the logical
            // type will be null, which is handled by Avro library.
            if (logicalType instanceof LogicalTypes.Decimal) {
                LogicalTypes.Decimal d = (LogicalTypes.Decimal) logicalType;
                return DataTypes.DECIMAL(d.getPrecision(), d.getScale());
            } else {
                return DataTypes.BYTES();
            }

        case DOUBLE:
            return DataTypes.DOUBLE();

        case FLOAT:
            return DataTypes.FLOAT();

        case LONG:
            if (logicalType instanceof LogicalTypes.TimestampMillis ||
                    logicalType instanceof LogicalTypes.TimestampMicros) {
                return DataTypes.TIMESTAMP(3).bridgedTo(java.sql.Timestamp.class);
            } else {
                return DataTypes.BIGINT();
            }

        case RECORD:
            if (existingRecordNames.contains(avroSchema.getFullName())) {
                throw new IncompatibleSchemaException(
                        String.format("Found recursive reference in Avro schema, which can not be processed by Flink: %s", avroSchema.toString(true)), null);
            }

            Set<String> newRecordName = ImmutableSet.<String>builder()
                    .addAll(existingRecordNames).add(avroSchema.getFullName()).build();
            List<DataTypes.Field> fields = new ArrayList<>();
            for (Schema.Field f : avroSchema.getFields()) {
                DataType fieldType = avro2SqlType(f.schema(), newRecordName);
                fields.add(DataTypes.FIELD(f.name(), fieldType));
            }
            return DataTypes.ROW(fields.toArray(new DataTypes.Field[0]));

        case ARRAY:
            DataType elementType = avro2SqlType(avroSchema.getElementType(), existingRecordNames);
            return DataTypes.ARRAY(elementType);

        case MAP:
            DataType valueType = avro2SqlType(avroSchema.getValueType(), existingRecordNames);
            return DataTypes.MAP(DataTypes.STRING(), valueType);

        case UNION:
            if (avroSchema.getTypes().stream().anyMatch(f -> f.getType() == Schema.Type.NULL)) {
                // In case of a union with null, eliminate it and make a recursive call
                List<Schema> remainingUnionTypes =
                        avroSchema.getTypes().stream().filter(f -> f.getType() != Schema.Type.NULL).collect(Collectors.toList());
                if (remainingUnionTypes.size() == 1) {
                    return avro2SqlType(remainingUnionTypes.get(0), existingRecordNames).nullable();
                } else {
                    return avro2SqlType(Schema.createUnion(remainingUnionTypes), existingRecordNames).nullable();
                }
            } else {
                List<Schema.Type> types = avroSchema.getTypes().stream().map(Schema::getType).collect(Collectors.toList());
                if (types.size() == 1) {
                    return avro2SqlType(avroSchema.getTypes().get(0), existingRecordNames);
                } else if (types.size() == 2 && types.contains(Schema.Type.INT) && types.contains(Schema.Type.LONG)) {
                    return DataTypes.BIGINT();
                } else if (types.size() == 2 && types.contains(Schema.Type.FLOAT) && types.contains(Schema.Type.DOUBLE)) {
                    return DataTypes.DOUBLE();
                } else {
                    // Convert complex unions to struct types where field names are member0, member1, etc.
                    // This is consistent with the behavior when converting between Avro and Parquet.
                    List<DataTypes.Field> memberFields = new ArrayList<>();
                    List<Schema> schemas = avroSchema.getTypes();
                    for (int i = 0; i < schemas.size(); i++) {
                        DataType memberType = avro2SqlType(schemas.get(i), existingRecordNames);
                        memberFields.add(DataTypes.FIELD("member" + i, memberType));
                    }
                    return DataTypes.ROW(memberFields.toArray(new DataTypes.Field[0]));
                }
            }

        default:
            throw new IncompatibleSchemaException(String.format("Unsupported type %s", avroSchema.toString(true)), null);
    }
}