Java Code Examples for org.apache.flink.api.common.typeinfo.SqlTimeTypeInfo#DATE

The following examples show how to use org.apache.flink.api.common.typeinfo.SqlTimeTypeInfo#DATE . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: OrcTableSource.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private PredicateLeaf.Type toOrcType(TypeInformation<?> type) {
	if (type == BasicTypeInfo.BYTE_TYPE_INFO ||
		type == BasicTypeInfo.SHORT_TYPE_INFO ||
		type == BasicTypeInfo.INT_TYPE_INFO ||
		type == BasicTypeInfo.LONG_TYPE_INFO) {
		return PredicateLeaf.Type.LONG;
	} else if (type == BasicTypeInfo.FLOAT_TYPE_INFO ||
		type == BasicTypeInfo.DOUBLE_TYPE_INFO) {
		return PredicateLeaf.Type.FLOAT;
	} else if (type == BasicTypeInfo.BOOLEAN_TYPE_INFO) {
		return PredicateLeaf.Type.BOOLEAN;
	} else if (type == BasicTypeInfo.STRING_TYPE_INFO) {
		return PredicateLeaf.Type.STRING;
	} else if (type == SqlTimeTypeInfo.TIMESTAMP) {
		return PredicateLeaf.Type.TIMESTAMP;
	} else if (type == SqlTimeTypeInfo.DATE) {
		return PredicateLeaf.Type.DATE;
	} else if (type == BasicTypeInfo.BIG_DEC_TYPE_INFO) {
		return PredicateLeaf.Type.DECIMAL;
	} else {
		// unsupported type
		return null;
	}
}
 
Example 2
Source File: OrcTableSource.java    From flink with Apache License 2.0 6 votes vote down vote up
private PredicateLeaf.Type toOrcType(TypeInformation<?> type) {
	if (type == BasicTypeInfo.BYTE_TYPE_INFO ||
		type == BasicTypeInfo.SHORT_TYPE_INFO ||
		type == BasicTypeInfo.INT_TYPE_INFO ||
		type == BasicTypeInfo.LONG_TYPE_INFO) {
		return PredicateLeaf.Type.LONG;
	} else if (type == BasicTypeInfo.FLOAT_TYPE_INFO ||
		type == BasicTypeInfo.DOUBLE_TYPE_INFO) {
		return PredicateLeaf.Type.FLOAT;
	} else if (type == BasicTypeInfo.BOOLEAN_TYPE_INFO) {
		return PredicateLeaf.Type.BOOLEAN;
	} else if (type == BasicTypeInfo.STRING_TYPE_INFO) {
		return PredicateLeaf.Type.STRING;
	} else if (type == SqlTimeTypeInfo.TIMESTAMP) {
		return PredicateLeaf.Type.TIMESTAMP;
	} else if (type == SqlTimeTypeInfo.DATE) {
		return PredicateLeaf.Type.DATE;
	} else if (type == BasicTypeInfo.BIG_DEC_TYPE_INFO) {
		return PredicateLeaf.Type.DECIMAL;
	} else {
		// unsupported type
		return null;
	}
}
 
Example 3
Source File: OrcTableSource.java    From flink with Apache License 2.0 6 votes vote down vote up
private PredicateLeaf.Type toOrcType(TypeInformation<?> type) {
	if (type == BasicTypeInfo.BYTE_TYPE_INFO ||
		type == BasicTypeInfo.SHORT_TYPE_INFO ||
		type == BasicTypeInfo.INT_TYPE_INFO ||
		type == BasicTypeInfo.LONG_TYPE_INFO) {
		return PredicateLeaf.Type.LONG;
	} else if (type == BasicTypeInfo.FLOAT_TYPE_INFO ||
		type == BasicTypeInfo.DOUBLE_TYPE_INFO) {
		return PredicateLeaf.Type.FLOAT;
	} else if (type == BasicTypeInfo.BOOLEAN_TYPE_INFO) {
		return PredicateLeaf.Type.BOOLEAN;
	} else if (type == BasicTypeInfo.STRING_TYPE_INFO) {
		return PredicateLeaf.Type.STRING;
	} else if (type == SqlTimeTypeInfo.TIMESTAMP) {
		return PredicateLeaf.Type.TIMESTAMP;
	} else if (type == SqlTimeTypeInfo.DATE) {
		return PredicateLeaf.Type.DATE;
	} else if (type == BasicTypeInfo.BIG_DEC_TYPE_INFO) {
		return PredicateLeaf.Type.DECIMAL;
	} else {
		// unsupported type
		return null;
	}
}
 
Example 4
Source File: RowCsvInputFormatTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testSqlTimeFields() throws Exception {
	String fileContent = "1990-10-14|02:42:25|1990-10-14 02:42:25.123|1990-1-4 2:2:5\n" +
		"1990-10-14|02:42:25|1990-10-14 02:42:25.123|1990-1-4 2:2:5.3\n";

	FileInputSplit split = createTempFile(fileContent);

	TypeInformation[] fieldTypes = new TypeInformation[]{
		SqlTimeTypeInfo.DATE,
		SqlTimeTypeInfo.TIME,
		SqlTimeTypeInfo.TIMESTAMP,
		SqlTimeTypeInfo.TIMESTAMP};

	RowCsvInputFormat format = new RowCsvInputFormat(PATH, fieldTypes);
	format.setFieldDelimiter("|");
	format.configure(new Configuration());
	format.open(split);

	Row result = new Row(4);

	result = format.nextRecord(result);
	assertNotNull(result);
	assertEquals(Date.valueOf("1990-10-14"), result.getField(0));
	assertEquals(Time.valueOf("02:42:25"), result.getField(1));
	assertEquals(Timestamp.valueOf("1990-10-14 02:42:25.123"), result.getField(2));
	assertEquals(Timestamp.valueOf("1990-01-04 02:02:05"), result.getField(3));

	result = format.nextRecord(result);
	assertNotNull(result);
	assertEquals(Date.valueOf("1990-10-14"), result.getField(0));
	assertEquals(Time.valueOf("02:42:25"), result.getField(1));
	assertEquals(Timestamp.valueOf("1990-10-14 02:42:25.123"), result.getField(2));
	assertEquals(Timestamp.valueOf("1990-01-04 02:02:05.3"), result.getField(3));

	result = format.nextRecord(result);
	assertNull(result);
	assertTrue(format.reachedEnd());
}
 
Example 5
Source File: RowCsvInputFormatTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testSqlTimeFields() throws Exception {
	String fileContent = "1990-10-14|02:42:25|1990-10-14 02:42:25.123|1990-1-4 2:2:5\n" +
		"1990-10-14|02:42:25|1990-10-14 02:42:25.123|1990-1-4 2:2:5.3\n";

	FileInputSplit split = createTempFile(fileContent);

	TypeInformation[] fieldTypes = new TypeInformation[]{
		SqlTimeTypeInfo.DATE,
		SqlTimeTypeInfo.TIME,
		SqlTimeTypeInfo.TIMESTAMP,
		SqlTimeTypeInfo.TIMESTAMP};

	RowCsvInputFormat format = new RowCsvInputFormat(PATH, fieldTypes);
	format.setFieldDelimiter("|");
	format.configure(new Configuration());
	format.open(split);

	Row result = new Row(4);

	result = format.nextRecord(result);
	assertNotNull(result);
	assertEquals(Date.valueOf("1990-10-14"), result.getField(0));
	assertEquals(Time.valueOf("02:42:25"), result.getField(1));
	assertEquals(Timestamp.valueOf("1990-10-14 02:42:25.123"), result.getField(2));
	assertEquals(Timestamp.valueOf("1990-01-04 02:02:05"), result.getField(3));

	result = format.nextRecord(result);
	assertNotNull(result);
	assertEquals(Date.valueOf("1990-10-14"), result.getField(0));
	assertEquals(Time.valueOf("02:42:25"), result.getField(1));
	assertEquals(Timestamp.valueOf("1990-10-14 02:42:25.123"), result.getField(2));
	assertEquals(Timestamp.valueOf("1990-01-04 02:02:05.3"), result.getField(3));

	result = format.nextRecord(result);
	assertNull(result);
	assertTrue(format.reachedEnd());
}
 
Example 6
Source File: RowCsvInputFormatTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testSqlTimeFields() throws Exception {
	String fileContent = "1990-10-14|02:42:25|1990-10-14 02:42:25.123|1990-1-4 2:2:5\n" +
			"1990-10-14|02:42:25|1990-10-14 02:42:25.123|1990-1-4 2:2:5.3\n";

	FileInputSplit split = createTempFile(fileContent);

	TypeInformation[] fieldTypes = new TypeInformation[]{
			SqlTimeTypeInfo.DATE,
			SqlTimeTypeInfo.TIME,
			SqlTimeTypeInfo.TIMESTAMP,
			SqlTimeTypeInfo.TIMESTAMP};

	RowCsvInputFormat.Builder builder = RowCsvInputFormat.builder(new RowTypeInfo(fieldTypes), PATH)
			.setFieldDelimiter('|');

	RowCsvInputFormat format = builder.build();
	format.configure(new Configuration());
	format.open(split);

	Row result = new Row(4);

	result = format.nextRecord(result);
	assertNotNull(result);
	assertEquals(Date.valueOf("1990-10-14"), result.getField(0));
	assertEquals(Time.valueOf("02:42:25"), result.getField(1));
	assertEquals(Timestamp.valueOf("1990-10-14 02:42:25.123"), result.getField(2));
	assertEquals(Timestamp.valueOf("1990-01-04 02:02:05"), result.getField(3));

	result = format.nextRecord(result);
	assertNotNull(result);
	assertEquals(Date.valueOf("1990-10-14"), result.getField(0));
	assertEquals(Time.valueOf("02:42:25"), result.getField(1));
	assertEquals(Timestamp.valueOf("1990-10-14 02:42:25.123"), result.getField(2));
	assertEquals(Timestamp.valueOf("1990-01-04 02:02:05.3"), result.getField(3));

	result = format.nextRecord(result);
	assertNull(result);
	assertTrue(format.reachedEnd());
}
 
Example 7
Source File: RowCsvInputFormatTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testSqlTimeFields() throws Exception {
	String fileContent = "1990-10-14|02:42:25|1990-10-14 02:42:25.123|1990-1-4 2:2:5\n" +
		"1990-10-14|02:42:25|1990-10-14 02:42:25.123|1990-1-4 2:2:5.3\n";

	FileInputSplit split = createTempFile(fileContent);

	TypeInformation[] fieldTypes = new TypeInformation[]{
		SqlTimeTypeInfo.DATE,
		SqlTimeTypeInfo.TIME,
		SqlTimeTypeInfo.TIMESTAMP,
		SqlTimeTypeInfo.TIMESTAMP};

	RowCsvInputFormat format = new RowCsvInputFormat(PATH, fieldTypes);
	format.setFieldDelimiter("|");
	format.configure(new Configuration());
	format.open(split);

	Row result = new Row(4);

	result = format.nextRecord(result);
	assertNotNull(result);
	assertEquals(Date.valueOf("1990-10-14"), result.getField(0));
	assertEquals(Time.valueOf("02:42:25"), result.getField(1));
	assertEquals(Timestamp.valueOf("1990-10-14 02:42:25.123"), result.getField(2));
	assertEquals(Timestamp.valueOf("1990-01-04 02:02:05"), result.getField(3));

	result = format.nextRecord(result);
	assertNotNull(result);
	assertEquals(Date.valueOf("1990-10-14"), result.getField(0));
	assertEquals(Time.valueOf("02:42:25"), result.getField(1));
	assertEquals(Timestamp.valueOf("1990-10-14 02:42:25.123"), result.getField(2));
	assertEquals(Timestamp.valueOf("1990-01-04 02:02:05.3"), result.getField(3));

	result = format.nextRecord(result);
	assertNull(result);
	assertTrue(format.reachedEnd());
}
 
Example 8
Source File: OrcBatchReader.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
/**
 * Converts an ORC schema to a Flink TypeInformation.
 *
 * @param schema The ORC schema.
 * @return The TypeInformation that corresponds to the ORC schema.
 */
static TypeInformation schemaToTypeInfo(TypeDescription schema) {
	switch (schema.getCategory()) {
		case BOOLEAN:
			return BasicTypeInfo.BOOLEAN_TYPE_INFO;
		case BYTE:
			return BasicTypeInfo.BYTE_TYPE_INFO;
		case SHORT:
			return BasicTypeInfo.SHORT_TYPE_INFO;
		case INT:
			return BasicTypeInfo.INT_TYPE_INFO;
		case LONG:
			return BasicTypeInfo.LONG_TYPE_INFO;
		case FLOAT:
			return BasicTypeInfo.FLOAT_TYPE_INFO;
		case DOUBLE:
			return BasicTypeInfo.DOUBLE_TYPE_INFO;
		case DECIMAL:
			return BasicTypeInfo.BIG_DEC_TYPE_INFO;
		case STRING:
		case CHAR:
		case VARCHAR:
			return BasicTypeInfo.STRING_TYPE_INFO;
		case DATE:
			return SqlTimeTypeInfo.DATE;
		case TIMESTAMP:
			return SqlTimeTypeInfo.TIMESTAMP;
		case BINARY:
			return PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO;
		case STRUCT:
			List<TypeDescription> fieldSchemas = schema.getChildren();
			TypeInformation[] fieldTypes = new TypeInformation[fieldSchemas.size()];
			for (int i = 0; i < fieldSchemas.size(); i++) {
				fieldTypes[i] = schemaToTypeInfo(fieldSchemas.get(i));
			}
			String[] fieldNames = schema.getFieldNames().toArray(new String[]{});
			return new RowTypeInfo(fieldTypes, fieldNames);
		case LIST:
			TypeDescription elementSchema = schema.getChildren().get(0);
			TypeInformation<?> elementType = schemaToTypeInfo(elementSchema);
			// arrays of primitive types are handled as object arrays to support null values
			return ObjectArrayTypeInfo.getInfoFor(elementType);
		case MAP:
			TypeDescription keySchema = schema.getChildren().get(0);
			TypeDescription valSchema = schema.getChildren().get(1);
			TypeInformation<?> keyType = schemaToTypeInfo(keySchema);
			TypeInformation<?> valType = schemaToTypeInfo(valSchema);
			return new MapTypeInfo<>(keyType, valType);
		case UNION:
			throw new UnsupportedOperationException("UNION type is not supported yet.");
		default:
			throw new IllegalArgumentException("Unknown type " + schema);
	}
}
 
Example 9
Source File: OrcBatchReader.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Converts an ORC schema to a Flink TypeInformation.
 *
 * @param schema The ORC schema.
 * @return The TypeInformation that corresponds to the ORC schema.
 */
static TypeInformation schemaToTypeInfo(TypeDescription schema) {
	switch (schema.getCategory()) {
		case BOOLEAN:
			return BasicTypeInfo.BOOLEAN_TYPE_INFO;
		case BYTE:
			return BasicTypeInfo.BYTE_TYPE_INFO;
		case SHORT:
			return BasicTypeInfo.SHORT_TYPE_INFO;
		case INT:
			return BasicTypeInfo.INT_TYPE_INFO;
		case LONG:
			return BasicTypeInfo.LONG_TYPE_INFO;
		case FLOAT:
			return BasicTypeInfo.FLOAT_TYPE_INFO;
		case DOUBLE:
			return BasicTypeInfo.DOUBLE_TYPE_INFO;
		case DECIMAL:
			return BasicTypeInfo.BIG_DEC_TYPE_INFO;
		case STRING:
		case CHAR:
		case VARCHAR:
			return BasicTypeInfo.STRING_TYPE_INFO;
		case DATE:
			return SqlTimeTypeInfo.DATE;
		case TIMESTAMP:
			return SqlTimeTypeInfo.TIMESTAMP;
		case BINARY:
			return PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO;
		case STRUCT:
			List<TypeDescription> fieldSchemas = schema.getChildren();
			TypeInformation[] fieldTypes = new TypeInformation[fieldSchemas.size()];
			for (int i = 0; i < fieldSchemas.size(); i++) {
				fieldTypes[i] = schemaToTypeInfo(fieldSchemas.get(i));
			}
			String[] fieldNames = schema.getFieldNames().toArray(new String[]{});
			return new RowTypeInfo(fieldTypes, fieldNames);
		case LIST:
			TypeDescription elementSchema = schema.getChildren().get(0);
			TypeInformation<?> elementType = schemaToTypeInfo(elementSchema);
			// arrays of primitive types are handled as object arrays to support null values
			return ObjectArrayTypeInfo.getInfoFor(elementType);
		case MAP:
			TypeDescription keySchema = schema.getChildren().get(0);
			TypeDescription valSchema = schema.getChildren().get(1);
			TypeInformation<?> keyType = schemaToTypeInfo(keySchema);
			TypeInformation<?> valType = schemaToTypeInfo(valSchema);
			return new MapTypeInfo<>(keyType, valType);
		case UNION:
			throw new UnsupportedOperationException("UNION type is not supported yet.");
		default:
			throw new IllegalArgumentException("Unknown type " + schema);
	}
}
 
Example 10
Source File: FlinkJdbcGenerator.java    From Quicksql with MIT License 4 votes vote down vote up
/**
 * .
 */
public static TypeInformation<?> getTypeInformation(String type) {
    switch (type) {
        case "INT":
        case "TINYINT":
        case "SMALLINT":
        case "MEDIUMINT":
        case "BOOLEAN":
        case "INTEGER":
            return BasicTypeInfo.INT_TYPE_INFO;
        case "BIGINT":
        case "INT UNSIGNED":
            return BasicTypeInfo.LONG_TYPE_INFO;
        case "VARCHAR":
        case "TEXT":
        case "TIMESTAMP":
        case "DATETIME":
        case "LONGTEXT":
        case "VARCHAR2":
        case "STRING":
        case "CHAR":
            return BasicTypeInfo.STRING_TYPE_INFO;
        case "DOUBLE":
            return BasicTypeInfo.DOUBLE_TYPE_INFO;
        case "FLOAT":
            return BasicTypeInfo.FLOAT_TYPE_INFO;
        case "DATE":
        case "YEAR":
            return SqlTimeTypeInfo.DATE;
        case "BIGDECIMAL":
        case "DECIMAL":
            return BasicTypeInfo.BIG_DEC_TYPE_INFO;
        case "BIT":
            return BasicTypeInfo.BOOLEAN_TYPE_INFO;
        case "BLOB":
        case "LONGBLOB":
            return BasicTypeInfo.BYTE_TYPE_INFO;
        default:
            return BasicTypeInfo.STRING_TYPE_INFO;
    }
}
 
Example 11
Source File: OrcBatchReader.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Converts an ORC schema to a Flink TypeInformation.
 *
 * @param schema The ORC schema.
 * @return The TypeInformation that corresponds to the ORC schema.
 */
static TypeInformation schemaToTypeInfo(TypeDescription schema) {
	switch (schema.getCategory()) {
		case BOOLEAN:
			return BasicTypeInfo.BOOLEAN_TYPE_INFO;
		case BYTE:
			return BasicTypeInfo.BYTE_TYPE_INFO;
		case SHORT:
			return BasicTypeInfo.SHORT_TYPE_INFO;
		case INT:
			return BasicTypeInfo.INT_TYPE_INFO;
		case LONG:
			return BasicTypeInfo.LONG_TYPE_INFO;
		case FLOAT:
			return BasicTypeInfo.FLOAT_TYPE_INFO;
		case DOUBLE:
			return BasicTypeInfo.DOUBLE_TYPE_INFO;
		case DECIMAL:
			return BasicTypeInfo.BIG_DEC_TYPE_INFO;
		case STRING:
		case CHAR:
		case VARCHAR:
			return BasicTypeInfo.STRING_TYPE_INFO;
		case DATE:
			return SqlTimeTypeInfo.DATE;
		case TIMESTAMP:
			return SqlTimeTypeInfo.TIMESTAMP;
		case BINARY:
			return PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO;
		case STRUCT:
			List<TypeDescription> fieldSchemas = schema.getChildren();
			TypeInformation[] fieldTypes = new TypeInformation[fieldSchemas.size()];
			for (int i = 0; i < fieldSchemas.size(); i++) {
				fieldTypes[i] = schemaToTypeInfo(fieldSchemas.get(i));
			}
			String[] fieldNames = schema.getFieldNames().toArray(new String[]{});
			return new RowTypeInfo(fieldTypes, fieldNames);
		case LIST:
			TypeDescription elementSchema = schema.getChildren().get(0);
			TypeInformation<?> elementType = schemaToTypeInfo(elementSchema);
			// arrays of primitive types are handled as object arrays to support null values
			return ObjectArrayTypeInfo.getInfoFor(elementType);
		case MAP:
			TypeDescription keySchema = schema.getChildren().get(0);
			TypeDescription valSchema = schema.getChildren().get(1);
			TypeInformation<?> keyType = schemaToTypeInfo(keySchema);
			TypeInformation<?> valType = schemaToTypeInfo(valSchema);
			return new MapTypeInfo<>(keyType, valType);
		case UNION:
			throw new UnsupportedOperationException("UNION type is not supported yet.");
		default:
			throw new IllegalArgumentException("Unknown type " + schema);
	}
}