org.apache.flink.table.api.DataTypes Java Examples

The following examples show how to use org.apache.flink.table.api.DataTypes. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source Project: pulsar-flink   Author: streamnative   File: SchemaUtils.java    License: Apache License 2.0 6 votes vote down vote up
public static FieldsDataType pulsarSourceSchema(SchemaInfo si) throws IncompatibleSchemaException {
    List<DataTypes.Field> mainSchema = new ArrayList<>();
    DataType dataType = si2SqlType(si);
    if (dataType instanceof FieldsDataType) {
        FieldsDataType fieldsDataType = (FieldsDataType) dataType;
        RowType rowType = (RowType) fieldsDataType.getLogicalType();
        rowType.getFieldNames().stream()
                .map(fieldName -> DataTypes.FIELD(fieldName, fieldsDataType.getFieldDataTypes().get(fieldName)))
                .forEach(mainSchema::add);
    } else {
        mainSchema.add(DataTypes.FIELD("value", dataType));
    }

    mainSchema.addAll(METADATA_FIELDS);
    return (FieldsDataType) DataTypes.ROW(mainSchema.toArray(new DataTypes.Field[0]));
}
 
Example #2
Source Project: flink   Author: apache   File: Elasticsearch6DynamicSinkFactoryTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void validateWrongFlushSize() {
	Elasticsearch6DynamicSinkFactory sinkFactory = new Elasticsearch6DynamicSinkFactory();

	thrown.expect(ValidationException.class);
	thrown.expectMessage(
		"'sink.bulk-flush.max-size' must be in MB granularity. Got: 1024 bytes");
	sinkFactory.createDynamicTableSink(
		context()
			.withSchema(TableSchema.builder()
				.field("a", DataTypes.TIME())
				.build())
			.withOption(ElasticsearchOptions.INDEX_OPTION.key(), "MyIndex")
			.withOption(ElasticsearchOptions.DOCUMENT_TYPE_OPTION.key(), "MyType")
			.withOption(ElasticsearchOptions.HOSTS_OPTION.key(), "http://localhost:1234")
			.withOption(ElasticsearchOptions.BULK_FLASH_MAX_SIZE_OPTION.key(), "1kb")
			.build()
	);
}
 
Example #3
Source Project: flink   Author: apache   File: HiveGenericUDAFTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testUDAFCount() throws Exception {
	Object[] constantArgs = new Object[] {
		null
	};

	DataType[] argTypes = new DataType[] {
		DataTypes.DOUBLE()
	};

	HiveGenericUDAF udf = init(GenericUDAFCount.class, constantArgs, argTypes);

	GenericUDAFEvaluator.AggregationBuffer acc = udf.createAccumulator();

	udf.accumulate(acc, 0.5d);
	udf.accumulate(acc, 0.3d);
	udf.accumulate(acc, 5.3d);

	udf.merge(acc, Arrays.asList());

	assertEquals(3L, udf.getValue(acc));
}
 
Example #4
Source Project: flink-learning   Author: zhisheng17   File: KafkaSourceMain.java    License: Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
    StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    blinkStreamEnv.setParallelism(1);
    EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build();
    StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

    ParameterTool parameterTool = ExecutionEnvUtil.PARAMETER_TOOL;
    Properties properties = KafkaConfigUtil.buildKafkaProps(parameterTool);
    DataStream<String> dataStream = blinkStreamEnv.addSource(new FlinkKafkaConsumer011<>(parameterTool.get("kafka.topic"), new SimpleStringSchema(), properties));
    Table table = blinkStreamTableEnv.fromDataStream(dataStream, "word");
    blinkStreamTableEnv.registerTable("kafkaDataStream", table);

    RetractStreamTableSink<Row> retractStreamTableSink = new MyRetractStreamTableSink(new String[]{"_count", "word"}, new DataType[]{DataTypes.BIGINT(), DataTypes.STRING()});
    blinkStreamTableEnv.registerTableSink("sinkTable", retractStreamTableSink);

    Table wordCount = blinkStreamTableEnv.sqlQuery("SELECT count(word) AS _count,word FROM kafkaDataStream GROUP BY word");

    wordCount.insertInto("sinkTable");

    blinkStreamTableEnv.execute("Blink Kafka Table Source");
}
 
Example #5
Source Project: flink   Author: apache   File: Elasticsearch7DynamicSinkFactoryTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void validateWrongFlushSize() {
	Elasticsearch7DynamicSinkFactory sinkFactory = new Elasticsearch7DynamicSinkFactory();

	thrown.expect(ValidationException.class);
	thrown.expectMessage(
		"'sink.bulk-flush.max-size' must be in MB granularity. Got: 1024 bytes");
	sinkFactory.createDynamicTableSink(
		context()
			.withSchema(TableSchema.builder()
				.field("a", DataTypes.TIME())
				.build())
			.withOption(ElasticsearchOptions.INDEX_OPTION.key(), "MyIndex")
			.withOption(ElasticsearchOptions.HOSTS_OPTION.key(), "http://localhost:1234")
			.withOption(ElasticsearchOptions.BULK_FLASH_MAX_SIZE_OPTION.key(), "1kb")
			.build()
	);
}
 
Example #6
Source Project: flink   Author: apache   File: HiveGenericUDFTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testDecode() {
	String constDecoding = "UTF-8";

	HiveGenericUDF udf = init(
		GenericUDFDecode.class,
		new Object[] {
			null,
			constDecoding
		},
		new DataType[] {
			DataTypes.BYTES(),
			DataTypes.STRING()
		}
	);

	HiveSimpleUDF simpleUDF = HiveSimpleUDFTest.init(
		UDFUnhex.class,
		new DataType[]{
			DataTypes.STRING()
		});

	assertEquals("MySQL", udf.eval(simpleUDF.eval("4D7953514C"), constDecoding));
}
 
Example #7
Source Project: flink   Author: apache   File: DataTypeExtractorTest.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Testing data type shared with the Scala tests.
 */
public static DataType getPojoWithCustomOrderDataType(Class<?> pojoClass) {
	final StructuredType.Builder builder = StructuredType.newBuilder(pojoClass);
	builder.attributes(
		Arrays.asList(
			new StructuredAttribute(
				"z",
				new BigIntType()),
			new StructuredAttribute(
				"y",
				new BooleanType()),
			new StructuredAttribute(
				"x",
				new IntType())));
	builder.setFinal(true);
	builder.setInstantiable(true);
	final StructuredType structuredType = builder.build();

	final List<DataType> fieldDataTypes = Arrays.asList(
		DataTypes.BIGINT(),
		DataTypes.BOOLEAN(),
		DataTypes.INT()
	);

	return new FieldsDataType(structuredType, pojoClass, fieldDataTypes);
}
 
Example #8
Source Project: flink   Author: apache   File: Elasticsearch7DynamicSinkFactoryTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void validateWrongIndex() {
	Elasticsearch7DynamicSinkFactory sinkFactory = new Elasticsearch7DynamicSinkFactory();

	thrown.expect(ValidationException.class);
	thrown.expectMessage(
		"'index' must not be empty");
	sinkFactory.createDynamicTableSink(
		context()
			.withSchema(TableSchema.builder()
				.field("a", DataTypes.TIME())
				.build())
			.withOption("index", "")
			.withOption("hosts", "http://localhost:12345")
			.build()
	);
}
 
Example #9
Source Project: flink   Author: flink-tpc-ds   File: HiveGenericUDFTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testCase() {
	HiveGenericUDF udf = init(
		GenericUDFCase.class,
		new Object[] {
			null,
			"1",
			"a",
			"b"
		},
		new DataType[] {
			DataTypes.STRING(),
			DataTypes.STRING(),
			DataTypes.STRING(),
			DataTypes.STRING()
		}
	);

	assertEquals("a", udf.eval("1", "1", "a", "b"));
	assertEquals("b", udf.eval("2", "1", "a", "b"));
}
 
Example #10
Source Project: flink   Author: flink-tpc-ds   File: HiveTableFactoryTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testGenericTable() throws Exception {
	TableSchema schema = TableSchema.builder()
		.field("name", DataTypes.STRING())
		.field("age", DataTypes.INT())
		.build();

	Map<String, String> properties = new HashMap<>();
	properties.put(CatalogConfig.IS_GENERIC, String.valueOf(true));
	properties.put("connector", "COLLECTION");

	catalog.createDatabase("mydb", new CatalogDatabaseImpl(new HashMap<>(), ""), true);
	ObjectPath path = new ObjectPath("mydb", "mytable");
	CatalogTable table = new CatalogTableImpl(schema, properties, "csv table");
	catalog.createTable(path, table, true);
	Optional<TableFactory> opt = catalog.getTableFactory();
	assertTrue(opt.isPresent());
	HiveTableFactory tableFactory = (HiveTableFactory) opt.get();
	TableSource tableSource = tableFactory.createTableSource(path, table);
	assertTrue(tableSource instanceof StreamTableSource);
	TableSink tableSink = tableFactory.createTableSink(path, table);
	assertTrue(tableSink instanceof StreamTableSink);
}
 
Example #11
Source Project: flink   Author: apache   File: SqlToOperationConverterTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testAlterTableAddPkConstraintEnforced() throws Exception {
	Catalog catalog = new GenericInMemoryCatalog("default", "default");
	catalogManager.registerCatalog("cat1", catalog);
	catalog.createDatabase("db1", new CatalogDatabaseImpl(new HashMap<>(), null), true);
	CatalogTable catalogTable = new CatalogTableImpl(
			TableSchema.builder()
					.field("a", DataTypes.STRING().notNull())
					.field("b", DataTypes.BIGINT().notNull())
					.field("c", DataTypes.BIGINT())
					.build(),
			new HashMap<>(),
			"tb1");
	catalogManager.setCurrentCatalog("cat1");
	catalogManager.setCurrentDatabase("db1");
	catalog.createTable(new ObjectPath("db1", "tb1"), catalogTable, true);
	// Test alter table add enforced
	thrown.expect(ValidationException.class);
	thrown.expectMessage("Flink doesn't support ENFORCED mode for PRIMARY KEY constaint. "
			+ "ENFORCED/NOT ENFORCED  controls if the constraint checks are performed on the "
			+ "incoming/outgoing data. Flink does not own the data therefore the "
			+ "only supported mode is the NOT ENFORCED mode");
	parse("alter table tb1 add constraint ct1 primary key(a, b)",
			SqlDialect.DEFAULT);
}
 
Example #12
Source Project: flink   Author: apache   File: DataTypePrecisionFixer.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public DataType visit(CollectionDataType collectionDataType) {
	DataType elementType = collectionDataType.getElementDataType();
	switch (logicalType.getTypeRoot()) {
		case ARRAY:
			ArrayType arrayType = (ArrayType) logicalType;
			DataType newArrayElementType = elementType
				.accept(new DataTypePrecisionFixer(arrayType.getElementType()));
			return DataTypes
				.ARRAY(newArrayElementType)
				.bridgedTo(collectionDataType.getConversionClass());

		case MULTISET:
			MultisetType multisetType = (MultisetType) logicalType;
			DataType newMultisetElementType = elementType
				.accept(new DataTypePrecisionFixer(multisetType.getElementType()));
			return DataTypes
				.MULTISET(newMultisetElementType)
				.bridgedTo(collectionDataType.getConversionClass());

		default:
			throw new UnsupportedOperationException("Unsupported logical type : " + logicalType);
	}
}
 
Example #13
Source Project: flink   Author: apache   File: DataTypeExtractorTest.java    License: Apache License 2.0 6 votes vote down vote up
private static DataType getOuterTupleDataType() {
	final StructuredType.Builder builder = StructuredType.newBuilder(Tuple2.class);
	builder.attributes(
		Arrays.asList(
			new StructuredAttribute(
				"f0",
				new IntType()),
			new StructuredAttribute(
				"f1",
				getInnerTupleDataType().getLogicalType())));
	builder.setFinal(true);
	builder.setInstantiable(true);
	final StructuredType structuredType = builder.build();

	final List<DataType> fieldDataTypes = Arrays.asList(
		DataTypes.INT(),
		getInnerTupleDataType()
	);

	return new FieldsDataType(structuredType, Tuple2.class, fieldDataTypes);
}
 
Example #14
Source Project: flink   Author: flink-tpc-ds   File: BinaryRowTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testGenericMap() {
	Map javaMap = new HashMap();
	javaMap.put(6, BinaryString.fromString("6"));
	javaMap.put(5, BinaryString.fromString("5"));
	javaMap.put(666, BinaryString.fromString("666"));
	javaMap.put(0, null);

	GenericMap genericMap = new GenericMap(javaMap);

	BinaryRow row = new BinaryRow(1);
	BinaryRowWriter rowWriter = new BinaryRowWriter(row);
	BaseMapSerializer serializer = new BaseMapSerializer(
		DataTypes.INT().getLogicalType(),
		DataTypes.STRING().getLogicalType(),
		new ExecutionConfig());
	rowWriter.writeMap(0, genericMap, serializer);
	rowWriter.complete();

	Map map = row.getMap(0).toJavaMap(DataTypes.INT().getLogicalType(), DataTypes.STRING().getLogicalType());
	assertEquals(BinaryString.fromString("6"), map.get(6));
	assertEquals(BinaryString.fromString("5"), map.get(5));
	assertEquals(BinaryString.fromString("666"), map.get(666));
	assertTrue(map.containsKey(0));
	assertNull(map.get(0));
}
 
Example #15
Source Project: flink   Author: flink-tpc-ds   File: ListAggFunction.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public Expression[] initialValuesExpressions() {
	return new Expression[] {
			/* delimiter */ literal(",", DataTypes.STRING()),
			/* acc */ nullOf(DataTypes.STRING())
	};
}
 
Example #16
Source Project: flink   Author: flink-tpc-ds   File: ExpressionTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testInvalidValueLiteral() {
	thrown.expect(ValidationException.class);
	thrown.expectMessage("does not support a value literal of class 'java.lang.Integer'");

	new ValueLiteralExpression(12, DataTypes.TINYINT());
}
 
Example #17
Source Project: flink   Author: apache   File: ValuesTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testValuesOverrideSchema() {
	JavaStreamTableTestUtil util = javaStreamTestUtil();
	Table t = util.getTableEnv().fromValues(
		DataTypes.ROW(
			DataTypes.FIELD("a", DataTypes.BIGINT()),
			DataTypes.FIELD("b", DataTypes.STRING())),
		row(lit(1).plus(2), "ABC"),
		row(2, "ABC")
	);
	util.verifyPlan(t);
}
 
Example #18
Source Project: flink   Author: apache   File: HiveTableSinkITCase.java    License: Apache License 2.0 5 votes vote down vote up
private RowTypeInfo createHiveDestTable(String dbName, String tblName, int numPartCols) throws Exception {
	TableSchema.Builder builder = new TableSchema.Builder();
	builder.fields(new String[]{"i", "l", "d", "s"},
			new DataType[]{
					DataTypes.INT(),
					DataTypes.BIGINT(),
					DataTypes.DOUBLE(),
					DataTypes.STRING()});
	return createHiveDestTable(dbName, tblName, builder.build(), numPartCols);
}
 
Example #19
Source Project: flink   Author: flink-tpc-ds   File: HiveCatalogDataTypeTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testNonSupportedBinaryDataTypes() throws Exception {
	DataType[] types = new DataType[] {
			DataTypes.BINARY(BinaryType.MAX_LENGTH)
	};

	CatalogTable table = createCatalogTable(types);

	catalog.createDatabase(db1, createDb(), false);

	exception.expect(UnsupportedOperationException.class);
	catalog.createTable(path1, table, false);
}
 
Example #20
Source Project: flink   Author: flink-tpc-ds   File: HiveCatalogDataTypeTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testCharTypeLength() throws Exception {
	DataType[] types = new DataType[] {
		DataTypes.CHAR(HiveChar.MAX_CHAR_LENGTH + 1)
	};

	exception.expect(CatalogException.class);
	exception.expectMessage("HiveCatalog doesn't support char type with length of '256'. The maximum length is 255");
	verifyDataTypes(types);
}
 
Example #21
Source Project: flink   Author: apache   File: LegacyDecimalTypeTransformation.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public DataType transform(DataType typeToTransform) {
	LogicalType logicalType = typeToTransform.getLogicalType();
	if (logicalType instanceof LegacyTypeInformationType && logicalType.getTypeRoot() == LogicalTypeRoot.DECIMAL) {
		DataType decimalType = DataTypes
			.DECIMAL(DecimalType.MAX_PRECISION, 18)
			.bridgedTo(typeToTransform.getConversionClass());
		return logicalType.isNullable() ? decimalType : decimalType.notNull();
	}
	return typeToTransform;
}
 
Example #22
Source Project: flink   Author: flink-tpc-ds   File: ExpressionTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testValueLiteralString() {
	assertEquals(
		"[null, null, [1, 2, 3]]",
		new ValueLiteralExpression(new Integer[][]{null, null, {1, 2, 3}}).toString());

	assertEquals(
		"[null, null, ['1', '2', '3', 'Dog''s']]",
		new ValueLiteralExpression(
				new String[][]{null, null, {"1", "2", "3", "Dog's"}},
				DataTypes.ARRAY(DataTypes.ARRAY(DataTypes.STRING())))
			.toString());
}
 
Example #23
Source Project: flink   Author: apache   File: OrcSplitReaderUtilTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testLogicalTypeToOrcType() {
	test("boolean", DataTypes.BOOLEAN());
	test("char(123)", DataTypes.CHAR(123));
	test("varchar(123)", DataTypes.VARCHAR(123));
	test("string", DataTypes.STRING());
	test("binary", DataTypes.BYTES());
	test("tinyint", DataTypes.TINYINT());
	test("smallint", DataTypes.SMALLINT());
	test("int", DataTypes.INT());
	test("bigint", DataTypes.BIGINT());
	test("float", DataTypes.FLOAT());
	test("double", DataTypes.DOUBLE());
	test("date", DataTypes.DATE());
	test("timestamp", DataTypes.TIMESTAMP());
	test("array<float>", DataTypes.ARRAY(DataTypes.FLOAT()));
	test("map<float,bigint>", DataTypes.MAP(DataTypes.FLOAT(), DataTypes.BIGINT()));
	test("struct<int0:int,str1:string,double2:double,row3:struct<int0:int,int1:int>>",
			DataTypes.ROW(
					DataTypes.FIELD("int0", DataTypes.INT()),
					DataTypes.FIELD("str1", DataTypes.STRING()),
					DataTypes.FIELD("double2", DataTypes.DOUBLE()),
					DataTypes.FIELD(
							"row3",
							DataTypes.ROW(
									DataTypes.FIELD("int0", DataTypes.INT()),
									DataTypes.FIELD("int1", DataTypes.INT()))
					)));
	test("decimal(4,2)", DataTypes.DECIMAL(4, 2));
}
 
Example #24
Source Project: flink   Author: flink-tpc-ds   File: HiveSimpleUDFTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testUDFUnbase64() {
	HiveSimpleUDF udf = init(
		UDFBase64.class,
		new DataType[]{
			DataTypes.BYTES()
		});

	assertEquals("Cg==", udf.eval(new byte[] {10}));
}
 
Example #25
Source Project: flink   Author: apache   File: HiveGenericUDFTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testArray() {
	HiveGenericUDF udf = init(
		TestGenericUDFArray.class,
		new Object[] {
			null
		},
		new DataType[] {
			DataTypes.ARRAY(DataTypes.INT())
		}
	);

	assertEquals(6, udf.eval(1, 2, 3));
	assertEquals(6, udf.eval(new Integer[] { 1, 2, 3 }));
}
 
Example #26
Source Project: flink   Author: apache   File: HiveCatalogDataTypeTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testCharTypeLength() throws Exception {
	DataType[] types = new DataType[] {
		DataTypes.CHAR(HiveChar.MAX_CHAR_LENGTH + 1)
	};

	exception.expect(CatalogException.class);
	verifyDataTypes(types);
}
 
Example #27
Source Project: flink-connectors   Author: pravega   File: FlinkTableITCase.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testBatchTableSinkUsingDescriptor() throws Exception {

    // create a Pravega stream for test purposes
    Stream stream = Stream.of(setupUtils.getScope(), "testBatchTableSinkUsingDescriptor");
    this.setupUtils.createTestStream(stream.getStreamName(), 1);

    // create a Flink Table environment
    ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment();
    env.setParallelism(1);
    BatchTableEnvironment tableEnv = BatchTableEnvironment.create(env);

    Table table = tableEnv.fromDataSet(env.fromCollection(SAMPLES));

    Pravega pravega = new Pravega();
    pravega.tableSinkWriterBuilder()
            .withRoutingKeyField("category")
            .forStream(stream)
            .withPravegaConfig(setupUtils.getPravegaConfig());

    ConnectTableDescriptor desc = tableEnv.connect(pravega)
            .withFormat(new Json().failOnMissingField(true))
            .withSchema(new Schema().field("category", DataTypes.STRING()).
                    field("value", DataTypes.INT()));
    desc.createTemporaryTable("test");

    final Map<String, String> propertiesMap = desc.toProperties();
    final TableSink<?> sink = TableFactoryService.find(BatchTableSinkFactory.class, propertiesMap)
            .createBatchTableSink(propertiesMap);

    String tableSinkPath = tableEnv.getCurrentDatabase() + "." + "PravegaSink";

    ConnectorCatalogTable<?, ?> connectorCatalogSinkTable = ConnectorCatalogTable.sink(sink, true);

    tableEnv.getCatalog(tableEnv.getCurrentCatalog()).get().createTable(
            ObjectPath.fromString(tableSinkPath),
            connectorCatalogSinkTable, false);
    table.insertInto("PravegaSink");
    env.execute();
}
 
Example #28
Source Project: flink   Author: flink-tpc-ds   File: ListAggFunction.java    License: Apache License 2.0 5 votes vote down vote up
public ListAggFunction(int operandCount) {
	this.operandCount = operandCount;
	if (operandCount == 1) {
		delimiter = literal(",", DataTypes.STRING());
		operand = operand(0);
	} else {
		delimiter = operand(1);
		operand = operand(0);
	}
}
 
Example #29
Source Project: flink   Author: flink-tpc-ds   File: HiveGenericUDFTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testDateFormat() {
	String constYear = "y";
	String constMonth = "M";

	HiveGenericUDF udf = init(
		GenericUDFDateFormat.class,
		new Object[] {
			null,
			constYear
		},
		new DataType[] {
			DataTypes.STRING(),
			DataTypes.STRING()
		}
	);

	assertEquals("2009", udf.eval("2009-08-31", constYear));

	udf = init(
		GenericUDFDateFormat.class,
		new Object[] {
			null,
			constMonth
		},
		new DataType[] {
			DataTypes.DATE(),
			DataTypes.STRING()
		}
	);

	assertEquals("8", udf.eval(Date.valueOf("2019-08-31"), constMonth));
}
 
Example #30
Source Project: flink   Author: apache   File: TypeConversionsTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testDateToTimestampWithLocalTimeZone() {
	config().setLocalTimeZone(ZoneOffset.ofHours(2));
	testTableApi(
		lit(LocalDate.parse("1970-02-01")).cast(DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(0)),
		"1970-02-01 00:00:00"
	);

	testSqlApi(
		"cast(DATE '1970-02-01' AS TIMESTAMP(0) WITH LOCAL TIME ZONE)",
		"1970-02-01 00:00:00"
	);
}