org.apache.flink.table.api.DataTypes Java Examples

The following examples show how to use org.apache.flink.table.api.DataTypes. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: BinaryRowTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testGenericMap() {
	Map javaMap = new HashMap();
	javaMap.put(6, BinaryString.fromString("6"));
	javaMap.put(5, BinaryString.fromString("5"));
	javaMap.put(666, BinaryString.fromString("666"));
	javaMap.put(0, null);

	GenericMap genericMap = new GenericMap(javaMap);

	BinaryRow row = new BinaryRow(1);
	BinaryRowWriter rowWriter = new BinaryRowWriter(row);
	BaseMapSerializer serializer = new BaseMapSerializer(
		DataTypes.INT().getLogicalType(),
		DataTypes.STRING().getLogicalType(),
		new ExecutionConfig());
	rowWriter.writeMap(0, genericMap, serializer);
	rowWriter.complete();

	Map map = row.getMap(0).toJavaMap(DataTypes.INT().getLogicalType(), DataTypes.STRING().getLogicalType());
	assertEquals(BinaryString.fromString("6"), map.get(6));
	assertEquals(BinaryString.fromString("5"), map.get(5));
	assertEquals(BinaryString.fromString("666"), map.get(666));
	assertTrue(map.containsKey(0));
	assertNull(map.get(0));
}
 
Example #2
Source File: Elasticsearch6DynamicSinkFactoryTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void validateWrongFlushSize() {
	Elasticsearch6DynamicSinkFactory sinkFactory = new Elasticsearch6DynamicSinkFactory();

	thrown.expect(ValidationException.class);
	thrown.expectMessage(
		"'sink.bulk-flush.max-size' must be in MB granularity. Got: 1024 bytes");
	sinkFactory.createDynamicTableSink(
		context()
			.withSchema(TableSchema.builder()
				.field("a", DataTypes.TIME())
				.build())
			.withOption(ElasticsearchOptions.INDEX_OPTION.key(), "MyIndex")
			.withOption(ElasticsearchOptions.DOCUMENT_TYPE_OPTION.key(), "MyType")
			.withOption(ElasticsearchOptions.HOSTS_OPTION.key(), "http://localhost:1234")
			.withOption(ElasticsearchOptions.BULK_FLASH_MAX_SIZE_OPTION.key(), "1kb")
			.build()
	);
}
 
Example #3
Source File: SchemaUtils.java    From pulsar-flink with Apache License 2.0 6 votes vote down vote up
public static FieldsDataType pulsarSourceSchema(SchemaInfo si) throws IncompatibleSchemaException {
    List<DataTypes.Field> mainSchema = new ArrayList<>();
    DataType dataType = si2SqlType(si);
    if (dataType instanceof FieldsDataType) {
        FieldsDataType fieldsDataType = (FieldsDataType) dataType;
        RowType rowType = (RowType) fieldsDataType.getLogicalType();
        rowType.getFieldNames().stream()
                .map(fieldName -> DataTypes.FIELD(fieldName, fieldsDataType.getFieldDataTypes().get(fieldName)))
                .forEach(mainSchema::add);
    } else {
        mainSchema.add(DataTypes.FIELD("value", dataType));
    }

    mainSchema.addAll(METADATA_FIELDS);
    return (FieldsDataType) DataTypes.ROW(mainSchema.toArray(new DataTypes.Field[0]));
}
 
Example #4
Source File: HiveGenericUDAFTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testUDAFCount() throws Exception {
	Object[] constantArgs = new Object[] {
		null
	};

	DataType[] argTypes = new DataType[] {
		DataTypes.DOUBLE()
	};

	HiveGenericUDAF udf = init(GenericUDAFCount.class, constantArgs, argTypes);

	GenericUDAFEvaluator.AggregationBuffer acc = udf.createAccumulator();

	udf.accumulate(acc, 0.5d);
	udf.accumulate(acc, 0.3d);
	udf.accumulate(acc, 5.3d);

	udf.merge(acc, Arrays.asList());

	assertEquals(3L, udf.getValue(acc));
}
 
Example #5
Source File: KafkaSourceMain.java    From flink-learning with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
    StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    blinkStreamEnv.setParallelism(1);
    EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build();
    StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

    ParameterTool parameterTool = ExecutionEnvUtil.PARAMETER_TOOL;
    Properties properties = KafkaConfigUtil.buildKafkaProps(parameterTool);
    DataStream<String> dataStream = blinkStreamEnv.addSource(new FlinkKafkaConsumer011<>(parameterTool.get("kafka.topic"), new SimpleStringSchema(), properties));
    Table table = blinkStreamTableEnv.fromDataStream(dataStream, "word");
    blinkStreamTableEnv.registerTable("kafkaDataStream", table);

    RetractStreamTableSink<Row> retractStreamTableSink = new MyRetractStreamTableSink(new String[]{"_count", "word"}, new DataType[]{DataTypes.BIGINT(), DataTypes.STRING()});
    blinkStreamTableEnv.registerTableSink("sinkTable", retractStreamTableSink);

    Table wordCount = blinkStreamTableEnv.sqlQuery("SELECT count(word) AS _count,word FROM kafkaDataStream GROUP BY word");

    wordCount.insertInto("sinkTable");

    blinkStreamTableEnv.execute("Blink Kafka Table Source");
}
 
Example #6
Source File: Elasticsearch7DynamicSinkFactoryTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void validateWrongFlushSize() {
	Elasticsearch7DynamicSinkFactory sinkFactory = new Elasticsearch7DynamicSinkFactory();

	thrown.expect(ValidationException.class);
	thrown.expectMessage(
		"'sink.bulk-flush.max-size' must be in MB granularity. Got: 1024 bytes");
	sinkFactory.createDynamicTableSink(
		context()
			.withSchema(TableSchema.builder()
				.field("a", DataTypes.TIME())
				.build())
			.withOption(ElasticsearchOptions.INDEX_OPTION.key(), "MyIndex")
			.withOption(ElasticsearchOptions.HOSTS_OPTION.key(), "http://localhost:1234")
			.withOption(ElasticsearchOptions.BULK_FLASH_MAX_SIZE_OPTION.key(), "1kb")
			.build()
	);
}
 
Example #7
Source File: HiveGenericUDFTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testDecode() {
	String constDecoding = "UTF-8";

	HiveGenericUDF udf = init(
		GenericUDFDecode.class,
		new Object[] {
			null,
			constDecoding
		},
		new DataType[] {
			DataTypes.BYTES(),
			DataTypes.STRING()
		}
	);

	HiveSimpleUDF simpleUDF = HiveSimpleUDFTest.init(
		UDFUnhex.class,
		new DataType[]{
			DataTypes.STRING()
		});

	assertEquals("MySQL", udf.eval(simpleUDF.eval("4D7953514C"), constDecoding));
}
 
Example #8
Source File: DataTypeExtractorTest.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Testing data type shared with the Scala tests.
 */
public static DataType getPojoWithCustomOrderDataType(Class<?> pojoClass) {
	final StructuredType.Builder builder = StructuredType.newBuilder(pojoClass);
	builder.attributes(
		Arrays.asList(
			new StructuredAttribute(
				"z",
				new BigIntType()),
			new StructuredAttribute(
				"y",
				new BooleanType()),
			new StructuredAttribute(
				"x",
				new IntType())));
	builder.setFinal(true);
	builder.setInstantiable(true);
	final StructuredType structuredType = builder.build();

	final List<DataType> fieldDataTypes = Arrays.asList(
		DataTypes.BIGINT(),
		DataTypes.BOOLEAN(),
		DataTypes.INT()
	);

	return new FieldsDataType(structuredType, pojoClass, fieldDataTypes);
}
 
Example #9
Source File: Elasticsearch7DynamicSinkFactoryTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void validateWrongIndex() {
	Elasticsearch7DynamicSinkFactory sinkFactory = new Elasticsearch7DynamicSinkFactory();

	thrown.expect(ValidationException.class);
	thrown.expectMessage(
		"'index' must not be empty");
	sinkFactory.createDynamicTableSink(
		context()
			.withSchema(TableSchema.builder()
				.field("a", DataTypes.TIME())
				.build())
			.withOption("index", "")
			.withOption("hosts", "http://localhost:12345")
			.build()
	);
}
 
Example #10
Source File: HiveGenericUDFTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testCase() {
	HiveGenericUDF udf = init(
		GenericUDFCase.class,
		new Object[] {
			null,
			"1",
			"a",
			"b"
		},
		new DataType[] {
			DataTypes.STRING(),
			DataTypes.STRING(),
			DataTypes.STRING(),
			DataTypes.STRING()
		}
	);

	assertEquals("a", udf.eval("1", "1", "a", "b"));
	assertEquals("b", udf.eval("2", "1", "a", "b"));
}
 
Example #11
Source File: HiveTableFactoryTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testGenericTable() throws Exception {
	TableSchema schema = TableSchema.builder()
		.field("name", DataTypes.STRING())
		.field("age", DataTypes.INT())
		.build();

	Map<String, String> properties = new HashMap<>();
	properties.put(CatalogConfig.IS_GENERIC, String.valueOf(true));
	properties.put("connector", "COLLECTION");

	catalog.createDatabase("mydb", new CatalogDatabaseImpl(new HashMap<>(), ""), true);
	ObjectPath path = new ObjectPath("mydb", "mytable");
	CatalogTable table = new CatalogTableImpl(schema, properties, "csv table");
	catalog.createTable(path, table, true);
	Optional<TableFactory> opt = catalog.getTableFactory();
	assertTrue(opt.isPresent());
	HiveTableFactory tableFactory = (HiveTableFactory) opt.get();
	TableSource tableSource = tableFactory.createTableSource(path, table);
	assertTrue(tableSource instanceof StreamTableSource);
	TableSink tableSink = tableFactory.createTableSink(path, table);
	assertTrue(tableSink instanceof StreamTableSink);
}
 
Example #12
Source File: SqlToOperationConverterTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testAlterTableAddPkConstraintEnforced() throws Exception {
	Catalog catalog = new GenericInMemoryCatalog("default", "default");
	catalogManager.registerCatalog("cat1", catalog);
	catalog.createDatabase("db1", new CatalogDatabaseImpl(new HashMap<>(), null), true);
	CatalogTable catalogTable = new CatalogTableImpl(
			TableSchema.builder()
					.field("a", DataTypes.STRING().notNull())
					.field("b", DataTypes.BIGINT().notNull())
					.field("c", DataTypes.BIGINT())
					.build(),
			new HashMap<>(),
			"tb1");
	catalogManager.setCurrentCatalog("cat1");
	catalogManager.setCurrentDatabase("db1");
	catalog.createTable(new ObjectPath("db1", "tb1"), catalogTable, true);
	// Test alter table add enforced
	thrown.expect(ValidationException.class);
	thrown.expectMessage("Flink doesn't support ENFORCED mode for PRIMARY KEY constaint. "
			+ "ENFORCED/NOT ENFORCED  controls if the constraint checks are performed on the "
			+ "incoming/outgoing data. Flink does not own the data therefore the "
			+ "only supported mode is the NOT ENFORCED mode");
	parse("alter table tb1 add constraint ct1 primary key(a, b)",
			SqlDialect.DEFAULT);
}
 
Example #13
Source File: DataTypePrecisionFixer.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public DataType visit(CollectionDataType collectionDataType) {
	DataType elementType = collectionDataType.getElementDataType();
	switch (logicalType.getTypeRoot()) {
		case ARRAY:
			ArrayType arrayType = (ArrayType) logicalType;
			DataType newArrayElementType = elementType
				.accept(new DataTypePrecisionFixer(arrayType.getElementType()));
			return DataTypes
				.ARRAY(newArrayElementType)
				.bridgedTo(collectionDataType.getConversionClass());

		case MULTISET:
			MultisetType multisetType = (MultisetType) logicalType;
			DataType newMultisetElementType = elementType
				.accept(new DataTypePrecisionFixer(multisetType.getElementType()));
			return DataTypes
				.MULTISET(newMultisetElementType)
				.bridgedTo(collectionDataType.getConversionClass());

		default:
			throw new UnsupportedOperationException("Unsupported logical type : " + logicalType);
	}
}
 
Example #14
Source File: DataTypeExtractorTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private static DataType getOuterTupleDataType() {
	final StructuredType.Builder builder = StructuredType.newBuilder(Tuple2.class);
	builder.attributes(
		Arrays.asList(
			new StructuredAttribute(
				"f0",
				new IntType()),
			new StructuredAttribute(
				"f1",
				getInnerTupleDataType().getLogicalType())));
	builder.setFinal(true);
	builder.setInstantiable(true);
	final StructuredType structuredType = builder.build();

	final List<DataType> fieldDataTypes = Arrays.asList(
		DataTypes.INT(),
		getInnerTupleDataType()
	);

	return new FieldsDataType(structuredType, Tuple2.class, fieldDataTypes);
}
 
Example #15
Source File: ListAggFunction.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Expression[] initialValuesExpressions() {
	return new Expression[] {
			/* delimiter */ literal(",", DataTypes.STRING()),
			/* acc */ nullOf(DataTypes.STRING())
	};
}
 
Example #16
Source File: ListAggFunction.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Expression[] initialValuesExpressions() {
	return new Expression[] {
			/* delimiter */ literal(",", DataTypes.STRING().notNull()),
			/* acc */ nullOf(DataTypes.STRING())
	};
}
 
Example #17
Source File: PrintUtilsTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private TableSchema getSchema() {
	return TableSchema.builder()
			.field("boolean", DataTypes.BOOLEAN())
			.field("int", DataTypes.INT())
			.field("bigint", DataTypes.BIGINT())
			.field("varchar", DataTypes.STRING())
			.field("decimal(10, 5)", DataTypes.DECIMAL(10, 5))
			.field("timestamp", DataTypes.TIMESTAMP(6))
			.build();
}
 
Example #18
Source File: ExpressionTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testInvalidValueLiteral() {
	thrown.expect(ValidationException.class);
	thrown.expectMessage("does not support a value literal of class 'java.lang.Integer'");

	new ValueLiteralExpression(12, DataTypes.TINYINT().notNull());
}
 
Example #19
Source File: TypeMappingUtilsTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testFieldMappingNonMatchingTypes() {
	thrown.expect(ValidationException.class);
	thrown.expectMessage("Type TIMESTAMP(3) of table field 'f0' does not match with the physical type STRING of " +
		"the 'f0' field of the TableSource return type.");
	TypeMappingUtils.computePhysicalIndices(
		TableSchema.builder()
			.field("f1", DataTypes.BIGINT())
			.field("f0", DataTypes.TIMESTAMP(3))
			.build().getTableColumns(),
		ROW(FIELD("f0", DataTypes.STRING()), FIELD("f1", DataTypes.BIGINT())),
		Function.identity()
	);
}
 
Example #20
Source File: HiveGenericUDTFTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testOverSumInt() throws Exception {
	Object[] constantArgs = new Object[] {
		null,
		4
	};

	DataType[] dataTypes = new DataType[] {
		DataTypes.INT(),
		DataTypes.INT()
	};

	HiveGenericUDTF udf = init(
		TestOverSumIntUDTF.class,
		constantArgs,
		dataTypes
	);

	udf.eval(5, 4);

	assertEquals(Arrays.asList(Row.of(9), Row.of(9)), collector.result);

	// Test empty input and empty output
	constantArgs = new Object[] {};

	dataTypes = new DataType[] {};

	udf = init(
		TestOverSumIntUDTF.class,
		constantArgs,
		dataTypes
	);

	udf.eval();

	assertEquals(Arrays.asList(), collector.result);
}
 
Example #21
Source File: TypeConversionsTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testDateToTimestampWithLocalTimeZone() {
	config().setLocalTimeZone(ZoneOffset.ofHours(2));
	testTableApi(
		lit(LocalDate.parse("1970-02-01")).cast(DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(0)),
		"1970-02-01 00:00:00"
	);

	testSqlApi(
		"cast(DATE '1970-02-01' AS TIMESTAMP(0) WITH LOCAL TIME ZONE)",
		"1970-02-01 00:00:00"
	);
}
 
Example #22
Source File: HiveGenericUDFTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testDateFormat() {
	String constYear = "y";
	String constMonth = "M";

	HiveGenericUDF udf = init(
		GenericUDFDateFormat.class,
		new Object[] {
			null,
			constYear
		},
		new DataType[] {
			DataTypes.STRING(),
			DataTypes.STRING()
		}
	);

	assertEquals("2009", udf.eval("2009-08-31", constYear));

	udf = init(
		GenericUDFDateFormat.class,
		new Object[] {
			null,
			constMonth
		},
		new DataType[] {
			DataTypes.DATE(),
			DataTypes.STRING()
		}
	);

	assertEquals("8", udf.eval(Date.valueOf("2019-08-31"), constMonth));
}
 
Example #23
Source File: ListAggFunction.java    From flink with Apache License 2.0 5 votes vote down vote up
public ListAggFunction(int operandCount) {
	this.operandCount = operandCount;
	if (operandCount == 1) {
		delimiter = literal(",", DataTypes.STRING());
		operand = operand(0);
	} else {
		delimiter = operand(1);
		operand = operand(0);
	}
}
 
Example #24
Source File: FlinkTableITCase.java    From flink-connectors with Apache License 2.0 5 votes vote down vote up
@Test
public void testBatchTableSinkUsingDescriptor() throws Exception {

    // create a Pravega stream for test purposes
    Stream stream = Stream.of(setupUtils.getScope(), "testBatchTableSinkUsingDescriptor");
    this.setupUtils.createTestStream(stream.getStreamName(), 1);

    // create a Flink Table environment
    ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment();
    env.setParallelism(1);
    BatchTableEnvironment tableEnv = BatchTableEnvironment.create(env);

    Table table = tableEnv.fromDataSet(env.fromCollection(SAMPLES));

    Pravega pravega = new Pravega();
    pravega.tableSinkWriterBuilder()
            .withRoutingKeyField("category")
            .forStream(stream)
            .withPravegaConfig(setupUtils.getPravegaConfig());

    ConnectTableDescriptor desc = tableEnv.connect(pravega)
            .withFormat(new Json().failOnMissingField(true))
            .withSchema(new Schema().field("category", DataTypes.STRING()).
                    field("value", DataTypes.INT()));
    desc.createTemporaryTable("test");

    final Map<String, String> propertiesMap = desc.toProperties();
    final TableSink<?> sink = TableFactoryService.find(BatchTableSinkFactory.class, propertiesMap)
            .createBatchTableSink(propertiesMap);

    String tableSinkPath = tableEnv.getCurrentDatabase() + "." + "PravegaSink";

    ConnectorCatalogTable<?, ?> connectorCatalogSinkTable = ConnectorCatalogTable.sink(sink, true);

    tableEnv.getCatalog(tableEnv.getCurrentCatalog()).get().createTable(
            ObjectPath.fromString(tableSinkPath),
            connectorCatalogSinkTable, false);
    table.insertInto("PravegaSink");
    env.execute();
}
 
Example #25
Source File: HiveCatalogDataTypeTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testCharTypeLength() throws Exception {
	DataType[] types = new DataType[] {
		DataTypes.CHAR(HiveChar.MAX_CHAR_LENGTH + 1)
	};

	exception.expect(CatalogException.class);
	verifyDataTypes(types);
}
 
Example #26
Source File: HiveGenericUDFTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testArray() {
	HiveGenericUDF udf = init(
		TestGenericUDFArray.class,
		new Object[] {
			null
		},
		new DataType[] {
			DataTypes.ARRAY(DataTypes.INT())
		}
	);

	assertEquals(6, udf.eval(1, 2, 3));
	assertEquals(6, udf.eval(new Integer[] { 1, 2, 3 }));
}
 
Example #27
Source File: HiveSimpleUDFTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testUDFUnbase64() {
	HiveSimpleUDF udf = init(
		UDFBase64.class,
		new DataType[]{
			DataTypes.BYTES()
		});

	assertEquals("Cg==", udf.eval(new byte[] {10}));
}
 
Example #28
Source File: OrcSplitReaderUtilTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testLogicalTypeToOrcType() {
	test("boolean", DataTypes.BOOLEAN());
	test("char(123)", DataTypes.CHAR(123));
	test("varchar(123)", DataTypes.VARCHAR(123));
	test("string", DataTypes.STRING());
	test("binary", DataTypes.BYTES());
	test("tinyint", DataTypes.TINYINT());
	test("smallint", DataTypes.SMALLINT());
	test("int", DataTypes.INT());
	test("bigint", DataTypes.BIGINT());
	test("float", DataTypes.FLOAT());
	test("double", DataTypes.DOUBLE());
	test("date", DataTypes.DATE());
	test("timestamp", DataTypes.TIMESTAMP());
	test("array<float>", DataTypes.ARRAY(DataTypes.FLOAT()));
	test("map<float,bigint>", DataTypes.MAP(DataTypes.FLOAT(), DataTypes.BIGINT()));
	test("struct<int0:int,str1:string,double2:double,row3:struct<int0:int,int1:int>>",
			DataTypes.ROW(
					DataTypes.FIELD("int0", DataTypes.INT()),
					DataTypes.FIELD("str1", DataTypes.STRING()),
					DataTypes.FIELD("double2", DataTypes.DOUBLE()),
					DataTypes.FIELD(
							"row3",
							DataTypes.ROW(
									DataTypes.FIELD("int0", DataTypes.INT()),
									DataTypes.FIELD("int1", DataTypes.INT()))
					)));
	test("decimal(4,2)", DataTypes.DECIMAL(4, 2));
}
 
Example #29
Source File: ExpressionTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testValueLiteralString() {
	assertEquals(
		"[null, null, [1, 2, 3]]",
		new ValueLiteralExpression(new Integer[][]{null, null, {1, 2, 3}}).toString());

	assertEquals(
		"[null, null, ['1', '2', '3', 'Dog''s']]",
		new ValueLiteralExpression(
				new String[][]{null, null, {"1", "2", "3", "Dog's"}},
				DataTypes.ARRAY(DataTypes.ARRAY(DataTypes.STRING())))
			.toString());
}
 
Example #30
Source File: HiveSimpleUDFTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testUDFUnbase64() {
	HiveSimpleUDF udf = init(
		UDFBase64.class,
		new DataType[]{
			DataTypes.BYTES()
		});

	assertEquals("Cg==", udf.eval(new byte[] {10}));
}