Java Code Examples for org.apache.flink.table.api.DataTypes#ROW
The following examples show how to use
org.apache.flink.table.api.DataTypes#ROW .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SchemaUtils.java From pulsar-flink with Apache License 2.0 | 6 votes |
public static FieldsDataType pulsarSourceSchema(SchemaInfo si) throws IncompatibleSchemaException { List<DataTypes.Field> mainSchema = new ArrayList<>(); DataType dataType = si2SqlType(si); if (dataType instanceof FieldsDataType) { FieldsDataType fieldsDataType = (FieldsDataType) dataType; RowType rowType = (RowType) fieldsDataType.getLogicalType(); rowType.getFieldNames().stream() .map(fieldName -> DataTypes.FIELD(fieldName, fieldsDataType.getFieldDataTypes().get(fieldName))) .forEach(mainSchema::add); } else { mainSchema.add(DataTypes.FIELD("value", dataType)); } mainSchema.addAll(METADATA_FIELDS); return (FieldsDataType) DataTypes.ROW(mainSchema.toArray(new DataTypes.Field[0])); }
Example 2
Source File: DebeziumJsonDeserializationSchema.java From flink with Apache License 2.0 | 6 votes |
private static RowType createJsonRowType(DataType databaseSchema, boolean schemaInclude) { DataType payload = DataTypes.ROW( DataTypes.FIELD("before", databaseSchema), DataTypes.FIELD("after", databaseSchema), DataTypes.FIELD("op", DataTypes.STRING())); if (schemaInclude) { // when Debezium Kafka connect enables "value.converter.schemas.enable", // the JSON will contain "schema" information, but we just ignore "schema" // and extract data from "payload". return (RowType) DataTypes.ROW( DataTypes.FIELD("payload", payload)).getLogicalType(); } else { // payload contains some other information, e.g. "source", "ts_ms" // but we don't need them. return (RowType) payload.getLogicalType(); } }
Example 3
Source File: ValuesTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testValuesFromRowObjectInCollection() { JavaStreamTableTestUtil util = javaStreamTestUtil(); List<Object> data = Arrays.asList( row(1, lit("ABC")), row(Math.PI, "ABC"), row(3.1f, "DEF"), row(99L, lit("DEFG")), row(0d, "D") ); DataType rowType = DataTypes.ROW( DataTypes.FIELD("a", DataTypes.DECIMAL(10, 2).notNull()), DataTypes.FIELD("b", DataTypes.CHAR(4).notNull())); Table t = util.getTableEnv().fromValues( rowType, data ); util.verifyPlan(t); }
Example 4
Source File: TypeTransformationsTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testToNullable() { DataType dataType = DataTypes.ROW( DataTypes.FIELD("a", DataTypes.STRING().notNull()), DataTypes.FIELD("b", DataTypes.TIMESTAMP()), DataTypes.FIELD("c", DataTypes.TIMESTAMP(5).notNull()), DataTypes.FIELD("d", DataTypes.ARRAY(DataTypes.TIME().notNull())), DataTypes.FIELD("e", DataTypes.MAP(DataTypes.DATE().notNull(), DataTypes.TIME(9).notNull())), DataTypes.FIELD("f", DataTypes.TIMESTAMP_WITH_TIME_ZONE()) ); DataType expected = DataTypes.ROW( DataTypes.FIELD("a", DataTypes.STRING()), DataTypes.FIELD("b", DataTypes.TIMESTAMP()), DataTypes.FIELD("c", DataTypes.TIMESTAMP(5)), DataTypes.FIELD("d", DataTypes.ARRAY(DataTypes.TIME())), DataTypes.FIELD("e", DataTypes.MAP(DataTypes.DATE(), DataTypes.TIME(9))), DataTypes.FIELD("f", DataTypes.TIMESTAMP_WITH_TIME_ZONE()) ); assertEquals(expected, DataTypeUtils.transform(dataType, toNullable())); }
Example 5
Source File: TypeTransformationsTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testLegacyRawToTypeInfoRaw() { DataType dataType = DataTypes.ROW( DataTypes.FIELD("a", DataTypes.STRING()), DataTypes.FIELD("b", DataTypes.DECIMAL(10, 3)), DataTypes.FIELD("c", createLegacyRaw()), DataTypes.FIELD("d", DataTypes.ARRAY(createLegacyRaw())) ); TypeInformation<TypeTransformationsTest> typeInformation = TypeExtractor.getForClass(TypeTransformationsTest.class); DataType expected = DataTypes.ROW( DataTypes.FIELD("a", DataTypes.STRING()), DataTypes.FIELD("b", DataTypes.DECIMAL(10, 3)), DataTypes.FIELD("c", DataTypes.RAW(typeInformation)), DataTypes.FIELD("d", DataTypes.ARRAY(DataTypes.RAW(typeInformation))) ); assertEquals(expected, DataTypeUtils.transform(dataType, legacyRawToTypeInfoRaw())); }
Example 6
Source File: TypeTransformationsTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testLegacyDecimalToDefaultDecimal() { DataType dataType = DataTypes.ROW( DataTypes.FIELD("a", DataTypes.STRING()), DataTypes.FIELD("b", DataTypes.DECIMAL(10, 3)), DataTypes.FIELD("c", createLegacyDecimal()), DataTypes.FIELD("d", DataTypes.ARRAY(createLegacyDecimal())) ); DataType expected = DataTypes.ROW( DataTypes.FIELD("a", DataTypes.STRING()), DataTypes.FIELD("b", DataTypes.DECIMAL(10, 3)), DataTypes.FIELD("c", DataTypes.DECIMAL(38, 18)), DataTypes.FIELD("d", DataTypes.ARRAY(DataTypes.DECIMAL(38, 18))) ); assertEquals(expected, DataTypeUtils.transform(dataType, legacyDecimalToDefaultDecimal())); }
Example 7
Source File: TypeTransformationsTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testTimeToSqlTypes() { DataType dataType = DataTypes.ROW( DataTypes.FIELD("a", DataTypes.STRING()), DataTypes.FIELD("b", DataTypes.TIMESTAMP()), DataTypes.FIELD("c", DataTypes.TIMESTAMP(5)), DataTypes.FIELD("d", DataTypes.ARRAY(DataTypes.TIME())), DataTypes.FIELD("e", DataTypes.MAP(DataTypes.DATE(), DataTypes.TIME(9))), DataTypes.FIELD("f", DataTypes.TIMESTAMP_WITH_TIME_ZONE()) ); DataType expected = DataTypes.ROW( DataTypes.FIELD("a", DataTypes.STRING()), DataTypes.FIELD("b", DataTypes.TIMESTAMP().bridgedTo(Timestamp.class)), DataTypes.FIELD("c", DataTypes.TIMESTAMP(5).bridgedTo(Timestamp.class)), DataTypes.FIELD("d", DataTypes.ARRAY(DataTypes.TIME().bridgedTo(Time.class))), DataTypes.FIELD("e", DataTypes.MAP( DataTypes.DATE().bridgedTo(Date.class), DataTypes.TIME(9).bridgedTo(Time.class))), DataTypes.FIELD("f", DataTypes.TIMESTAMP_WITH_TIME_ZONE()) ); assertEquals(expected, DataTypeUtils.transform(dataType, timeToSqlTypes())); }
Example 8
Source File: HiveCatalogDataTypeTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testComplexDataTypes() throws Exception { DataType[] types = new DataType[]{ DataTypes.ARRAY(DataTypes.DOUBLE()), DataTypes.MAP(DataTypes.FLOAT(), DataTypes.BIGINT()), DataTypes.ROW( DataTypes.FIELD("0", DataTypes.BOOLEAN()), DataTypes.FIELD("1", DataTypes.BOOLEAN()), DataTypes.FIELD("2", DataTypes.DATE())), // nested complex types DataTypes.ARRAY(DataTypes.ARRAY(DataTypes.INT())), DataTypes.MAP(DataTypes.STRING(), DataTypes.MAP(DataTypes.STRING(), DataTypes.BIGINT())), DataTypes.ROW( DataTypes.FIELD("3", DataTypes.ARRAY(DataTypes.DECIMAL(5, 3))), DataTypes.FIELD("4", DataTypes.MAP(DataTypes.TINYINT(), DataTypes.SMALLINT())), DataTypes.FIELD("5", DataTypes.ROW(DataTypes.FIELD("3", DataTypes.TIMESTAMP(9)))) ) }; verifyDataTypes(types); }
Example 9
Source File: KuduTableSource.java From bahir-flink with Apache License 2.0 | 6 votes |
@Override public DataType getProducedDataType() { if (projectedFields == null) { return flinkSchema.toRowDataType(); } else { DataTypes.Field[] fields = new DataTypes.Field[projectedFields.length]; for (int i = 0; i < fields.length; i++) { String fieldName = projectedFields[i]; fields[i] = DataTypes.FIELD( fieldName, flinkSchema .getTableColumn(fieldName) .get() .getType() ); } return DataTypes.ROW(fields); } }
Example 10
Source File: HiveCatalogDataTypeTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testComplexDataTypes() throws Exception { DataType[] types = new DataType[]{ DataTypes.ARRAY(DataTypes.DOUBLE()), DataTypes.MAP(DataTypes.FLOAT(), DataTypes.BIGINT()), DataTypes.ROW( DataTypes.FIELD("0", DataTypes.BOOLEAN()), DataTypes.FIELD("1", DataTypes.BOOLEAN()), DataTypes.FIELD("2", DataTypes.DATE())), // nested complex types DataTypes.ARRAY(DataTypes.ARRAY(DataTypes.INT())), DataTypes.MAP(DataTypes.STRING(), DataTypes.MAP(DataTypes.STRING(), DataTypes.BIGINT())), DataTypes.ROW( DataTypes.FIELD("3", DataTypes.ARRAY(DataTypes.DECIMAL(5, 3))), DataTypes.FIELD("4", DataTypes.MAP(DataTypes.TINYINT(), DataTypes.SMALLINT())), DataTypes.FIELD("5", DataTypes.ROW(DataTypes.FIELD("3", DataTypes.TIMESTAMP()))) ) }; verifyDataTypes(types); }
Example 11
Source File: PulsarMetadataReader.java From pulsar-flink with Apache License 2.0 | 6 votes |
public void putSchema(ObjectPath tablePath, CatalogBaseTable table) throws IncompatibleSchemaException { String topic = objectPath2TopicName(tablePath); TableSchema tableSchema = table.getSchema(); List<String> fieldsRemaining = new ArrayList<>(tableSchema.getFieldCount()); for (String fieldName : tableSchema.getFieldNames()) { if (!PulsarOptions.META_FIELD_NAMES.contains(fieldName)) { fieldsRemaining.add(fieldName); } } DataType dataType; if (fieldsRemaining.size() == 1) { dataType = tableSchema.getFieldDataType(fieldsRemaining.get(0)).get(); } else { List<DataTypes.Field> fieldList = fieldsRemaining.stream() .map(f -> DataTypes.FIELD(f, tableSchema.getFieldDataType(f).get())) .collect(Collectors.toList()); dataType = DataTypes.ROW(fieldList.toArray(new DataTypes.Field[0])); } SchemaInfo si = SchemaUtils.sqlType2PulsarSchema(dataType).getSchemaInfo(); SchemaUtils.uploadPulsarSchema(admin, topic, si); }
Example 12
Source File: HiveTypeUtil.java From flink with Apache License 2.0 | 5 votes |
/** * Convert Hive data type to a Flink data type. * * @param hiveType a Hive data type * @return the corresponding Flink data type */ public static DataType toFlinkType(TypeInfo hiveType) { checkNotNull(hiveType, "hiveType cannot be null"); switch (hiveType.getCategory()) { case PRIMITIVE: return toFlinkPrimitiveType((PrimitiveTypeInfo) hiveType); case LIST: ListTypeInfo listTypeInfo = (ListTypeInfo) hiveType; return DataTypes.ARRAY(toFlinkType(listTypeInfo.getListElementTypeInfo())); case MAP: MapTypeInfo mapTypeInfo = (MapTypeInfo) hiveType; return DataTypes.MAP(toFlinkType(mapTypeInfo.getMapKeyTypeInfo()), toFlinkType(mapTypeInfo.getMapValueTypeInfo())); case STRUCT: StructTypeInfo structTypeInfo = (StructTypeInfo) hiveType; List<String> names = structTypeInfo.getAllStructFieldNames(); List<TypeInfo> typeInfos = structTypeInfo.getAllStructFieldTypeInfos(); DataTypes.Field[] fields = new DataTypes.Field[names.size()]; for (int i = 0; i < fields.length; i++) { fields[i] = DataTypes.FIELD(names.get(i), toFlinkType(typeInfos.get(i))); } return DataTypes.ROW(fields); default: throw new UnsupportedOperationException( String.format("Flink doesn't support Hive data type %s yet.", hiveType)); } }
Example 13
Source File: HiveTypeUtil.java From flink with Apache License 2.0 | 5 votes |
/** * Convert Hive data type to a Flink data type. * * @param hiveType a Hive data type * @return the corresponding Flink data type */ public static DataType toFlinkType(TypeInfo hiveType) { checkNotNull(hiveType, "hiveType cannot be null"); switch (hiveType.getCategory()) { case PRIMITIVE: return toFlinkPrimitiveType((PrimitiveTypeInfo) hiveType); case LIST: ListTypeInfo listTypeInfo = (ListTypeInfo) hiveType; return DataTypes.ARRAY(toFlinkType(listTypeInfo.getListElementTypeInfo())); case MAP: MapTypeInfo mapTypeInfo = (MapTypeInfo) hiveType; return DataTypes.MAP(toFlinkType(mapTypeInfo.getMapKeyTypeInfo()), toFlinkType(mapTypeInfo.getMapValueTypeInfo())); case STRUCT: StructTypeInfo structTypeInfo = (StructTypeInfo) hiveType; List<String> names = structTypeInfo.getAllStructFieldNames(); List<TypeInfo> typeInfos = structTypeInfo.getAllStructFieldTypeInfos(); DataTypes.Field[] fields = new DataTypes.Field[names.size()]; for (int i = 0; i < fields.length; i++) { fields[i] = DataTypes.FIELD(names.get(i), toFlinkType(typeInfos.get(i))); } return DataTypes.ROW(fields); default: throw new UnsupportedOperationException( String.format("Flink doesn't support Hive data type %s yet.", hiveType)); } }
Example 14
Source File: SimpleCatalogFactory.java From flink with Apache License 2.0 | 4 votes |
@Override public Catalog createCatalog(String name, Map<String, String> properties) { String database = properties.getOrDefault( CatalogDescriptorValidator.CATALOG_DEFAULT_DATABASE, "default_database"); GenericInMemoryCatalog genericInMemoryCatalog = new GenericInMemoryCatalog(name, database); String tableName = properties.getOrDefault(TEST_TABLE_NAME, TEST_TABLE_NAME); StreamTableSource<Row> tableSource = new StreamTableSource<Row>() { @Override public DataStream<Row> getDataStream(StreamExecutionEnvironment execEnv) { return execEnv.fromCollection(TABLE_CONTENTS) .returns(new RowTypeInfo( new TypeInformation[]{Types.INT(), Types.STRING()}, new String[]{"id", "string"})); } @Override public TableSchema getTableSchema() { return TableSchema.builder() .field("id", DataTypes.INT()) .field("string", DataTypes.STRING()) .build(); } @Override public DataType getProducedDataType() { return DataTypes.ROW( DataTypes.FIELD("id", DataTypes.INT()), DataTypes.FIELD("string", DataTypes.STRING()) ); } }; try { genericInMemoryCatalog.createTable( new ObjectPath(database, tableName), ConnectorCatalogTable.source(tableSource, false), false ); } catch (Exception e) { throw new WrappingRuntimeException(e); } return genericInMemoryCatalog; }
Example 15
Source File: ValuesITCase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testTypeConversions() throws Exception { List<Row> data = Arrays.asList( Row.of(1, "ABC", java.sql.Timestamp.valueOf("2000-12-12 12:30:57.12"), Row.of(1, "ABC", Arrays.asList(1, 2, 3))), Row.of(Math.PI, "ABC", LocalDateTime.parse("2000-12-12T12:30:57.123456"), Row.of(Math.PI, "ABC", Arrays.asList(1L, 2L, 3L))), Row.of(3.1f, "DEF", LocalDateTime.parse("2000-12-12T12:30:57.1234567"), Row.of(3.1f, "DEF", Arrays.asList(1D, 2D, 3D))), Row.of(99L, "DEFG", LocalDateTime.parse("2000-12-12T12:30:57.12345678"), Row.of(99L, "DEFG", Arrays.asList(1f, 2f, 3f))), Row.of(0d, "D", LocalDateTime.parse("2000-12-12T12:30:57.123"), Row.of(0d, "D", Arrays.asList(1, 2, 3))) ); DataType rowType = DataTypes.ROW( DataTypes.FIELD("a", TypeConversions.fromLegacyInfoToDataType(Types.BIG_DEC)), DataTypes.FIELD("b", TypeConversions.fromLegacyInfoToDataType(Types.STRING)), DataTypes.FIELD("c", TypeConversions.fromLegacyInfoToDataType(Types.SQL_TIMESTAMP)), DataTypes.FIELD( "row", DataTypes.ROW( DataTypes.FIELD("a", TypeConversions.fromLegacyInfoToDataType(Types.BIG_DEC)), DataTypes.FIELD("c", TypeConversions.fromLegacyInfoToDataType(Types.STRING)), DataTypes.FIELD("d", TypeConversions.fromLegacyInfoToDataType(Types.OBJECT_ARRAY(Types.BIG_DEC)))) ) ); StreamExecutionEnvironment streamExecEnvironment = StreamExecutionEnvironment.getExecutionEnvironment(); EnvironmentSettings settings = EnvironmentSettings.newInstance().useOldPlanner().build(); StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create( streamExecEnvironment, settings); Table t = tableEnvironment.fromValues( rowType, data ); DataStream<Row> rowDataStream = tableEnvironment.toAppendStream(t, Row.class); StreamITCase.clear(); rowDataStream.addSink(new StreamITCase.StringSink<>()); streamExecEnvironment.execute(); List<String> expected = Arrays.asList( "0,D,2000-12-12 12:30:57.123,0,D,[1, 2, 3]", "1,ABC,2000-12-12 12:30:57.12,1,ABC,[1, 2, 3]", "3.0999999046325684,DEF,2000-12-12 12:30:57.123,3.0999999046325684,DEF,[1.0, 2.0, 3.0]", "3.141592653589793,ABC,2000-12-12 12:30:57.123,3.141592653589793,ABC,[1, 2, 3]", "99,DEFG,2000-12-12 12:30:57.123,99,DEFG,[1.0, 2.0, 3.0]" ); StreamITCase.compareWithList(expected); }
Example 16
Source File: FlinkPulsarITest.java From pulsar-flink with Apache License 2.0 | 4 votes |
private DataType intRowWithTopicType() { return DataTypes.ROW( DataTypes.FIELD(TOPIC_ATTRIBUTE_NAME, DataTypes.STRING()), DataTypes.FIELD("v", DataTypes.INT()) ); }
Example 17
Source File: FlinkPulsarITest.java From pulsar-flink with Apache License 2.0 | 4 votes |
private DataType intRowType() { return DataTypes.ROW( DataTypes.FIELD("v", DataTypes.INT())); }
Example 18
Source File: SchemaUtils.java From pulsar-flink with Apache License 2.0 | 4 votes |
private static DataType avro2SqlType(Schema avroSchema, Set<String> existingRecordNames) throws IncompatibleSchemaException { LogicalType logicalType = avroSchema.getLogicalType(); switch (avroSchema.getType()) { case INT: if (logicalType instanceof LogicalTypes.Date) { return DataTypes.DATE(); } else { return DataTypes.INT(); } case STRING: case ENUM: return DataTypes.STRING(); case BOOLEAN: return DataTypes.BOOLEAN(); case BYTES: case FIXED: // For FIXED type, if the precision requires more bytes than fixed size, the logical // type will be null, which is handled by Avro library. if (logicalType instanceof LogicalTypes.Decimal) { LogicalTypes.Decimal d = (LogicalTypes.Decimal) logicalType; return DataTypes.DECIMAL(d.getPrecision(), d.getScale()); } else { return DataTypes.BYTES(); } case DOUBLE: return DataTypes.DOUBLE(); case FLOAT: return DataTypes.FLOAT(); case LONG: if (logicalType instanceof LogicalTypes.TimestampMillis || logicalType instanceof LogicalTypes.TimestampMicros) { return DataTypes.TIMESTAMP(3).bridgedTo(java.sql.Timestamp.class); } else { return DataTypes.BIGINT(); } case RECORD: if (existingRecordNames.contains(avroSchema.getFullName())) { throw new IncompatibleSchemaException( String.format("Found recursive reference in Avro schema, which can not be processed by Flink: %s", avroSchema.toString(true)), null); } Set<String> newRecordName = ImmutableSet.<String>builder() .addAll(existingRecordNames).add(avroSchema.getFullName()).build(); List<DataTypes.Field> fields = new ArrayList<>(); for (Schema.Field f : avroSchema.getFields()) { DataType fieldType = avro2SqlType(f.schema(), newRecordName); fields.add(DataTypes.FIELD(f.name(), fieldType)); } return DataTypes.ROW(fields.toArray(new DataTypes.Field[0])); case ARRAY: DataType elementType = avro2SqlType(avroSchema.getElementType(), existingRecordNames); return DataTypes.ARRAY(elementType); case MAP: DataType valueType = avro2SqlType(avroSchema.getValueType(), existingRecordNames); return DataTypes.MAP(DataTypes.STRING(), valueType); case UNION: if (avroSchema.getTypes().stream().anyMatch(f -> f.getType() == Schema.Type.NULL)) { // In case of a union with null, eliminate it and make a recursive call List<Schema> remainingUnionTypes = avroSchema.getTypes().stream().filter(f -> f.getType() != Schema.Type.NULL).collect(Collectors.toList()); if (remainingUnionTypes.size() == 1) { return avro2SqlType(remainingUnionTypes.get(0), existingRecordNames).nullable(); } else { return avro2SqlType(Schema.createUnion(remainingUnionTypes), existingRecordNames).nullable(); } } else { List<Schema.Type> types = avroSchema.getTypes().stream().map(Schema::getType).collect(Collectors.toList()); if (types.size() == 1) { return avro2SqlType(avroSchema.getTypes().get(0), existingRecordNames); } else if (types.size() == 2 && types.contains(Schema.Type.INT) && types.contains(Schema.Type.LONG)) { return DataTypes.BIGINT(); } else if (types.size() == 2 && types.contains(Schema.Type.FLOAT) && types.contains(Schema.Type.DOUBLE)) { return DataTypes.DOUBLE(); } else { // Convert complex unions to struct types where field names are member0, member1, etc. // This is consistent with the behavior when converting between Avro and Parquet. List<DataTypes.Field> memberFields = new ArrayList<>(); List<Schema> schemas = avroSchema.getTypes(); for (int i = 0; i < schemas.size(); i++) { DataType memberType = avro2SqlType(schemas.get(i), existingRecordNames); memberFields.add(DataTypes.FIELD("member" + i, memberType)); } return DataTypes.ROW(memberFields.toArray(new DataTypes.Field[0])); } } default: throw new IncompatibleSchemaException(String.format("Unsupported type %s", avroSchema.toString(true)), null); } }
Example 19
Source File: SimpleCatalogFactory.java From flink with Apache License 2.0 | 4 votes |
@Override public Catalog createCatalog(String name, Map<String, String> properties) { String database = properties.getOrDefault( CatalogDescriptorValidator.CATALOG_DEFAULT_DATABASE, "default_database"); GenericInMemoryCatalog genericInMemoryCatalog = new GenericInMemoryCatalog(name, database); String tableName = properties.getOrDefault(TEST_TABLE_NAME, TEST_TABLE_NAME); StreamTableSource<Row> tableSource = new StreamTableSource<Row>() { @Override public DataStream<Row> getDataStream(StreamExecutionEnvironment execEnv) { return execEnv.fromCollection(TABLE_CONTENTS) .returns(new RowTypeInfo( new TypeInformation[]{Types.INT(), Types.STRING()}, new String[]{"id", "string"})); } @Override public TableSchema getTableSchema() { return TableSchema.builder() .field("id", DataTypes.INT()) .field("string", DataTypes.STRING()) .build(); } @Override public DataType getProducedDataType() { return DataTypes.ROW( DataTypes.FIELD("id", DataTypes.INT()), DataTypes.FIELD("string", DataTypes.STRING()) ); } }; try { genericInMemoryCatalog.createTable( new ObjectPath(database, tableName), ConnectorCatalogTable.source(tableSource, false), false ); } catch (Exception e) { throw new WrappingRuntimeException(e); } return genericInMemoryCatalog; }