Java Code Examples for org.apache.flink.api.java.typeutils.RowTypeInfo#getArity()

The following examples show how to use org.apache.flink.api.java.typeutils.RowTypeInfo#getArity() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HBaseTableFactory.java    From flink with Apache License 2.0 6 votes vote down vote up
private HBaseTableSchema validateTableSchema(TableSchema schema) {
	HBaseTableSchema hbaseSchema = new HBaseTableSchema();
	String[] fieldNames = schema.getFieldNames();
	TypeInformation[] fieldTypes = schema.getFieldTypes();
	for (int i = 0; i < fieldNames.length; i++) {
		String name = fieldNames[i];
		TypeInformation<?> type = fieldTypes[i];
		if (type instanceof RowTypeInfo) {
			RowTypeInfo familyType = (RowTypeInfo) type;
			String[] qualifierNames = familyType.getFieldNames();
			TypeInformation[] qualifierTypes = familyType.getFieldTypes();
			for (int j = 0; j < familyType.getArity(); j++) {
				hbaseSchema.addColumn(name, qualifierNames[j], qualifierTypes[j].getTypeClass());
			}
		} else {
			hbaseSchema.setRowKey(name, type.getTypeClass());
		}
	}
	return hbaseSchema;
}
 
Example 2
Source File: MysqlSideFunction.java    From alchemy with Apache License 2.0 6 votes vote down vote up
private Row fillRecord(Row input, JsonObject value) {
    RowTypeInfo sideTable = this.sideTableInfo.getSideType();
    int sideSize = sideTable.getArity();
    int inputSize = input.getArity();
    if (this.sideTableInfo.getRowSize() != (sideSize + inputSize)) {
        LOG.warn("expected row size:{} ,Row:{} , side:{}", this.sideTableInfo.getRowSize(), input, value);
        throw new IllegalArgumentException("expected row size:" + this.sideTableInfo.getRowSize()
            + ", but input size:" + inputSize + " and side size:" + sideSize);
    }
    Row row = new Row(this.sideTableInfo.getRowSize());
    for (int i = 0; i < inputSize; i++) {
        row.setField(i, input.getField(i));
    }
    RowTypeInfo sideType = this.sideTableInfo.getSideType();
    Map<Integer, String> indexFields = getIndexFields(sideType);
    for (int i = 0; i < sideSize; i++) {
        Object result = value.getValue(indexFields.get(i));
        row.setField(i + inputSize, ConvertObjectUtil.transform(result, sideType.getTypeAt(i)));
    }
    return row;
}
 
Example 3
Source File: SideStream.java    From alchemy with Apache License 2.0 6 votes vote down vote up
private static RowTypeInfo createReturnType(TableSchema leftTable, RowTypeInfo sideType) {
    String[] leftFields = leftTable.getColumnNames();
    TypeInformation[] leftTypes = leftTable.getTypes();
    int leftArity = leftFields.length;
    int rightArity = sideType.getArity();
    int size = leftArity + rightArity;
    String[] columnNames = new String[size];
    TypeInformation[] columnTypes = new TypeInformation[size];
    for (int i = 0; i < leftArity; i++) {
        columnNames[i] = leftFields[i];
        columnTypes[i] = leftTypes[i];
    }
    for (int i = 0; i < rightArity; i++) {
        columnNames[leftArity + i] = sideType.getFieldNames()[i];
        columnTypes[leftArity + i] = sideType.getTypeAt(i);
    }

    return new RowTypeInfo(columnTypes, columnNames);
}
 
Example 4
Source File: RowRowRecordParser.java    From incubator-iotdb with Apache License 2.0 6 votes vote down vote up
/**
 * Creates RowRowRecordParser from output RowTypeInfo and selected series in the RowRecord. The row field "time"
 * will be used to store the timestamp value. The other row fields store the values of the same field names of
 * the RowRecord.
 *
 * @param outputRowTypeInfo The RowTypeInfo of the output row.
 * @param selectedSeries The selected series in the RowRecord.
 * @return The RowRowRecordParser.
 */
public static RowRowRecordParser create(RowTypeInfo outputRowTypeInfo, List<Path> selectedSeries) {
	List<String> selectedSeriesNames = selectedSeries.stream().map(Path::toString).collect(Collectors.toList());
	String[] rowFieldNames = outputRowTypeInfo.getFieldNames();
	int[] indexMapping = new int[outputRowTypeInfo.getArity()];
	for (int i = 0; i < outputRowTypeInfo.getArity(); i++) {
		if (!QueryConstant.RESERVED_TIME.equals(rowFieldNames[i])) {
			int index = selectedSeriesNames.indexOf(rowFieldNames[i]);
			if (index >= 0) {
				indexMapping[i] = index;
			} else {
				throw new IllegalArgumentException(rowFieldNames[i] + " is not found in selected series.");
			}
		} else {
			// marked as timestamp field.
			indexMapping[i] = -1;
		}
	}
	return new RowRowRecordParser(indexMapping, outputRowTypeInfo);
}
 
Example 5
Source File: CsvRowSchemaConverter.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Convert {@link RowTypeInfo} to {@link CsvSchema}.
 */
public static CsvSchema convert(RowTypeInfo rowType) {
	final Builder builder = new CsvSchema.Builder();
	final String[] fields = rowType.getFieldNames();
	final TypeInformation<?>[] types = rowType.getFieldTypes();
	for (int i = 0; i < rowType.getArity(); i++) {
		builder.addColumn(new Column(i, fields[i], convertType(fields[i], types[i])));
	}
	return builder.build();
}
 
Example 6
Source File: HiveTableSinkTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testWriteNestedComplexType() throws Exception {
	String dbName = "default";
	String tblName = "dest";
	ObjectPath tablePath = new ObjectPath(dbName, tblName);

	// nested complex types
	TableSchema.Builder builder = new TableSchema.Builder();
	// array of rows
	builder.fields(new String[]{"a"}, new DataType[]{DataTypes.ARRAY(
			DataTypes.ROW(DataTypes.FIELD("f1", DataTypes.INT()), DataTypes.FIELD("f2", DataTypes.STRING())))});
	RowTypeInfo rowTypeInfo = createDestTable(dbName, tblName, builder.build(), 0);
	Row row = new Row(rowTypeInfo.getArity());
	Object[] array = new Object[3];
	row.setField(0, array);
	for (int i = 0; i < array.length; i++) {
		Row struct = new Row(2);
		struct.setField(0, 1 + i);
		struct.setField(1, String.valueOf((char) ('a' + i)));
		array[i] = struct;
	}
	List<Row> toWrite = new ArrayList<>();
	toWrite.add(row);

	TableEnvironment tableEnv = HiveTestUtils.createTableEnv();

	Table src = tableEnv.fromTableSource(new CollectionTableSource(toWrite, rowTypeInfo));
	tableEnv.registerTable("nestedSrc", src);
	tableEnv.registerCatalog("hive", hiveCatalog);
	tableEnv.sqlQuery("select * from nestedSrc").insertInto("hive", "default", "dest");
	tableEnv.execute("mytest");

	List<String> result = hiveShell.executeQuery("select * from " + tblName);
	assertEquals(1, result.size());
	assertEquals("[{\"f1\":1,\"f2\":\"a\"},{\"f1\":2,\"f2\":\"b\"},{\"f1\":3,\"f2\":\"c\"}]", result.get(0));
	hiveCatalog.dropTable(tablePath, false);
}
 
Example 7
Source File: CsvRowSchemaConverter.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Convert {@link RowTypeInfo} to {@link CsvSchema}.
 */
public static CsvSchema convert(RowTypeInfo rowType) {
	final Builder builder = new CsvSchema.Builder();
	final String[] fields = rowType.getFieldNames();
	final TypeInformation<?>[] types = rowType.getFieldTypes();
	for (int i = 0; i < rowType.getArity(); i++) {
		builder.addColumn(new Column(i, fields[i], convertType(fields[i], types[i])));
	}
	return builder.build();
}
 
Example 8
Source File: MysqlSideFunction.java    From alchemy with Apache License 2.0 5 votes vote down vote up
private Map<Integer, String> getIndexFields(RowTypeInfo sideType) {
    Map<Integer, String> indexFields = new HashMap<>(sideType.getArity());
    String[] fieldNames = sideType.getFieldNames();
    for (String field : fieldNames) {
        indexFields.put(sideType.getFieldIndex(field), field);
    }
    return indexFields;
}
 
Example 9
Source File: HBaseTableFactory.java    From flink with Apache License 2.0 5 votes vote down vote up
private HBaseTableSchema validateTableSchema(TableSchema schema) {
	HBaseTableSchema hbaseSchema = new HBaseTableSchema();
	String[] fieldNames = schema.getFieldNames();
	TypeInformation[] fieldTypes = schema.getFieldTypes();
	for (int i = 0; i < fieldNames.length; i++) {
		String name = fieldNames[i];
		TypeInformation<?> type = fieldTypes[i];
		if (type instanceof RowTypeInfo) {
			RowTypeInfo familyType = (RowTypeInfo) type;
			String[] qualifierNames = familyType.getFieldNames();
			TypeInformation[] qualifierTypes = familyType.getFieldTypes();
			for (int j = 0; j < familyType.getArity(); j++) {
				// HBase connector doesn't support LocalDateTime
				// use Timestamp as conversion class for now.
				Class clazz = qualifierTypes[j].getTypeClass();
				if (LocalDateTime.class.equals(clazz)) {
					clazz = Timestamp.class;
				} else if (LocalDate.class.equals(clazz)) {
					clazz = Date.class;
				} else if (LocalTime.class.equals(clazz)) {
					clazz = Time.class;
				}
				hbaseSchema.addColumn(name, qualifierNames[j], clazz);
			}
		} else {
			hbaseSchema.setRowKey(name, type.getTypeClass());
		}
	}
	return hbaseSchema;
}
 
Example 10
Source File: HiveTableSinkITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testWriteNestedComplexType() throws Exception {
	String dbName = "default";
	String tblName = "dest";
	ObjectPath tablePath = new ObjectPath(dbName, tblName);

	// nested complex types
	TableSchema.Builder builder = new TableSchema.Builder();
	// array of rows
	builder.fields(new String[]{"a"}, new DataType[]{DataTypes.ARRAY(
			DataTypes.ROW(DataTypes.FIELD("f1", DataTypes.INT()), DataTypes.FIELD("f2", DataTypes.STRING())))});
	RowTypeInfo rowTypeInfo = createHiveDestTable(dbName, tblName, builder.build(), 0);
	Row row = new Row(rowTypeInfo.getArity());
	Object[] array = new Object[3];
	row.setField(0, array);
	for (int i = 0; i < array.length; i++) {
		Row struct = new Row(2);
		struct.setField(0, 1 + i);
		struct.setField(1, String.valueOf((char) ('a' + i)));
		array[i] = struct;
	}
	List<Row> toWrite = new ArrayList<>();
	toWrite.add(row);

	TableEnvironment tableEnv = HiveTestUtils.createTableEnvWithBlinkPlannerBatchMode();

	Table src = tableEnv.fromTableSource(new CollectionTableSource(toWrite, rowTypeInfo));
	tableEnv.registerTable("nestedSrc", src);
	tableEnv.registerCatalog("hive", hiveCatalog);
	TableEnvUtil.execInsertTableAndWaitResult(tableEnv.sqlQuery("select * from nestedSrc"), "hive.`default`.dest");

	List<String> result = hiveShell.executeQuery("select * from " + tblName);
	assertEquals(1, result.size());
	assertEquals("[{\"f1\":1,\"f2\":\"a\"},{\"f1\":2,\"f2\":\"b\"},{\"f1\":3,\"f2\":\"c\"}]", result.get(0));
	hiveCatalog.dropTable(tablePath, false);
}
 
Example 11
Source File: CsvRowSchemaConverter.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Convert {@link RowTypeInfo} to {@link CsvSchema}.
 */
public static CsvSchema convert(RowTypeInfo rowType) {
	final Builder builder = new CsvSchema.Builder();
	final String[] fields = rowType.getFieldNames();
	final TypeInformation<?>[] types = rowType.getFieldTypes();
	for (int i = 0; i < rowType.getArity(); i++) {
		builder.addColumn(new Column(i, fields[i], convertType(fields[i], types[i])));
	}
	return builder.build();
}
 
Example 12
Source File: HiveTableSinkTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testWriteComplexType() throws Exception {
	String dbName = "default";
	String tblName = "dest";
	ObjectPath tablePath = new ObjectPath(dbName, tblName);

	TableSchema.Builder builder = new TableSchema.Builder();
	builder.fields(new String[]{"a", "m", "s"}, new DataType[]{
			DataTypes.ARRAY(DataTypes.INT()),
			DataTypes.MAP(DataTypes.INT(), DataTypes.STRING()),
			DataTypes.ROW(DataTypes.FIELD("f1", DataTypes.INT()), DataTypes.FIELD("f2", DataTypes.STRING()))});

	RowTypeInfo rowTypeInfo = createDestTable(dbName, tblName, builder.build(), 0);
	List<Row> toWrite = new ArrayList<>();
	Row row = new Row(rowTypeInfo.getArity());
	Object[] array = new Object[]{1, 2, 3};
	Map<Integer, String> map = new HashMap<Integer, String>() {{
		put(1, "a");
		put(2, "b");
	}};
	Row struct = new Row(2);
	struct.setField(0, 3);
	struct.setField(1, "c");

	row.setField(0, array);
	row.setField(1, map);
	row.setField(2, struct);
	toWrite.add(row);

	TableEnvironment tableEnv = HiveTestUtils.createTableEnv();
	Table src = tableEnv.fromTableSource(new CollectionTableSource(toWrite, rowTypeInfo));
	tableEnv.registerTable("complexSrc", src);

	tableEnv.registerCatalog("hive", hiveCatalog);
	tableEnv.sqlQuery("select * from complexSrc").insertInto("hive", "default", "dest");
	tableEnv.execute("mytest");

	List<String> result = hiveShell.executeQuery("select * from " + tblName);
	assertEquals(1, result.size());
	assertEquals("[1,2,3]\t{1:\"a\",2:\"b\"}\t{\"f1\":3,\"f2\":\"c\"}", result.get(0));

	hiveCatalog.dropTable(tablePath, false);
}
 
Example 13
Source File: HiveTableSinkITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testWriteComplexType() throws Exception {
	String dbName = "default";
	String tblName = "dest";
	ObjectPath tablePath = new ObjectPath(dbName, tblName);

	TableSchema.Builder builder = new TableSchema.Builder();
	builder.fields(new String[]{"a", "m", "s"}, new DataType[]{
			DataTypes.ARRAY(DataTypes.INT()),
			DataTypes.MAP(DataTypes.INT(), DataTypes.STRING()),
			DataTypes.ROW(DataTypes.FIELD("f1", DataTypes.INT()), DataTypes.FIELD("f2", DataTypes.STRING()))});

	RowTypeInfo rowTypeInfo = createHiveDestTable(dbName, tblName, builder.build(), 0);
	List<Row> toWrite = new ArrayList<>();
	Row row = new Row(rowTypeInfo.getArity());
	Object[] array = new Object[]{1, 2, 3};
	Map<Integer, String> map = new HashMap<Integer, String>() {{
		put(1, "a");
		put(2, "b");
	}};
	Row struct = new Row(2);
	struct.setField(0, 3);
	struct.setField(1, "c");

	row.setField(0, array);
	row.setField(1, map);
	row.setField(2, struct);
	toWrite.add(row);

	TableEnvironment tableEnv = HiveTestUtils.createTableEnvWithBlinkPlannerBatchMode();
	Table src = tableEnv.fromTableSource(new CollectionTableSource(toWrite, rowTypeInfo));
	tableEnv.registerTable("complexSrc", src);

	tableEnv.registerCatalog("hive", hiveCatalog);
	TableEnvUtil.execInsertTableAndWaitResult(tableEnv.sqlQuery("select * from complexSrc"), "hive.`default`.dest");

	List<String> result = hiveShell.executeQuery("select * from " + tblName);
	assertEquals(1, result.size());
	assertEquals("[1,2,3]\t{1:\"a\",2:\"b\"}\t{\"f1\":3,\"f2\":\"c\"}", result.get(0));

	hiveCatalog.dropTable(tablePath, false);
}