Java Code Examples for org.apache.flink.table.types.logical.RowType#getFieldCount()

The following examples show how to use org.apache.flink.table.types.logical.RowType#getFieldCount() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CsvRowDataSerializationSchema.java    From flink with Apache License 2.0 6 votes vote down vote up
private SerializationRuntimeConverter createRowConverter(RowType type) {
	LogicalType[] fieldTypes = type.getFields().stream()
		.map(RowType.RowField::getType)
		.toArray(LogicalType[]::new);
	final String[] fieldNames = type.getFieldNames().toArray(new String[0]);
	final RowFieldConverter[] fieldConverters = Arrays.stream(fieldTypes)
		.map(this::createNullableRowFieldConverter)
		.toArray(RowFieldConverter[]::new);
	final int rowArity = type.getFieldCount();
	return (csvMapper, container, row) -> {
		// top level reuses the object node container
		final ObjectNode objectNode = (ObjectNode) container;
		for (int i = 0; i < rowArity; i++) {
			objectNode.set(
				fieldNames[i],
				fieldConverters[i].convert(csvMapper, container, row, i));
		}
		return objectNode;
	};
}
 
Example 2
Source File: CsvRowDataSerializationSchema.java    From flink with Apache License 2.0 6 votes vote down vote up
private RowFieldConverter createRowRowFieldConverter(RowType type) {
	LogicalType[] fieldTypes = type.getFields().stream()
		.map(RowType.RowField::getType)
		.toArray(LogicalType[]::new);
	final RowFieldConverter[] fieldConverters = Arrays.stream(fieldTypes)
		.map(this::createNullableRowFieldConverter)
		.toArray(RowFieldConverter[]::new);
	final int rowArity = type.getFieldCount();
	return (csvMapper, container, row, pos) -> {
		final RowData value = row.getRow(pos, rowArity);
		// nested rows use array node container
		final ArrayNode arrayNode = csvMapper.createArrayNode();
		for (int i = 0; i < rowArity; i++) {
			arrayNode.add(fieldConverters[i].convert(csvMapper, arrayNode, value, i));
		}
		return arrayNode;
	};
}
 
Example 3
Source File: AvroRowDataDeserializationSchema.java    From flink with Apache License 2.0 6 votes vote down vote up
static DeserializationRuntimeConverter createRowConverter(RowType rowType) {
	final DeserializationRuntimeConverter[] fieldConverters = rowType.getFields().stream()
		.map(RowType.RowField::getType)
		.map(AvroRowDataDeserializationSchema::createNullableConverter)
		.toArray(DeserializationRuntimeConverter[]::new);
	final int arity = rowType.getFieldCount();

	return avroObject -> {
		IndexedRecord record = (IndexedRecord) avroObject;
		GenericRowData row = new GenericRowData(arity);
		for (int i = 0; i < arity; ++i) {
			row.setField(i, fieldConverters[i].convert(record.get(i)));
		}
		return row;
	};
}
 
Example 4
Source File: CanalJsonDeserializationSchema.java    From flink with Apache License 2.0 6 votes vote down vote up
public CanalJsonDeserializationSchema(
		RowType rowType,
		TypeInformation<RowData> resultTypeInfo,
		boolean ignoreParseErrors,
		TimestampFormat timestampFormatOption) {
	this.resultTypeInfo = resultTypeInfo;
	this.ignoreParseErrors = ignoreParseErrors;
	this.fieldCount = rowType.getFieldCount();
	this.jsonDeserializer = new JsonRowDataDeserializationSchema(
		createJsonRowType(fromLogicalToDataType(rowType)),
		// the result type is never used, so it's fine to pass in Canal's result type
		resultTypeInfo,
		false, // ignoreParseErrors already contains the functionality of failOnMissingField
		ignoreParseErrors,
		timestampFormatOption);

}
 
Example 5
Source File: ArrowUtils.java    From flink with Apache License 2.0 6 votes vote down vote up
private static Field toArrowField(String fieldName, LogicalType logicalType) {
	FieldType fieldType = new FieldType(
		logicalType.isNullable(),
		logicalType.accept(LogicalTypeToArrowTypeConverter.INSTANCE),
		null);
	List<Field> children = null;
	if (logicalType instanceof ArrayType) {
		children = Collections.singletonList(toArrowField(
			"element", ((ArrayType) logicalType).getElementType()));
	} else if (logicalType instanceof RowType) {
		RowType rowType = (RowType) logicalType;
		children = new ArrayList<>(rowType.getFieldCount());
		for (RowType.RowField field : rowType.getFields()) {
			children.add(toArrowField(field.getName(), field.getType()));
		}
	}
	return new Field(fieldName, fieldType, children);
}
 
Example 6
Source File: AbstractJdbcRowConverter.java    From flink with Apache License 2.0 5 votes vote down vote up
public AbstractJdbcRowConverter(RowType rowType) {
	this.rowType = checkNotNull(rowType);
	this.fieldTypes = rowType.getFields().stream()
		.map(RowType.RowField::getType)
		.toArray(LogicalType[]::new);
	this.toInternalConverters = new JdbcDeserializationConverter[rowType.getFieldCount()];
	this.toExternalConverters = new JdbcSerializationConverter[rowType.getFieldCount()];
	for (int i = 0; i < rowType.getFieldCount(); i++) {
		toInternalConverters[i] = createNullableInternalConverter(rowType.getTypeAt(i));
		toExternalConverters[i] = createNullableExternalConverter(fieldTypes[i]);
	}
}
 
Example 7
Source File: CsvRowSchemaConverter.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Convert {@link RowType} to {@link CsvSchema}.
 */
public static CsvSchema convert(RowType rowType) {
	Builder builder = new CsvSchema.Builder();
	List<RowType.RowField> fields = rowType.getFields();
	for (int i = 0; i < rowType.getFieldCount(); i++) {
		String fieldName = fields.get(i).getName();
		LogicalType fieldType = fields.get(i).getType();
		builder.addColumn(new Column(i, fieldName, convertType(fieldName, fieldType)));
	}
	return builder.build();
}
 
Example 8
Source File: ParquetRowDataWriter.java    From flink with Apache License 2.0 5 votes vote down vote up
public ParquetRowDataWriter(
		RecordConsumer recordConsumer,
		RowType rowType,
		GroupType schema,
		boolean utcTimestamp) {
	this.recordConsumer = recordConsumer;
	this.utcTimestamp = utcTimestamp;

	this.filedWriters = new FieldWriter[rowType.getFieldCount()];
	this.fieldNames = rowType.getFieldNames().toArray(new String[0]);
	for (int i = 0; i < rowType.getFieldCount(); i++) {
		this.filedWriters[i] = createWriter(rowType.getTypeAt(i), schema.getType(i));
	}
}
 
Example 9
Source File: ParquetSchemaConverter.java    From flink with Apache License 2.0 5 votes vote down vote up
public static MessageType convertToParquetMessageType(String name, RowType rowType) {
	Type[] types = new Type[rowType.getFieldCount()];
	for (int i = 0; i < rowType.getFieldCount(); i++) {
		types[i] = convertToParquetType(
				rowType.getFieldNames().get(i), rowType.getTypeAt(i));
	}
	return new MessageType(name, types);
}
 
Example 10
Source File: LogicalTypeChecks.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public Integer visit(RowType rowType) {
	return rowType.getFieldCount();
}
 
Example 11
Source File: OrcSplitReaderUtil.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * See {@code org.apache.flink.table.catalog.hive.util.HiveTypeUtil}.
 */
public static TypeDescription logicalTypeToOrcType(LogicalType type) {
	type = type.copy(true);
	switch (type.getTypeRoot()) {
		case CHAR:
			return TypeDescription.createChar().withMaxLength(((CharType) type).getLength());
		case VARCHAR:
			int len = ((VarCharType) type).getLength();
			if (len == VarCharType.MAX_LENGTH) {
				return TypeDescription.createString();
			} else {
				return TypeDescription.createVarchar().withMaxLength(len);
			}
		case BOOLEAN:
			return TypeDescription.createBoolean();
		case VARBINARY:
			if (type.equals(DataTypes.BYTES().getLogicalType())) {
				return TypeDescription.createBinary();
			} else {
				throw new UnsupportedOperationException(
						"Not support other binary type: " + type);
			}
		case DECIMAL:
			DecimalType decimalType = (DecimalType) type;
			return TypeDescription.createDecimal()
					.withScale(decimalType.getScale())
					.withPrecision(decimalType.getPrecision());
		case TINYINT:
			return TypeDescription.createByte();
		case SMALLINT:
			return TypeDescription.createShort();
		case INTEGER:
			return TypeDescription.createInt();
		case BIGINT:
			return TypeDescription.createLong();
		case FLOAT:
			return TypeDescription.createFloat();
		case DOUBLE:
			return TypeDescription.createDouble();
		case DATE:
			return TypeDescription.createDate();
		case TIMESTAMP_WITHOUT_TIME_ZONE:
			return TypeDescription.createTimestamp();
		case ARRAY:
			ArrayType arrayType = (ArrayType) type;
			return TypeDescription.createList(logicalTypeToOrcType(arrayType.getElementType()));
		case MAP:
			MapType mapType = (MapType) type;
			return TypeDescription.createMap(
					logicalTypeToOrcType(mapType.getKeyType()),
					logicalTypeToOrcType(mapType.getValueType()));
		case ROW:
			RowType rowType = (RowType) type;
			TypeDescription struct = TypeDescription.createStruct();
			for (int i = 0; i < rowType.getFieldCount(); i++) {
				struct.addField(
						rowType.getFieldNames().get(i),
						logicalTypeToOrcType(rowType.getChildren().get(i)));
			}
			return struct;
		default:
			throw new UnsupportedOperationException("Unsupported type: " + type);
	}
}
 
Example 12
Source File: ArrowUtils.java    From flink with Apache License 2.0 4 votes vote down vote up
private static ArrowFieldWriter<Row> createRowArrowFieldWriter(ValueVector vector, LogicalType fieldType) {
	if (vector instanceof TinyIntVector) {
		return new RowTinyIntWriter((TinyIntVector) vector);
	} else if (vector instanceof SmallIntVector) {
		return new RowSmallIntWriter((SmallIntVector) vector);
	} else if (vector instanceof IntVector) {
		return new RowIntWriter((IntVector) vector);
	} else if (vector instanceof BigIntVector) {
		return new RowBigIntWriter((BigIntVector) vector);
	} else if (vector instanceof BitVector) {
		return new RowBooleanWriter((BitVector) vector);
	} else if (vector instanceof Float4Vector) {
		return new RowFloatWriter((Float4Vector) vector);
	} else if (vector instanceof Float8Vector) {
		return new RowDoubleWriter((Float8Vector) vector);
	} else if (vector instanceof VarCharVector) {
		return new RowVarCharWriter((VarCharVector) vector);
	} else if (vector instanceof VarBinaryVector) {
		return new RowVarBinaryWriter((VarBinaryVector) vector);
	} else if (vector instanceof DecimalVector) {
		DecimalVector decimalVector = (DecimalVector) vector;
		return new RowDecimalWriter(decimalVector, getPrecision(decimalVector), decimalVector.getScale());
	} else if (vector instanceof DateDayVector) {
		return new RowDateWriter((DateDayVector) vector);
	} else if (vector instanceof TimeSecVector || vector instanceof TimeMilliVector ||
		vector instanceof TimeMicroVector || vector instanceof TimeNanoVector) {
		return new RowTimeWriter(vector);
	} else if (vector instanceof TimeStampVector && ((ArrowType.Timestamp) vector.getField().getType()).getTimezone() == null) {
		return new RowTimestampWriter(vector);
	} else if (vector instanceof ListVector) {
		ListVector listVector = (ListVector) vector;
		LogicalType elementType = ((ArrayType) fieldType).getElementType();
		return new RowArrayWriter(listVector, createRowArrowFieldWriter(listVector.getDataVector(), elementType));
	} else if (vector instanceof StructVector) {
		RowType rowType = (RowType) fieldType;
		ArrowFieldWriter<Row>[] fieldsWriters = new ArrowFieldWriter[rowType.getFieldCount()];
		for (int i = 0; i < fieldsWriters.length; i++) {
			fieldsWriters[i] = createRowArrowFieldWriter(
				((StructVector) vector).getVectorById(i),
				rowType.getTypeAt(i));
		}
		return new RowRowWriter((StructVector) vector, fieldsWriters);
	} else {
		throw new UnsupportedOperationException(String.format(
			"Unsupported type %s.", fieldType));
	}
}
 
Example 13
Source File: ArrowUtils.java    From flink with Apache License 2.0 4 votes vote down vote up
private static ArrowFieldWriter<RowData> createArrowFieldWriterForRow(ValueVector vector, LogicalType fieldType) {
	if (vector instanceof TinyIntVector) {
		return TinyIntWriter.forRow((TinyIntVector) vector);
	} else if (vector instanceof SmallIntVector) {
		return SmallIntWriter.forRow((SmallIntVector) vector);
	} else if (vector instanceof IntVector) {
		return IntWriter.forRow((IntVector) vector);
	} else if (vector instanceof BigIntVector) {
		return BigIntWriter.forRow((BigIntVector) vector);
	} else if (vector instanceof BitVector) {
		return BooleanWriter.forRow((BitVector) vector);
	} else if (vector instanceof Float4Vector) {
		return FloatWriter.forRow((Float4Vector) vector);
	} else if (vector instanceof Float8Vector) {
		return DoubleWriter.forRow((Float8Vector) vector);
	} else if (vector instanceof VarCharVector) {
		return VarCharWriter.forRow((VarCharVector) vector);
	} else if (vector instanceof VarBinaryVector) {
		return VarBinaryWriter.forRow((VarBinaryVector) vector);
	} else if (vector instanceof DecimalVector) {
		DecimalVector decimalVector = (DecimalVector) vector;
		return DecimalWriter.forRow(decimalVector, getPrecision(decimalVector), decimalVector.getScale());
	} else if (vector instanceof DateDayVector) {
		return DateWriter.forRow((DateDayVector) vector);
	} else if (vector instanceof TimeSecVector || vector instanceof TimeMilliVector ||
		vector instanceof TimeMicroVector || vector instanceof TimeNanoVector) {
		return TimeWriter.forRow(vector);
	} else if (vector instanceof TimeStampVector && ((ArrowType.Timestamp) vector.getField().getType()).getTimezone() == null) {
		int precision;
		if (fieldType instanceof LocalZonedTimestampType) {
			precision = ((LocalZonedTimestampType) fieldType).getPrecision();
		} else {
			precision = ((TimestampType) fieldType).getPrecision();
		}
		return TimestampWriter.forRow(vector, precision);
	} else if (vector instanceof ListVector) {
		ListVector listVector = (ListVector) vector;
		LogicalType elementType = ((ArrayType) fieldType).getElementType();
		return ArrayWriter.forRow(listVector, createArrowFieldWriterForArray(listVector.getDataVector(), elementType));
	} else if (vector instanceof StructVector) {
		RowType rowType = (RowType) fieldType;
		ArrowFieldWriter<RowData>[] fieldsWriters = new ArrowFieldWriter[rowType.getFieldCount()];
		for (int i = 0; i < fieldsWriters.length; i++) {
			fieldsWriters[i] = createArrowFieldWriterForRow(
				((StructVector) vector).getVectorById(i),
				rowType.getTypeAt(i));
		}
		return RowWriter.forRow((StructVector) vector, fieldsWriters);
	} else {
		throw new UnsupportedOperationException(String.format(
			"Unsupported type %s.", fieldType));
	}
}
 
Example 14
Source File: ArrowUtils.java    From flink with Apache License 2.0 4 votes vote down vote up
private static ArrowFieldWriter<ArrayData> createArrowFieldWriterForArray(ValueVector vector, LogicalType fieldType) {
	if (vector instanceof TinyIntVector) {
		return TinyIntWriter.forArray((TinyIntVector) vector);
	} else if (vector instanceof SmallIntVector) {
		return SmallIntWriter.forArray((SmallIntVector) vector);
	} else if (vector instanceof IntVector) {
		return IntWriter.forArray((IntVector) vector);
	} else if (vector instanceof BigIntVector) {
		return BigIntWriter.forArray((BigIntVector) vector);
	} else if (vector instanceof BitVector) {
		return BooleanWriter.forArray((BitVector) vector);
	} else if (vector instanceof Float4Vector) {
		return FloatWriter.forArray((Float4Vector) vector);
	} else if (vector instanceof Float8Vector) {
		return DoubleWriter.forArray((Float8Vector) vector);
	} else if (vector instanceof VarCharVector) {
		return VarCharWriter.forArray((VarCharVector) vector);
	} else if (vector instanceof VarBinaryVector) {
		return VarBinaryWriter.forArray((VarBinaryVector) vector);
	} else if (vector instanceof DecimalVector) {
		DecimalVector decimalVector = (DecimalVector) vector;
		return DecimalWriter.forArray(decimalVector, getPrecision(decimalVector), decimalVector.getScale());
	} else if (vector instanceof DateDayVector) {
		return DateWriter.forArray((DateDayVector) vector);
	} else if (vector instanceof TimeSecVector || vector instanceof TimeMilliVector ||
		vector instanceof TimeMicroVector || vector instanceof TimeNanoVector) {
		return TimeWriter.forArray(vector);
	} else if (vector instanceof TimeStampVector && ((ArrowType.Timestamp) vector.getField().getType()).getTimezone() == null) {
		int precision;
		if (fieldType instanceof LocalZonedTimestampType) {
			precision = ((LocalZonedTimestampType) fieldType).getPrecision();
		} else {
			precision = ((TimestampType) fieldType).getPrecision();
		}
		return TimestampWriter.forArray(vector, precision);
	} else if (vector instanceof ListVector) {
		ListVector listVector = (ListVector) vector;
		LogicalType elementType = ((ArrayType) fieldType).getElementType();
		return ArrayWriter.forArray(listVector, createArrowFieldWriterForArray(listVector.getDataVector(), elementType));
	} else if (vector instanceof StructVector) {
		RowType rowType = (RowType) fieldType;
		ArrowFieldWriter<RowData>[] fieldsWriters = new ArrowFieldWriter[rowType.getFieldCount()];
		for (int i = 0; i < fieldsWriters.length; i++) {
			fieldsWriters[i] = createArrowFieldWriterForRow(
				((StructVector) vector).getVectorById(i),
				rowType.getTypeAt(i));
		}
		return RowWriter.forArray((StructVector) vector, fieldsWriters);
	} else {
		throw new UnsupportedOperationException(String.format(
			"Unsupported type %s.", fieldType));
	}
}