Java Code Examples for org.apache.flink.api.common.typeinfo.TypeInformation#getArity()

The following examples show how to use org.apache.flink.api.common.typeinfo.TypeInformation#getArity() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: Keys.java    From flink with Apache License 2.0 6 votes vote down vote up
public static boolean isSortKey(int fieldPos, TypeInformation<?> type) {

			if (!type.isTupleType() || !(type instanceof CompositeType)) {
				throw new InvalidProgramException("Specifying keys via field positions is only valid " +
					"for tuple data types. Type: " + type);
			}
			if (type.getArity() == 0) {
				throw new InvalidProgramException("Tuple size must be greater than 0. Size: " + type.getArity());
			}

			if(fieldPos < 0 || fieldPos >= type.getArity()) {
				throw new IndexOutOfBoundsException("Tuple position is out of range: " + fieldPos);
			}

			TypeInformation<?> sortKeyType = ((CompositeType<?>)type).getTypeAt(fieldPos);
			return sortKeyType.isSortKeyType();
		}
 
Example 2
Source File: Keys.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
public static boolean isSortKey(int fieldPos, TypeInformation<?> type) {

			if (!type.isTupleType() || !(type instanceof CompositeType)) {
				throw new InvalidProgramException("Specifying keys via field positions is only valid " +
					"for tuple data types. Type: " + type);
			}
			if (type.getArity() == 0) {
				throw new InvalidProgramException("Tuple size must be greater than 0. Size: " + type.getArity());
			}

			if(fieldPos < 0 || fieldPos >= type.getArity()) {
				throw new IndexOutOfBoundsException("Tuple position is out of range: " + fieldPos);
			}

			TypeInformation<?> sortKeyType = ((CompositeType<?>)type).getTypeAt(fieldPos);
			return sortKeyType.isSortKeyType();
		}
 
Example 3
Source File: CsvTableSource.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public void open(FunctionContext context) throws Exception {
	super.open(context);
	TypeInformation<Row> rowType = getResultType();

	RowCsvInputFormat inputFormat = config.createInputFormat();
	FileInputSplit[] inputSplits = inputFormat.createInputSplits(1);
	for (FileInputSplit split : inputSplits) {
		inputFormat.open(split);
		Row row = new Row(rowType.getArity());
		while (true) {
			Row r = inputFormat.nextRecord(row);
			if (r == null) {
				break;
			} else {
				Object key = getTargetKey(r);
				List<Row> rows = dataMap.computeIfAbsent(key, k -> new ArrayList<>());
				rows.add(Row.copy(r));
			}
		}
		inputFormat.close();
	}
}
 
Example 4
Source File: CsvTableSource.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public void open(FunctionContext context) throws Exception {
	super.open(context);
	TypeInformation<Row> rowType = getResultType();

	RowCsvInputFormat inputFormat = config.createInputFormat();
	FileInputSplit[] inputSplits = inputFormat.createInputSplits(1);
	for (FileInputSplit split : inputSplits) {
		inputFormat.open(split);
		Row row = new Row(rowType.getArity());
		while (true) {
			Row r = inputFormat.nextRecord(row);
			if (r == null) {
				break;
			} else {
				Object key = getTargetKey(r);
				List<Row> rows = dataMap.computeIfAbsent(key, k -> new ArrayList<>());
				rows.add(Row.copy(r));
			}
		}
		inputFormat.close();
	}
}
 
Example 5
Source File: Keys.java    From flink with Apache License 2.0 6 votes vote down vote up
public static boolean isSortKey(int fieldPos, TypeInformation<?> type) {

			if (!type.isTupleType() || !(type instanceof CompositeType)) {
				throw new InvalidProgramException("Specifying keys via field positions is only valid " +
					"for tuple data types. Type: " + type);
			}
			if (type.getArity() == 0) {
				throw new InvalidProgramException("Tuple size must be greater than 0. Size: " + type.getArity());
			}

			if(fieldPos < 0 || fieldPos >= type.getArity()) {
				throw new IndexOutOfBoundsException("Tuple position is out of range: " + fieldPos);
			}

			TypeInformation<?> sortKeyType = ((CompositeType<?>)type).getTypeAt(fieldPos);
			return sortKeyType.isSortKeyType();
		}
 
Example 6
Source File: TypeExtractor.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private static TypeInformation<?> getTypeOfPojoField(TypeInformation<?> pojoInfo, Field field) {
	for (int j = 0; j < pojoInfo.getArity(); j++) {
		PojoField pf = ((PojoTypeInfo<?>) pojoInfo).getPojoFieldAt(j);
		if (pf.getField().getName().equals(field.getName())) {
			return pf.getTypeInformation();
		}
	}
	return null;
}
 
Example 7
Source File: TypeExtractor.java    From flink with Apache License 2.0 5 votes vote down vote up
private static TypeInformation<?> getTypeOfPojoField(TypeInformation<?> pojoInfo, Field field) {
	for (int j = 0; j < pojoInfo.getArity(); j++) {
		PojoField pf = ((PojoTypeInfo<?>) pojoInfo).getPojoFieldAt(j);
		if (pf.getField().getName().equals(field.getName())) {
			return pf.getTypeInformation();
		}
	}
	return null;
}
 
Example 8
Source File: RowConverter.java    From flink with Apache License 2.0 5 votes vote down vote up
public RowConverter(GroupType schema, TypeInformation<?> typeInfo, ParentDataHolder parent, int pos) {
	this.typeInfo = typeInfo;
	this.parentDataHolder = parent;
	this.posInParentRow = pos;
	this.converters = new Converter[schema.getFieldCount()];

	int i = 0;
	if (typeInfo.getArity() >= 1 && (typeInfo instanceof CompositeType)) {
		for (Type field : schema.getFields()) {
			converters[i] = createConverter(field, i, ((CompositeType<?>) typeInfo).getTypeAt(i), this);
			i++;
		}
	}
}
 
Example 9
Source File: FieldInfoUtils.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Returns field types for a given {@link TypeInformation}.
 *
 * @param inputType The TypeInformation to extract field types from.
 * @return An array holding the field types.
 */
public static TypeInformation<?>[] getFieldTypes(TypeInformation<?> inputType) {
	validateInputTypeInfo(inputType);

	final TypeInformation<?>[] fieldTypes;
	if (inputType instanceof CompositeType) {
		int arity = inputType.getArity();
		CompositeType<?> ct = (CompositeType<?>) inputType;
		fieldTypes = IntStream.range(0, arity).mapToObj(ct::getTypeAt).toArray(TypeInformation[]::new);
	} else {
		fieldTypes = new TypeInformation[]{inputType};
	}

	return fieldTypes;
}
 
Example 10
Source File: KeyedStream.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Validates that a given type of element (as encoded by the provided {@link TypeInformation}) can be
 * used as a key in the {@code DataStream.keyBy()} operation. This is done by searching depth-first the
 * key type and checking if each of the composite types satisfies the required conditions
 * (see {@link #validateKeyTypeIsHashable(TypeInformation)}).
 *
 * @param keyType The {@link TypeInformation} of the key.
 */
private TypeInformation<KEY> validateKeyType(TypeInformation<KEY> keyType) {
	Stack<TypeInformation<?>> stack = new Stack<>();
	stack.push(keyType);

	List<TypeInformation<?>> unsupportedTypes = new ArrayList<>();

	while (!stack.isEmpty()) {
		TypeInformation<?> typeInfo = stack.pop();

		if (!validateKeyTypeIsHashable(typeInfo)) {
			unsupportedTypes.add(typeInfo);
		}

		if (typeInfo instanceof TupleTypeInfoBase) {
			for (int i = 0; i < typeInfo.getArity(); i++) {
				stack.push(((TupleTypeInfoBase) typeInfo).getTypeAt(i));
			}
		}
	}

	if (!unsupportedTypes.isEmpty()) {
		throw new InvalidProgramException("Type " + keyType + " cannot be used as key. Contained " +
				"UNSUPPORTED key types: " + StringUtils.join(unsupportedTypes, ", ") + ". Look " +
				"at the keyBy() documentation for the conditions a type has to satisfy in order to be " +
				"eligible for a key.");
	}

	return keyType;
}
 
Example 11
Source File: TypeExtractor.java    From flink with Apache License 2.0 5 votes vote down vote up
private static TypeInformation<?> getTypeOfPojoField(TypeInformation<?> pojoInfo, Field field) {
	for (int j = 0; j < pojoInfo.getArity(); j++) {
		PojoField pf = ((PojoTypeInfo<?>) pojoInfo).getPojoFieldAt(j);
		if (pf.getField().getName().equals(field.getName())) {
			return pf.getTypeInformation();
		}
	}
	return null;
}
 
Example 12
Source File: RowConverter.java    From flink with Apache License 2.0 5 votes vote down vote up
public RowConverter(GroupType schema, TypeInformation<?> typeInfo, ParentDataHolder parent, int pos) {
	this.typeInfo = typeInfo;
	this.parentDataHolder = parent;
	this.posInParentRow = pos;
	this.converters = new Converter[schema.getFieldCount()];

	int i = 0;
	if (typeInfo.getArity() >= 1 && (typeInfo instanceof CompositeType)) {
		for (Type field : schema.getFields()) {
			converters[i] = createConverter(field, i, ((CompositeType<?>) typeInfo).getTypeAt(i), this);
			i++;
		}
	}
}
 
Example 13
Source File: FieldInfoUtils.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Returns field types for a given {@link TypeInformation}.
 *
 * @param inputType The TypeInformation to extract field types from.
 * @return An array holding the field types.
 */
public static TypeInformation<?>[] getFieldTypes(TypeInformation<?> inputType) {
	validateInputTypeInfo(inputType);

	final TypeInformation<?>[] fieldTypes;
	if (inputType instanceof CompositeType) {
		int arity = inputType.getArity();
		CompositeType ct = (CompositeType<?>) inputType;
		fieldTypes = IntStream.range(0, arity).mapToObj(ct::getTypeAt).toArray(TypeInformation[]::new);
	} else {
		fieldTypes = new TypeInformation[]{inputType};
	}

	return fieldTypes;
}
 
Example 14
Source File: KeyedStream.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Validates that a given type of element (as encoded by the provided {@link TypeInformation}) can be
 * used as a key in the {@code DataStream.keyBy()} operation. This is done by searching depth-first the
 * key type and checking if each of the composite types satisfies the required conditions
 * (see {@link #validateKeyTypeIsHashable(TypeInformation)}).
 *
 * @param keyType The {@link TypeInformation} of the key.
 */
private TypeInformation<KEY> validateKeyType(TypeInformation<KEY> keyType) {
	Stack<TypeInformation<?>> stack = new Stack<>();
	stack.push(keyType);

	List<TypeInformation<?>> unsupportedTypes = new ArrayList<>();

	while (!stack.isEmpty()) {
		TypeInformation<?> typeInfo = stack.pop();

		if (!validateKeyTypeIsHashable(typeInfo)) {
			unsupportedTypes.add(typeInfo);
		}

		if (typeInfo instanceof TupleTypeInfoBase) {
			for (int i = 0; i < typeInfo.getArity(); i++) {
				stack.push(((TupleTypeInfoBase) typeInfo).getTypeAt(i));
			}
		}
	}

	if (!unsupportedTypes.isEmpty()) {
		throw new InvalidProgramException("Type " + keyType + " cannot be used as key. Contained " +
				"UNSUPPORTED key types: " + StringUtils.join(unsupportedTypes, ", ") + ". Look " +
				"at the keyBy() documentation for the conditions a type has to satisfy in order to be " +
				"eligible for a key.");
	}

	return keyType;
}
 
Example 15
Source File: Keys.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Create int-based (non-nested) field position keys on a tuple type.
 */
public ExpressionKeys(int[] keyPositions, TypeInformation<T> type, boolean allowEmpty) {

	if (!type.isTupleType() || !(type instanceof CompositeType)) {
		throw new InvalidProgramException("Specifying keys via field positions is only valid " +
				"for tuple data types. Type: " + type);
	}
	if (type.getArity() == 0) {
		throw new InvalidProgramException("Tuple size must be greater than 0. Size: " + type.getArity());
	}
	if (!allowEmpty && (keyPositions == null || keyPositions.length == 0)) {
		throw new IllegalArgumentException("The grouping fields must not be empty.");
	}

	this.keyFields = new ArrayList<>();

	if (keyPositions == null || keyPositions.length == 0) {
		// use all tuple fields as key fields
		keyPositions = createIncrIntArray(type.getArity());
	} else {
		rangeCheckFields(keyPositions, type.getArity() - 1);
	}

	checkArgument(keyPositions.length > 0, "Grouping fields can not be empty at this point");

	// extract key field types
	CompositeType<T> cType = (CompositeType<T>)type;
	this.keyFields = new ArrayList<>(type.getTotalFields());

	// for each key position, find all (nested) field types
	String[] fieldNames = cType.getFieldNames();
	this.originalKeyTypes = new TypeInformation<?>[keyPositions.length];
	ArrayList<FlatFieldDescriptor> tmpList = new ArrayList<>();
	for (int i = 0; i < keyPositions.length; i++) {
		int keyPos = keyPositions[i];
		tmpList.clear();
		// get all flat fields
		this.originalKeyTypes[i] = cType.getTypeAt(keyPos);
		cType.getFlatFields(fieldNames[keyPos], 0, tmpList);
		// check if fields are of key type
		for(FlatFieldDescriptor ffd : tmpList) {
			if(!ffd.getType().isKeyType()) {
				throw new InvalidProgramException("This type (" + ffd.getType() + ") cannot be used as key.");
			}
		}
		this.keyFields.addAll(tmpList);
	}
}
 
Example 16
Source File: SemanticPropUtil.java    From flink with Apache License 2.0 4 votes vote down vote up
public static DualInputSemanticProperties createProjectionPropertiesDual(
	int[] fields, boolean[] isFromFirst, TypeInformation<?> inType1, TypeInformation<?> inType2) {
	DualInputSemanticProperties dsp = new DualInputSemanticProperties();

	int[] sourceOffsets1;
	if (inType1 instanceof TupleTypeInfo<?>) {
		sourceOffsets1 = new int[inType1.getArity()];
		sourceOffsets1[0] = 0;
		for (int i = 1; i < inType1.getArity(); i++) {
			sourceOffsets1[i] = ((TupleTypeInfo<?>) inType1).getTypeAt(i - 1).getTotalFields() + sourceOffsets1[i - 1];
		}
	} else {
		sourceOffsets1 = new int[]{0};
	}

	int[] sourceOffsets2;
	if (inType2 instanceof TupleTypeInfo<?>) {
		sourceOffsets2 = new int[inType2.getArity()];
		sourceOffsets2[0] = 0;
		for (int i = 1; i < inType2.getArity(); i++) {
			sourceOffsets2[i] = ((TupleTypeInfo<?>) inType2).getTypeAt(i - 1).getTotalFields() + sourceOffsets2[i - 1];
		}
	} else {
		sourceOffsets2 = new int[]{0};
	}

	int targetOffset = 0;
	for (int i = 0; i < fields.length; i++) {
		int sourceOffset;
		int numFieldsToCopy;
		int input;
		if (isFromFirst[i]) {
			input = 0;
			if (fields[i] == -1) {
				sourceOffset = 0;
				numFieldsToCopy = inType1.getTotalFields();
			} else {
				sourceOffset = sourceOffsets1[fields[i]];
				numFieldsToCopy = ((TupleTypeInfo<?>) inType1).getTypeAt(fields[i]).getTotalFields();
			}
		} else {
			input = 1;
			if (fields[i] == -1) {
				sourceOffset = 0;
				numFieldsToCopy = inType2.getTotalFields();
			} else {
				sourceOffset = sourceOffsets2[fields[i]];
				numFieldsToCopy = ((TupleTypeInfo<?>) inType2).getTypeAt(fields[i]).getTotalFields();
			}
		}

		for (int j = 0; j < numFieldsToCopy; j++) {
			dsp.addForwardedField(input, sourceOffset + j, targetOffset + j);
		}
		targetOffset += numFieldsToCopy;
	}

	return dsp;
}
 
Example 17
Source File: SemanticPropUtil.java    From flink with Apache License 2.0 4 votes vote down vote up
public static DualInputSemanticProperties createProjectionPropertiesDual(
	int[] fields, boolean[] isFromFirst, TypeInformation<?> inType1, TypeInformation<?> inType2) {
	DualInputSemanticProperties dsp = new DualInputSemanticProperties();

	int[] sourceOffsets1;
	if (inType1 instanceof TupleTypeInfo<?>) {
		sourceOffsets1 = new int[inType1.getArity()];
		sourceOffsets1[0] = 0;
		for (int i = 1; i < inType1.getArity(); i++) {
			sourceOffsets1[i] = ((TupleTypeInfo<?>) inType1).getTypeAt(i - 1).getTotalFields() + sourceOffsets1[i - 1];
		}
	} else {
		sourceOffsets1 = new int[]{0};
	}

	int[] sourceOffsets2;
	if (inType2 instanceof TupleTypeInfo<?>) {
		sourceOffsets2 = new int[inType2.getArity()];
		sourceOffsets2[0] = 0;
		for (int i = 1; i < inType2.getArity(); i++) {
			sourceOffsets2[i] = ((TupleTypeInfo<?>) inType2).getTypeAt(i - 1).getTotalFields() + sourceOffsets2[i - 1];
		}
	} else {
		sourceOffsets2 = new int[]{0};
	}

	int targetOffset = 0;
	for (int i = 0; i < fields.length; i++) {
		int sourceOffset;
		int numFieldsToCopy;
		int input;
		if (isFromFirst[i]) {
			input = 0;
			if (fields[i] == -1) {
				sourceOffset = 0;
				numFieldsToCopy = inType1.getTotalFields();
			} else {
				sourceOffset = sourceOffsets1[fields[i]];
				numFieldsToCopy = ((TupleTypeInfo<?>) inType1).getTypeAt(fields[i]).getTotalFields();
			}
		} else {
			input = 1;
			if (fields[i] == -1) {
				sourceOffset = 0;
				numFieldsToCopy = inType2.getTotalFields();
			} else {
				sourceOffset = sourceOffsets2[fields[i]];
				numFieldsToCopy = ((TupleTypeInfo<?>) inType2).getTypeAt(fields[i]).getTotalFields();
			}
		}

		for (int j = 0; j < numFieldsToCopy; j++) {
			dsp.addForwardedField(input, sourceOffset + j, targetOffset + j);
		}
		targetOffset += numFieldsToCopy;
	}

	return dsp;
}
 
Example 18
Source File: Keys.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
/**
 * Create int-based (non-nested) field position keys on a tuple type.
 */
public ExpressionKeys(int[] keyPositions, TypeInformation<T> type, boolean allowEmpty) {

	if (!type.isTupleType() || !(type instanceof CompositeType)) {
		throw new InvalidProgramException("Specifying keys via field positions is only valid " +
				"for tuple data types. Type: " + type);
	}
	if (type.getArity() == 0) {
		throw new InvalidProgramException("Tuple size must be greater than 0. Size: " + type.getArity());
	}
	if (!allowEmpty && (keyPositions == null || keyPositions.length == 0)) {
		throw new IllegalArgumentException("The grouping fields must not be empty.");
	}

	this.keyFields = new ArrayList<>();

	if (keyPositions == null || keyPositions.length == 0) {
		// use all tuple fields as key fields
		keyPositions = createIncrIntArray(type.getArity());
	} else {
		rangeCheckFields(keyPositions, type.getArity() - 1);
	}

	checkArgument(keyPositions.length > 0, "Grouping fields can not be empty at this point");

	// extract key field types
	CompositeType<T> cType = (CompositeType<T>)type;
	this.keyFields = new ArrayList<>(type.getTotalFields());

	// for each key position, find all (nested) field types
	String[] fieldNames = cType.getFieldNames();
	this.originalKeyTypes = new TypeInformation<?>[keyPositions.length];
	ArrayList<FlatFieldDescriptor> tmpList = new ArrayList<>();
	for (int i = 0; i < keyPositions.length; i++) {
		int keyPos = keyPositions[i];
		tmpList.clear();
		// get all flat fields
		this.originalKeyTypes[i] = cType.getTypeAt(keyPos);
		cType.getFlatFields(fieldNames[keyPos], 0, tmpList);
		// check if fields are of key type
		for(FlatFieldDescriptor ffd : tmpList) {
			if(!ffd.getType().isKeyType()) {
				throw new InvalidProgramException("This type (" + ffd.getType() + ") cannot be used as key.");
			}
		}
		this.keyFields.addAll(tmpList);
	}
}
 
Example 19
Source File: Keys.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Create int-based (non-nested) field position keys on a tuple type.
 */
public ExpressionKeys(int[] keyPositions, TypeInformation<T> type, boolean allowEmpty) {

	if (!type.isTupleType() || !(type instanceof CompositeType)) {
		throw new InvalidProgramException("Specifying keys via field positions is only valid " +
				"for tuple data types. Type: " + type);
	}
	if (type.getArity() == 0) {
		throw new InvalidProgramException("Tuple size must be greater than 0. Size: " + type.getArity());
	}
	if (!allowEmpty && (keyPositions == null || keyPositions.length == 0)) {
		throw new IllegalArgumentException("The grouping fields must not be empty.");
	}

	this.keyFields = new ArrayList<>();

	if (keyPositions == null || keyPositions.length == 0) {
		// use all tuple fields as key fields
		keyPositions = createIncrIntArray(type.getArity());
	} else {
		rangeCheckFields(keyPositions, type.getArity() - 1);
	}

	checkArgument(keyPositions.length > 0, "Grouping fields can not be empty at this point");

	// extract key field types
	CompositeType<T> cType = (CompositeType<T>)type;
	this.keyFields = new ArrayList<>(type.getTotalFields());

	// for each key position, find all (nested) field types
	String[] fieldNames = cType.getFieldNames();
	this.originalKeyTypes = new TypeInformation<?>[keyPositions.length];
	ArrayList<FlatFieldDescriptor> tmpList = new ArrayList<>();
	for (int i = 0; i < keyPositions.length; i++) {
		int keyPos = keyPositions[i];
		tmpList.clear();
		// get all flat fields
		this.originalKeyTypes[i] = cType.getTypeAt(keyPos);
		cType.getFlatFields(fieldNames[keyPos], 0, tmpList);
		// check if fields are of key type
		for(FlatFieldDescriptor ffd : tmpList) {
			if(!ffd.getType().isKeyType()) {
				throw new InvalidProgramException("This type (" + ffd.getType() + ") cannot be used as key.");
			}
		}
		this.keyFields.addAll(tmpList);
	}
}
 
Example 20
Source File: SemanticPropUtil.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
public static DualInputSemanticProperties createProjectionPropertiesDual(
	int[] fields, boolean[] isFromFirst, TypeInformation<?> inType1, TypeInformation<?> inType2) {
	DualInputSemanticProperties dsp = new DualInputSemanticProperties();

	int[] sourceOffsets1;
	if (inType1 instanceof TupleTypeInfo<?>) {
		sourceOffsets1 = new int[inType1.getArity()];
		sourceOffsets1[0] = 0;
		for (int i = 1; i < inType1.getArity(); i++) {
			sourceOffsets1[i] = ((TupleTypeInfo<?>) inType1).getTypeAt(i - 1).getTotalFields() + sourceOffsets1[i - 1];
		}
	} else {
		sourceOffsets1 = new int[]{0};
	}

	int[] sourceOffsets2;
	if (inType2 instanceof TupleTypeInfo<?>) {
		sourceOffsets2 = new int[inType2.getArity()];
		sourceOffsets2[0] = 0;
		for (int i = 1; i < inType2.getArity(); i++) {
			sourceOffsets2[i] = ((TupleTypeInfo<?>) inType2).getTypeAt(i - 1).getTotalFields() + sourceOffsets2[i - 1];
		}
	} else {
		sourceOffsets2 = new int[]{0};
	}

	int targetOffset = 0;
	for (int i = 0; i < fields.length; i++) {
		int sourceOffset;
		int numFieldsToCopy;
		int input;
		if (isFromFirst[i]) {
			input = 0;
			if (fields[i] == -1) {
				sourceOffset = 0;
				numFieldsToCopy = inType1.getTotalFields();
			} else {
				sourceOffset = sourceOffsets1[fields[i]];
				numFieldsToCopy = ((TupleTypeInfo<?>) inType1).getTypeAt(fields[i]).getTotalFields();
			}
		} else {
			input = 1;
			if (fields[i] == -1) {
				sourceOffset = 0;
				numFieldsToCopy = inType2.getTotalFields();
			} else {
				sourceOffset = sourceOffsets2[fields[i]];
				numFieldsToCopy = ((TupleTypeInfo<?>) inType2).getTypeAt(fields[i]).getTotalFields();
			}
		}

		for (int j = 0; j < numFieldsToCopy; j++) {
			dsp.addForwardedField(input, sourceOffset + j, targetOffset + j);
		}
		targetOffset += numFieldsToCopy;
	}

	return dsp;
}