Java Code Examples for org.apache.flink.api.common.typeutils.TypeComparator#duplicate()

The following examples show how to use org.apache.flink.api.common.typeutils.TypeComparator#duplicate() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CombiningUnilateralSortMerger.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
public CombiningSpillingThread(ExceptionHandler<IOException> exceptionHandler, CircularQueues<E> queues,
		AbstractInvokable parentTask, MemoryManager memManager, IOManager ioManager, 
		TypeSerializer<E> serializer, TypeComparator<E> comparator, 
		List<MemorySegment> sortReadMemory, List<MemorySegment> writeMemory, int maxNumFileHandles,
		boolean objectReuseEnabled)
{
	super(exceptionHandler, queues, parentTask, memManager, ioManager, serializer, comparator, 
		sortReadMemory, writeMemory, maxNumFileHandles);
	
	this.comparator2 = comparator.duplicate();
	this.objectReuseEnabled = objectReuseEnabled;
}
 
Example 2
Source File: SynchronousChainedCombineDriver.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public void openTask() throws Exception {
	// open the stub first
	final Configuration stubConfig = this.config.getStubParameters();
	BatchTask.openUserCode(this.combiner, stubConfig);

	// ----------------- Set up the sorter -------------------------

	// instantiate the serializer / comparator
	final TypeSerializerFactory<IN> serializerFactory = this.config.getInputSerializer(0, this.userCodeClassLoader);
	final TypeComparatorFactory<IN> sortingComparatorFactory = this.config.getDriverComparator(0, this.userCodeClassLoader);
	final TypeComparatorFactory<IN> groupingComparatorFactory = this.config.getDriverComparator(1, this.userCodeClassLoader);
	
	this.serializer = serializerFactory.getSerializer();

	TypeComparator<IN> sortingComparator = sortingComparatorFactory.createComparator();
	this.groupingComparator = groupingComparatorFactory.createComparator();
	
	MemoryManager memManager = this.parent.getEnvironment().getMemoryManager();
	final int numMemoryPages = memManager.computeNumberOfPages(this.config.getRelativeMemoryDriver());
	this.memory = memManager.allocatePages(this.parent, numMemoryPages);

	// instantiate a fix-length in-place sorter, if possible, otherwise the out-of-place sorter
	if (sortingComparator.supportsSerializationWithKeyNormalization() &&
		this.serializer.getLength() > 0 && this.serializer.getLength() <= THRESHOLD_FOR_IN_PLACE_SORTING)
	{
		this.sorter = new FixedLengthRecordSorter<IN>(this.serializer, sortingComparator.duplicate(), this.memory);
	} else {
		this.sorter = new NormalizedKeySorter<IN>(this.serializer, sortingComparator.duplicate(), this.memory);
	}

	if (LOG.isDebugEnabled()) {
		LOG.debug("SynchronousChainedCombineDriver object reuse: " + (this.objectReuseEnabled ? "ENABLED" : "DISABLED") + ".");
	}
}
 
Example 3
Source File: GroupCombineChainedDriver.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public void openTask() throws Exception {
	// open the stub first
	final Configuration stubConfig = this.config.getStubParameters();
	BatchTask.openUserCode(this.reducer, stubConfig);

	// ----------------- Set up the sorter -------------------------

	// instantiate the serializer / comparator
	final TypeSerializerFactory<IN> serializerFactory = this.config.getInputSerializer(0, this.userCodeClassLoader);
	final TypeComparatorFactory<IN> sortingComparatorFactory = this.config.getDriverComparator(0, this.userCodeClassLoader);
	final TypeComparatorFactory<IN> groupingComparatorFactory = this.config.getDriverComparator(1, this.userCodeClassLoader);
	this.serializer = serializerFactory.getSerializer();
	
	TypeComparator<IN> sortingComparator = sortingComparatorFactory.createComparator();
	this.groupingComparator = groupingComparatorFactory.createComparator();

	MemoryManager memManager = this.parent.getEnvironment().getMemoryManager();
	final int numMemoryPages = memManager.computeNumberOfPages(this.config.getRelativeMemoryDriver());
	this.memory = memManager.allocatePages(this.parent, numMemoryPages);

	// instantiate a fix-length in-place sorter, if possible, otherwise the out-of-place sorter
	if (sortingComparator.supportsSerializationWithKeyNormalization() &&
		this.serializer.getLength() > 0 && this.serializer.getLength() <= THRESHOLD_FOR_IN_PLACE_SORTING)
	{
		this.sorter = new FixedLengthRecordSorter<IN>(this.serializer, sortingComparator.duplicate(), memory);
	} else {
		this.sorter = new NormalizedKeySorter<IN>(this.serializer, sortingComparator.duplicate(), memory);
	}

	if (LOG.isDebugEnabled()) {
		LOG.debug("SynchronousChainedCombineDriver object reuse: " + (this.objectReuseEnabled ? "ENABLED" : "DISABLED") + ".");
	}
}
 
Example 4
Source File: GroupReduceCombineDriver.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public void prepare() throws Exception {
	final DriverStrategy driverStrategy = this.taskContext.getTaskConfig().getDriverStrategy();
	if (driverStrategy != DriverStrategy.SORTED_GROUP_COMBINE){
		throw new Exception("Invalid strategy " + driverStrategy + " for group reduce combiner.");
	}
	
	

	final TypeSerializerFactory<IN> serializerFactory = this.taskContext.getInputSerializer(0);
	this.serializer = serializerFactory.getSerializer();

	final TypeComparator<IN> sortingComparator = this.taskContext.getDriverComparator(0);
	
	this.groupingComparator = this.taskContext.getDriverComparator(1);
	this.combiner = this.taskContext.getStub();
	this.output = this.taskContext.getOutputCollector();

	MemoryManager memManager = this.taskContext.getMemoryManager();
	final int numMemoryPages = memManager.computeNumberOfPages(this.taskContext.getTaskConfig().getRelativeMemoryDriver());
	this.memory = memManager.allocatePages(this.taskContext.getContainingTask(), numMemoryPages);

	// instantiate a fix-length in-place sorter, if possible, otherwise the out-of-place sorter
	if (sortingComparator.supportsSerializationWithKeyNormalization() &&
			this.serializer.getLength() > 0 && this.serializer.getLength() <= THRESHOLD_FOR_IN_PLACE_SORTING)
	{
		this.sorter = new FixedLengthRecordSorter<IN>(this.serializer, sortingComparator.duplicate(), memory);
	} else {
		this.sorter = new NormalizedKeySorter<IN>(this.serializer, sortingComparator.duplicate(), memory);
	}

	ExecutionConfig executionConfig = taskContext.getExecutionConfig();
	this.objectReuseEnabled = executionConfig.isObjectReuseEnabled();

	if (LOG.isDebugEnabled()) {
		LOG.debug("GroupReduceCombineDriver object reuse: {}.", (this.objectReuseEnabled ? "ENABLED" : "DISABLED"));
	}
}
 
Example 5
Source File: CombiningUnilateralSortMerger.java    From flink with Apache License 2.0 5 votes vote down vote up
public CombiningSpillingThread(ExceptionHandler<IOException> exceptionHandler, CircularQueues<E> queues,
		AbstractInvokable parentTask, MemoryManager memManager, IOManager ioManager, 
		TypeSerializer<E> serializer, TypeComparator<E> comparator, 
		List<MemorySegment> sortReadMemory, List<MemorySegment> writeMemory, int maxNumFileHandles,
		boolean objectReuseEnabled)
{
	super(exceptionHandler, queues, parentTask, memManager, ioManager, serializer, comparator, 
		sortReadMemory, writeMemory, maxNumFileHandles);
	
	this.comparator2 = comparator.duplicate();
	this.objectReuseEnabled = objectReuseEnabled;
}
 
Example 6
Source File: SynchronousChainedCombineDriver.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void openTask() throws Exception {
	// open the stub first
	final Configuration stubConfig = this.config.getStubParameters();
	BatchTask.openUserCode(this.combiner, stubConfig);

	// ----------------- Set up the sorter -------------------------

	// instantiate the serializer / comparator
	final TypeSerializerFactory<IN> serializerFactory = this.config.getInputSerializer(0, this.userCodeClassLoader);
	final TypeComparatorFactory<IN> sortingComparatorFactory = this.config.getDriverComparator(0, this.userCodeClassLoader);
	final TypeComparatorFactory<IN> groupingComparatorFactory = this.config.getDriverComparator(1, this.userCodeClassLoader);
	
	this.serializer = serializerFactory.getSerializer();

	TypeComparator<IN> sortingComparator = sortingComparatorFactory.createComparator();
	this.groupingComparator = groupingComparatorFactory.createComparator();
	
	MemoryManager memManager = this.parent.getEnvironment().getMemoryManager();
	final int numMemoryPages = memManager.computeNumberOfPages(this.config.getRelativeMemoryDriver());
	this.memory = memManager.allocatePages(this.parent, numMemoryPages);

	// instantiate a fix-length in-place sorter, if possible, otherwise the out-of-place sorter
	if (sortingComparator.supportsSerializationWithKeyNormalization() &&
		this.serializer.getLength() > 0 && this.serializer.getLength() <= THRESHOLD_FOR_IN_PLACE_SORTING)
	{
		this.sorter = new FixedLengthRecordSorter<IN>(this.serializer, sortingComparator.duplicate(), this.memory);
	} else {
		this.sorter = new NormalizedKeySorter<IN>(this.serializer, sortingComparator.duplicate(), this.memory);
	}

	if (LOG.isDebugEnabled()) {
		LOG.debug("SynchronousChainedCombineDriver object reuse: " + (this.objectReuseEnabled ? "ENABLED" : "DISABLED") + ".");
	}
}
 
Example 7
Source File: GroupCombineChainedDriver.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void openTask() throws Exception {
	// open the stub first
	final Configuration stubConfig = this.config.getStubParameters();
	BatchTask.openUserCode(this.reducer, stubConfig);

	// ----------------- Set up the sorter -------------------------

	// instantiate the serializer / comparator
	final TypeSerializerFactory<IN> serializerFactory = this.config.getInputSerializer(0, this.userCodeClassLoader);
	final TypeComparatorFactory<IN> sortingComparatorFactory = this.config.getDriverComparator(0, this.userCodeClassLoader);
	final TypeComparatorFactory<IN> groupingComparatorFactory = this.config.getDriverComparator(1, this.userCodeClassLoader);
	this.serializer = serializerFactory.getSerializer();
	
	TypeComparator<IN> sortingComparator = sortingComparatorFactory.createComparator();
	this.groupingComparator = groupingComparatorFactory.createComparator();

	MemoryManager memManager = this.parent.getEnvironment().getMemoryManager();
	final int numMemoryPages = memManager.computeNumberOfPages(this.config.getRelativeMemoryDriver());
	this.memory = memManager.allocatePages(this.parent, numMemoryPages);

	// instantiate a fix-length in-place sorter, if possible, otherwise the out-of-place sorter
	if (sortingComparator.supportsSerializationWithKeyNormalization() &&
		this.serializer.getLength() > 0 && this.serializer.getLength() <= THRESHOLD_FOR_IN_PLACE_SORTING)
	{
		this.sorter = new FixedLengthRecordSorter<IN>(this.serializer, sortingComparator.duplicate(), memory);
	} else {
		this.sorter = new NormalizedKeySorter<IN>(this.serializer, sortingComparator.duplicate(), memory);
	}

	if (LOG.isDebugEnabled()) {
		LOG.debug("SynchronousChainedCombineDriver object reuse: " + (this.objectReuseEnabled ? "ENABLED" : "DISABLED") + ".");
	}
}
 
Example 8
Source File: GroupReduceCombineDriver.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void prepare() throws Exception {
	final DriverStrategy driverStrategy = this.taskContext.getTaskConfig().getDriverStrategy();
	if (driverStrategy != DriverStrategy.SORTED_GROUP_COMBINE){
		throw new Exception("Invalid strategy " + driverStrategy + " for group reduce combiner.");
	}
	
	

	final TypeSerializerFactory<IN> serializerFactory = this.taskContext.getInputSerializer(0);
	this.serializer = serializerFactory.getSerializer();

	final TypeComparator<IN> sortingComparator = this.taskContext.getDriverComparator(0);
	
	this.groupingComparator = this.taskContext.getDriverComparator(1);
	this.combiner = this.taskContext.getStub();
	this.output = this.taskContext.getOutputCollector();

	MemoryManager memManager = this.taskContext.getMemoryManager();
	final int numMemoryPages = memManager.computeNumberOfPages(this.taskContext.getTaskConfig().getRelativeMemoryDriver());
	this.memory = memManager.allocatePages(this.taskContext.getContainingTask(), numMemoryPages);

	// instantiate a fix-length in-place sorter, if possible, otherwise the out-of-place sorter
	if (sortingComparator.supportsSerializationWithKeyNormalization() &&
			this.serializer.getLength() > 0 && this.serializer.getLength() <= THRESHOLD_FOR_IN_PLACE_SORTING)
	{
		this.sorter = new FixedLengthRecordSorter<IN>(this.serializer, sortingComparator.duplicate(), memory);
	} else {
		this.sorter = new NormalizedKeySorter<IN>(this.serializer, sortingComparator.duplicate(), memory);
	}

	ExecutionConfig executionConfig = taskContext.getExecutionConfig();
	this.objectReuseEnabled = executionConfig.isObjectReuseEnabled();

	if (LOG.isDebugEnabled()) {
		LOG.debug("GroupReduceCombineDriver object reuse: {}.", (this.objectReuseEnabled ? "ENABLED" : "DISABLED"));
	}
}
 
Example 9
Source File: CombiningUnilateralSortMerger.java    From flink with Apache License 2.0 5 votes vote down vote up
public CombiningSpillingThread(ExceptionHandler<IOException> exceptionHandler, CircularQueues<E> queues,
		AbstractInvokable parentTask, MemoryManager memManager, IOManager ioManager, 
		TypeSerializer<E> serializer, TypeComparator<E> comparator, 
		List<MemorySegment> sortReadMemory, List<MemorySegment> writeMemory, int maxNumFileHandles,
		boolean objectReuseEnabled)
{
	super(exceptionHandler, queues, parentTask, memManager, ioManager, serializer, comparator, 
		sortReadMemory, writeMemory, maxNumFileHandles);
	
	this.comparator2 = comparator.duplicate();
	this.objectReuseEnabled = objectReuseEnabled;
}
 
Example 10
Source File: SynchronousChainedCombineDriver.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void openTask() throws Exception {
	// open the stub first
	final Configuration stubConfig = this.config.getStubParameters();
	BatchTask.openUserCode(this.combiner, stubConfig);

	// ----------------- Set up the sorter -------------------------

	// instantiate the serializer / comparator
	final TypeSerializerFactory<IN> serializerFactory = this.config.getInputSerializer(0, this.userCodeClassLoader);
	final TypeComparatorFactory<IN> sortingComparatorFactory = this.config.getDriverComparator(0, this.userCodeClassLoader);
	final TypeComparatorFactory<IN> groupingComparatorFactory = this.config.getDriverComparator(1, this.userCodeClassLoader);
	
	this.serializer = serializerFactory.getSerializer();

	TypeComparator<IN> sortingComparator = sortingComparatorFactory.createComparator();
	this.groupingComparator = groupingComparatorFactory.createComparator();
	
	MemoryManager memManager = this.parent.getEnvironment().getMemoryManager();
	final int numMemoryPages = memManager.computeNumberOfPages(this.config.getRelativeMemoryDriver());
	this.memory = memManager.allocatePages(this.parent, numMemoryPages);

	// instantiate a fix-length in-place sorter, if possible, otherwise the out-of-place sorter
	if (sortingComparator.supportsSerializationWithKeyNormalization() &&
		this.serializer.getLength() > 0 && this.serializer.getLength() <= THRESHOLD_FOR_IN_PLACE_SORTING)
	{
		this.sorter = new FixedLengthRecordSorter<IN>(this.serializer, sortingComparator.duplicate(), this.memory);
	} else {
		this.sorter = new NormalizedKeySorter<IN>(this.serializer, sortingComparator.duplicate(), this.memory);
	}

	if (LOG.isDebugEnabled()) {
		LOG.debug("SynchronousChainedCombineDriver object reuse: " + (this.objectReuseEnabled ? "ENABLED" : "DISABLED") + ".");
	}
}
 
Example 11
Source File: GroupReduceCombineDriver.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void prepare() throws Exception {
	final DriverStrategy driverStrategy = this.taskContext.getTaskConfig().getDriverStrategy();
	if (driverStrategy != DriverStrategy.SORTED_GROUP_COMBINE){
		throw new Exception("Invalid strategy " + driverStrategy + " for group reduce combiner.");
	}
	
	

	final TypeSerializerFactory<IN> serializerFactory = this.taskContext.getInputSerializer(0);
	this.serializer = serializerFactory.getSerializer();

	final TypeComparator<IN> sortingComparator = this.taskContext.getDriverComparator(0);
	
	this.groupingComparator = this.taskContext.getDriverComparator(1);
	this.combiner = this.taskContext.getStub();
	this.output = this.taskContext.getOutputCollector();

	MemoryManager memManager = this.taskContext.getMemoryManager();
	final int numMemoryPages = memManager.computeNumberOfPages(this.taskContext.getTaskConfig().getRelativeMemoryDriver());
	this.memory = memManager.allocatePages(this.taskContext.getContainingTask(), numMemoryPages);

	// instantiate a fix-length in-place sorter, if possible, otherwise the out-of-place sorter
	if (sortingComparator.supportsSerializationWithKeyNormalization() &&
			this.serializer.getLength() > 0 && this.serializer.getLength() <= THRESHOLD_FOR_IN_PLACE_SORTING)
	{
		this.sorter = new FixedLengthRecordSorter<IN>(this.serializer, sortingComparator.duplicate(), memory);
	} else {
		this.sorter = new NormalizedKeySorter<IN>(this.serializer, sortingComparator.duplicate(), memory);
	}

	ExecutionConfig executionConfig = taskContext.getExecutionConfig();
	this.objectReuseEnabled = executionConfig.isObjectReuseEnabled();

	if (LOG.isDebugEnabled()) {
		LOG.debug("GroupReduceCombineDriver object reuse: {}.", (this.objectReuseEnabled ? "ENABLED" : "DISABLED"));
	}
}
 
Example 12
Source File: RandomSortMergeOuterJoinTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("unchecked, rawtypes")
protected void testOuterJoinWithHighNumberOfCommonKeys(
		FlinkJoinType outerJoinType, int input1Size, int input1Duplicates, int input1ValueLength,
		float input1KeyDensity, int input2Size, int input2Duplicates, int input2ValueLength,
		float input2KeyDensity) {
	TypeComparator<Tuple2<Integer, String>> comparator1 = new TupleComparator<>(
			new int[]{0},
			new TypeComparator<?>[]{new IntComparator(true)},
			new TypeSerializer<?>[]{IntSerializer.INSTANCE}
	);
	TypeComparator<Tuple2<Integer, String>> comparator2 = new TupleComparator<>(
			new int[]{0},
			new TypeComparator<?>[]{new IntComparator(true)},
			new TypeSerializer<?>[]{IntSerializer.INSTANCE}
	);

	final int duplicateKey = 13;

	try {
		final TupleGenerator generator1 = new TupleGenerator(SEED1, 500, input1KeyDensity, input1ValueLength, KeyMode.SORTED_SPARSE, ValueMode.RANDOM_LENGTH, null);
		final TupleGenerator generator2 = new TupleGenerator(SEED2, 500, input2KeyDensity, input2ValueLength, KeyMode.SORTED_SPARSE, ValueMode.RANDOM_LENGTH, null);

		final TupleGeneratorIterator gen1Iter = new TupleGeneratorIterator(generator1, input1Size);
		final TupleGeneratorIterator gen2Iter = new TupleGeneratorIterator(generator2, input2Size);

		final TupleConstantValueIterator const1Iter = new TupleConstantValueIterator(duplicateKey, "LEFT String for Duplicate Keys", input1Duplicates);
		final TupleConstantValueIterator const2Iter = new TupleConstantValueIterator(duplicateKey, "RIGHT String for Duplicate Keys", input2Duplicates);

		final List<MutableObjectIterator<Tuple2<Integer, String>>> inList1 = new ArrayList<>();
		inList1.add(gen1Iter);
		inList1.add(const1Iter);

		final List<MutableObjectIterator<Tuple2<Integer, String>>> inList2 = new ArrayList<>();
		inList2.add(gen2Iter);
		inList2.add(const2Iter);

		MutableObjectIterator<Tuple2<Integer, String>> input1 = new MergeIterator<>(inList1, comparator1.duplicate());
		MutableObjectIterator<Tuple2<Integer, String>> input2 = new MergeIterator<>(inList2, comparator2.duplicate());

		// collect expected data
		final Map<Integer, Collection<Match>> expectedMatchesMap = joinValues(
				RandomSortMergeInnerJoinTest.collectData(input1),
				RandomSortMergeInnerJoinTest.collectData(input2),
				outerJoinType);

		// re-create the whole thing for actual processing

		// reset the generators and iterators
		generator1.reset();
		generator2.reset();
		const1Iter.reset();
		const2Iter.reset();
		gen1Iter.reset();
		gen2Iter.reset();

		inList1.clear();
		inList1.add(gen1Iter);
		inList1.add(const1Iter);

		inList2.clear();
		inList2.add(gen2Iter);
		inList2.add(const2Iter);

		input1 = new MergeIterator<>(inList1, comparator1.duplicate());
		input2 = new MergeIterator<>(inList2, comparator2.duplicate());

		StreamOperator operator = getOperator(outerJoinType);
		RandomSortMergeInnerJoinTest.match(expectedMatchesMap,
				RandomSortMergeInnerJoinTest.transformToBinary(myJoin(operator, input1, input2)));

		// assert that each expected match was seen
		for (Entry<Integer, Collection<Match>> entry : expectedMatchesMap.entrySet()) {
			if (!entry.getValue().isEmpty()) {
				Assert.fail("Collection for key " + entry.getKey() + " is not empty");
			}
		}
	} catch (Exception e) {
		e.printStackTrace();
		Assert.fail("An exception occurred during the test: " + e.getMessage());
	}
}
 
Example 13
Source File: RandomSortMergeOuterJoinTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("unchecked, rawtypes")
protected void testOuterJoinWithHighNumberOfCommonKeys(
		FlinkJoinType outerJoinType, int input1Size, int input1Duplicates, int input1ValueLength,
		float input1KeyDensity, int input2Size, int input2Duplicates, int input2ValueLength,
		float input2KeyDensity) {
	TypeComparator<Tuple2<Integer, String>> comparator1 = new TupleComparator<>(
			new int[]{0},
			new TypeComparator<?>[]{new IntComparator(true)},
			new TypeSerializer<?>[]{IntSerializer.INSTANCE}
	);
	TypeComparator<Tuple2<Integer, String>> comparator2 = new TupleComparator<>(
			new int[]{0},
			new TypeComparator<?>[]{new IntComparator(true)},
			new TypeSerializer<?>[]{IntSerializer.INSTANCE}
	);

	final int duplicateKey = 13;

	try {
		final TupleGenerator generator1 = new TupleGenerator(SEED1, 500, input1KeyDensity, input1ValueLength, KeyMode.SORTED_SPARSE, ValueMode.RANDOM_LENGTH, null);
		final TupleGenerator generator2 = new TupleGenerator(SEED2, 500, input2KeyDensity, input2ValueLength, KeyMode.SORTED_SPARSE, ValueMode.RANDOM_LENGTH, null);

		final TupleGeneratorIterator gen1Iter = new TupleGeneratorIterator(generator1, input1Size);
		final TupleGeneratorIterator gen2Iter = new TupleGeneratorIterator(generator2, input2Size);

		final TupleConstantValueIterator const1Iter = new TupleConstantValueIterator(duplicateKey, "LEFT String for Duplicate Keys", input1Duplicates);
		final TupleConstantValueIterator const2Iter = new TupleConstantValueIterator(duplicateKey, "RIGHT String for Duplicate Keys", input2Duplicates);

		final List<MutableObjectIterator<Tuple2<Integer, String>>> inList1 = new ArrayList<>();
		inList1.add(gen1Iter);
		inList1.add(const1Iter);

		final List<MutableObjectIterator<Tuple2<Integer, String>>> inList2 = new ArrayList<>();
		inList2.add(gen2Iter);
		inList2.add(const2Iter);

		MutableObjectIterator<Tuple2<Integer, String>> input1 = new MergeIterator<>(inList1, comparator1.duplicate());
		MutableObjectIterator<Tuple2<Integer, String>> input2 = new MergeIterator<>(inList2, comparator2.duplicate());

		// collect expected data
		final Map<Integer, Collection<Match>> expectedMatchesMap = joinValues(
				RandomSortMergeInnerJoinTest.collectData(input1),
				RandomSortMergeInnerJoinTest.collectData(input2),
				outerJoinType);

		// re-create the whole thing for actual processing

		// reset the generators and iterators
		generator1.reset();
		generator2.reset();
		const1Iter.reset();
		const2Iter.reset();
		gen1Iter.reset();
		gen2Iter.reset();

		inList1.clear();
		inList1.add(gen1Iter);
		inList1.add(const1Iter);

		inList2.clear();
		inList2.add(gen2Iter);
		inList2.add(const2Iter);

		input1 = new MergeIterator<>(inList1, comparator1.duplicate());
		input2 = new MergeIterator<>(inList2, comparator2.duplicate());

		StreamOperator operator = getOperator(outerJoinType);
		RandomSortMergeInnerJoinTest.match(expectedMatchesMap,
				RandomSortMergeInnerJoinTest.transformToBinary(myJoin(operator, input1, input2)));

		// assert that each expected match was seen
		for (Entry<Integer, Collection<Match>> entry : expectedMatchesMap.entrySet()) {
			if (!entry.getValue().isEmpty()) {
				Assert.fail("Collection for key " + entry.getKey() + " is not empty");
			}
		}
	} catch (Exception e) {
		e.printStackTrace();
		Assert.fail("An exception occurred during the test: " + e.getMessage());
	}
}