org.apache.flink.runtime.io.disk.iomanager.ChannelWriterOutputView Java Examples

The following examples show how to use org.apache.flink.runtime.io.disk.iomanager.ChannelWriterOutputView. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HashPartition.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
public void finalizeBuildPhase(IOManager ioAccess, FileIOChannel.Enumerator probeChannelEnumerator,
		LinkedBlockingQueue<MemorySegment> bufferReturnQueue)
throws IOException
{
	this.finalBufferLimit = this.buildSideWriteBuffer.getCurrentPositionInSegment();
	this.partitionBuffers = this.buildSideWriteBuffer.close();
	
	if (!isInMemory()) {
		// close the channel. note that in the spilled case, the build-side-buffer will have sent off
		// the last segment and it will be returned to the write-behind-buffer queue.
		this.buildSideChannel.close();
		
		// create the channel for the probe side and claim one buffer for it
		this.probeSideChannel = ioAccess.createBlockChannelWriter(probeChannelEnumerator.next(), bufferReturnQueue);
		// creating the ChannelWriterOutputView without memory will cause it to draw one segment from the
		// write behind queue, which is the spare segment we had above.
		this.probeSideBuffer = new ChannelWriterOutputView(this.probeSideChannel, this.memorySegmentSize);
	}
}
 
Example #2
Source File: HashPartition.java    From flink with Apache License 2.0 6 votes vote down vote up
public void finalizeBuildPhase(IOManager ioAccess, FileIOChannel.Enumerator probeChannelEnumerator,
		LinkedBlockingQueue<MemorySegment> bufferReturnQueue)
throws IOException
{
	this.finalBufferLimit = this.buildSideWriteBuffer.getCurrentPositionInSegment();
	this.partitionBuffers = this.buildSideWriteBuffer.close();
	
	if (!isInMemory()) {
		// close the channel. note that in the spilled case, the build-side-buffer will have sent off
		// the last segment and it will be returned to the write-behind-buffer queue.
		this.buildSideChannel.close();
		
		// create the channel for the probe side and claim one buffer for it
		this.probeSideChannel = ioAccess.createBlockChannelWriter(probeChannelEnumerator.next(), bufferReturnQueue);
		// creating the ChannelWriterOutputView without memory will cause it to draw one segment from the
		// write behind queue, which is the spare segment we had above.
		this.probeSideBuffer = new ChannelWriterOutputView(this.probeSideChannel, this.memorySegmentSize);
	}
}
 
Example #3
Source File: HashPartition.java    From flink with Apache License 2.0 6 votes vote down vote up
public void finalizeBuildPhase(IOManager ioAccess, FileIOChannel.Enumerator probeChannelEnumerator,
		LinkedBlockingQueue<MemorySegment> bufferReturnQueue)
throws IOException
{
	this.finalBufferLimit = this.buildSideWriteBuffer.getCurrentPositionInSegment();
	this.partitionBuffers = this.buildSideWriteBuffer.close();
	
	if (!isInMemory()) {
		// close the channel. note that in the spilled case, the build-side-buffer will have sent off
		// the last segment and it will be returned to the write-behind-buffer queue.
		this.buildSideChannel.close();
		
		// create the channel for the probe side and claim one buffer for it
		this.probeSideChannel = ioAccess.createBlockChannelWriter(probeChannelEnumerator.next(), bufferReturnQueue);
		// creating the ChannelWriterOutputView without memory will cause it to draw one segment from the
		// write behind queue, which is the spare segment we had above.
		this.probeSideBuffer = new ChannelWriterOutputView(this.probeSideChannel, this.memorySegmentSize);
	}
}
 
Example #4
Source File: HashPartition.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
public void prepareProbePhase(IOManager ioAccess, FileIOChannel.Enumerator probeChannelEnumerator,
		LinkedBlockingQueue<MemorySegment> bufferReturnQueue) throws IOException {
	if (isInMemory()) {
		return;
	}
	// ATTENTION: The following lines are duplicated code from finalizeBuildPhase
	this.probeSideChannel = ioAccess.createBlockChannelWriter(probeChannelEnumerator.next(), bufferReturnQueue);
	this.probeSideBuffer = new ChannelWriterOutputView(this.probeSideChannel, this.memorySegmentSize);
}
 
Example #5
Source File: FixedLengthRecordSorter.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Writes a subset of the records in this buffer in their logical order to the given output.
 * 
 * @param output The output view to write the records to.
 * @param start The logical start position of the subset.
 * @param num The number of elements to write.
 * @throws IOException Thrown, if an I/O exception occurred writing to the output view.
 */
@Override
public void writeToOutput(final ChannelWriterOutputView output, final int start, int num) throws IOException {
	final TypeComparator<T> comparator = this.comparator;
	final TypeSerializer<T> serializer = this.serializer;
	T record = this.recordInstance;
	
	final SingleSegmentInputView inView = this.inView;
	
	final int recordsPerSegment = this.recordsPerSegment;
	int currentMemSeg = start / recordsPerSegment;
	int offset = (start % recordsPerSegment) * this.recordSize;
	
	while (num > 0) {
		final MemorySegment currentIndexSegment = this.sortBuffer.get(currentMemSeg++);
		inView.set(currentIndexSegment, offset);
		
		// check whether we have a full or partially full segment
		if (num >= recordsPerSegment && offset == 0) {
			// full segment
			for (int numInMemSeg = 0; numInMemSeg < recordsPerSegment; numInMemSeg++) {
				record = comparator.readWithKeyDenormalization(record, inView);
				serializer.serialize(record, output);
			}
			num -= recordsPerSegment;
		} else {
			// partially filled segment
			for (; num > 0 && offset <= this.lastEntryOffset; num--, offset += this.recordSize) {
				record = comparator.readWithKeyDenormalization(record, inView);
				serializer.serialize(record, output);
			}
		}

		offset = 0;
	}
}
 
Example #6
Source File: FixedLengthRecordSorter.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Writes the records in this buffer in their logical order to the given output.
 * 
 * @param output The output view to write the records to.
 * @throws IOException Thrown, if an I/O exception occurred writing to the output view.
 */
@Override
public void writeToOutput(final ChannelWriterOutputView output) throws IOException {
	final TypeComparator<T> comparator = this.comparator;
	final TypeSerializer<T> serializer = this.serializer;
	T record = this.recordInstance;
	
	final SingleSegmentInputView inView = this.inView;
	
	final int recordsPerSegment = this.recordsPerSegment;
	int recordsLeft = this.numRecords;
	int currentMemSeg = 0;
	
	while (recordsLeft > 0) {
		final MemorySegment currentIndexSegment = this.sortBuffer.get(currentMemSeg++);
		inView.set(currentIndexSegment, 0);
		
		// check whether we have a full or partially full segment
		if (recordsLeft >= recordsPerSegment) {
			// full segment
			for (int numInMemSeg = 0; numInMemSeg < recordsPerSegment; numInMemSeg++) {
				record = comparator.readWithKeyDenormalization(record, inView);
				serializer.serialize(record, output);
			}
			recordsLeft -= recordsPerSegment;
		} else {
			// partially filled segment
			for (; recordsLeft > 0; recordsLeft--) {
				record = comparator.readWithKeyDenormalization(record, inView);
				serializer.serialize(record, output);
			}
		}
	}
}
 
Example #7
Source File: FixedLengthRecordSorter.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Writes a subset of the records in this buffer in their logical order to the given output.
 * 
 * @param output The output view to write the records to.
 * @param start The logical start position of the subset.
 * @param num The number of elements to write.
 * @throws IOException Thrown, if an I/O exception occurred writing to the output view.
 */
@Override
public void writeToOutput(final ChannelWriterOutputView output, final int start, int num) throws IOException {
	final TypeComparator<T> comparator = this.comparator;
	final TypeSerializer<T> serializer = this.serializer;
	T record = this.recordInstance;
	
	final SingleSegmentInputView inView = this.inView;
	
	final int recordsPerSegment = this.recordsPerSegment;
	int currentMemSeg = start / recordsPerSegment;
	int offset = (start % recordsPerSegment) * this.recordSize;
	
	while (num > 0) {
		final MemorySegment currentIndexSegment = this.sortBuffer.get(currentMemSeg++);
		inView.set(currentIndexSegment, offset);
		
		// check whether we have a full or partially full segment
		if (num >= recordsPerSegment && offset == 0) {
			// full segment
			for (int numInMemSeg = 0; numInMemSeg < recordsPerSegment; numInMemSeg++) {
				record = comparator.readWithKeyDenormalization(record, inView);
				serializer.serialize(record, output);
			}
			num -= recordsPerSegment;
		} else {
			// partially filled segment
			for (; num > 0 && offset <= this.lastEntryOffset; num--, offset += this.recordSize) {
				record = comparator.readWithKeyDenormalization(record, inView);
				serializer.serialize(record, output);
			}
		}

		offset = 0;
	}
}
 
Example #8
Source File: FixedLengthRecordSorter.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Writes the records in this buffer in their logical order to the given output.
 * 
 * @param output The output view to write the records to.
 * @throws IOException Thrown, if an I/O exception occurred writing to the output view.
 */
@Override
public void writeToOutput(final ChannelWriterOutputView output) throws IOException {
	final TypeComparator<T> comparator = this.comparator;
	final TypeSerializer<T> serializer = this.serializer;
	T record = this.recordInstance;
	
	final SingleSegmentInputView inView = this.inView;
	
	final int recordsPerSegment = this.recordsPerSegment;
	int recordsLeft = this.numRecords;
	int currentMemSeg = 0;
	
	while (recordsLeft > 0) {
		final MemorySegment currentIndexSegment = this.sortBuffer.get(currentMemSeg++);
		inView.set(currentIndexSegment, 0);
		
		// check whether we have a full or partially full segment
		if (recordsLeft >= recordsPerSegment) {
			// full segment
			for (int numInMemSeg = 0; numInMemSeg < recordsPerSegment; numInMemSeg++) {
				record = comparator.readWithKeyDenormalization(record, inView);
				serializer.serialize(record, output);
			}
			recordsLeft -= recordsPerSegment;
		} else {
			// partially filled segment
			for (; recordsLeft > 0; recordsLeft--) {
				record = comparator.readWithKeyDenormalization(record, inView);
				serializer.serialize(record, output);
			}
		}
	}
}
 
Example #9
Source File: HashPartition.java    From flink with Apache License 2.0 5 votes vote down vote up
public void prepareProbePhase(IOManager ioAccess, FileIOChannel.Enumerator probeChannelEnumerator,
		LinkedBlockingQueue<MemorySegment> bufferReturnQueue) throws IOException {
	if (isInMemory()) {
		return;
	}
	// ATTENTION: The following lines are duplicated code from finalizeBuildPhase
	this.probeSideChannel = ioAccess.createBlockChannelWriter(probeChannelEnumerator.next(), bufferReturnQueue);
	this.probeSideBuffer = new ChannelWriterOutputView(this.probeSideChannel, this.memorySegmentSize);
}
 
Example #10
Source File: HashPartition.java    From flink with Apache License 2.0 5 votes vote down vote up
public void prepareProbePhase(IOManager ioAccess, FileIOChannel.Enumerator probeChannelEnumerator,
		LinkedBlockingQueue<MemorySegment> bufferReturnQueue) throws IOException {
	if (isInMemory()) {
		return;
	}
	// ATTENTION: The following lines are duplicated code from finalizeBuildPhase
	this.probeSideChannel = ioAccess.createBlockChannelWriter(probeChannelEnumerator.next(), bufferReturnQueue);
	this.probeSideBuffer = new ChannelWriterOutputView(this.probeSideChannel, this.memorySegmentSize);
}
 
Example #11
Source File: FixedLengthRecordSorter.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Writes a subset of the records in this buffer in their logical order to the given output.
 * 
 * @param output The output view to write the records to.
 * @param start The logical start position of the subset.
 * @param num The number of elements to write.
 * @throws IOException Thrown, if an I/O exception occurred writing to the output view.
 */
@Override
public void writeToOutput(final ChannelWriterOutputView output, final int start, int num) throws IOException {
	final TypeComparator<T> comparator = this.comparator;
	final TypeSerializer<T> serializer = this.serializer;
	T record = this.recordInstance;
	
	final SingleSegmentInputView inView = this.inView;
	
	final int recordsPerSegment = this.recordsPerSegment;
	int currentMemSeg = start / recordsPerSegment;
	int offset = (start % recordsPerSegment) * this.recordSize;
	
	while (num > 0) {
		final MemorySegment currentIndexSegment = this.sortBuffer.get(currentMemSeg++);
		inView.set(currentIndexSegment, offset);
		
		// check whether we have a full or partially full segment
		if (num >= recordsPerSegment && offset == 0) {
			// full segment
			for (int numInMemSeg = 0; numInMemSeg < recordsPerSegment; numInMemSeg++) {
				record = comparator.readWithKeyDenormalization(record, inView);
				serializer.serialize(record, output);
			}
			num -= recordsPerSegment;
		} else {
			// partially filled segment
			for (; num > 0 && offset <= this.lastEntryOffset; num--, offset += this.recordSize) {
				record = comparator.readWithKeyDenormalization(record, inView);
				serializer.serialize(record, output);
			}
		}

		offset = 0;
	}
}
 
Example #12
Source File: FixedLengthRecordSorter.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Writes the records in this buffer in their logical order to the given output.
 * 
 * @param output The output view to write the records to.
 * @throws IOException Thrown, if an I/O exception occurred writing to the output view.
 */
@Override
public void writeToOutput(final ChannelWriterOutputView output) throws IOException {
	final TypeComparator<T> comparator = this.comparator;
	final TypeSerializer<T> serializer = this.serializer;
	T record = this.recordInstance;
	
	final SingleSegmentInputView inView = this.inView;
	
	final int recordsPerSegment = this.recordsPerSegment;
	int recordsLeft = this.numRecords;
	int currentMemSeg = 0;
	
	while (recordsLeft > 0) {
		final MemorySegment currentIndexSegment = this.sortBuffer.get(currentMemSeg++);
		inView.set(currentIndexSegment, 0);
		
		// check whether we have a full or partially full segment
		if (recordsLeft >= recordsPerSegment) {
			// full segment
			for (int numInMemSeg = 0; numInMemSeg < recordsPerSegment; numInMemSeg++) {
				record = comparator.readWithKeyDenormalization(record, inView);
				serializer.serialize(record, output);
			}
			recordsLeft -= recordsPerSegment;
		} else {
			// partially filled segment
			for (; recordsLeft > 0; recordsLeft--) {
				record = comparator.readWithKeyDenormalization(record, inView);
				serializer.serialize(record, output);
			}
		}
	}
}
 
Example #13
Source File: ChannelViewsTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testReadTooMany() throws Exception
{
	final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
	final FileIOChannel.ID channel = this.ioManager.createChannel();
	final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer();
	
	// create the writer output view
	List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
	final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
	final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE);

	// write a number of pairs
	final Tuple2<Integer, String> rec = new Tuple2<>();
	for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
		generator.next(rec);
		serializer.serialize(rec, outView);
	}
	this.memoryManager.release(outView.close());

	// create the reader input view
	memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
	final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel);
	final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, outView.getBlockCount(), true);
	generator.reset();

	// read and re-generate all records and compare them
	try {
		final Tuple2<Integer, String> readRec = new Tuple2<>();
		for (int i = 0; i < NUM_PAIRS_SHORT + 1; i++) {
			generator.next(rec);
			serializer.deserialize(readRec, inView);
			final int k1 = rec.f0;
			final String v1 = rec.f1;
			final int k2 = readRec.f0;
			final String v2 = readRec.f1;
			Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2));
		}
		Assert.fail("Expected an EOFException which did not occur.");
	}
	catch (EOFException eofex) {
		// expected
	}
	catch (Throwable t) {
		// unexpected
		Assert.fail("Unexpected Exception: " + t.getMessage());
	}
	
	this.memoryManager.release(inView.close());
	reader.deleteChannel();
}
 
Example #14
Source File: ChannelViewsTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testWriteReadOneBufferOnly() throws Exception
{
	final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
	final FileIOChannel.ID channel = this.ioManager.createChannel();
	final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer();
	
	// create the writer output view
	List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, 1);
	final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
	final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE);
	
	// write a number of pairs
	final Tuple2<Integer, String> rec = new Tuple2<>();
	for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
		generator.next(rec);
		serializer.serialize(rec, outView);
	}
	this.memoryManager.release(outView.close());
	
	// create the reader input view
	memory = this.memoryManager.allocatePages(this.parentTask, 1);
	final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel);
	final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, outView.getBlockCount(), true);
	generator.reset();
	
	// read and re-generate all records and compare them
	final Tuple2<Integer, String> readRec = new Tuple2<>();
	for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
		generator.next(rec);
		serializer.deserialize(readRec, inView);
		
		int k1 = rec.f0;
		String v1 = rec.f1;
		
		int k2 = readRec.f0;
		String v2 = readRec.f1;
		
		Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2));
	}
	
	this.memoryManager.release(inView.close());
	reader.deleteChannel();
}
 
Example #15
Source File: ChannelViewsTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testWriteReadNotAll() throws Exception
{
	final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
	final FileIOChannel.ID channel = this.ioManager.createChannel();
	final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer();
	
	// create the writer output view
	List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
	final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
	final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE);
	
	// write a number of pairs
	final Tuple2<Integer, String> rec = new Tuple2<>();
	for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
		generator.next(rec);
		serializer.serialize(rec, outView);
	}
	this.memoryManager.release(outView.close());
	
	// create the reader input view
	memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
	final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel);
	final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, outView.getBlockCount(), true);
	generator.reset();
	
	// read and re-generate all records and compare them
	final Tuple2<Integer, String> readRec = new Tuple2<>();
	for (int i = 0; i < NUM_PAIRS_SHORT / 2; i++) {
		generator.next(rec);
		serializer.deserialize(readRec, inView);
		
		int k1 = rec.f0;
		String v1 = rec.f1;
		
		int k2 = readRec.f0;
		String v2 = readRec.f1;
		
		Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2));
	}
	
	this.memoryManager.release(inView.close());
	reader.deleteChannel();
}
 
Example #16
Source File: NormalizedKeySorter.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public void writeToOutput(ChannelWriterOutputView output, LargeRecordHandler<T> largeRecordsOutput)
		throws IOException
{
	if (LOG.isDebugEnabled()) {
		if (largeRecordsOutput == null) {
			LOG.debug("Spilling sort buffer without large record handling.");
		} else {
			LOG.debug("Spilling sort buffer with large record handling.");
		}
	}
	
	final int numRecords = this.numRecords;
	int currentMemSeg = 0;
	int currentRecord = 0;
	
	while (currentRecord < numRecords) {
		final MemorySegment currentIndexSegment = this.sortIndex.get(currentMemSeg++);

		// go through all records in the memory segment
		for (int offset = 0; currentRecord < numRecords && offset <= this.lastIndexEntryOffset; currentRecord++, offset += this.indexEntrySize) {
			final long pointer = currentIndexSegment.getLong(offset);
			
			// small records go into the regular spill file, large records into the special code path
			if (pointer >= 0 || largeRecordsOutput == null) {
				this.recordBuffer.setReadPosition(pointer);
				this.serializer.copy(this.recordBuffer, output);
			}
			else {
				
				if (LOG.isDebugEnabled()) {
					LOG.debug("Spilling large record to large record fetch file.");
				}
				
				this.recordBuffer.setReadPosition(pointer & POINTER_MASK);
				T record = this.serializer.deserialize(this.recordBuffer);
				largeRecordsOutput.addRecord(record);
			}
		}
	}
}
 
Example #17
Source File: FixedLengthRecordSorter.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public void writeToOutput(ChannelWriterOutputView output, LargeRecordHandler<T> largeRecordsOutput)
		throws IOException
{
	writeToOutput(output);
}
 
Example #18
Source File: FixedLengthRecordSorterTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testFlushPartialMemoryPage() throws Exception {
	// Insert IntPair which would fill 2 memory pages.
	final int NUM_RECORDS = 2 * MEMORY_PAGE_SIZE / 8;
	final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), 3);

	FixedLengthRecordSorter<IntPair> sorter = newSortBuffer(memory);
	UniformIntPairGenerator generator = new UniformIntPairGenerator(Integer.MAX_VALUE, 1, false);

	// write the records
	IntPair record = new IntPair();
	int num = -1;
	do {
		generator.next(record);
		num++;
	}
	while (sorter.write(record) && num < NUM_RECORDS);

	FileIOChannel.ID channelID = this.ioManager.createChannelEnumerator().next();
	BlockChannelWriter<MemorySegment> blockChannelWriter = this.ioManager.createBlockChannelWriter(channelID);
	final List<MemorySegment> writeBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelWriterOutputView outputView = new ChannelWriterOutputView(blockChannelWriter, writeBuffer, writeBuffer.get(0).size());

	sorter.writeToOutput(outputView, 1, NUM_RECORDS - 1);

	this.memoryManager.release(outputView.close());

	BlockChannelReader<MemorySegment> blockChannelReader = this.ioManager.createBlockChannelReader(channelID);
	final List<MemorySegment> readBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelReaderInputView readerInputView = new ChannelReaderInputView(blockChannelReader, readBuffer, false);
	final List<MemorySegment> dataBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelReaderInputViewIterator<IntPair> iterator = new ChannelReaderInputViewIterator(readerInputView, dataBuffer, this.serializer);

	record = iterator.next(record);
	int i =1;
	while (record != null) {
		Assert.assertEquals(i, record.getKey());
		record = iterator.next(record);
		i++;
	}

	Assert.assertEquals(NUM_RECORDS, i);

	this.memoryManager.release(dataBuffer);
	// release the memory occupied by the buffers
	sorter.dispose();
	this.memoryManager.release(memory);
}
 
Example #19
Source File: FixedLengthRecordSorterTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testFlushFullMemoryPage() throws Exception {
	// Insert IntPair which would fill 2 memory pages.
	final int NUM_RECORDS = 2 * MEMORY_PAGE_SIZE / 8;
	final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), 3);

	FixedLengthRecordSorter<IntPair> sorter = newSortBuffer(memory);
	UniformIntPairGenerator generator = new UniformIntPairGenerator(Integer.MAX_VALUE, 1, false);

	// write the records
	IntPair record = new IntPair();
	int num = -1;
	do {
		generator.next(record);
		num++;
	}
	while (sorter.write(record) && num < NUM_RECORDS);

	FileIOChannel.ID channelID = this.ioManager.createChannelEnumerator().next();
	BlockChannelWriter<MemorySegment> blockChannelWriter = this.ioManager.createBlockChannelWriter(channelID);
	final List<MemorySegment> writeBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelWriterOutputView outputView = new ChannelWriterOutputView(blockChannelWriter, writeBuffer, writeBuffer.get(0).size());

	sorter.writeToOutput(outputView, 0, NUM_RECORDS);

	this.memoryManager.release(outputView.close());

	BlockChannelReader<MemorySegment> blockChannelReader = this.ioManager.createBlockChannelReader(channelID);
	final List<MemorySegment> readBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelReaderInputView readerInputView = new ChannelReaderInputView(blockChannelReader, readBuffer, false);
	final List<MemorySegment> dataBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelReaderInputViewIterator<IntPair> iterator = new ChannelReaderInputViewIterator(readerInputView, dataBuffer, this.serializer);

	record = iterator.next(record);
	int i =0;
	while (record != null) {
		Assert.assertEquals(i, record.getKey());
		record = iterator.next(record);
		i++;
	}

	Assert.assertEquals(NUM_RECORDS, i);

	this.memoryManager.release(dataBuffer);
	// release the memory occupied by the buffers
	sorter.dispose();
	this.memoryManager.release(memory);
}
 
Example #20
Source File: ChannelViewsTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testWriteReadSmallRecords() throws Exception
{
	final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
	final FileIOChannel.ID channel = this.ioManager.createChannel();
	final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer();
	
	// create the writer output view
	List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
	final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
	final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE);
	// write a number of pairs
	final Tuple2<Integer, String> rec = new Tuple2<>();
	for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
		generator.next(rec);
		serializer.serialize(rec, outView);
	}
	this.memoryManager.release(outView.close());
	
	// create the reader input view
	memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
	final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel);
	final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, outView.getBlockCount(), true);
	generator.reset();
	
	// read and re-generate all records and compare them
	final Tuple2<Integer, String> readRec = new Tuple2<>();
	for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
		generator.next(rec);
		serializer.deserialize(readRec, inView);
		
		int k1 = rec.f0;
		String v1 = rec.f1;
		
		int k2 = readRec.f0;
		String v2 = readRec.f1;
		
		Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2));
	}
	
	this.memoryManager.release(inView.close());
	reader.deleteChannel();
}
 
Example #21
Source File: ChannelViewsTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testReadTooMany() throws Exception
{
	final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
	final FileIOChannel.ID channel = this.ioManager.createChannel();
	final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer();
	
	// create the writer output view
	List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
	final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
	final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE);

	// write a number of pairs
	final Tuple2<Integer, String> rec = new Tuple2<>();
	for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
		generator.next(rec);
		serializer.serialize(rec, outView);
	}
	this.memoryManager.release(outView.close());

	// create the reader input view
	memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
	final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel);
	final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, outView.getBlockCount(), true);
	generator.reset();

	// read and re-generate all records and compare them
	try {
		final Tuple2<Integer, String> readRec = new Tuple2<>();
		for (int i = 0; i < NUM_PAIRS_SHORT + 1; i++) {
			generator.next(rec);
			serializer.deserialize(readRec, inView);
			final int k1 = rec.f0;
			final String v1 = rec.f1;
			final int k2 = readRec.f0;
			final String v2 = readRec.f1;
			Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2));
		}
		Assert.fail("Expected an EOFException which did not occur.");
	}
	catch (EOFException eofex) {
		// expected
	}
	catch (Throwable t) {
		// unexpected
		Assert.fail("Unexpected Exception: " + t.getMessage());
	}
	
	this.memoryManager.release(inView.close());
	reader.deleteChannel();
}
 
Example #22
Source File: ChannelViewsTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testReadWithoutKnownBlockCount() throws Exception
{
	final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
	final FileIOChannel.ID channel = this.ioManager.createChannel();
	final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer();
	
	// create the writer output view
	List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
	final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
	final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE);
	
	// write a number of pairs
	final Tuple2<Integer, String> rec = new Tuple2<>();
	for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
		generator.next(rec);
		serializer.serialize(rec, outView);
	}
	this.memoryManager.release(outView.close());
	
	// create the reader input view
	memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
	final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel);
	final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, true);
	generator.reset();
	
	// read and re-generate all records and compare them
	final Tuple2<Integer, String> readRec = new Tuple2<>();
	for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
		generator.next(rec);
		serializer.deserialize(readRec, inView);
		
		int k1 = rec.f0;
		String v1 = rec.f1;
		
		int k2 = readRec.f0;
		String v2 = readRec.f1;
		
		Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2));
	}
	
	this.memoryManager.release(inView.close());
	reader.deleteChannel();
}
 
Example #23
Source File: ChannelViewsTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testWriteAndReadLongRecords() throws Exception
{
	final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_LONG_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
	final FileIOChannel.ID channel = this.ioManager.createChannel();
	final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer();
	
	// create the writer output view
	List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
	final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
	final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE);
	
	// write a number of pairs
	final Tuple2<Integer, String> rec = new Tuple2<>();
	for (int i = 0; i < NUM_PAIRS_LONG; i++) {
		generator.next(rec);
		serializer.serialize(rec, outView);
	}
	this.memoryManager.release(outView.close());
	
	// create the reader input view
	memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
	final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel);
	final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, outView.getBlockCount(), true);
	generator.reset();
	
	// read and re-generate all records and compare them
	final Tuple2<Integer, String> readRec = new Tuple2<>();
	for (int i = 0; i < NUM_PAIRS_LONG; i++) {
		generator.next(rec);
		serializer.deserialize(readRec, inView);
		final int k1 = rec.f0;
		final String v1 = rec.f1;
		final int k2 = readRec.f0;
		final String v2 = readRec.f1;
		Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2));
	}
	
	this.memoryManager.release(inView.close());
	reader.deleteChannel();
}
 
Example #24
Source File: ChannelViewsTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testWriteReadSmallRecords() throws Exception
{
	final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
	final FileIOChannel.ID channel = this.ioManager.createChannel();
	final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer();
	
	// create the writer output view
	List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
	final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
	final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE);
	// write a number of pairs
	final Tuple2<Integer, String> rec = new Tuple2<>();
	for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
		generator.next(rec);
		serializer.serialize(rec, outView);
	}
	this.memoryManager.release(outView.close());
	
	// create the reader input view
	memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
	final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel);
	final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, outView.getBlockCount(), true);
	generator.reset();
	
	// read and re-generate all records and compare them
	final Tuple2<Integer, String> readRec = new Tuple2<>();
	for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
		generator.next(rec);
		serializer.deserialize(readRec, inView);
		
		int k1 = rec.f0;
		String v1 = rec.f1;
		
		int k2 = readRec.f0;
		String v2 = readRec.f1;
		
		Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2));
	}
	
	this.memoryManager.release(inView.close());
	reader.deleteChannel();
}
 
Example #25
Source File: FixedLengthRecordSorterTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testFlushPartialMemoryPage() throws Exception {
	// Insert IntPair which would fill 2 memory pages.
	final int NUM_RECORDS = 2 * MEMORY_PAGE_SIZE / 8;
	final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), 3);

	FixedLengthRecordSorter<IntPair> sorter = newSortBuffer(memory);
	UniformIntPairGenerator generator = new UniformIntPairGenerator(Integer.MAX_VALUE, 1, false);

	// write the records
	IntPair record = new IntPair();
	int num = -1;
	do {
		generator.next(record);
		num++;
	}
	while (sorter.write(record) && num < NUM_RECORDS);

	FileIOChannel.ID channelID = this.ioManager.createChannelEnumerator().next();
	BlockChannelWriter<MemorySegment> blockChannelWriter = this.ioManager.createBlockChannelWriter(channelID);
	final List<MemorySegment> writeBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelWriterOutputView outputView = new ChannelWriterOutputView(blockChannelWriter, writeBuffer, writeBuffer.get(0).size());

	sorter.writeToOutput(outputView, 1, NUM_RECORDS - 1);

	this.memoryManager.release(outputView.close());

	BlockChannelReader<MemorySegment> blockChannelReader = this.ioManager.createBlockChannelReader(channelID);
	final List<MemorySegment> readBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelReaderInputView readerInputView = new ChannelReaderInputView(blockChannelReader, readBuffer, false);
	final List<MemorySegment> dataBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelReaderInputViewIterator<IntPair> iterator = new ChannelReaderInputViewIterator(readerInputView, dataBuffer, this.serializer);

	record = iterator.next(record);
	int i =1;
	while (record != null) {
		Assert.assertEquals(i, record.getKey());
		record = iterator.next(record);
		i++;
	}

	Assert.assertEquals(NUM_RECORDS, i);

	this.memoryManager.release(dataBuffer);
	// release the memory occupied by the buffers
	sorter.dispose();
	this.memoryManager.release(memory);
}
 
Example #26
Source File: FixedLengthRecordSorterTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testFlushFullMemoryPage() throws Exception {
	// Insert IntPair which would fill 2 memory pages.
	final int NUM_RECORDS = 2 * MEMORY_PAGE_SIZE / 8;
	final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), 3);

	FixedLengthRecordSorter<IntPair> sorter = newSortBuffer(memory);
	UniformIntPairGenerator generator = new UniformIntPairGenerator(Integer.MAX_VALUE, 1, false);

	// write the records
	IntPair record = new IntPair();
	int num = -1;
	do {
		generator.next(record);
		num++;
	}
	while (sorter.write(record) && num < NUM_RECORDS);

	FileIOChannel.ID channelID = this.ioManager.createChannelEnumerator().next();
	BlockChannelWriter<MemorySegment> blockChannelWriter = this.ioManager.createBlockChannelWriter(channelID);
	final List<MemorySegment> writeBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelWriterOutputView outputView = new ChannelWriterOutputView(blockChannelWriter, writeBuffer, writeBuffer.get(0).size());

	sorter.writeToOutput(outputView, 0, NUM_RECORDS);

	this.memoryManager.release(outputView.close());

	BlockChannelReader<MemorySegment> blockChannelReader = this.ioManager.createBlockChannelReader(channelID);
	final List<MemorySegment> readBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelReaderInputView readerInputView = new ChannelReaderInputView(blockChannelReader, readBuffer, false);
	final List<MemorySegment> dataBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelReaderInputViewIterator<IntPair> iterator = new ChannelReaderInputViewIterator(readerInputView, dataBuffer, this.serializer);

	record = iterator.next(record);
	int i =0;
	while (record != null) {
		Assert.assertEquals(i, record.getKey());
		record = iterator.next(record);
		i++;
	}

	Assert.assertEquals(NUM_RECORDS, i);

	this.memoryManager.release(dataBuffer);
	// release the memory occupied by the buffers
	sorter.dispose();
	this.memoryManager.release(memory);
}
 
Example #27
Source File: ChannelViewsTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testWriteReadOneBufferOnly() throws Exception
{
	final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
	final FileIOChannel.ID channel = this.ioManager.createChannel();
	final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer();
	
	// create the writer output view
	List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, 1);
	final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
	final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE);
	
	// write a number of pairs
	final Tuple2<Integer, String> rec = new Tuple2<>();
	for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
		generator.next(rec);
		serializer.serialize(rec, outView);
	}
	this.memoryManager.release(outView.close());
	
	// create the reader input view
	memory = this.memoryManager.allocatePages(this.parentTask, 1);
	final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel);
	final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, outView.getBlockCount(), true);
	generator.reset();
	
	// read and re-generate all records and compare them
	final Tuple2<Integer, String> readRec = new Tuple2<>();
	for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
		generator.next(rec);
		serializer.deserialize(readRec, inView);
		
		int k1 = rec.f0;
		String v1 = rec.f1;
		
		int k2 = readRec.f0;
		String v2 = readRec.f1;
		
		Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2));
	}
	
	this.memoryManager.release(inView.close());
	reader.deleteChannel();
}
 
Example #28
Source File: FixedLengthRecordSorter.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public void writeToOutput(ChannelWriterOutputView output, LargeRecordHandler<T> largeRecordsOutput)
		throws IOException
{
	writeToOutput(output);
}
 
Example #29
Source File: ChannelViewsTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testWriteReadNotAll() throws Exception
{
	final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
	final FileIOChannel.ID channel = this.ioManager.createChannel();
	final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer();
	
	// create the writer output view
	List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
	final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
	final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE);
	
	// write a number of pairs
	final Tuple2<Integer, String> rec = new Tuple2<>();
	for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
		generator.next(rec);
		serializer.serialize(rec, outView);
	}
	this.memoryManager.release(outView.close());
	
	// create the reader input view
	memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
	final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel);
	final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, outView.getBlockCount(), true);
	generator.reset();
	
	// read and re-generate all records and compare them
	final Tuple2<Integer, String> readRec = new Tuple2<>();
	for (int i = 0; i < NUM_PAIRS_SHORT / 2; i++) {
		generator.next(rec);
		serializer.deserialize(readRec, inView);
		
		int k1 = rec.f0;
		String v1 = rec.f1;
		
		int k2 = readRec.f0;
		String v2 = readRec.f1;
		
		Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2));
	}
	
	this.memoryManager.release(inView.close());
	reader.deleteChannel();
}
 
Example #30
Source File: ChannelViewsTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testReadWithoutKnownBlockCount() throws Exception
{
	final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
	final FileIOChannel.ID channel = this.ioManager.createChannel();
	final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer();
	
	// create the writer output view
	List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
	final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
	final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE);
	
	// write a number of pairs
	final Tuple2<Integer, String> rec = new Tuple2<>();
	for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
		generator.next(rec);
		serializer.serialize(rec, outView);
	}
	this.memoryManager.release(outView.close());
	
	// create the reader input view
	memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS);
	final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel);
	final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, true);
	generator.reset();
	
	// read and re-generate all records and compare them
	final Tuple2<Integer, String> readRec = new Tuple2<>();
	for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
		generator.next(rec);
		serializer.deserialize(readRec, inView);
		
		int k1 = rec.f0;
		String v1 = rec.f1;
		
		int k2 = readRec.f0;
		String v2 = readRec.f1;
		
		Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2));
	}
	
	this.memoryManager.release(inView.close());
	reader.deleteChannel();
}