org.apache.flink.runtime.io.disk.ChannelReaderInputViewIterator Java Examples

The following examples show how to use org.apache.flink.runtime.io.disk.ChannelReaderInputViewIterator. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: UnilateralSortMerger.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Returns an iterator that iterates over the merged result from all given channels.
 * 
 * @param channelIDs The channels that are to be merged and returned.
 * @param inputSegments The buffers to be used for reading. The list contains for each channel one
 *                      list of input segments. The size of the <code>inputSegments</code> list must be equal to
 *                      that of the <code>channelIDs</code> list.
 * @return An iterator over the merged records of the input channels.
 * @throws IOException Thrown, if the readers encounter an I/O problem.
 */
protected final MergeIterator<E> getMergingIterator(final List<ChannelWithBlockCount> channelIDs,
		final List<List<MemorySegment>> inputSegments, List<FileIOChannel> readerList, MutableObjectIterator<E> largeRecords)
	throws IOException
{
	// create one iterator per channel id
	if (LOG.isDebugEnabled()) {
		LOG.debug("Performing merge of " + channelIDs.size() + " sorted streams.");
	}
	
	final List<MutableObjectIterator<E>> iterators = new ArrayList<MutableObjectIterator<E>>(channelIDs.size() + 1);
	
	for (int i = 0; i < channelIDs.size(); i++) {
		final ChannelWithBlockCount channel = channelIDs.get(i);
		final List<MemorySegment> segsForChannel = inputSegments.get(i);
		
		// create a reader. if there are multiple segments for the reader, issue multiple together per I/O request
		final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel.getChannel());
			
		readerList.add(reader);
		registerOpenChannelToBeRemovedAtShudown(reader);
		unregisterChannelToBeRemovedAtShudown(channel.getChannel());
		
		// wrap channel reader as a view, to get block spanning record deserialization
		final ChannelReaderInputView inView = new ChannelReaderInputView(reader, segsForChannel, 
																	channel.getBlockCount(), false);
		iterators.add(new ChannelReaderInputViewIterator<E>(inView, null, this.serializer));
	}
	
	if (largeRecords != null) {
		iterators.add(largeRecords);
	}

	return new MergeIterator<E>(iterators, this.comparator);
}
 
Example #2
Source File: UnilateralSortMerger.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Returns an iterator that iterates over the merged result from all given channels.
 * 
 * @param channelIDs The channels that are to be merged and returned.
 * @param inputSegments The buffers to be used for reading. The list contains for each channel one
 *                      list of input segments. The size of the <code>inputSegments</code> list must be equal to
 *                      that of the <code>channelIDs</code> list.
 * @return An iterator over the merged records of the input channels.
 * @throws IOException Thrown, if the readers encounter an I/O problem.
 */
protected final MergeIterator<E> getMergingIterator(final List<ChannelWithBlockCount> channelIDs,
		final List<List<MemorySegment>> inputSegments, List<FileIOChannel> readerList, MutableObjectIterator<E> largeRecords)
	throws IOException
{
	// create one iterator per channel id
	if (LOG.isDebugEnabled()) {
		LOG.debug("Performing merge of " + channelIDs.size() + " sorted streams.");
	}
	
	final List<MutableObjectIterator<E>> iterators = new ArrayList<MutableObjectIterator<E>>(channelIDs.size() + 1);
	
	for (int i = 0; i < channelIDs.size(); i++) {
		final ChannelWithBlockCount channel = channelIDs.get(i);
		final List<MemorySegment> segsForChannel = inputSegments.get(i);
		
		// create a reader. if there are multiple segments for the reader, issue multiple together per I/O request
		final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel.getChannel());
			
		readerList.add(reader);
		registerOpenChannelToBeRemovedAtShudown(reader);
		unregisterChannelToBeRemovedAtShudown(channel.getChannel());
		
		// wrap channel reader as a view, to get block spanning record deserialization
		final ChannelReaderInputView inView = new ChannelReaderInputView(reader, segsForChannel, 
																	channel.getBlockCount(), false);
		iterators.add(new ChannelReaderInputViewIterator<E>(inView, null, this.serializer));
	}
	
	if (largeRecords != null) {
		iterators.add(largeRecords);
	}

	return new MergeIterator<E>(iterators, this.comparator);
}
 
Example #3
Source File: UnilateralSortMerger.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Returns an iterator that iterates over the merged result from all given channels.
 * 
 * @param channelIDs The channels that are to be merged and returned.
 * @param inputSegments The buffers to be used for reading. The list contains for each channel one
 *                      list of input segments. The size of the <code>inputSegments</code> list must be equal to
 *                      that of the <code>channelIDs</code> list.
 * @return An iterator over the merged records of the input channels.
 * @throws IOException Thrown, if the readers encounter an I/O problem.
 */
protected final MergeIterator<E> getMergingIterator(final List<ChannelWithBlockCount> channelIDs,
		final List<List<MemorySegment>> inputSegments, List<FileIOChannel> readerList, MutableObjectIterator<E> largeRecords)
	throws IOException
{
	// create one iterator per channel id
	if (LOG.isDebugEnabled()) {
		LOG.debug("Performing merge of " + channelIDs.size() + " sorted streams.");
	}
	
	final List<MutableObjectIterator<E>> iterators = new ArrayList<MutableObjectIterator<E>>(channelIDs.size() + 1);
	
	for (int i = 0; i < channelIDs.size(); i++) {
		final ChannelWithBlockCount channel = channelIDs.get(i);
		final List<MemorySegment> segsForChannel = inputSegments.get(i);
		
		// create a reader. if there are multiple segments for the reader, issue multiple together per I/O request
		final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel.getChannel());
			
		readerList.add(reader);
		registerOpenChannelToBeRemovedAtShudown(reader);
		unregisterChannelToBeRemovedAtShudown(channel.getChannel());
		
		// wrap channel reader as a view, to get block spanning record deserialization
		final ChannelReaderInputView inView = new ChannelReaderInputView(reader, segsForChannel, 
																	channel.getBlockCount(), false);
		iterators.add(new ChannelReaderInputViewIterator<E>(inView, null, this.serializer));
	}
	
	if (largeRecords != null) {
		iterators.add(largeRecords);
	}

	return new MergeIterator<E>(iterators, this.comparator);
}
 
Example #4
Source File: FixedLengthRecordSorterTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Test
public void testFlushFullMemoryPage() throws Exception {
	// Insert IntPair which would fill 2 memory pages.
	final int NUM_RECORDS = 2 * MEMORY_PAGE_SIZE / 8;
	final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), 3);

	FixedLengthRecordSorter<IntPair> sorter = newSortBuffer(memory);
	UniformIntPairGenerator generator = new UniformIntPairGenerator(Integer.MAX_VALUE, 1, false);

	// write the records
	IntPair record = new IntPair();
	int num = -1;
	do {
		generator.next(record);
		num++;
	}
	while (sorter.write(record) && num < NUM_RECORDS);

	FileIOChannel.ID channelID = this.ioManager.createChannelEnumerator().next();
	BlockChannelWriter<MemorySegment> blockChannelWriter = this.ioManager.createBlockChannelWriter(channelID);
	final List<MemorySegment> writeBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelWriterOutputView outputView = new ChannelWriterOutputView(blockChannelWriter, writeBuffer, writeBuffer.get(0).size());

	sorter.writeToOutput(outputView, 0, NUM_RECORDS);

	this.memoryManager.release(outputView.close());

	BlockChannelReader<MemorySegment> blockChannelReader = this.ioManager.createBlockChannelReader(channelID);
	final List<MemorySegment> readBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelReaderInputView readerInputView = new ChannelReaderInputView(blockChannelReader, readBuffer, false);
	final List<MemorySegment> dataBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelReaderInputViewIterator<IntPair> iterator = new ChannelReaderInputViewIterator(readerInputView, dataBuffer, this.serializer);

	record = iterator.next(record);
	int i =0;
	while (record != null) {
		Assert.assertEquals(i, record.getKey());
		record = iterator.next(record);
		i++;
	}

	Assert.assertEquals(NUM_RECORDS, i);

	this.memoryManager.release(dataBuffer);
	// release the memory occupied by the buffers
	sorter.dispose();
	this.memoryManager.release(memory);
}
 
Example #5
Source File: FixedLengthRecordSorterTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Test
public void testFlushPartialMemoryPage() throws Exception {
	// Insert IntPair which would fill 2 memory pages.
	final int NUM_RECORDS = 2 * MEMORY_PAGE_SIZE / 8;
	final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), 3);

	FixedLengthRecordSorter<IntPair> sorter = newSortBuffer(memory);
	UniformIntPairGenerator generator = new UniformIntPairGenerator(Integer.MAX_VALUE, 1, false);

	// write the records
	IntPair record = new IntPair();
	int num = -1;
	do {
		generator.next(record);
		num++;
	}
	while (sorter.write(record) && num < NUM_RECORDS);

	FileIOChannel.ID channelID = this.ioManager.createChannelEnumerator().next();
	BlockChannelWriter<MemorySegment> blockChannelWriter = this.ioManager.createBlockChannelWriter(channelID);
	final List<MemorySegment> writeBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelWriterOutputView outputView = new ChannelWriterOutputView(blockChannelWriter, writeBuffer, writeBuffer.get(0).size());

	sorter.writeToOutput(outputView, 1, NUM_RECORDS - 1);

	this.memoryManager.release(outputView.close());

	BlockChannelReader<MemorySegment> blockChannelReader = this.ioManager.createBlockChannelReader(channelID);
	final List<MemorySegment> readBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelReaderInputView readerInputView = new ChannelReaderInputView(blockChannelReader, readBuffer, false);
	final List<MemorySegment> dataBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelReaderInputViewIterator<IntPair> iterator = new ChannelReaderInputViewIterator(readerInputView, dataBuffer, this.serializer);

	record = iterator.next(record);
	int i =1;
	while (record != null) {
		Assert.assertEquals(i, record.getKey());
		record = iterator.next(record);
		i++;
	}

	Assert.assertEquals(NUM_RECORDS, i);

	this.memoryManager.release(dataBuffer);
	// release the memory occupied by the buffers
	sorter.dispose();
	this.memoryManager.release(memory);
}
 
Example #6
Source File: BinaryExternalMerger.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
protected MutableObjectIterator<BinaryRow> channelReaderInputViewIterator(AbstractChannelReaderInputView inView) {
	return new ChannelReaderInputViewIterator<>(inView, null, serializer.duplicate());
}
 
Example #7
Source File: ProbeIterator.java    From flink with Apache License 2.0 4 votes vote down vote up
public void set(ChannelReaderInputViewIterator<BinaryRow> source) {
	this.source = source;
}
 
Example #8
Source File: FixedLengthRecordSorterTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testFlushFullMemoryPage() throws Exception {
	// Insert IntPair which would fill 2 memory pages.
	final int NUM_RECORDS = 2 * MEMORY_PAGE_SIZE / 8;
	final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), 3);

	FixedLengthRecordSorter<IntPair> sorter = newSortBuffer(memory);
	UniformIntPairGenerator generator = new UniformIntPairGenerator(Integer.MAX_VALUE, 1, false);

	// write the records
	IntPair record = new IntPair();
	int num = -1;
	do {
		generator.next(record);
		num++;
	}
	while (sorter.write(record) && num < NUM_RECORDS);

	FileIOChannel.ID channelID = this.ioManager.createChannelEnumerator().next();
	BlockChannelWriter<MemorySegment> blockChannelWriter = this.ioManager.createBlockChannelWriter(channelID);
	final List<MemorySegment> writeBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelWriterOutputView outputView = new ChannelWriterOutputView(blockChannelWriter, writeBuffer, writeBuffer.get(0).size());

	sorter.writeToOutput(outputView, 0, NUM_RECORDS);

	this.memoryManager.release(outputView.close());

	BlockChannelReader<MemorySegment> blockChannelReader = this.ioManager.createBlockChannelReader(channelID);
	final List<MemorySegment> readBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelReaderInputView readerInputView = new ChannelReaderInputView(blockChannelReader, readBuffer, false);
	final List<MemorySegment> dataBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelReaderInputViewIterator<IntPair> iterator = new ChannelReaderInputViewIterator(readerInputView, dataBuffer, this.serializer);

	record = iterator.next(record);
	int i =0;
	while (record != null) {
		Assert.assertEquals(i, record.getKey());
		record = iterator.next(record);
		i++;
	}

	Assert.assertEquals(NUM_RECORDS, i);

	this.memoryManager.release(dataBuffer);
	// release the memory occupied by the buffers
	sorter.dispose();
	this.memoryManager.release(memory);
}
 
Example #9
Source File: FixedLengthRecordSorterTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testFlushPartialMemoryPage() throws Exception {
	// Insert IntPair which would fill 2 memory pages.
	final int NUM_RECORDS = 2 * MEMORY_PAGE_SIZE / 8;
	final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), 3);

	FixedLengthRecordSorter<IntPair> sorter = newSortBuffer(memory);
	UniformIntPairGenerator generator = new UniformIntPairGenerator(Integer.MAX_VALUE, 1, false);

	// write the records
	IntPair record = new IntPair();
	int num = -1;
	do {
		generator.next(record);
		num++;
	}
	while (sorter.write(record) && num < NUM_RECORDS);

	FileIOChannel.ID channelID = this.ioManager.createChannelEnumerator().next();
	BlockChannelWriter<MemorySegment> blockChannelWriter = this.ioManager.createBlockChannelWriter(channelID);
	final List<MemorySegment> writeBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelWriterOutputView outputView = new ChannelWriterOutputView(blockChannelWriter, writeBuffer, writeBuffer.get(0).size());

	sorter.writeToOutput(outputView, 1, NUM_RECORDS - 1);

	this.memoryManager.release(outputView.close());

	BlockChannelReader<MemorySegment> blockChannelReader = this.ioManager.createBlockChannelReader(channelID);
	final List<MemorySegment> readBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelReaderInputView readerInputView = new ChannelReaderInputView(blockChannelReader, readBuffer, false);
	final List<MemorySegment> dataBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelReaderInputViewIterator<IntPair> iterator = new ChannelReaderInputViewIterator(readerInputView, dataBuffer, this.serializer);

	record = iterator.next(record);
	int i =1;
	while (record != null) {
		Assert.assertEquals(i, record.getKey());
		record = iterator.next(record);
		i++;
	}

	Assert.assertEquals(NUM_RECORDS, i);

	this.memoryManager.release(dataBuffer);
	// release the memory occupied by the buffers
	sorter.dispose();
	this.memoryManager.release(memory);
}
 
Example #10
Source File: BinaryExternalMerger.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
protected MutableObjectIterator<BinaryRowData> channelReaderInputViewIterator(AbstractChannelReaderInputView inView) {
	return new ChannelReaderInputViewIterator<>(inView, null, serializer.duplicate());
}
 
Example #11
Source File: ProbeIterator.java    From flink with Apache License 2.0 4 votes vote down vote up
public void set(ChannelReaderInputViewIterator<BinaryRowData> source) {
	this.source = source;
}
 
Example #12
Source File: FixedLengthRecordSorterTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testFlushFullMemoryPage() throws Exception {
	// Insert IntPair which would fill 2 memory pages.
	final int NUM_RECORDS = 2 * MEMORY_PAGE_SIZE / 8;
	final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), 3);

	FixedLengthRecordSorter<IntPair> sorter = newSortBuffer(memory);
	UniformIntPairGenerator generator = new UniformIntPairGenerator(Integer.MAX_VALUE, 1, false);

	// write the records
	IntPair record = new IntPair();
	int num = -1;
	do {
		generator.next(record);
		num++;
	}
	while (sorter.write(record) && num < NUM_RECORDS);

	FileIOChannel.ID channelID = this.ioManager.createChannelEnumerator().next();
	BlockChannelWriter<MemorySegment> blockChannelWriter = this.ioManager.createBlockChannelWriter(channelID);
	final List<MemorySegment> writeBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelWriterOutputView outputView = new ChannelWriterOutputView(blockChannelWriter, writeBuffer, writeBuffer.get(0).size());

	sorter.writeToOutput(outputView, 0, NUM_RECORDS);

	this.memoryManager.release(outputView.close());

	BlockChannelReader<MemorySegment> blockChannelReader = this.ioManager.createBlockChannelReader(channelID);
	final List<MemorySegment> readBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelReaderInputView readerInputView = new ChannelReaderInputView(blockChannelReader, readBuffer, false);
	final List<MemorySegment> dataBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelReaderInputViewIterator<IntPair> iterator = new ChannelReaderInputViewIterator(readerInputView, dataBuffer, this.serializer);

	record = iterator.next(record);
	int i =0;
	while (record != null) {
		Assert.assertEquals(i, record.getKey());
		record = iterator.next(record);
		i++;
	}

	Assert.assertEquals(NUM_RECORDS, i);

	this.memoryManager.release(dataBuffer);
	// release the memory occupied by the buffers
	sorter.dispose();
	this.memoryManager.release(memory);
}
 
Example #13
Source File: FixedLengthRecordSorterTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testFlushPartialMemoryPage() throws Exception {
	// Insert IntPair which would fill 2 memory pages.
	final int NUM_RECORDS = 2 * MEMORY_PAGE_SIZE / 8;
	final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), 3);

	FixedLengthRecordSorter<IntPair> sorter = newSortBuffer(memory);
	UniformIntPairGenerator generator = new UniformIntPairGenerator(Integer.MAX_VALUE, 1, false);

	// write the records
	IntPair record = new IntPair();
	int num = -1;
	do {
		generator.next(record);
		num++;
	}
	while (sorter.write(record) && num < NUM_RECORDS);

	FileIOChannel.ID channelID = this.ioManager.createChannelEnumerator().next();
	BlockChannelWriter<MemorySegment> blockChannelWriter = this.ioManager.createBlockChannelWriter(channelID);
	final List<MemorySegment> writeBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelWriterOutputView outputView = new ChannelWriterOutputView(blockChannelWriter, writeBuffer, writeBuffer.get(0).size());

	sorter.writeToOutput(outputView, 1, NUM_RECORDS - 1);

	this.memoryManager.release(outputView.close());

	BlockChannelReader<MemorySegment> blockChannelReader = this.ioManager.createBlockChannelReader(channelID);
	final List<MemorySegment> readBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelReaderInputView readerInputView = new ChannelReaderInputView(blockChannelReader, readBuffer, false);
	final List<MemorySegment> dataBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3);
	ChannelReaderInputViewIterator<IntPair> iterator = new ChannelReaderInputViewIterator(readerInputView, dataBuffer, this.serializer);

	record = iterator.next(record);
	int i =1;
	while (record != null) {
		Assert.assertEquals(i, record.getKey());
		record = iterator.next(record);
		i++;
	}

	Assert.assertEquals(NUM_RECORDS, i);

	this.memoryManager.release(dataBuffer);
	// release the memory occupied by the buffers
	sorter.dispose();
	this.memoryManager.release(memory);
}