org.apache.flink.runtime.io.disk.iomanager.BlockChannelReader Java Examples

The following examples show how to use org.apache.flink.runtime.io.disk.iomanager.BlockChannelReader. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: BaseHybridHashTable.java    From flink with Apache License 2.0 6 votes vote down vote up
protected List<MemorySegment> readAllBuffers(FileIOChannel.ID id, int blockCount) throws IOException {
	// we are guaranteed to stay in memory
	ensureNumBuffersReturned(blockCount);

	LinkedBlockingQueue<MemorySegment> retSegments = new LinkedBlockingQueue<>();
	BlockChannelReader<MemorySegment> reader = FileChannelUtil.createBlockChannelReader(
			ioManager, id, retSegments,
			compressionEnable, compressionCodecFactory, compressionBlockSize, segmentSize);
	for (int i = 0; i < blockCount; i++) {
		reader.readBlock(internalPool.nextSegment());
	}
	reader.closeAndDelete();

	final List<MemorySegment> buffers = new ArrayList<>();
	retSegments.drainTo(buffers);
	return buffers;
}
 
Example #2
Source File: BaseHybridHashTable.java    From flink with Apache License 2.0 6 votes vote down vote up
protected List<MemorySegment> readAllBuffers(FileIOChannel.ID id, int blockCount) throws IOException {
	// we are guaranteed to stay in memory
	ensureNumBuffersReturned(blockCount);

	LinkedBlockingQueue<MemorySegment> retSegments = new LinkedBlockingQueue<>();
	BlockChannelReader<MemorySegment> reader = FileChannelUtil.createBlockChannelReader(
			ioManager, id, retSegments,
			compressionEnable, compressionCodecFactory, compressionBlockSize, segmentSize);
	for (int i = 0; i < blockCount; i++) {
		reader.readBlock(availableMemory.remove(availableMemory.size() - 1));
	}
	reader.closeAndDelete();

	final List<MemorySegment> buffers = new ArrayList<>();
	retSegments.drainTo(buffers);
	return buffers;
}
 
Example #3
Source File: FileChannelUtil.java    From flink with Apache License 2.0 6 votes vote down vote up
public static BlockChannelReader<MemorySegment> createBlockChannelReader(
		IOManager ioManager,
		FileIOChannel.ID channel,
		LinkedBlockingQueue<MemorySegment> bufferReturnQueue,
		boolean compressionEnable,
		BlockCompressionFactory compressionCodecFactory,
		int compressionBlockSize,
		int segmentSize) throws IOException {
	if (compressionEnable) {
		return new CompressedBlockChannelReader(
				ioManager,
				channel,
				bufferReturnQueue,
				compressionCodecFactory,
				compressionBlockSize,
				segmentSize
		);
	} else {
		return ioManager.createBlockChannelReader(channel, bufferReturnQueue);
	}
}
 
Example #4
Source File: SerializedUpdateBuffer.java    From flink with Apache License 2.0 6 votes vote down vote up
private ReadEnd(MemorySegment firstMemSegment, LinkedBlockingQueue<MemorySegment> emptyBufferTarget,
								Deque<MemorySegment> fullBufferSource, BlockChannelReader<MemorySegment> spilledBufferSource,
								List<MemorySegment> emptyBuffers, int numBuffersSpilled)
	throws IOException {
	super(firstMemSegment, firstMemSegment.getInt(0), HEADER_LENGTH);

	this.emptyBufferTarget = emptyBufferTarget;
	this.fullBufferSource = fullBufferSource;

	this.spilledBufferSource = spilledBufferSource;

	requestsRemaining = numBuffersSpilled;
	this.spilledBuffersRemaining = numBuffersSpilled;

	// send the first requests
	while (requestsRemaining > 0 && emptyBuffers.size() > 0) {
		this.spilledBufferSource.readBlock(emptyBuffers.remove(emptyBuffers.size() - 1));
		requestsRemaining--;
	}
}
 
Example #5
Source File: SerializedUpdateBuffer.java    From flink with Apache License 2.0 6 votes vote down vote up
private ReadEnd(MemorySegment firstMemSegment, LinkedBlockingQueue<MemorySegment> emptyBufferTarget,
								Deque<MemorySegment> fullBufferSource, BlockChannelReader<MemorySegment> spilledBufferSource,
								List<MemorySegment> emptyBuffers, int numBuffersSpilled)
	throws IOException {
	super(firstMemSegment, firstMemSegment.getInt(0), HEADER_LENGTH);

	this.emptyBufferTarget = emptyBufferTarget;
	this.fullBufferSource = fullBufferSource;

	this.spilledBufferSource = spilledBufferSource;

	requestsRemaining = numBuffersSpilled;
	this.spilledBuffersRemaining = numBuffersSpilled;

	// send the first requests
	while (requestsRemaining > 0 && emptyBuffers.size() > 0) {
		this.spilledBufferSource.readBlock(emptyBuffers.remove(emptyBuffers.size() - 1));
		requestsRemaining--;
	}
}
 
Example #6
Source File: FileChannelUtil.java    From flink with Apache License 2.0 6 votes vote down vote up
public static BlockChannelReader<MemorySegment> createBlockChannelReader(
		IOManager ioManager,
		FileIOChannel.ID channel,
		LinkedBlockingQueue<MemorySegment> bufferReturnQueue,
		boolean compressionEnable,
		BlockCompressionFactory compressionCodecFactory,
		int compressionBlockSize,
		int segmentSize) throws IOException {
	if (compressionEnable) {
		return new CompressedBlockChannelReader(
				ioManager,
				channel,
				bufferReturnQueue,
				compressionCodecFactory,
				compressionBlockSize,
				segmentSize
		);
	} else {
		return ioManager.createBlockChannelReader(channel, bufferReturnQueue);
	}
}
 
Example #7
Source File: SerializedUpdateBuffer.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private ReadEnd(MemorySegment firstMemSegment, LinkedBlockingQueue<MemorySegment> emptyBufferTarget,
								Deque<MemorySegment> fullBufferSource, BlockChannelReader<MemorySegment> spilledBufferSource,
								List<MemorySegment> emptyBuffers, int numBuffersSpilled)
	throws IOException {
	super(firstMemSegment, firstMemSegment.getInt(0), HEADER_LENGTH);

	this.emptyBufferTarget = emptyBufferTarget;
	this.fullBufferSource = fullBufferSource;

	this.spilledBufferSource = spilledBufferSource;

	requestsRemaining = numBuffersSpilled;
	this.spilledBuffersRemaining = numBuffersSpilled;

	// send the first requests
	while (requestsRemaining > 0 && emptyBuffers.size() > 0) {
		this.spilledBufferSource.readBlock(emptyBuffers.remove(emptyBuffers.size() - 1));
		requestsRemaining--;
	}
}
 
Example #8
Source File: FileChannelUtil.java    From flink with Apache License 2.0 5 votes vote down vote up
public static AbstractChannelReaderInputView createInputView(
		IOManager ioManager,
		ChannelWithMeta channel,
		List<FileIOChannel> channels,
		boolean compressionEnable,
		BlockCompressionFactory compressionCodecFactory,
		int compressionBlockSize,
		int segmentSize) throws IOException {
	if (compressionEnable) {
		CompressedHeaderlessChannelReaderInputView in =
				new CompressedHeaderlessChannelReaderInputView(
						channel.getChannel(),
						ioManager,
						compressionCodecFactory,
						compressionBlockSize,
						channel.getBlockCount()
				);
		channels.add(in.getReader());
		return in;
	} else {
		BlockChannelReader<MemorySegment> reader =
				ioManager.createBlockChannelReader(channel.getChannel());
		channels.add(reader);
		return new HeaderlessChannelReaderInputView(
				reader,
				Arrays.asList(
						allocateUnpooledSegment(segmentSize),
						allocateUnpooledSegment(segmentSize)
				),
				channel.getBlockCount(),
				channel.getNumBytesInLastBlock(), false
		);
	}
}
 
Example #9
Source File: FileChannelInputView.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
public FileChannelInputView(BlockChannelReader<MemorySegment> reader, MemoryManager memManager, List<MemorySegment> memory, int sizeOfLastBlock) throws IOException {
	super(0);
	
	checkNotNull(reader);
	checkNotNull(memManager);
	checkNotNull(memory);
	checkArgument(!reader.isClosed());
	checkArgument(memory.size() > 0);
	
	this.reader = reader;
	this.memManager = memManager;
	this.memory = memory;
	this.sizeOfLastBlock = sizeOfLastBlock;
	
	try {
		final long channelLength = reader.getSize();
		final int segmentSize = memManager.getPageSize();
		
		this.numBlocksRemaining = MathUtils.checkedDownCast(channelLength / segmentSize);
		if (channelLength % segmentSize != 0) {
			this.numBlocksRemaining++;
		}
		
		this.numRequestsRemaining = numBlocksRemaining;
		
		for (int i = 0; i < memory.size(); i++) {
			sendReadRequest(memory.get(i));
		}
		
		advance();
	}
	catch (IOException e) {
		memManager.release(memory);
		throw e;
	}
}
 
Example #10
Source File: ChannelReaderInputViewIterator.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
public ChannelReaderInputViewIterator(BlockChannelReader<MemorySegment> reader, LinkedBlockingQueue<MemorySegment> returnQueue,
		List<MemorySegment> segments, List<MemorySegment> freeMemTarget, TypeSerializer<E> accessors, int numBlocks)
throws IOException
{
	this.accessors = accessors;
	this.freeMemTarget = freeMemTarget;
	this.inView = new ChannelReaderInputView(reader, segments, numBlocks, false);
}
 
Example #11
Source File: FileChannelStreamsTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testCloseAndDeleteInputView() {
	final IOManager ioManager = new IOManagerAsync();
	try {
		MemoryManager memMan = new MemoryManager(4 * 16*1024, 1, 16*1024, MemoryType.HEAP, true);
		List<MemorySegment> memory = new ArrayList<MemorySegment>();
		memMan.allocatePages(new DummyInvokable(), memory, 4);
		
		FileIOChannel.ID channel = ioManager.createChannel();
		
		// add some test data
		try (FileWriter wrt = new FileWriter(channel.getPath())) {
			wrt.write("test data");
		}
		
		BlockChannelReader<MemorySegment> reader = ioManager.createBlockChannelReader(channel);
		FileChannelInputView in = new FileChannelInputView(reader, memMan, memory, 9);
		
		// read just something
		in.readInt();
		
		// close for the first time, make sure all memory returns
		in.close();
		assertTrue(memMan.verifyEmpty());
		
		// close again, should not cause an exception
		in.close();
		
		// delete, make sure file is removed
		in.closeAndDelete();
		assertFalse(new File(channel.getPath()).exists());
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
	finally {
		ioManager.shutdown();
	}
}
 
Example #12
Source File: FileChannelStreamsTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testCloseAndDeleteInputView() {
	try (IOManager ioManager = new IOManagerAsync()) {
		MemoryManager memMan = MemoryManagerBuilder.newBuilder().build();
		List<MemorySegment> memory = new ArrayList<MemorySegment>();
		memMan.allocatePages(new DummyInvokable(), memory, 4);
		
		FileIOChannel.ID channel = ioManager.createChannel();
		
		// add some test data
		try (FileWriter wrt = new FileWriter(channel.getPath())) {
			wrt.write("test data");
		}
		
		BlockChannelReader<MemorySegment> reader = ioManager.createBlockChannelReader(channel);
		FileChannelInputView in = new FileChannelInputView(reader, memMan, memory, 9);
		
		// read just something
		in.readInt();
		
		// close for the first time, make sure all memory returns
		in.close();
		assertTrue(memMan.verifyEmpty());
		
		// close again, should not cause an exception
		in.close();
		
		// delete, make sure file is removed
		in.closeAndDelete();
		assertFalse(new File(channel.getPath()).exists());
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example #13
Source File: UnilateralSortMerger.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Returns an iterator that iterates over the merged result from all given channels.
 * 
 * @param channelIDs The channels that are to be merged and returned.
 * @param inputSegments The buffers to be used for reading. The list contains for each channel one
 *                      list of input segments. The size of the <code>inputSegments</code> list must be equal to
 *                      that of the <code>channelIDs</code> list.
 * @return An iterator over the merged records of the input channels.
 * @throws IOException Thrown, if the readers encounter an I/O problem.
 */
protected final MergeIterator<E> getMergingIterator(final List<ChannelWithBlockCount> channelIDs,
		final List<List<MemorySegment>> inputSegments, List<FileIOChannel> readerList, MutableObjectIterator<E> largeRecords)
	throws IOException
{
	// create one iterator per channel id
	if (LOG.isDebugEnabled()) {
		LOG.debug("Performing merge of " + channelIDs.size() + " sorted streams.");
	}
	
	final List<MutableObjectIterator<E>> iterators = new ArrayList<MutableObjectIterator<E>>(channelIDs.size() + 1);
	
	for (int i = 0; i < channelIDs.size(); i++) {
		final ChannelWithBlockCount channel = channelIDs.get(i);
		final List<MemorySegment> segsForChannel = inputSegments.get(i);
		
		// create a reader. if there are multiple segments for the reader, issue multiple together per I/O request
		final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel.getChannel());
			
		readerList.add(reader);
		registerOpenChannelToBeRemovedAtShudown(reader);
		unregisterChannelToBeRemovedAtShudown(channel.getChannel());
		
		// wrap channel reader as a view, to get block spanning record deserialization
		final ChannelReaderInputView inView = new ChannelReaderInputView(reader, segsForChannel, 
																	channel.getBlockCount(), false);
		iterators.add(new ChannelReaderInputViewIterator<E>(inView, null, this.serializer));
	}
	
	if (largeRecords != null) {
		iterators.add(largeRecords);
	}

	return new MergeIterator<E>(iterators, this.comparator);
}
 
Example #14
Source File: BaseHybridHashTable.java    From flink with Apache License 2.0 5 votes vote down vote up
protected HeaderlessChannelReaderInputView createInputView(FileIOChannel.ID id, int blockCount, int lastSegmentLimit) throws IOException {
	BlockChannelReader<MemorySegment> inReader = FileChannelUtil.createBlockChannelReader(
			ioManager, id, new LinkedBlockingQueue<>(),
			compressionEnable, compressionCodecFactory, compressionBlockSize, segmentSize);
	return new HeaderlessChannelReaderInputView(inReader,
												Arrays.asList(allocateUnpooledSegment(segmentSize), allocateUnpooledSegment(segmentSize)),
												blockCount, lastSegmentLimit, false);

}
 
Example #15
Source File: FileChannelUtil.java    From flink with Apache License 2.0 5 votes vote down vote up
public static AbstractChannelReaderInputView createInputView(
		IOManager ioManager,
		ChannelWithMeta channel,
		List<FileIOChannel> channels,
		boolean compressionEnable,
		BlockCompressionFactory compressionCodecFactory,
		int compressionBlockSize,
		int segmentSize) throws IOException {
	if (compressionEnable) {
		CompressedHeaderlessChannelReaderInputView in =
				new CompressedHeaderlessChannelReaderInputView(
						channel.getChannel(),
						ioManager,
						compressionCodecFactory,
						compressionBlockSize,
						channel.getBlockCount()
				);
		channels.add(in.getReader());
		return in;
	} else {
		BlockChannelReader<MemorySegment> reader =
				ioManager.createBlockChannelReader(channel.getChannel());
		channels.add(reader);
		return new HeaderlessChannelReaderInputView(
				reader,
				Arrays.asList(
						allocateUnpooledSegment(segmentSize),
						allocateUnpooledSegment(segmentSize)
				),
				channel.getBlockCount(),
				channel.getNumBytesInLastBlock(), false
		);
	}
}
 
Example #16
Source File: BaseHybridHashTable.java    From flink with Apache License 2.0 5 votes vote down vote up
protected HeaderlessChannelReaderInputView createInputView(FileIOChannel.ID id, int blockCount, int lastSegmentLimit) throws IOException {
	BlockChannelReader<MemorySegment> inReader = FileChannelUtil.createBlockChannelReader(
			ioManager, id, new LinkedBlockingQueue<>(),
			compressionEnable, compressionCodecFactory, compressionBlockSize, segmentSize);
	return new HeaderlessChannelReaderInputView(inReader,
												Arrays.asList(allocateUnpooledSegment(segmentSize), allocateUnpooledSegment(segmentSize)),
												blockCount, lastSegmentLimit, false);

}
 
Example #17
Source File: UnilateralSortMerger.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Returns an iterator that iterates over the merged result from all given channels.
 * 
 * @param channelIDs The channels that are to be merged and returned.
 * @param inputSegments The buffers to be used for reading. The list contains for each channel one
 *                      list of input segments. The size of the <code>inputSegments</code> list must be equal to
 *                      that of the <code>channelIDs</code> list.
 * @return An iterator over the merged records of the input channels.
 * @throws IOException Thrown, if the readers encounter an I/O problem.
 */
protected final MergeIterator<E> getMergingIterator(final List<ChannelWithBlockCount> channelIDs,
		final List<List<MemorySegment>> inputSegments, List<FileIOChannel> readerList, MutableObjectIterator<E> largeRecords)
	throws IOException
{
	// create one iterator per channel id
	if (LOG.isDebugEnabled()) {
		LOG.debug("Performing merge of " + channelIDs.size() + " sorted streams.");
	}
	
	final List<MutableObjectIterator<E>> iterators = new ArrayList<MutableObjectIterator<E>>(channelIDs.size() + 1);
	
	for (int i = 0; i < channelIDs.size(); i++) {
		final ChannelWithBlockCount channel = channelIDs.get(i);
		final List<MemorySegment> segsForChannel = inputSegments.get(i);
		
		// create a reader. if there are multiple segments for the reader, issue multiple together per I/O request
		final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel.getChannel());
			
		readerList.add(reader);
		registerOpenChannelToBeRemovedAtShudown(reader);
		unregisterChannelToBeRemovedAtShudown(channel.getChannel());
		
		// wrap channel reader as a view, to get block spanning record deserialization
		final ChannelReaderInputView inView = new ChannelReaderInputView(reader, segsForChannel, 
																	channel.getBlockCount(), false);
		iterators.add(new ChannelReaderInputViewIterator<E>(inView, null, this.serializer));
	}
	
	if (largeRecords != null) {
		iterators.add(largeRecords);
	}

	return new MergeIterator<E>(iterators, this.comparator);
}
 
Example #18
Source File: FileChannelInputView.java    From flink with Apache License 2.0 5 votes vote down vote up
public FileChannelInputView(BlockChannelReader<MemorySegment> reader, MemoryManager memManager, List<MemorySegment> memory, int sizeOfLastBlock) throws IOException {
	super(0);
	
	checkNotNull(reader);
	checkNotNull(memManager);
	checkNotNull(memory);
	checkArgument(!reader.isClosed());
	checkArgument(memory.size() > 0);
	
	this.reader = reader;
	this.memManager = memManager;
	this.memory = memory;
	this.sizeOfLastBlock = sizeOfLastBlock;
	
	try {
		final long channelLength = reader.getSize();
		final int segmentSize = memManager.getPageSize();
		
		this.numBlocksRemaining = MathUtils.checkedDownCast(channelLength / segmentSize);
		if (channelLength % segmentSize != 0) {
			this.numBlocksRemaining++;
		}
		
		this.numRequestsRemaining = numBlocksRemaining;
		
		for (int i = 0; i < memory.size(); i++) {
			sendReadRequest(memory.get(i));
		}
		
		advance();
	}
	catch (IOException e) {
		memManager.release(memory);
		throw e;
	}
}
 
Example #19
Source File: ChannelReaderInputViewIterator.java    From flink with Apache License 2.0 5 votes vote down vote up
public ChannelReaderInputViewIterator(BlockChannelReader<MemorySegment> reader, LinkedBlockingQueue<MemorySegment> returnQueue,
		List<MemorySegment> segments, List<MemorySegment> freeMemTarget, TypeSerializer<E> accessors, int numBlocks)
throws IOException
{
	this.accessors = accessors;
	this.freeMemTarget = freeMemTarget;
	this.inView = new ChannelReaderInputView(reader, segments, numBlocks, false);
}
 
Example #20
Source File: FileChannelInputView.java    From flink with Apache License 2.0 5 votes vote down vote up
public FileChannelInputView(BlockChannelReader<MemorySegment> reader, MemoryManager memManager, List<MemorySegment> memory, int sizeOfLastBlock) throws IOException {
	super(0);
	
	checkNotNull(reader);
	checkNotNull(memManager);
	checkNotNull(memory);
	checkArgument(!reader.isClosed());
	checkArgument(memory.size() > 0);
	
	this.reader = reader;
	this.memManager = memManager;
	this.memory = memory;
	this.sizeOfLastBlock = sizeOfLastBlock;
	
	try {
		final long channelLength = reader.getSize();
		final int segmentSize = memManager.getPageSize();
		
		this.numBlocksRemaining = MathUtils.checkedDownCast(channelLength / segmentSize);
		if (channelLength % segmentSize != 0) {
			this.numBlocksRemaining++;
		}
		
		this.numRequestsRemaining = numBlocksRemaining;
		
		for (int i = 0; i < memory.size(); i++) {
			sendReadRequest(memory.get(i));
		}
		
		advance();
	}
	catch (IOException e) {
		memManager.release(memory);
		throw e;
	}
}
 
Example #21
Source File: ChannelReaderInputViewIterator.java    From flink with Apache License 2.0 5 votes vote down vote up
public ChannelReaderInputViewIterator(BlockChannelReader<MemorySegment> reader, LinkedBlockingQueue<MemorySegment> returnQueue,
		List<MemorySegment> segments, List<MemorySegment> freeMemTarget, TypeSerializer<E> accessors, int numBlocks)
throws IOException
{
	this.accessors = accessors;
	this.freeMemTarget = freeMemTarget;
	this.inView = new ChannelReaderInputView(reader, segments, numBlocks, false);
}
 
Example #22
Source File: FileChannelStreamsTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testCloseAndDeleteInputView() {
	try (IOManager ioManager = new IOManagerAsync()) {
		MemoryManager memMan = new MemoryManager(4 * 16*1024, 1, 16*1024, MemoryType.HEAP, true);
		List<MemorySegment> memory = new ArrayList<MemorySegment>();
		memMan.allocatePages(new DummyInvokable(), memory, 4);
		
		FileIOChannel.ID channel = ioManager.createChannel();
		
		// add some test data
		try (FileWriter wrt = new FileWriter(channel.getPath())) {
			wrt.write("test data");
		}
		
		BlockChannelReader<MemorySegment> reader = ioManager.createBlockChannelReader(channel);
		FileChannelInputView in = new FileChannelInputView(reader, memMan, memory, 9);
		
		// read just something
		in.readInt();
		
		// close for the first time, make sure all memory returns
		in.close();
		assertTrue(memMan.verifyEmpty());
		
		// close again, should not cause an exception
		in.close();
		
		// delete, make sure file is removed
		in.closeAndDelete();
		assertFalse(new File(channel.getPath()).exists());
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example #23
Source File: UnilateralSortMerger.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Returns an iterator that iterates over the merged result from all given channels.
 * 
 * @param channelIDs The channels that are to be merged and returned.
 * @param inputSegments The buffers to be used for reading. The list contains for each channel one
 *                      list of input segments. The size of the <code>inputSegments</code> list must be equal to
 *                      that of the <code>channelIDs</code> list.
 * @return An iterator over the merged records of the input channels.
 * @throws IOException Thrown, if the readers encounter an I/O problem.
 */
protected final MergeIterator<E> getMergingIterator(final List<ChannelWithBlockCount> channelIDs,
		final List<List<MemorySegment>> inputSegments, List<FileIOChannel> readerList, MutableObjectIterator<E> largeRecords)
	throws IOException
{
	// create one iterator per channel id
	if (LOG.isDebugEnabled()) {
		LOG.debug("Performing merge of " + channelIDs.size() + " sorted streams.");
	}
	
	final List<MutableObjectIterator<E>> iterators = new ArrayList<MutableObjectIterator<E>>(channelIDs.size() + 1);
	
	for (int i = 0; i < channelIDs.size(); i++) {
		final ChannelWithBlockCount channel = channelIDs.get(i);
		final List<MemorySegment> segsForChannel = inputSegments.get(i);
		
		// create a reader. if there are multiple segments for the reader, issue multiple together per I/O request
		final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel.getChannel());
			
		readerList.add(reader);
		registerOpenChannelToBeRemovedAtShudown(reader);
		unregisterChannelToBeRemovedAtShudown(channel.getChannel());
		
		// wrap channel reader as a view, to get block spanning record deserialization
		final ChannelReaderInputView inView = new ChannelReaderInputView(reader, segsForChannel, 
																	channel.getBlockCount(), false);
		iterators.add(new ChannelReaderInputViewIterator<E>(inView, null, this.serializer));
	}
	
	if (largeRecords != null) {
		iterators.add(largeRecords);
	}

	return new MergeIterator<E>(iterators, this.comparator);
}
 
Example #24
Source File: FileChannelStreamsITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testWriteAndReadLongRecords() {
	try {
		final List<MemorySegment> memory = memManager.allocatePages(new DummyInvokable(), NUM_MEMORY_SEGMENTS);
		
		final PairGenerator generator = new PairGenerator(SEED, KEY_MAX, VALUE_LONG_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
		final FileIOChannel.ID channel = this.ioManager.createChannel();
		
		// create the writer output view
		final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
		final FileChannelOutputView outView = new FileChannelOutputView(writer, memManager, memory, MEMORY_PAGE_SIZE);
		
		// write a number of pairs
		Pair pair = new Pair();
		for (int i = 0; i < NUM_PAIRS_LONG; i++) {
			generator.next(pair);
			pair.write(outView);
		}
		outView.close();
		
		// create the reader input view
		List<MemorySegment> readMemory = memManager.allocatePages(new DummyInvokable(), NUM_MEMORY_SEGMENTS);
		
		final BlockChannelReader<MemorySegment> reader = ioManager.createBlockChannelReader(channel);
		final FileChannelInputView inView = new FileChannelInputView(reader, memManager, readMemory, outView.getBytesInLatestSegment());
		generator.reset();
		
		// read and re-generate all records and compare them
		Pair readPair = new Pair();
		for (int i = 0; i < NUM_PAIRS_LONG; i++) {
			generator.next(pair);
			readPair.read(inView);
			assertEquals("The re-generated and the read record do not match.", pair, readPair);
		}
		
		inView.close();
		reader.deleteChannel();
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example #25
Source File: SpillingBuffer.java    From flink with Apache License 2.0 4 votes vote down vote up
public DataInputView flip() throws IOException {
	// check whether this is the first flip and we need to add the current segment to the full ones
	if (getCurrentSegment() != null) {
		// first flip
		if (this.writer == null) {
			// in memory
			this.fullSegments.add(getCurrentSegment());
			this.numBytesInLastSegment = getCurrentPositionInSegment();
			this.inMemInView = new RandomAccessInputView(this.fullSegments, this.segmentSize, this.numBytesInLastSegment);
		} else {
			// external: write the last segment and collect the memory back
			this.writer.writeBlock(this.getCurrentSegment());
			this.numMemorySegmentsInWriter++;
			
			this.numBytesInLastSegment = getCurrentPositionInSegment();
			this.blockCount++;
			this.writer.close();
			for (int i = this.numMemorySegmentsInWriter; i > 0; i--) {
				this.fullSegments.add(this.writer.getNextReturnedBlock());
			}
			this.numMemorySegmentsInWriter = 0;
		}
		
		// make sure we cannot write more
		clear();
	}
	
	if (this.writer == null) {
		// in memory
		this.inMemInView.setReadPosition(0);
		return this.inMemInView;
	} else {
		// recollect memory from a previous view
		if (this.externalInView != null) {
			this.externalInView.close();
		}
		
		final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(this.writer.getChannelID());
		this.externalInView = new HeaderlessChannelReaderInputView(reader, this.fullSegments, this.blockCount, this.numBytesInLastSegment, false);
		return this.externalInView;
	}
}
 
Example #26
Source File: FileChannelStreamsITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testReadTooMany() {
	try {
		final List<MemorySegment> memory = memManager.allocatePages(new DummyInvokable(), NUM_MEMORY_SEGMENTS);
		
		final PairGenerator generator = new PairGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
		final FileIOChannel.ID channel = this.ioManager.createChannel();
		
		// create the writer output view
		final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
		final FileChannelOutputView outView = new FileChannelOutputView(writer, memManager, memory, MEMORY_PAGE_SIZE);

		// write a number of pairs
		Pair pair = new Pair();
		for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
			generator.next(pair);
			pair.write(outView);
		}
		outView.close();

		// create the reader input view
		List<MemorySegment> readMemory = memManager.allocatePages(new DummyInvokable(), NUM_MEMORY_SEGMENTS);
		
		final BlockChannelReader<MemorySegment> reader = ioManager.createBlockChannelReader(channel);
		final FileChannelInputView inView = new FileChannelInputView(reader, memManager, readMemory, outView.getBytesInLatestSegment());
		generator.reset();

		// read and re-generate all records and compare them
		try {
			Pair readPair = new Pair();
			for (int i = 0; i < NUM_PAIRS_SHORT + 1; i++) {
				generator.next(pair);
				readPair.read(inView);
				assertEquals("The re-generated and the read record do not match.", pair, readPair);
			}
			fail("Expected an EOFException which did not occur.");
		}
		catch (EOFException eofex) {
			// expected
		}
		
		inView.close();
		reader.deleteChannel();
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example #27
Source File: FileChannelStreamsITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testWriteReadNotAll() {
	try {
		final List<MemorySegment> memory = memManager.allocatePages(new DummyInvokable(), NUM_MEMORY_SEGMENTS);
		
		final PairGenerator generator = new PairGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
		final FileIOChannel.ID channel = this.ioManager.createChannel();
		
		// create the writer output view
		final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
		final FileChannelOutputView outView = new FileChannelOutputView(writer, memManager, memory, MEMORY_PAGE_SIZE);
		
		// write a number of pairs
		Pair pair = new Pair();
		for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
			generator.next(pair);
			pair.write(outView);
		}
		outView.close();
		
		// create the reader input view
		List<MemorySegment> readMemory = memManager.allocatePages(new DummyInvokable(), NUM_MEMORY_SEGMENTS);
		
		final BlockChannelReader<MemorySegment> reader = ioManager.createBlockChannelReader(channel);
		final FileChannelInputView inView = new FileChannelInputView(reader, memManager, readMemory, outView.getBytesInLatestSegment());
		generator.reset();
		
		// read and re-generate all records and compare them
		Pair readPair = new Pair();
		for (int i = 0; i < NUM_PAIRS_SHORT / 2; i++) {
			generator.next(pair);
			readPair.read(inView);
			assertEquals("The re-generated and the read record do not match.", pair, readPair);
		}
		
		inView.close();
		reader.deleteChannel();
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example #28
Source File: FileChannelStreamsITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testWriteReadOneBufferOnly() {
	try {
		final List<MemorySegment> memory = memManager.allocatePages(new DummyInvokable(), 1);
		
		final PairGenerator generator = new PairGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
		final FileIOChannel.ID channel = this.ioManager.createChannel();
		
		// create the writer output view
		final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
		final FileChannelOutputView outView = new FileChannelOutputView(writer, memManager, memory, MEMORY_PAGE_SIZE);
		
		// write a number of pairs
		Pair pair = new Pair();
		for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
			generator.next(pair);
			pair.write(outView);
		}
		outView.close();
		
		// create the reader input view
		List<MemorySegment> readMemory = memManager.allocatePages(new DummyInvokable(), 1);
		
		final BlockChannelReader<MemorySegment> reader = ioManager.createBlockChannelReader(channel);
		final FileChannelInputView inView = new FileChannelInputView(reader, memManager, readMemory, outView.getBytesInLatestSegment());
		generator.reset();
		
		// read and re-generate all records and compare them
		Pair readPair = new Pair();
		for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
			generator.next(pair);
			readPair.read(inView);
			assertEquals("The re-generated and the read record do not match.", pair, readPair);
		}
		
		inView.close();
		reader.deleteChannel();
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example #29
Source File: FileChannelStreamsITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testWriteReadOneBufferOnly() {
	try {
		final List<MemorySegment> memory = memManager.allocatePages(new DummyInvokable(), 1);
		
		final PairGenerator generator = new PairGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
		final FileIOChannel.ID channel = this.ioManager.createChannel();
		
		// create the writer output view
		final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
		final FileChannelOutputView outView = new FileChannelOutputView(writer, memManager, memory, MEMORY_PAGE_SIZE);
		
		// write a number of pairs
		Pair pair = new Pair();
		for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
			generator.next(pair);
			pair.write(outView);
		}
		outView.close();
		
		// create the reader input view
		List<MemorySegment> readMemory = memManager.allocatePages(new DummyInvokable(), 1);
		
		final BlockChannelReader<MemorySegment> reader = ioManager.createBlockChannelReader(channel);
		final FileChannelInputView inView = new FileChannelInputView(reader, memManager, readMemory, outView.getBytesInLatestSegment());
		generator.reset();
		
		// read and re-generate all records and compare them
		Pair readPair = new Pair();
		for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
			generator.next(pair);
			readPair.read(inView);
			assertEquals("The re-generated and the read record do not match.", pair, readPair);
		}
		
		inView.close();
		reader.deleteChannel();
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example #30
Source File: FileChannelStreamsITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testWriteReadNotAll() {
	try {
		final List<MemorySegment> memory = memManager.allocatePages(new DummyInvokable(), NUM_MEMORY_SEGMENTS);
		
		final PairGenerator generator = new PairGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH);
		final FileIOChannel.ID channel = this.ioManager.createChannel();
		
		// create the writer output view
		final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel);
		final FileChannelOutputView outView = new FileChannelOutputView(writer, memManager, memory, MEMORY_PAGE_SIZE);
		
		// write a number of pairs
		Pair pair = new Pair();
		for (int i = 0; i < NUM_PAIRS_SHORT; i++) {
			generator.next(pair);
			pair.write(outView);
		}
		outView.close();
		
		// create the reader input view
		List<MemorySegment> readMemory = memManager.allocatePages(new DummyInvokable(), NUM_MEMORY_SEGMENTS);
		
		final BlockChannelReader<MemorySegment> reader = ioManager.createBlockChannelReader(channel);
		final FileChannelInputView inView = new FileChannelInputView(reader, memManager, readMemory, outView.getBytesInLatestSegment());
		generator.reset();
		
		// read and re-generate all records and compare them
		Pair readPair = new Pair();
		for (int i = 0; i < NUM_PAIRS_SHORT / 2; i++) {
			generator.next(pair);
			readPair.read(inView);
			assertEquals("The re-generated and the read record do not match.", pair, readPair);
		}
		
		inView.close();
		reader.deleteChannel();
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}