org.apache.flink.core.fs.FSDataInputStream Java Examples

The following examples show how to use org.apache.flink.core.fs.FSDataInputStream. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: EmptyStreamStateHandle.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
public FSDataInputStream openInputStream() throws IOException {
	// returns an empty stream
	return new FSDataInputStream() {

		@Override
		public void seek(long desired) throws IOException {
			if (desired != 0) {
				throw new IOException("out of bounds");
			}
		}

		@Override
		public long getPos() {
			return 0;
		}

		@Override
		public int read() throws IOException {
			return -1;
		}
	};
}
 
Example #2
Source File: OperatorStateRestoreOperation.java    From flink with Apache License 2.0 6 votes vote down vote up
private <S> void deserializeOperatorStateValues(
	PartitionableListState<S> stateListForName,
	FSDataInputStream in,
	OperatorStateHandle.StateMetaInfo metaInfo) throws IOException {

	if (null != metaInfo) {
		long[] offsets = metaInfo.getOffsets();
		if (null != offsets) {
			DataInputView div = new DataInputViewStreamWrapper(in);
			TypeSerializer<S> serializer = stateListForName.getStateMetaInfo().getPartitionStateSerializer();
			for (long offset : offsets) {
				in.seek(offset);
				stateListForName.add(serializer.deserialize(div));
			}
		}
	}
}
 
Example #3
Source File: KeyedStateCheckpointOutputStreamTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testReadWriteMissingKeyGroups() throws Exception {
	final KeyGroupRange keyRange = new KeyGroupRange(0, 2);
	KeyedStateCheckpointOutputStream stream = createStream(keyRange);

	DataOutputView dov = new DataOutputViewStreamWrapper(stream);
	stream.startNewKeyGroup(1);
	dov.writeInt(1);

	KeyGroupsStateHandle fullHandle = stream.closeAndGetHandle();

	int count = 0;
	try (FSDataInputStream in = fullHandle.openInputStream()) {
		DataInputView div = new DataInputViewStreamWrapper(in);
		for (int kg : fullHandle.getKeyGroupRange()) {
			long off = fullHandle.getOffsetForKeyGroup(kg);
			if (off >= 0) {
				in.seek(off);
				Assert.assertEquals(1, div.readInt());
				++count;
			}
		}
	}

	Assert.assertEquals(1, count);
}
 
Example #4
Source File: ByteStreamStateHandleTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testBulkRead() throws IOException {
	final byte[] data = {34, 25, 22, 66};
	final ByteStreamStateHandle handle = new ByteStreamStateHandle("name", data);
	final int targetLen = 8;

	for (int start = 0; start < data.length; start++) {
		for (int num = 0; num < targetLen; num++) {
			FSDataInputStream in = handle.openInputStream();
			in.seek(start);

			final byte[] target = new byte[targetLen];
			final int read = in.read(target, targetLen - num, num);

			assertEquals(Math.min(num, data.length - start), read);
			for (int i = 0; i < read; i++) {
				assertEquals(data[start + i], target[targetLen - num + i]);
			}

			int newPos = start + read;
			assertEquals(newPos, (int) in.getPos());
			assertEquals(newPos < data.length ? data[newPos] : -1, in.read());
		}
	}
}
 
Example #5
Source File: FlinkLineParser.java    From incubator-retired-mrql with Apache License 2.0 6 votes vote down vote up
@Override
public void open ( FSDataInputStream fsin, long fstart, long fend ) {
    in_memory = false;
    start = fstart;
    end = fend;
    try {
        in = new LineReader(fsin,start,end-start+1,maxLineLength);
        if (false && start != 0) {  // for all but the first data split, skip the first record
            line = in.readLine();
            if (line != null)
                start += line.length;
        };
        pos = start;
    } catch ( IOException e ) {
        System.err.println("*** Cannot parse the data split: "+fsin);
        start = end;
    }
}
 
Example #6
Source File: CheckpointStreamWithResultProviderTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testCloseAndFinalizeCheckpointStreamResultPrimaryOnly() throws Exception {
	CheckpointStreamFactory primaryFactory = createCheckpointStreamFactory();

	CheckpointStreamWithResultProvider resultProvider =
		CheckpointStreamWithResultProvider.createSimpleStream(CheckpointedStateScope.EXCLUSIVE, primaryFactory);

	SnapshotResult<StreamStateHandle> result = writeCheckpointTestData(resultProvider);

	Assert.assertNotNull(result.getJobManagerOwnedSnapshot());
	Assert.assertNull(result.getTaskLocalSnapshot());

	try (FSDataInputStream inputStream = result.getJobManagerOwnedSnapshot().openInputStream()) {
		Assert.assertEquals(0x42, inputStream.read());
		Assert.assertEquals(-1, inputStream.read());
	}
}
 
Example #7
Source File: LineReader.java    From incubator-retired-mrql with Apache License 2.0 6 votes vote down vote up
public LineReader(final FSDataInputStream strm, final long start, final long length, final int buffersize)
		throws IOException {
	this.stream = strm;
	this.readBuffer = new byte[buffersize];
	this.wrapBuffer = new byte[256];

	this.lengthLeft = length;
	this.readPos = 0;
	this.overLimit = false;

	if (start != 0) {
		strm.seek(start);
		readLine();
	} else {
		fillBuffer();
	}
}
 
Example #8
Source File: OperatorStateRestoreOperation.java    From flink with Apache License 2.0 6 votes vote down vote up
private <S> void deserializeOperatorStateValues(
	PartitionableListState<S> stateListForName,
	FSDataInputStream in,
	OperatorStateHandle.StateMetaInfo metaInfo) throws IOException {

	if (null != metaInfo) {
		long[] offsets = metaInfo.getOffsets();
		if (null != offsets) {
			DataInputView div = new DataInputViewStreamWrapper(in);
			TypeSerializer<S> serializer = stateListForName.getStateMetaInfo().getPartitionStateSerializer();
			for (long offset : offsets) {
				in.seek(offset);
				stateListForName.add(serializer.deserialize(div));
			}
		}
	}
}
 
Example #9
Source File: OperatorStateOutputCheckpointStreamTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private static void verifyRead(OperatorStateHandle fullHandle, int numPartitions) throws IOException {
	int count = 0;
	try (FSDataInputStream in = fullHandle.openInputStream()) {
		OperatorStateHandle.StateMetaInfo metaInfo = fullHandle.getStateNameToPartitionOffsets().
				get(DefaultOperatorStateBackend.DEFAULT_OPERATOR_STATE_NAME);

		long[] offsets = metaInfo.getOffsets();

		Assert.assertNotNull(offsets);

		DataInputView div = new DataInputViewStreamWrapper(in);
		for (int i = 0; i < numPartitions; ++i) {
			in.seek(offsets[i]);
			Assert.assertEquals(i, div.readInt());
			++count;
		}
	}

	Assert.assertEquals(numPartitions, count);
}
 
Example #10
Source File: CheckpointStreamWithResultProviderTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testCloseAndFinalizeCheckpointStreamResultPrimaryOnly() throws Exception {
	CheckpointStreamFactory primaryFactory = createCheckpointStreamFactory();

	CheckpointStreamWithResultProvider resultProvider =
		CheckpointStreamWithResultProvider.createSimpleStream(CheckpointedStateScope.EXCLUSIVE, primaryFactory);

	SnapshotResult<StreamStateHandle> result = writeCheckpointTestData(resultProvider);

	Assert.assertNotNull(result.getJobManagerOwnedSnapshot());
	Assert.assertNull(result.getTaskLocalSnapshot());

	try (FSDataInputStream inputStream = result.getJobManagerOwnedSnapshot().openInputStream()) {
		Assert.assertEquals(0x42, inputStream.read());
		Assert.assertEquals(-1, inputStream.read());
	}
}
 
Example #11
Source File: OperatorStateRestoreOperation.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private <S> void deserializeOperatorStateValues(
	PartitionableListState<S> stateListForName,
	FSDataInputStream in,
	OperatorStateHandle.StateMetaInfo metaInfo) throws IOException {

	if (null != metaInfo) {
		long[] offsets = metaInfo.getOffsets();
		if (null != offsets) {
			DataInputView div = new DataInputViewStreamWrapper(in);
			TypeSerializer<S> serializer = stateListForName.getStateMetaInfo().getPartitionStateSerializer();
			for (long offset : offsets) {
				in.seek(offset);
				stateListForName.add(serializer.deserialize(div));
			}
		}
	}
}
 
Example #12
Source File: ByteStreamStateHandleTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testBulkRead() throws IOException {
	final byte[] data = {34, 25, 22, 66};
	final ByteStreamStateHandle handle = new ByteStreamStateHandle("name", data);
	final int targetLen = 8;

	for (int start = 0; start < data.length; start++) {
		for (int num = 0; num < targetLen; num++) {
			FSDataInputStream in = handle.openInputStream();
			in.seek(start);

			final byte[] target = new byte[targetLen];
			final int read = in.read(target, targetLen - num, num);

			assertEquals(Math.min(num, data.length - start), read);
			for (int i = 0; i < read; i++) {
				assertEquals(data[start + i], target[targetLen - num + i]);
			}

			int newPos = start + read;
			assertEquals(newPos, (int) in.getPos());
			assertEquals(newPos < data.length ? data[newPos] : -1, in.read());
		}
	}
}
 
Example #13
Source File: EmptyStreamStateHandle.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public FSDataInputStream openInputStream() throws IOException {
	// returns an empty stream
	return new FSDataInputStream() {

		@Override
		public void seek(long desired) throws IOException {
			if (desired != 0) {
				throw new IOException("out of bounds");
			}
		}

		@Override
		public long getPos() {
			return 0;
		}

		@Override
		public int read() throws IOException {
			return -1;
		}
	};
}
 
Example #14
Source File: KeyedStateCheckpointOutputStreamTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testReadWriteMissingKeyGroups() throws Exception {
	final KeyGroupRange keyRange = new KeyGroupRange(0, 2);
	KeyedStateCheckpointOutputStream stream = createStream(keyRange);

	DataOutputView dov = new DataOutputViewStreamWrapper(stream);
	stream.startNewKeyGroup(1);
	dov.writeInt(1);

	KeyGroupsStateHandle fullHandle = stream.closeAndGetHandle();

	int count = 0;
	try (FSDataInputStream in = fullHandle.openInputStream()) {
		DataInputView div = new DataInputViewStreamWrapper(in);
		for (int kg : fullHandle.getKeyGroupRange()) {
			long off = fullHandle.getOffsetForKeyGroup(kg);
			if (off >= 0) {
				in.seek(off);
				Assert.assertEquals(1, div.readInt());
				++count;
			}
		}
	}

	Assert.assertEquals(1, count);
}
 
Example #15
Source File: CheckpointStateOutputStreamTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Validates that even empty streams create a file and a file state handle.
 */
@Test
public void testEmptyState() throws Exception {
	final FileSystem fs = FileSystem.getLocalFileSystem();
	final Path folder = baseFolder();
	final String fileName = "myFileName";
	final Path filePath = new Path(folder, fileName);

	final FileStateHandle handle;
	try (FSDataOutputStream stream = createTestStream(fs, folder, fileName)) {
		handle = closeAndGetResult(stream);
	}

	// must have created a handle
	assertNotNull(handle);
	assertEquals(filePath, handle.getFilePath());

	// the pointer path should exist as a directory
	assertTrue(fs.exists(handle.getFilePath()));
	assertFalse(fs.getFileStatus(filePath).isDir());

	// the contents should be empty
	try (FSDataInputStream in = handle.openInputStream()) {
		assertEquals(-1, in.read());
	}
}
 
Example #16
Source File: BlockingFSDataInputStream.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
public BlockingFSDataInputStream(
	@Nullable FSDataInputStream delegate,
	@Nullable OneShotLatch waitForBlock,
	@Nullable OneShotLatch triggerUnblock,
	long blockAtPosition) {

	this.delegate = delegate;
	this.triggerUnblock = triggerUnblock;
	this.waitUntilStreamBlocked = waitForBlock;
	this.blockAtPosition = blockAtPosition;
	if (delegate != null) {
		try {
			this.position = delegate.getPos();
		} catch (IOException e) {
			throw new RuntimeException(e);
		}
	} else {
		this.position = 0;
	}
	this.closed = new AtomicBoolean(false);
}
 
Example #17
Source File: FileUtils.java    From flink with Apache License 2.0 6 votes vote down vote up
private static void addToZip(Path fileOrDirectory, FileSystem fs, Path rootDir, ZipOutputStream out) throws IOException {
	String relativePath = fileOrDirectory.getPath().replace(rootDir.getPath() + '/', "");
	if (fs.getFileStatus(fileOrDirectory).isDir()) {
		out.putNextEntry(new ZipEntry(relativePath + '/'));
		for (FileStatus containedFile : fs.listStatus(fileOrDirectory)) {
			addToZip(containedFile.getPath(), fs, rootDir, out);
		}
	} else {
		ZipEntry entry = new ZipEntry(relativePath);
		out.putNextEntry(entry);

		try (FSDataInputStream in = fs.open(fileOrDirectory)) {
			IOUtils.copyBytes(in, out, false);
		}
		out.closeEntry();
	}
}
 
Example #18
Source File: OperatorStateRestoreOperation.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private <K, V> void deserializeBroadcastStateValues(
	final BackendWritableBroadcastState<K, V> broadcastStateForName,
	final FSDataInputStream in,
	final OperatorStateHandle.StateMetaInfo metaInfo) throws Exception {

	if (metaInfo != null) {
		long[] offsets = metaInfo.getOffsets();
		if (offsets != null) {

			TypeSerializer<K> keySerializer = broadcastStateForName.getStateMetaInfo().getKeySerializer();
			TypeSerializer<V> valueSerializer = broadcastStateForName.getStateMetaInfo().getValueSerializer();

			in.seek(offsets[0]);

			DataInputView div = new DataInputViewStreamWrapper(in);
			int size = div.readInt();
			for (int i = 0; i < size; i++) {
				broadcastStateForName.put(keySerializer.deserialize(div), valueSerializer.deserialize(div));
			}
		}
	}
}
 
Example #19
Source File: FileReadFunction.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
public void flatMap(Tuple3<String, Long, Long> value, Collector<String> out) throws Exception {
	FSDataInputStream stream = FileSystem.get(new URI(value.f0)).open(new Path(value.f0));
	stream.seek(value.f1);

	BufferedReader reader = new BufferedReader(new InputStreamReader(stream));
	String line;

	try {
		while ((line = reader.readLine()) != null && (value.f2 == -1L || stream.getPos() <= value.f2)) {
			out.collect(line);
		}
	} finally {
		reader.close();
	}
}
 
Example #20
Source File: DuplicatingCheckpointOutputStreamTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Tests that in case of unaligned stream positions, the secondary stream is closed and the primary still works.
 * This is important because some code may rely on seeking to stream offsets in the created state files and if the
 * streams are not aligned this code could fail.
 */
@Test
public void testUnalignedStreamsException() throws IOException {
	int streamCapacity = 1024 * 1024;
	TestMemoryCheckpointOutputStream primaryStream = new TestMemoryCheckpointOutputStream(streamCapacity);
	TestMemoryCheckpointOutputStream secondaryStream = new TestMemoryCheckpointOutputStream(streamCapacity);

	primaryStream.write(42);

	DuplicatingCheckpointOutputStream stream =
		new DuplicatingCheckpointOutputStream(primaryStream, secondaryStream);

	Assert.assertNotNull(stream.getSecondaryStreamException());
	Assert.assertTrue(secondaryStream.isClosed());

	stream.write(23);

	try {
		stream.closeAndGetSecondaryHandle();
		Assert.fail();
	} catch (IOException ignore) {
		Assert.assertEquals(ignore.getCause(), stream.getSecondaryStreamException());
	}

	StreamStateHandle primaryHandle = stream.closeAndGetPrimaryHandle();

	try (FSDataInputStream inputStream = primaryHandle.openInputStream();) {
		Assert.assertEquals(42, inputStream.read());
		Assert.assertEquals(23, inputStream.read());
		Assert.assertEquals(-1, inputStream.read());
	}
}
 
Example #21
Source File: InterruptSensitiveRestoreTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public FSDataInputStream openInputStream() throws IOException {

	closed = false;

	FSDataInputStream is = new FSDataInputStream() {

		@Override
		public void seek(long desired) {
		}

		@Override
		public long getPos() {
			return 0;
		}

		@Override
		public int read() throws IOException {
			block();
			throw new EOFException();
		}

		@Override
		public void close() throws IOException {
			super.close();
			closed = true;
		}
	};

	return is;
}
 
Example #22
Source File: CheckpointStreamWithResultProviderTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testCloseAndFinalizeCheckpointStreamResultPrimaryAndSecondary() throws Exception {
	CheckpointStreamFactory primaryFactory = createCheckpointStreamFactory();
	LocalRecoveryDirectoryProvider directoryProvider = createLocalRecoveryDirectoryProvider();

	CheckpointStreamWithResultProvider resultProvider =
		CheckpointStreamWithResultProvider.createDuplicatingStream(
			42L,
			CheckpointedStateScope.EXCLUSIVE,
			primaryFactory,
			directoryProvider);

	SnapshotResult<StreamStateHandle> result = writeCheckpointTestData(resultProvider);

	Assert.assertNotNull(result.getJobManagerOwnedSnapshot());
	Assert.assertNotNull(result.getTaskLocalSnapshot());

	try (FSDataInputStream inputStream = result.getJobManagerOwnedSnapshot().openInputStream()) {
		Assert.assertEquals(0x42, inputStream.read());
		Assert.assertEquals(-1, inputStream.read());
	}

	try (FSDataInputStream inputStream = result.getTaskLocalSnapshot().openInputStream()) {
		Assert.assertEquals(0x42, inputStream.read());
		Assert.assertEquals(-1, inputStream.read());
	}
}
 
Example #23
Source File: HadoopSwiftFileSystemITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testSimpleFileWriteAndRead() throws Exception {
	final Configuration conf = createConfiguration();

	final String testLine = "Hello Upload!";

	FileSystem.initialize(conf);

	final Path path = new Path("swift://" + CONTAINER + '.' + SERVICENAME + '/' + TEST_DATA_DIR + "/test.txt");
	final FileSystem fs = path.getFileSystem();

	try {
		try (FSDataOutputStream out = fs.create(path, WriteMode.OVERWRITE);
			OutputStreamWriter writer = new OutputStreamWriter(out, StandardCharsets.UTF_8)) {
			writer.write(testLine);
		}

		try (FSDataInputStream in = fs.open(path);
			InputStreamReader ir = new InputStreamReader(in, StandardCharsets.UTF_8);
			BufferedReader reader = new BufferedReader(ir)) {
			String line = reader.readLine();
			assertEquals(testLine, line);
		}
	}
	finally {
		fs.delete(path, false);
	}
}
 
Example #24
Source File: InterruptSensitiveRestoreTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public FSDataInputStream openInputStream() throws IOException {

	closed = false;

	FSDataInputStream is = new FSDataInputStream() {

		@Override
		public void seek(long desired) {
		}

		@Override
		public long getPos() {
			return 0;
		}

		@Override
		public int read() throws IOException {
			block();
			throw new EOFException();
		}

		@Override
		public void close() throws IOException {
			super.close();
			closed = true;
		}
	};

	return is;
}
 
Example #25
Source File: HeapRestoreOperation.java    From flink with Apache License 2.0 5 votes vote down vote up
private void readStateHandleStateData(
	FSDataInputStream fsDataInputStream,
	DataInputViewStreamWrapper inView,
	KeyGroupRangeOffsets keyGroupOffsets,
	Map<Integer, StateMetaInfoSnapshot> kvStatesById,
	int numStates,
	int readVersion,
	boolean isCompressed) throws IOException {

	final StreamCompressionDecorator streamCompressionDecorator = isCompressed ?
		SnappyStreamCompressionDecorator.INSTANCE : UncompressedStreamCompressionDecorator.INSTANCE;

	for (Tuple2<Integer, Long> groupOffset : keyGroupOffsets) {
		int keyGroupIndex = groupOffset.f0;
		long offset = groupOffset.f1;

		// Check that restored key groups all belong to the backend.
		Preconditions.checkState(keyGroupRange.contains(keyGroupIndex), "The key group must belong to the backend.");

		fsDataInputStream.seek(offset);

		int writtenKeyGroupIndex = inView.readInt();
		Preconditions.checkState(writtenKeyGroupIndex == keyGroupIndex,
			"Unexpected key-group in restore.");

		try (InputStream kgCompressionInStream =
				 streamCompressionDecorator.decorateWithCompression(fsDataInputStream)) {

			readKeyGroupStateData(
				kgCompressionInStream,
				kvStatesById,
				keyGroupIndex,
				numStates,
				readVersion);
		}
	}
}
 
Example #26
Source File: CheckpointStreamWithResultProviderTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testCloseAndFinalizeCheckpointStreamResultPrimaryAndSecondary() throws Exception {
	CheckpointStreamFactory primaryFactory = createCheckpointStreamFactory();
	LocalRecoveryDirectoryProvider directoryProvider = createLocalRecoveryDirectoryProvider();

	CheckpointStreamWithResultProvider resultProvider =
		CheckpointStreamWithResultProvider.createDuplicatingStream(
			42L,
			CheckpointedStateScope.EXCLUSIVE,
			primaryFactory,
			directoryProvider);

	SnapshotResult<StreamStateHandle> result = writeCheckpointTestData(resultProvider);

	Assert.assertNotNull(result.getJobManagerOwnedSnapshot());
	Assert.assertNotNull(result.getTaskLocalSnapshot());

	try (FSDataInputStream inputStream = result.getJobManagerOwnedSnapshot().openInputStream()) {
		Assert.assertEquals(0x42, inputStream.read());
		Assert.assertEquals(-1, inputStream.read());
	}

	try (FSDataInputStream inputStream = result.getTaskLocalSnapshot().openInputStream()) {
		Assert.assertEquals(0x42, inputStream.read());
		Assert.assertEquals(-1, inputStream.read());
	}
}
 
Example #27
Source File: CheckpointCoordinatorTestingUtils.java    From flink with Apache License 2.0 5 votes vote down vote up
static void collectResult(int opIdx, OperatorStateHandle operatorStateHandle, List<String> resultCollector) throws Exception {
	try (FSDataInputStream in = operatorStateHandle.openInputStream()) {
		for (Map.Entry<String, OperatorStateHandle.StateMetaInfo> entry : operatorStateHandle.getStateNameToPartitionOffsets().entrySet()) {
			for (long offset : entry.getValue().getOffsets()) {
				in.seek(offset);
				Integer state = InstantiationUtil.
					deserializeObject(in, Thread.currentThread().getContextClassLoader());
				resultCollector.add(opIdx + " : " + entry.getKey() + " : " + state);
			}
		}
	}
}
 
Example #28
Source File: KeyedStateCheckpointOutputStreamTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private static void verifyRead(KeyGroupsStateHandle fullHandle, KeyGroupRange keyRange) throws IOException {
	int count = 0;
	try (FSDataInputStream in = fullHandle.openInputStream()) {
		DataInputView div = new DataInputViewStreamWrapper(in);
		for (int kg : fullHandle.getKeyGroupRange()) {
			long off = fullHandle.getOffsetForKeyGroup(kg);
			in.seek(off);
			Assert.assertEquals(kg, div.readInt());
			++count;
		}
	}

	Assert.assertEquals(keyRange.getNumberOfKeyGroups(), count);
}
 
Example #29
Source File: KeyedStateCheckpointOutputStreamTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private static void verifyRead(KeyGroupsStateHandle fullHandle, KeyGroupRange keyRange) throws IOException {
	int count = 0;
	try (FSDataInputStream in = fullHandle.openInputStream()) {
		DataInputView div = new DataInputViewStreamWrapper(in);
		for (int kg : fullHandle.getKeyGroupRange()) {
			long off = fullHandle.getOffsetForKeyGroup(kg);
			in.seek(off);
			Assert.assertEquals(kg, div.readInt());
			++count;
		}
	}

	Assert.assertEquals(keyRange.getNumberOfKeyGroups(), count);
}
 
Example #30
Source File: CheckpointCoordinatorTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
public static void compareKeyedState(
		Collection<KeyGroupsStateHandle> expectPartitionedKeyGroupState,
		Collection<? extends KeyedStateHandle> actualPartitionedKeyGroupState) throws Exception {

	KeyGroupsStateHandle expectedHeadOpKeyGroupStateHandle = expectPartitionedKeyGroupState.iterator().next();
	int expectedTotalKeyGroups = expectedHeadOpKeyGroupStateHandle.getKeyGroupRange().getNumberOfKeyGroups();
	int actualTotalKeyGroups = 0;
	for(KeyedStateHandle keyedStateHandle: actualPartitionedKeyGroupState) {
		assertTrue(keyedStateHandle instanceof KeyGroupsStateHandle);

		actualTotalKeyGroups += keyedStateHandle.getKeyGroupRange().getNumberOfKeyGroups();
	}

	assertEquals(expectedTotalKeyGroups, actualTotalKeyGroups);

	try (FSDataInputStream inputStream = expectedHeadOpKeyGroupStateHandle.openInputStream()) {
		for (int groupId : expectedHeadOpKeyGroupStateHandle.getKeyGroupRange()) {
			long offset = expectedHeadOpKeyGroupStateHandle.getOffsetForKeyGroup(groupId);
			inputStream.seek(offset);
			int expectedKeyGroupState =
					InstantiationUtil.deserializeObject(inputStream, Thread.currentThread().getContextClassLoader());
			for (KeyedStateHandle oneActualKeyedStateHandle : actualPartitionedKeyGroupState) {

				assertTrue(oneActualKeyedStateHandle instanceof KeyGroupsStateHandle);

				KeyGroupsStateHandle oneActualKeyGroupStateHandle = (KeyGroupsStateHandle) oneActualKeyedStateHandle;
				if (oneActualKeyGroupStateHandle.getKeyGroupRange().contains(groupId)) {
					long actualOffset = oneActualKeyGroupStateHandle.getOffsetForKeyGroup(groupId);
					try (FSDataInputStream actualInputStream = oneActualKeyGroupStateHandle.openInputStream()) {
						actualInputStream.seek(actualOffset);
						int actualGroupState = InstantiationUtil.
								deserializeObject(actualInputStream, Thread.currentThread().getContextClassLoader());
						assertEquals(expectedKeyGroupState, actualGroupState);
					}
				}
			}
		}
	}
}