org.apache.flink.runtime.operators.testutils.types.IntPair Java Examples

The following examples show how to use org.apache.flink.runtime.operators.testutils.types.IntPair. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MutableHashTableTestBase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testBuildAndRetrieve() throws Exception {
	final int NUM_MEM_PAGES = 32 * NUM_PAIRS / PAGE_SIZE;
	AbstractMutableHashTable<IntPair> table = getHashTable(intPairSerializer, intPairComparator, getMemory(NUM_MEM_PAGES));

	final Random rnd = new Random(RANDOM_SEED);
	final IntPair[] pairs = getRandomizedIntPairs(NUM_PAIRS, rnd);

	table.open();

	for (int i = 0; i < NUM_PAIRS; i++) {
		table.insert(pairs[i]);
	}

	AbstractHashTableProber<IntPair, IntPair> prober = table.getProber(intPairComparator, pairComparator);
	IntPair target = new IntPair();

	for (int i = 0; i < NUM_PAIRS; i++) {
		assertNotNull(prober.getMatchFor(pairs[i], target));
		assertEquals(pairs[i].getValue(), target.getValue());
	}


	table.close();
	assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size());
}
 
Example #2
Source File: ReusingHashJoinIteratorITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public void join(IntPair rec1, Tuple2<Integer, String> rec2, Collector<Tuple2<Integer, String>> out) throws Exception
{
	final int k = rec1.getKey();
	final int v = rec1.getValue(); 
	
	final Integer key = rec2.f0;
	final String value = rec2.f1;
	
	Assert.assertTrue("Key does not match for matching IntPair Tuple combination.", k == key); 
	
	Collection<TupleIntPairMatch> matches = this.toRemoveFrom.get(key);
	if (matches == null) {
		Assert.fail("Match " + key + " - " + v + ":" + value + " is unexpected.");
	}
	
	Assert.assertTrue("Produced match was not contained: " + key + " - " + v + ":" + value,
		matches.remove(new TupleIntPairMatch(v, value)));
	
	if (matches.isEmpty()) {
		this.toRemoveFrom.remove(key);
	}
}
 
Example #3
Source File: MutableHashTableTestBase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
protected static IntPair[] getRandomizedIntPairs(int num, Random rnd) {
	IntPair[] pairs = new IntPair[num];
	
	// create all the pairs, dense
	for (int i = 0; i < num; i++) {
		pairs[i] = new IntPair(i, i + KEY_VALUE_DIFF);
	}
	
	// randomly swap them
	for (int i = 0; i < 2 * num; i++) {
		int pos1 = rnd.nextInt(num);
		int pos2 = rnd.nextInt(num);
		
		IntPair tmp = pairs[pos1];
		pairs[pos1] = pairs[pos2];
		pairs[pos2] = tmp;
	}
	
	return pairs;
}
 
Example #4
Source File: MutableHashTableTestBase.java    From flink with Apache License 2.0 6 votes vote down vote up
protected static IntPair[] getRandomizedIntPairs(int num, Random rnd) {
	IntPair[] pairs = new IntPair[num];
	
	// create all the pairs, dense
	for (int i = 0; i < num; i++) {
		pairs[i] = new IntPair(i, i + KEY_VALUE_DIFF);
	}
	
	// randomly swap them
	for (int i = 0; i < 2 * num; i++) {
		int pos1 = rnd.nextInt(num);
		int pos2 = rnd.nextInt(num);
		
		IntPair tmp = pairs[pos1];
		pairs[pos1] = pairs[pos2];
		pairs[pos2] = tmp;
	}
	
	return pairs;
}
 
Example #5
Source File: MutableHashTableTestBase.java    From flink with Apache License 2.0 6 votes vote down vote up
protected static IntPair[] getRandomizedIntPairs(int num, Random rnd) {
	IntPair[] pairs = new IntPair[num];
	
	// create all the pairs, dense
	for (int i = 0; i < num; i++) {
		pairs[i] = new IntPair(i, i + KEY_VALUE_DIFF);
	}
	
	// randomly swap them
	for (int i = 0; i < 2 * num; i++) {
		int pos1 = rnd.nextInt(num);
		int pos2 = rnd.nextInt(num);
		
		IntPair tmp = pairs[pos1];
		pairs[pos1] = pairs[pos2];
		pairs[pos2] = tmp;
	}
	
	return pairs;
}
 
Example #6
Source File: NonReusingHashJoinIteratorITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
public static Map<Integer, Collection<Integer>> collectIntPairData(MutableObjectIterator<IntPair> iter)
throws Exception
{
	Map<Integer, Collection<Integer>> map = new HashMap<>();
	IntPair pair = new IntPair();
	
	while ((pair = iter.next(pair)) != null) {

		final int key = pair.getKey();
		final int value = pair.getValue();
		if (!map.containsKey(key)) {
			map.put(key, new ArrayList<Integer>());
		}

		Collection<Integer> values = map.get(key);
		values.add(value);
	}

	return map;
}
 
Example #7
Source File: NonReusingHashJoinIteratorITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
public static Map<Integer, Collection<Integer>> collectIntPairData(MutableObjectIterator<IntPair> iter)
throws Exception
{
	Map<Integer, Collection<Integer>> map = new HashMap<>();
	IntPair pair = new IntPair();
	
	while ((pair = iter.next(pair)) != null) {

		final int key = pair.getKey();
		final int value = pair.getValue();
		if (!map.containsKey(key)) {
			map.put(key, new ArrayList<Integer>());
		}

		Collection<Integer> values = map.get(key);
		values.add(value);
	}

	return map;
}
 
Example #8
Source File: TestData.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
public void putNormalizedKey(IntPair record, MemorySegment target, int offset, int len) {
	// see IntValue for a documentation of the logic
	final int value = record.getKey() - Integer.MIN_VALUE;

	if (len == 4) {
		target.putIntBigEndian(offset, value);
	} else if (len <= 0) {
	} else if (len < 4) {
		for (int i = 0; len > 0; len--, i++) {
			target.put(offset + i, (byte) ((value >>> ((3 - i) << 3)) & 0xff));
		}
	} else {
		target.putIntBigEndian(offset, value);
		for (int i = 4; i < len; i++) {
			target.put(offset + i, (byte) 0);
		}
	}
}
 
Example #9
Source File: TestData.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public void putNormalizedKey(IntPair record, MemorySegment target, int offset, int len) {
	// see IntValue for a documentation of the logic
	final int value = record.getKey() - Integer.MIN_VALUE;

	if (len == 4) {
		target.putIntBigEndian(offset, value);
	} else if (len <= 0) {
	} else if (len < 4) {
		for (int i = 0; len > 0; len--, i++) {
			target.put(offset + i, (byte) ((value >>> ((3 - i) << 3)) & 0xff));
		}
	} else {
		target.putIntBigEndian(offset, value);
		for (int i = 4; i < len; i++) {
			target.put(offset + i, (byte) 0);
		}
	}
}
 
Example #10
Source File: MutableHashTableTestBase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testBuildAndRetrieve() throws Exception {
	final int NUM_MEM_PAGES = 32 * NUM_PAIRS / PAGE_SIZE;
	AbstractMutableHashTable<IntPair> table = getHashTable(intPairSerializer, intPairComparator, getMemory(NUM_MEM_PAGES));

	final Random rnd = new Random(RANDOM_SEED);
	final IntPair[] pairs = getRandomizedIntPairs(NUM_PAIRS, rnd);

	table.open();

	for (int i = 0; i < NUM_PAIRS; i++) {
		table.insert(pairs[i]);
	}

	AbstractHashTableProber<IntPair, IntPair> prober = table.getProber(intPairComparator, pairComparator);
	IntPair target = new IntPair();

	for (int i = 0; i < NUM_PAIRS; i++) {
		assertNotNull(prober.getMatchFor(pairs[i], target));
		assertEquals(pairs[i].getValue(), target.getValue());
	}


	table.close();
	assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size());
}
 
Example #11
Source File: MutableHashTableTestBase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testDifferentProbers() {
	final int NUM_MEM_PAGES = 32 * NUM_PAIRS / PAGE_SIZE;
	AbstractMutableHashTable<IntPair> table = getHashTable(intPairSerializer, intPairComparator, getMemory(NUM_MEM_PAGES));

	AbstractHashTableProber<IntPair, IntPair> prober1 = table.getProber(intPairComparator, pairComparator);
	AbstractHashTableProber<IntPair, IntPair> prober2 = table.getProber(intPairComparator, pairComparator);

	assertFalse(prober1 == prober2);

	table.close(); // (This also tests calling close without calling open first.)
	assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size());
}
 
Example #12
Source File: RandomIntPairGenerator.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public IntPair next(IntPair reuse) {
	if (this.count++ < this.numRecords) {
		reuse.setKey(this.rnd.nextInt());
		reuse.setValue(this.rnd.nextInt());
		return reuse;
	} else {
		return null;
	}
}
 
Example #13
Source File: MutableHashTableTestBase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testMultipleProbers() throws Exception {
	final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE;
	AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES));

	final Random rnd = new Random(RANDOM_SEED);
	final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd);
	final IntPair[] pairs = getRandomizedIntPairs(NUM_LISTS, rnd);

	table.open();
	for (int i = 0; i < NUM_LISTS; i++) {
		table.insert(lists[i]);
	}

	AbstractHashTableProber<IntList, IntList> listProber = table.getProber(comparatorV, pairComparatorV);

	AbstractHashTableProber<IntPair, IntList> pairProber = table.getProber(intPairComparator, pairComparatorPL);

	IntList target = new IntList();
	for (int i = 0; i < NUM_LISTS; i++) {
		assertNotNull(pairProber.getMatchFor(pairs[i], target));
		assertNotNull(listProber.getMatchFor(lists[i], target));
		assertArrayEquals(lists[i].getValue(), target.getValue());
	}
	table.close();
	assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size());
}
 
Example #14
Source File: UniformIntPairGenerator.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public IntPair next(IntPair target) {
	if(!repeatKey) {
		if(valCnt >= numVals) {
			return null;
		}
		
		target.setKey(keyCnt++);
		target.setValue(valCnt);
		
		if(keyCnt == numKeys) {
			keyCnt = 0;
			valCnt++;
		}
	} else {
		if(keyCnt >= numKeys) {
			return null;
		}
		
		target.setKey(keyCnt);
		target.setValue(valCnt++);
		
		if(valCnt == numVals) {
			valCnt = 0;
			keyCnt++;
		}
	}
	
	return target;
}
 
Example #15
Source File: UniformIntPairGenerator.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public IntPair next(IntPair target) {
	if(!repeatKey) {
		if(valCnt >= numVals) {
			return null;
		}
		
		target.setKey(keyCnt++);
		target.setValue(valCnt);
		
		if(keyCnt == numKeys) {
			keyCnt = 0;
			valCnt++;
		}
	} else {
		if(keyCnt >= numKeys) {
			return null;
		}
		
		target.setKey(keyCnt);
		target.setValue(valCnt++);
		
		if(valCnt == numVals) {
			valCnt = 0;
			keyCnt++;
		}
	}
	
	return target;
}
 
Example #16
Source File: MutableHashTableTestBase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testDifferentProbers() {
	final int NUM_MEM_PAGES = 32 * NUM_PAIRS / PAGE_SIZE;
	AbstractMutableHashTable<IntPair> table = getHashTable(intPairSerializer, intPairComparator, getMemory(NUM_MEM_PAGES));

	AbstractHashTableProber<IntPair, IntPair> prober1 = table.getProber(intPairComparator, pairComparator);
	AbstractHashTableProber<IntPair, IntPair> prober2 = table.getProber(intPairComparator, pairComparator);

	assertFalse(prober1 == prober2);

	table.close(); // (This also tests calling close without calling open first.)
	assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size());
}
 
Example #17
Source File: HashTableITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public IntPair next(IntPair reuse) {
	if (this.numLeft > 0) {
		this.numLeft--;
		reuse.setKey(this.key);
		reuse.setValue(this.value);
		return reuse;
	}
	else {
		return null;
	}
}
 
Example #18
Source File: MutableHashTableTestBase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testMultipleProbers() throws Exception {
	final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE;
	AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES));

	final Random rnd = new Random(RANDOM_SEED);
	final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd);
	final IntPair[] pairs = getRandomizedIntPairs(NUM_LISTS, rnd);

	table.open();
	for (int i = 0; i < NUM_LISTS; i++) {
		table.insert(lists[i]);
	}

	AbstractHashTableProber<IntList, IntList> listProber = table.getProber(comparatorV, pairComparatorV);

	AbstractHashTableProber<IntPair, IntList> pairProber = table.getProber(intPairComparator, pairComparatorPL);

	IntList target = new IntList();
	for (int i = 0; i < NUM_LISTS; i++) {
		assertNotNull(pairProber.getMatchFor(pairs[i], target));
		assertNotNull(listProber.getMatchFor(lists[i], target));
		assertArrayEquals(lists[i].getValue(), target.getValue());
	}
	table.close();
	assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size());
}
 
Example #19
Source File: RandomIntPairGenerator.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public IntPair next() {
	if (this.count++ < this.numRecords) {
		return new IntPair(this.rnd.nextInt(), this.rnd.nextInt());
	} else {
		return null;
	}
}
 
Example #20
Source File: RandomIntPairGenerator.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public IntPair next() {
	if (this.count++ < this.numRecords) {
		return new IntPair(this.rnd.nextInt(), this.rnd.nextInt());
	} else {
		return null;
	}
}
 
Example #21
Source File: MutableHashTableTestBase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testDifferentProbers() {
	final int NUM_MEM_PAGES = 32 * NUM_PAIRS / PAGE_SIZE;
	AbstractMutableHashTable<IntPair> table = getHashTable(intPairSerializer, intPairComparator, getMemory(NUM_MEM_PAGES));

	AbstractHashTableProber<IntPair, IntPair> prober1 = table.getProber(intPairComparator, pairComparator);
	AbstractHashTableProber<IntPair, IntPair> prober2 = table.getProber(intPairComparator, pairComparator);

	assertFalse(prober1 == prober2);

	table.close(); // (This also tests calling close without calling open first.)
	assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size());
}
 
Example #22
Source File: HashTableITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public IntPair next(IntPair reuse) {
	if (this.numLeft > 0) {
		this.numLeft--;
		reuse.setKey(this.key);
		reuse.setValue(this.value);
		return reuse;
	}
	else {
		return null;
	}
}
 
Example #23
Source File: HashTableITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public IntPair next(IntPair reuse) {
	if (this.numLeft > 0) {
		this.numLeft--;
		reuse.setKey(this.key);
		reuse.setValue(this.value);
		return reuse;
	}
	else {
		return null;
	}
}
 
Example #24
Source File: RandomIntPairGenerator.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public IntPair next() {
	if (this.count++ < this.numRecords) {
		return new IntPair(this.rnd.nextInt(), this.rnd.nextInt());
	} else {
		return null;
	}
}
 
Example #25
Source File: HashTablePerformanceComparison.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testMutableHashMapPerformance() {
	try (IOManager ioManager = new IOManagerAsync()) {
		final int NUM_MEM_PAGES = SIZE * NUM_PAIRS / PAGE_SIZE;
		
		MutableObjectIterator<IntPair> buildInput = new UniformIntPairGenerator(NUM_PAIRS, 1, false);

		MutableObjectIterator<IntPair> probeInput = new UniformIntPairGenerator(0, 1, false);
		
		MutableObjectIterator<IntPair> probeTester = new UniformIntPairGenerator(NUM_PAIRS, 1, false);
		
		MutableObjectIterator<IntPair> updater = new UniformIntPairGenerator(NUM_PAIRS, 1, false);

		MutableObjectIterator<IntPair> updateTester = new UniformIntPairGenerator(NUM_PAIRS, 1, false);
		
		long start;
		long end;
		
		long first = System.currentTimeMillis();
		
		System.out.println("Creating and filling MutableHashMap...");
		start = System.currentTimeMillis();
		MutableHashTable<IntPair, IntPair> table = new MutableHashTable<IntPair, IntPair>(serializer, serializer, comparator, comparator, pairComparator, getMemory(NUM_MEM_PAGES, PAGE_SIZE), ioManager);				
		table.open(buildInput, probeInput);
		end = System.currentTimeMillis();
		System.out.println("HashMap ready. Time: " + (end-start) + " ms");
		
		System.out.println("Starting first probing run...");
		start = System.currentTimeMillis();
		IntPair compare = new IntPair();
		HashBucketIterator<IntPair, IntPair> iter;
		IntPair target = new IntPair(); 
		while(probeTester.next(compare) != null) {
			iter = table.getMatchesFor(compare);
			iter.next(target);
			assertEquals(target.getKey(), compare.getKey());
			assertEquals(target.getValue(), compare.getValue());
			assertTrue(iter.next(target) == null);
		}
		end = System.currentTimeMillis();
		System.out.println("Probing done. Time: " + (end-start) + " ms");

		System.out.println("Starting update...");
		start = System.currentTimeMillis();
		while(updater.next(compare) != null) {
			compare.setValue(compare.getValue() + 1);
			iter = table.getMatchesFor(compare);
			iter.next(target);
			iter.writeBack(compare);
			//assertFalse(iter.next(target));
		}
		end = System.currentTimeMillis();
		System.out.println("Update done. Time: " + (end-start) + " ms");
		
		System.out.println("Starting second probing run...");
		start = System.currentTimeMillis();
		while(updateTester.next(compare) != null) {
			compare.setValue(compare.getValue() + 1);
			iter = table.getMatchesFor(compare);
			iter.next(target);
			assertEquals(target.getKey(), compare.getKey());
			assertEquals(target.getValue(), compare.getValue());
			assertTrue(iter.next(target) == null);
		}
		end = System.currentTimeMillis();
		System.out.println("Probing done. Time: " + (end-start) + " ms");
		
		table.close();
		
		end = System.currentTimeMillis();
		System.out.println("Overall time: " + (end-first) + " ms");
		
		assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreedMemory().size());
	}
	catch (Exception e) {
		e.printStackTrace();
		fail("Error: " + e.getMessage());
	}
}
 
Example #26
Source File: TestData.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Override
public IntPair readWithKeyDenormalization(IntPair reuse, DataInputView source) throws IOException {
	reuse.setKey(source.readInt() + Integer.MIN_VALUE);
	reuse.setValue(source.readInt());
	return reuse;
}
 
Example #27
Source File: CompactingHashTableTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testTripleResize() {
	// Only CompactingHashTable
	try {
		final int NUM_MEM_PAGES = 30 * NUM_PAIRS / PAGE_SIZE;
		final Random rnd = new Random(RANDOM_SEED);
		final IntPair[] pairs = getRandomizedIntPairs(NUM_PAIRS, rnd);

		List<MemorySegment> memory = getMemory(NUM_MEM_PAGES);
		CompactingHashTable<IntPair> table = new CompactingHashTable<IntPair>(intPairSerializer, intPairComparator, memory);
		table.open();

		for (int i = 0; i < NUM_PAIRS; i++) {
			table.insert(pairs[i]);
		}

		AbstractHashTableProber<IntPair, IntPair> prober =
			table.getProber(intPairComparator, new SameTypePairComparator<>(intPairComparator));
		IntPair target = new IntPair();

		for (int i = 0; i < NUM_PAIRS; i++) {
			assertNotNull(prober.getMatchFor(pairs[i], target));
			assertEquals(pairs[i].getValue(), target.getValue());
		}

		// make sure there is enough memory for resize
		memory.addAll(getMemory(ADDITIONAL_MEM));
		Boolean b = Whitebox.<Boolean>invokeMethod(table, "resizeHashTable");
		assertTrue(b);

		for (int i = 0; i < NUM_PAIRS; i++) {
			assertNotNull(pairs[i].getKey() + " " + pairs[i].getValue(), prober.getMatchFor(pairs[i], target));
			assertEquals(pairs[i].getValue(), target.getValue());
		}

		// make sure there is enough memory for resize
		memory.addAll(getMemory(ADDITIONAL_MEM));
		b = Whitebox.<Boolean>invokeMethod(table, "resizeHashTable");
		assertTrue(b);

		for (int i = 0; i < NUM_PAIRS; i++) {
			assertNotNull(pairs[i].getKey() + " " + pairs[i].getValue(), prober.getMatchFor(pairs[i], target));
			assertEquals(pairs[i].getValue(), target.getValue());
		}

		// make sure there is enough memory for resize
		memory.addAll(getMemory(2*ADDITIONAL_MEM));
		b = Whitebox.<Boolean>invokeMethod(table, "resizeHashTable");
		assertTrue(b);

		for (int i = 0; i < NUM_PAIRS; i++) {
			assertNotNull(pairs[i].getKey() + " " + pairs[i].getValue(), prober.getMatchFor(pairs[i], target));
			assertEquals(pairs[i].getValue(), target.getValue());
		}

		table.close();
		assertEquals("Memory lost", NUM_MEM_PAGES + 4*ADDITIONAL_MEM, table.getFreeMemory().size());
	} catch (Exception e) {
		e.printStackTrace();
		fail("Error: " + e.getMessage());
	}
}
 
Example #28
Source File: CompactingHashTableTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testDoubleResize() {
	// Only CompactingHashTable
	try {
		final int NUM_MEM_PAGES = 30 * NUM_PAIRS / PAGE_SIZE;
		final Random rnd = new Random(RANDOM_SEED);
		final IntPair[] pairs = getRandomizedIntPairs(NUM_PAIRS, rnd);

		List<MemorySegment> memory = getMemory(NUM_MEM_PAGES);
		CompactingHashTable<IntPair> table = new CompactingHashTable<IntPair>(intPairSerializer, intPairComparator, memory);
		table.open();

		for (int i = 0; i < NUM_PAIRS; i++) {
			table.insert(pairs[i]);
		}

		AbstractHashTableProber<IntPair, IntPair> prober =
			table.getProber(intPairComparator, new SameTypePairComparator<>(intPairComparator));
		IntPair target = new IntPair();

		for (int i = 0; i < NUM_PAIRS; i++) {
			assertNotNull(prober.getMatchFor(pairs[i], target));
			assertEquals(pairs[i].getValue(), target.getValue());
		}

		// make sure there is enough memory for resize
		memory.addAll(getMemory(ADDITIONAL_MEM));
		Boolean b = Whitebox.<Boolean>invokeMethod(table, "resizeHashTable");
		assertTrue(b);

		for (int i = 0; i < NUM_PAIRS; i++) {
			assertNotNull(pairs[i].getKey() + " " + pairs[i].getValue(), prober.getMatchFor(pairs[i], target));
			assertEquals(pairs[i].getValue(), target.getValue());
		}

		// make sure there is enough memory for resize
		memory.addAll(getMemory(ADDITIONAL_MEM));
		b = Whitebox.<Boolean>invokeMethod(table, "resizeHashTable");
		assertTrue(b);

		for (int i = 0; i < NUM_PAIRS; i++) {
			assertNotNull(pairs[i].getKey() + " " + pairs[i].getValue(), prober.getMatchFor(pairs[i], target));
			assertEquals(pairs[i].getValue(), target.getValue());
		}

		table.close();
		assertEquals("Memory lost", NUM_MEM_PAGES + ADDITIONAL_MEM + ADDITIONAL_MEM, table.getFreeMemory().size());
	} catch (Exception e) {
		e.printStackTrace();
		fail("Error: " + e.getMessage());
	}
}
 
Example #29
Source File: NonReusingHashJoinIteratorITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public boolean equalToReference(IntPair candidate) {
	return this.reference == candidate.getKey();
}
 
Example #30
Source File: FixedLengthRecordSorterTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
	public void testWriteAndRead() throws Exception {
		final int numSegments = MEMORY_SIZE / MEMORY_PAGE_SIZE;
		final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), numSegments);
		
		FixedLengthRecordSorter<IntPair> sorter = newSortBuffer(memory);
		RandomIntPairGenerator generator = new RandomIntPairGenerator(SEED);
		
//		long startTime = System.currentTimeMillis();
		// write the records
		IntPair record = new IntPair();
		int num = -1;
		do {
			generator.next(record);
			num++;
		}
		while (sorter.write(record) && num < 3354624);
//		System.out.println("WRITE TIME " + (System.currentTimeMillis() - startTime));
		
		// re-read the records
		generator.reset();
		IntPair readTarget = new IntPair();
		
//		startTime = System.currentTimeMillis();
		int i = 0;
		while (i < num) {
			generator.next(record);
			readTarget = sorter.getRecord(readTarget, i++);
			
			int rk = readTarget.getKey();
			int gk = record.getKey();
			
			int rv = readTarget.getValue();
			int gv = record.getValue();
			
			if (gk != rk) {
				Assert.fail("The re-read key is wrong " + i);
			}
			if (gv != rv) {
				Assert.fail("The re-read value is wrong");
			}
		}
//		System.out.println("READ TIME " + (System.currentTimeMillis() - startTime));
//		System.out.println("RECORDS " + num);
		
		// release the memory occupied by the buffers
		sorter.dispose();
		this.memoryManager.release(memory);
	}