Java Code Examples for org.apache.flink.runtime.operators.testutils.UniformStringPairGenerator

The following examples show how to use org.apache.flink.runtime.operators.testutils.UniformStringPairGenerator. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: Flink-CEPplus   Source File: MutableHashTableTestBase.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testVariableLengthStringBuildAndRetrieve() throws IOException {
	final int NUM_MEM_PAGES = 40 * NUM_PAIRS / PAGE_SIZE;
	AbstractMutableHashTable<StringPair> table = getHashTable(serializerS, comparatorS, getMemory(NUM_MEM_PAGES));

	MutableObjectIterator<StringPair> buildInput = new UniformStringPairGenerator(NUM_PAIRS, 1, false);

	MutableObjectIterator<StringPair> probeTester = new UniformStringPairGenerator(NUM_PAIRS, 1, false);

	MutableObjectIterator<StringPair> updater = new UniformStringPairGenerator(NUM_PAIRS, 1, false);

	MutableObjectIterator<StringPair> updateTester = new UniformStringPairGenerator(NUM_PAIRS, 1, false);

	table.open();

	StringPair target = new StringPair();
	while(buildInput.next(target) != null) {
           table.insert(target);
       }

	AbstractHashTableProber<StringPair, StringPair> prober = table.getProber(comparatorS, pairComparatorS);
	StringPair temp = new StringPair();
	while(probeTester.next(target) != null) {
           assertNotNull("" + target.getKey(), prober.getMatchFor(target, temp));
           assertEquals(temp.getValue(), target.getValue());
       }

	while(updater.next(target) != null) {
           target.setValue(target.getValue());
           table.insertOrReplaceRecord(target);
       }

	while (updateTester.next(target) != null) {
           assertNotNull(prober.getMatchFor(target, temp));
           assertEquals(target.getValue(), temp.getValue());
       }

	table.close();
	assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size());
}
 
Example 2
Source Project: flink   Source File: MutableHashTableTestBase.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testVariableLengthStringBuildAndRetrieve() throws IOException {
	final int NUM_MEM_PAGES = 40 * NUM_PAIRS / PAGE_SIZE;
	AbstractMutableHashTable<StringPair> table = getHashTable(serializerS, comparatorS, getMemory(NUM_MEM_PAGES));

	MutableObjectIterator<StringPair> buildInput = new UniformStringPairGenerator(NUM_PAIRS, 1, false);

	MutableObjectIterator<StringPair> probeTester = new UniformStringPairGenerator(NUM_PAIRS, 1, false);

	MutableObjectIterator<StringPair> updater = new UniformStringPairGenerator(NUM_PAIRS, 1, false);

	MutableObjectIterator<StringPair> updateTester = new UniformStringPairGenerator(NUM_PAIRS, 1, false);

	table.open();

	StringPair target = new StringPair();
	while(buildInput.next(target) != null) {
           table.insert(target);
       }

	AbstractHashTableProber<StringPair, StringPair> prober = table.getProber(comparatorS, pairComparatorS);
	StringPair temp = new StringPair();
	while(probeTester.next(target) != null) {
           assertNotNull("" + target.getKey(), prober.getMatchFor(target, temp));
           assertEquals(temp.getValue(), target.getValue());
       }

	while(updater.next(target) != null) {
           target.setValue(target.getValue());
           table.insertOrReplaceRecord(target);
       }

	while (updateTester.next(target) != null) {
           assertNotNull(prober.getMatchFor(target, temp));
           assertEquals(target.getValue(), temp.getValue());
       }

	table.close();
	assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size());
}
 
Example 3
Source Project: flink   Source File: MutableHashTableTestBase.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testVariableLengthStringBuildAndRetrieve() throws IOException {
	final int NUM_MEM_PAGES = 40 * NUM_PAIRS / PAGE_SIZE;
	AbstractMutableHashTable<StringPair> table = getHashTable(serializerS, comparatorS, getMemory(NUM_MEM_PAGES));

	MutableObjectIterator<StringPair> buildInput = new UniformStringPairGenerator(NUM_PAIRS, 1, false);

	MutableObjectIterator<StringPair> probeTester = new UniformStringPairGenerator(NUM_PAIRS, 1, false);

	MutableObjectIterator<StringPair> updater = new UniformStringPairGenerator(NUM_PAIRS, 1, false);

	MutableObjectIterator<StringPair> updateTester = new UniformStringPairGenerator(NUM_PAIRS, 1, false);

	table.open();

	StringPair target = new StringPair();
	while(buildInput.next(target) != null) {
           table.insert(target);
       }

	AbstractHashTableProber<StringPair, StringPair> prober = table.getProber(comparatorS, pairComparatorS);
	StringPair temp = new StringPair();
	while(probeTester.next(target) != null) {
           assertNotNull("" + target.getKey(), prober.getMatchFor(target, temp));
           assertEquals(temp.getValue(), target.getValue());
       }

	while(updater.next(target) != null) {
           target.setValue(target.getValue());
           table.insertOrReplaceRecord(target);
       }

	while (updateTester.next(target) != null) {
           assertNotNull(prober.getMatchFor(target, temp));
           assertEquals(target.getValue(), temp.getValue());
       }

	table.close();
	assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size());
}
 
Example 4
Source Project: Flink-CEPplus   Source File: InPlaceMutableHashTableTest.java    License: Apache License 2.0 4 votes vote down vote up
@Test
public void testWithLengthChangingReduceFunction() throws Exception {
	Random rnd = new Random(RANDOM_SEED);

	final int numKeys = 10000;
	final int numVals = 10;
	final int numRecords = numKeys * numVals;

	StringPairSerializer serializer = new StringPairSerializer();
	StringPairComparator comparator = new StringPairComparator();
	ReduceFunction<StringPair> reducer = new ConcatReducer();

	// Create the InPlaceMutableHashTableWithJavaHashMap, which will provide the correct output.
	List<StringPair> expectedOutput = new ArrayList<>();
	InPlaceMutableHashTableWithJavaHashMap<StringPair, String> reference = new InPlaceMutableHashTableWithJavaHashMap<>(
		serializer, comparator, reducer, new CopyingListCollector<>(expectedOutput, serializer));

	// Create the InPlaceMutableHashTable to test
	final int numMemPages = numRecords * 10 / PAGE_SIZE;

	List<StringPair> actualOutput = new ArrayList<>();

	InPlaceMutableHashTable<StringPair> table =
		new InPlaceMutableHashTable<>(serializer, comparator, getMemory(numMemPages, PAGE_SIZE));
	InPlaceMutableHashTable<StringPair>.ReduceFacade reduceFacade =
		table.new ReduceFacade(reducer, new CopyingListCollector<>(actualOutput, serializer), true);

	// The loop is for checking the feature that multiple open / close are possible.
	for(int j = 0; j < 3; j++) {
		table.open();

		// Test emit when table is empty
		reduceFacade.emit();

		// Process some manual stuff
		reference.updateTableEntryWithReduce(serializer.copy(new StringPair("foo", "bar")), "foo");
		reference.updateTableEntryWithReduce(serializer.copy(new StringPair("foo", "baz")), "foo");
		reference.updateTableEntryWithReduce(serializer.copy(new StringPair("alma", "xyz")), "alma");
		reduceFacade.updateTableEntryWithReduce(serializer.copy(new StringPair("foo", "bar")));
		reduceFacade.updateTableEntryWithReduce(serializer.copy(new StringPair("foo", "baz")));
		reduceFacade.updateTableEntryWithReduce(serializer.copy(new StringPair("alma", "xyz")));
		for (int i = 0; i < 5; i++) {
			reduceFacade.updateTableEntryWithReduce(serializer.copy(new StringPair("korte", "abc")));
			reference.updateTableEntryWithReduce(serializer.copy(new StringPair("korte", "abc")), "korte");
		}
		reference.emitAndReset();
		reduceFacade.emitAndReset();

		// Generate some input
		UniformStringPairGenerator gen = new UniformStringPairGenerator(numKeys, numVals, true);
		List<StringPair> input = new ArrayList<>();
		StringPair cur = new StringPair();
		while (gen.next(cur) != null) {
			input.add(serializer.copy(cur));
		}
		Collections.shuffle(input, rnd);

		// Process the generated input
		final int numIntermingledEmits = 5;
		for (StringPair record : input) {
			reference.updateTableEntryWithReduce(serializer.copy(record), record.getKey());
			reduceFacade.updateTableEntryWithReduce(serializer.copy(record));
			if (rnd.nextDouble() < 1.0 / ((double) numRecords / numIntermingledEmits)) {
				// this will fire approx. numIntermingledEmits times
				reference.emitAndReset();
				reduceFacade.emitAndReset();
			}
		}
		reference.emitAndReset();
		reduceFacade.emit();
		table.close();

		// Check results

		assertEquals(expectedOutput.size(), actualOutput.size());

		String[] expectedValues = new String[expectedOutput.size()];
		for (int i = 0; i < expectedOutput.size(); i++) {
			expectedValues[i] = expectedOutput.get(i).getValue();
		}
		String[] actualValues = new String[actualOutput.size()];
		for (int i = 0; i < actualOutput.size(); i++) {
			actualValues[i] = actualOutput.get(i).getValue();
		}

		Arrays.sort(expectedValues, Ordering.<String>natural());
		Arrays.sort(actualValues, Ordering.<String>natural());
		assertArrayEquals(expectedValues, actualValues);

		expectedOutput.clear();
		actualOutput.clear();
	}
}
 
Example 5
Source Project: flink   Source File: InPlaceMutableHashTableTest.java    License: Apache License 2.0 4 votes vote down vote up
@Test
public void testWithLengthChangingReduceFunction() throws Exception {
	Random rnd = new Random(RANDOM_SEED);

	final int numKeys = 10000;
	final int numVals = 10;
	final int numRecords = numKeys * numVals;

	StringPairSerializer serializer = new StringPairSerializer();
	StringPairComparator comparator = new StringPairComparator();
	ReduceFunction<StringPair> reducer = new ConcatReducer();

	// Create the InPlaceMutableHashTableWithJavaHashMap, which will provide the correct output.
	List<StringPair> expectedOutput = new ArrayList<>();
	InPlaceMutableHashTableWithJavaHashMap<StringPair, String> reference = new InPlaceMutableHashTableWithJavaHashMap<>(
		serializer, comparator, reducer, new CopyingListCollector<>(expectedOutput, serializer));

	// Create the InPlaceMutableHashTable to test
	final int numMemPages = numRecords * 10 / PAGE_SIZE;

	List<StringPair> actualOutput = new ArrayList<>();

	InPlaceMutableHashTable<StringPair> table =
		new InPlaceMutableHashTable<>(serializer, comparator, getMemory(numMemPages, PAGE_SIZE));
	InPlaceMutableHashTable<StringPair>.ReduceFacade reduceFacade =
		table.new ReduceFacade(reducer, new CopyingListCollector<>(actualOutput, serializer), true);

	// The loop is for checking the feature that multiple open / close are possible.
	for(int j = 0; j < 3; j++) {
		table.open();

		// Test emit when table is empty
		reduceFacade.emit();

		// Process some manual stuff
		reference.updateTableEntryWithReduce(serializer.copy(new StringPair("foo", "bar")), "foo");
		reference.updateTableEntryWithReduce(serializer.copy(new StringPair("foo", "baz")), "foo");
		reference.updateTableEntryWithReduce(serializer.copy(new StringPair("alma", "xyz")), "alma");
		reduceFacade.updateTableEntryWithReduce(serializer.copy(new StringPair("foo", "bar")));
		reduceFacade.updateTableEntryWithReduce(serializer.copy(new StringPair("foo", "baz")));
		reduceFacade.updateTableEntryWithReduce(serializer.copy(new StringPair("alma", "xyz")));
		for (int i = 0; i < 5; i++) {
			reduceFacade.updateTableEntryWithReduce(serializer.copy(new StringPair("korte", "abc")));
			reference.updateTableEntryWithReduce(serializer.copy(new StringPair("korte", "abc")), "korte");
		}
		reference.emitAndReset();
		reduceFacade.emitAndReset();

		// Generate some input
		UniformStringPairGenerator gen = new UniformStringPairGenerator(numKeys, numVals, true);
		List<StringPair> input = new ArrayList<>();
		StringPair cur = new StringPair();
		while (gen.next(cur) != null) {
			input.add(serializer.copy(cur));
		}
		Collections.shuffle(input, rnd);

		// Process the generated input
		final int numIntermingledEmits = 5;
		for (StringPair record : input) {
			reference.updateTableEntryWithReduce(serializer.copy(record), record.getKey());
			reduceFacade.updateTableEntryWithReduce(serializer.copy(record));
			if (rnd.nextDouble() < 1.0 / ((double) numRecords / numIntermingledEmits)) {
				// this will fire approx. numIntermingledEmits times
				reference.emitAndReset();
				reduceFacade.emitAndReset();
			}
		}
		reference.emitAndReset();
		reduceFacade.emit();
		table.close();

		// Check results

		assertEquals(expectedOutput.size(), actualOutput.size());

		String[] expectedValues = new String[expectedOutput.size()];
		for (int i = 0; i < expectedOutput.size(); i++) {
			expectedValues[i] = expectedOutput.get(i).getValue();
		}
		String[] actualValues = new String[actualOutput.size()];
		for (int i = 0; i < actualOutput.size(); i++) {
			actualValues[i] = actualOutput.get(i).getValue();
		}

		Arrays.sort(expectedValues, Ordering.<String>natural());
		Arrays.sort(actualValues, Ordering.<String>natural());
		assertArrayEquals(expectedValues, actualValues);

		expectedOutput.clear();
		actualOutput.clear();
	}
}
 
Example 6
Source Project: flink   Source File: InPlaceMutableHashTableTest.java    License: Apache License 2.0 4 votes vote down vote up
@Test
public void testWithLengthChangingReduceFunction() throws Exception {
	Random rnd = new Random(RANDOM_SEED);

	final int numKeys = 10000;
	final int numVals = 10;
	final int numRecords = numKeys * numVals;

	StringPairSerializer serializer = new StringPairSerializer();
	StringPairComparator comparator = new StringPairComparator();
	ReduceFunction<StringPair> reducer = new ConcatReducer();

	// Create the InPlaceMutableHashTableWithJavaHashMap, which will provide the correct output.
	List<StringPair> expectedOutput = new ArrayList<>();
	InPlaceMutableHashTableWithJavaHashMap<StringPair, String> reference = new InPlaceMutableHashTableWithJavaHashMap<>(
		serializer, comparator, reducer, new CopyingListCollector<>(expectedOutput, serializer));

	// Create the InPlaceMutableHashTable to test
	final int numMemPages = numRecords * 10 / PAGE_SIZE;

	List<StringPair> actualOutput = new ArrayList<>();

	InPlaceMutableHashTable<StringPair> table =
		new InPlaceMutableHashTable<>(serializer, comparator, getMemory(numMemPages, PAGE_SIZE));
	InPlaceMutableHashTable<StringPair>.ReduceFacade reduceFacade =
		table.new ReduceFacade(reducer, new CopyingListCollector<>(actualOutput, serializer), true);

	// The loop is for checking the feature that multiple open / close are possible.
	for(int j = 0; j < 3; j++) {
		table.open();

		// Test emit when table is empty
		reduceFacade.emit();

		// Process some manual stuff
		reference.updateTableEntryWithReduce(serializer.copy(new StringPair("foo", "bar")), "foo");
		reference.updateTableEntryWithReduce(serializer.copy(new StringPair("foo", "baz")), "foo");
		reference.updateTableEntryWithReduce(serializer.copy(new StringPair("alma", "xyz")), "alma");
		reduceFacade.updateTableEntryWithReduce(serializer.copy(new StringPair("foo", "bar")));
		reduceFacade.updateTableEntryWithReduce(serializer.copy(new StringPair("foo", "baz")));
		reduceFacade.updateTableEntryWithReduce(serializer.copy(new StringPair("alma", "xyz")));
		for (int i = 0; i < 5; i++) {
			reduceFacade.updateTableEntryWithReduce(serializer.copy(new StringPair("korte", "abc")));
			reference.updateTableEntryWithReduce(serializer.copy(new StringPair("korte", "abc")), "korte");
		}
		reference.emitAndReset();
		reduceFacade.emitAndReset();

		// Generate some input
		UniformStringPairGenerator gen = new UniformStringPairGenerator(numKeys, numVals, true);
		List<StringPair> input = new ArrayList<>();
		StringPair cur = new StringPair();
		while (gen.next(cur) != null) {
			input.add(serializer.copy(cur));
		}
		Collections.shuffle(input, rnd);

		// Process the generated input
		final int numIntermingledEmits = 5;
		for (StringPair record : input) {
			reference.updateTableEntryWithReduce(serializer.copy(record), record.getKey());
			reduceFacade.updateTableEntryWithReduce(serializer.copy(record));
			if (rnd.nextDouble() < 1.0 / ((double) numRecords / numIntermingledEmits)) {
				// this will fire approx. numIntermingledEmits times
				reference.emitAndReset();
				reduceFacade.emitAndReset();
			}
		}
		reference.emitAndReset();
		reduceFacade.emit();
		table.close();

		// Check results

		assertEquals(expectedOutput.size(), actualOutput.size());

		String[] expectedValues = new String[expectedOutput.size()];
		for (int i = 0; i < expectedOutput.size(); i++) {
			expectedValues[i] = expectedOutput.get(i).getValue();
		}
		String[] actualValues = new String[actualOutput.size()];
		for (int i = 0; i < actualOutput.size(); i++) {
			actualValues[i] = actualOutput.get(i).getValue();
		}

		Arrays.sort(expectedValues, Ordering.<String>natural());
		Arrays.sort(actualValues, Ordering.<String>natural());
		assertArrayEquals(expectedValues, actualValues);

		expectedOutput.clear();
		actualOutput.clear();
	}
}