org.apache.flink.api.java.operators.UnsortedGrouping Java Examples

The following examples show how to use org.apache.flink.api.java.operators.UnsortedGrouping. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ReduceWithCombinerITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testForkingReduceOnKeyedDataset() throws Exception {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	env.setParallelism(4);

	// creates the input data and distributes them evenly among the available downstream tasks
	DataSet<Tuple3<String, Integer, Boolean>> input = createKeyedInput(env);

	UnsortedGrouping<Tuple3<String, Integer, Boolean>> counts = input.groupBy(0);

	DataSet<Tuple3<String, Integer, Boolean>> r1 = counts.reduceGroup(new KeyedCombReducer());
	DataSet<Tuple3<String, Integer, Boolean>> r2 = counts.reduceGroup(new KeyedGroupCombReducer());

	List<Tuple3<String, Integer, Boolean>> actual = r1.union(r2).collect();
	String expected = "k1,6,true\n" +
		"k2,4,true\n" +
		"k1,6,true\n" +
		"k2,4,true\n";
	compareResultAsTuples(actual, expected);
}
 
Example #2
Source File: ReduceWithCombinerITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testForkingReduceOnKeyedDatasetWithSelection() throws Exception {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	env.setParallelism(4);

	// creates the input data and distributes them evenly among the available downstream tasks
	DataSet<Tuple3<String, Integer, Boolean>> input = createKeyedInput(env);

	UnsortedGrouping<Tuple3<String, Integer, Boolean>> counts = input.groupBy(new KeySelectorX());

	DataSet<Tuple3<String, Integer, Boolean>> r1 = counts.reduceGroup(new KeyedCombReducer());
	DataSet<Tuple3<String, Integer, Boolean>> r2 = counts.reduceGroup(new KeyedGroupCombReducer());

	List<Tuple3<String, Integer, Boolean>> actual = r1.union(r2).collect();
	String expected = "k1,6,true\n" +
		"k2,4,true\n" +
		"k1,6,true\n" +
		"k2,4,true\n";

	compareResultAsTuples(actual, expected);
}
 
Example #3
Source File: ReduceWithCombinerITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testForkingReduceOnKeyedDataset() throws Exception {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	env.setParallelism(4);

	// creates the input data and distributes them evenly among the available downstream tasks
	DataSet<Tuple3<String, Integer, Boolean>> input = createKeyedInput(env);

	UnsortedGrouping<Tuple3<String, Integer, Boolean>> counts = input.groupBy(0);

	DataSet<Tuple3<String, Integer, Boolean>> r1 = counts.reduceGroup(new KeyedCombReducer());
	DataSet<Tuple3<String, Integer, Boolean>> r2 = counts.reduceGroup(new KeyedGroupCombReducer());

	List<Tuple3<String, Integer, Boolean>> actual = r1.union(r2).collect();
	String expected = "k1,6,true\n" +
		"k2,4,true\n" +
		"k1,6,true\n" +
		"k2,4,true\n";
	compareResultAsTuples(actual, expected);
}
 
Example #4
Source File: ReduceWithCombinerITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testForkingReduceOnKeyedDatasetWithSelection() throws Exception {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	env.setParallelism(4);

	// creates the input data and distributes them evenly among the available downstream tasks
	DataSet<Tuple3<String, Integer, Boolean>> input = createKeyedInput(env);

	UnsortedGrouping<Tuple3<String, Integer, Boolean>> counts = input.groupBy(new KeySelectorX());

	DataSet<Tuple3<String, Integer, Boolean>> r1 = counts.reduceGroup(new KeyedCombReducer());
	DataSet<Tuple3<String, Integer, Boolean>> r2 = counts.reduceGroup(new KeyedGroupCombReducer());

	List<Tuple3<String, Integer, Boolean>> actual = r1.union(r2).collect();
	String expected = "k1,6,true\n" +
		"k2,4,true\n" +
		"k1,6,true\n" +
		"k2,4,true\n";

	compareResultAsTuples(actual, expected);
}
 
Example #5
Source File: ReduceWithCombinerITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testForkingReduceOnKeyedDatasetWithSelection() throws Exception {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	env.setParallelism(4);

	// creates the input data and distributes them evenly among the available downstream tasks
	DataSet<Tuple3<String, Integer, Boolean>> input = createKeyedInput(env);

	UnsortedGrouping<Tuple3<String, Integer, Boolean>> counts = input.groupBy(new KeySelectorX());

	DataSet<Tuple3<String, Integer, Boolean>> r1 = counts.reduceGroup(new KeyedCombReducer());
	DataSet<Tuple3<String, Integer, Boolean>> r2 = counts.reduceGroup(new KeyedGroupCombReducer());

	List<Tuple3<String, Integer, Boolean>> actual = r1.union(r2).collect();
	String expected = "k1,6,true\n" +
		"k2,4,true\n" +
		"k1,6,true\n" +
		"k2,4,true\n";

	compareResultAsTuples(actual, expected);
}
 
Example #6
Source File: ReduceWithCombinerITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testForkingReduceOnKeyedDataset() throws Exception {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	env.setParallelism(4);

	// creates the input data and distributes them evenly among the available downstream tasks
	DataSet<Tuple3<String, Integer, Boolean>> input = createKeyedInput(env);

	UnsortedGrouping<Tuple3<String, Integer, Boolean>> counts = input.groupBy(0);

	DataSet<Tuple3<String, Integer, Boolean>> r1 = counts.reduceGroup(new KeyedCombReducer());
	DataSet<Tuple3<String, Integer, Boolean>> r2 = counts.reduceGroup(new KeyedGroupCombReducer());

	List<Tuple3<String, Integer, Boolean>> actual = r1.union(r2).collect();
	String expected = "k1,6,true\n" +
		"k2,4,true\n" +
		"k1,6,true\n" +
		"k2,4,true\n";
	compareResultAsTuples(actual, expected);
}
 
Example #7
Source File: MinByOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * This test validates that an index which is out of bounds throws an
 * IndexOutOfBoundsException.
 */
@Test(expected = IndexOutOfBoundsException.class)
public void testOutOfTupleBoundsGrouping3() {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	UnsortedGrouping<Tuple5<Integer, Long, String, Long, Integer>> groupDs = env.fromCollection(emptyTupleData, tupleTypeInfo).groupBy(0);

	// should not work, key out of tuple bounds
	groupDs.minBy(1, 2, 3, 4, -1);
}
 
Example #8
Source File: MaxByOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * This test validates that an index which is out of bounds throws an
 * IndexOutOfBoundsException.
 */
@Test(expected = IndexOutOfBoundsException.class)
public void testOutOfTupleBoundsGrouping3() {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	UnsortedGrouping<Tuple5<Integer, Long, String, Long, Integer>> groupDs = env.fromCollection(emptyTupleData, tupleTypeInfo).groupBy(0);

	// should not work, key out of tuple bounds
	groupDs.maxBy(1, 2, 3, 4, -1);
}
 
Example #9
Source File: MinByOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * This test validates that an index which is out of bounds throws an
 * IndexOutOfBoundsException.
 */
@Test(expected = IndexOutOfBoundsException.class)
public void testOutOfTupleBoundsGrouping2() {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	UnsortedGrouping<Tuple5<Integer, Long, String, Long, Integer>> groupDs = env.fromCollection(emptyTupleData, tupleTypeInfo).groupBy(0);

	// should not work, key out of tuple bounds
	groupDs.minBy(-1);
}
 
Example #10
Source File: MinByOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * This test validates that an index which is out of bounds throws an
 * IndexOutOfBoundsException.
 */
@Test(expected = IndexOutOfBoundsException.class)
public void testOutOfTupleBoundsGrouping1() {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	UnsortedGrouping<Tuple5<Integer, Long, String, Long, Integer>> groupDs = env.fromCollection(emptyTupleData, tupleTypeInfo).groupBy(0);

	// should not work, key out of tuple bounds
	groupDs.minBy(5);
}
 
Example #11
Source File: MinByOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * This test validates that an InvalidProgramException is thrown when minBy
 * is used on a custom data type.
 */
@Test(expected = InvalidProgramException.class)
public void testCustomKeyFieldsGrouping() {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	this.customTypeData.add(new CustomType());

	UnsortedGrouping<CustomType> groupDs = env.fromCollection(customTypeData).groupBy(0);
	// should not work: groups on custom type
	groupDs.minBy(0);
}
 
Example #12
Source File: MinByOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * This test validates that no exceptions is thrown when an empty grouping
 * calls minBy().
 */
@Test
public void testMinByKeyFieldsGrouping() {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	UnsortedGrouping<Tuple5<Integer, Long, String, Long, Integer>> groupDs = env.fromCollection(emptyTupleData, tupleTypeInfo).groupBy(0);

	// should work
	try {
		groupDs.minBy(4, 0, 1, 2, 3);
	} catch (Exception e) {
		Assert.fail();
	}
}
 
Example #13
Source File: MaxByOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * This test validates that an index which is out of bounds throws an
 * IndexOutOfBoundsException.
 */
@Test(expected = IndexOutOfBoundsException.class)
public void testOutOfTupleBoundsGrouping1() {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	UnsortedGrouping<Tuple5<Integer, Long, String, Long, Integer>> groupDs = env.fromCollection(emptyTupleData, tupleTypeInfo).groupBy(0);

	// should not work, key out of tuple bounds
	groupDs.maxBy(5);
}
 
Example #14
Source File: MaxByOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * This test validates that no exceptions is thrown when an empty grouping
 * calls maxBy().
 */
@Test
public void testMaxByKeyFieldsGrouping() {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	UnsortedGrouping<Tuple5<Integer, Long, String, Long, Integer>> groupDs = env.fromCollection(emptyTupleData, tupleTypeInfo).groupBy(0);

	// should work
	try {
		groupDs.maxBy(4, 0, 1, 2, 3);
	} catch (Exception e) {
		Assert.fail();
	}
}
 
Example #15
Source File: MaxByOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * This test validates that an InvalidProgramException is thrown when maxBy
 * is used on a custom data type.
 */
@Test(expected = InvalidProgramException.class)
public void testCustomKeyFieldsGrouping() {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	this.customTypeData.add(new CustomType());

	UnsortedGrouping<CustomType> groupDs = env.fromCollection(customTypeData).groupBy(0);
	// should not work: groups on custom type
	groupDs.maxBy(0);
}
 
Example #16
Source File: GroupCombineITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
// check if parallelism of 1 results in the same data like a shuffle
public void testCheckPartitionShuffleDOP1() throws Exception {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	env.setParallelism(1);

	// data
	DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env);

	// partition and group data
	UnsortedGrouping<Tuple3<Integer, Long, String>> partitionedDS = ds.partitionByHash(0).groupBy(1);

	List<Tuple2<Long, Integer>> result = partitionedDS
			.combineGroup(
			new GroupCombineFunction<Tuple3<Integer, Long, String>, Tuple2<Long, Integer>>() {
				@Override
				public void combine(Iterable<Tuple3<Integer, Long, String>> values, Collector<Tuple2<Long, Integer>> out) throws Exception {
					int count = 0;
					long key = 0;
					for (Tuple3<Integer, Long, String> value : values) {
						key = value.f1;
						count++;
					}
					out.collect(new Tuple2<>(key, count));
				}
			}).collect();

	String expected = "6,6\n" +
			"5,5\n" +
			"4,4\n" +
			"3,3\n" +
			"2,2\n" +
			"1,1\n";

	compareResultAsTuples(result, expected);
}
 
Example #17
Source File: MaxByOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * This test validates that an index which is out of bounds throws an
 * IndexOutOfBoundsException.
 */
@Test(expected = IndexOutOfBoundsException.class)
public void testOutOfTupleBoundsGrouping2() {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	UnsortedGrouping<Tuple5<Integer, Long, String, Long, Integer>> groupDs = env.fromCollection(emptyTupleData, tupleTypeInfo).groupBy(0);

	// should not work, key out of tuple bounds
	groupDs.maxBy(-1);
}
 
Example #18
Source File: GroupCombineITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
// check if no shuffle is being executed
public void testCheckPartitionShuffleGroupBy() throws Exception {

	org.junit.Assume.assumeTrue(mode != TestExecutionMode.COLLECTION);

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	// data
	DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env);

	// partition and group data
	UnsortedGrouping<Tuple3<Integer, Long, String>> partitionedDS = ds.partitionByHash(0).groupBy(1);

	List<Tuple2<Long, Integer>> result = partitionedDS
			.combineGroup(
					new GroupCombineFunction<Tuple3<Integer, Long, String>, Tuple2<Long, Integer>>() {
		@Override
		public void combine(Iterable<Tuple3<Integer, Long, String>> values, Collector<Tuple2<Long, Integer>> out) throws Exception {
			int count = 0;
			long key = 0;
			for (Tuple3<Integer, Long, String> value : values) {
				key = value.f1;
				count++;
			}
			out.collect(new Tuple2<>(key, count));
		}
	}).collect();

	String[] localExpected = new String[] { "(6,6)", "(5,5)" + "(4,4)", "(3,3)", "(2,2)", "(1,1)" };

	String[] resultAsStringArray = new String[result.size()];
	for (int i = 0; i < resultAsStringArray.length; ++i) {
		resultAsStringArray[i] = result.get(i).toString();
	}
	Arrays.sort(resultAsStringArray);

	Assert.assertEquals("The two arrays were identical.", false, Arrays.equals(localExpected, resultAsStringArray));
}
 
Example #19
Source File: MinByOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Validates that no ClassCastException happens
 * should not fail e.g. like in FLINK-8255.
 */
@Test(expected = InvalidProgramException.class)
public void testMinByRowTypeInfoKeyFieldsForUnsortedGrouping() {
	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	TypeInformation[] types = new TypeInformation[]{Types.INT, Types.INT};

	String[] fieldNames = new String[]{"id", "value"};
	RowTypeInfo rowTypeInfo = new RowTypeInfo(types, fieldNames);

	UnsortedGrouping groupDs = env.fromCollection(Collections.singleton(new Row(2)), rowTypeInfo).groupBy(0);

	groupDs.minBy(1);
}
 
Example #20
Source File: MaxByOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Validates that no ClassCastException happens
 * should not fail e.g. like in FLINK-8255.
 */
@Test(expected = InvalidProgramException.class)
public void testMaxByRowTypeInfoKeyFieldsForUnsortedGrouping() {
	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	TypeInformation[] types = new TypeInformation[]{Types.INT, Types.INT};

	String[] fieldNames = new String[]{"id", "value"};
	RowTypeInfo rowTypeInfo = new RowTypeInfo(types, fieldNames);

	UnsortedGrouping groupDs = env.fromCollection(Collections.singleton(new Row(2)), rowTypeInfo).groupBy(0);

	groupDs.maxBy(1);
}
 
Example #21
Source File: GroupCombineITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
// check if no shuffle is being executed
public void testCheckPartitionShuffleGroupBy() throws Exception {

	org.junit.Assume.assumeTrue(mode != TestExecutionMode.COLLECTION);

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	// data
	DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env);

	// partition and group data
	UnsortedGrouping<Tuple3<Integer, Long, String>> partitionedDS = ds.partitionByHash(0).groupBy(1);

	List<Tuple2<Long, Integer>> result = partitionedDS
			.combineGroup(
					new GroupCombineFunction<Tuple3<Integer, Long, String>, Tuple2<Long, Integer>>() {
		@Override
		public void combine(Iterable<Tuple3<Integer, Long, String>> values, Collector<Tuple2<Long, Integer>> out) throws Exception {
			int count = 0;
			long key = 0;
			for (Tuple3<Integer, Long, String> value : values) {
				key = value.f1;
				count++;
			}
			out.collect(new Tuple2<>(key, count));
		}
	}).collect();

	String[] localExpected = new String[] { "(6,6)", "(5,5)" + "(4,4)", "(3,3)", "(2,2)", "(1,1)" };

	String[] resultAsStringArray = new String[result.size()];
	for (int i = 0; i < resultAsStringArray.length; ++i) {
		resultAsStringArray[i] = result.get(i).toString();
	}
	Arrays.sort(resultAsStringArray);

	Assert.assertEquals("The two arrays were identical.", false, Arrays.equals(localExpected, resultAsStringArray));
}
 
Example #22
Source File: FlinkBatchTransformTranslators.java    From flink-dataflow with Apache License 2.0 5 votes vote down vote up
@Override
public void translateNode(GroupByKey<K, V> transform, FlinkBatchTranslationContext context) {
	DataSet<KV<K, V>> inputDataSet = context.getInputDataSet(context.getInput(transform));
	GroupReduceFunction<KV<K, V>, KV<K, Iterable<V>>> groupReduceFunction = new FlinkKeyedListAggregationFunction<>();

	TypeInformation<KV<K, Iterable<V>>> typeInformation = context.getTypeInfo(context.getOutput(transform));

	Grouping<KV<K, V>> grouping = new UnsortedGrouping<>(inputDataSet, new Keys.ExpressionKeys<>(new String[]{"key"}, inputDataSet.getType()));

	GroupReduceOperator<KV<K, V>, KV<K, Iterable<V>>> outputDataSet =
			new GroupReduceOperator<>(grouping, typeInformation, groupReduceFunction, transform.getName());

	context.setOutputDataSet(context.getOutput(transform), outputDataSet);
}
 
Example #23
Source File: FlinkBatchTransformTranslators.java    From flink-dataflow with Apache License 2.0 5 votes vote down vote up
@Override
public void translateNode(GroupByKey.GroupByKeyOnly<K, V> transform, FlinkBatchTranslationContext context) {
	DataSet<KV<K, V>> inputDataSet = context.getInputDataSet(context.getInput(transform));
	GroupReduceFunction<KV<K, V>, KV<K, Iterable<V>>> groupReduceFunction = new FlinkKeyedListAggregationFunction<>();

	TypeInformation<KV<K, Iterable<V>>> typeInformation = context.getTypeInfo(context.getOutput(transform));

	Grouping<KV<K, V>> grouping = new UnsortedGrouping<>(inputDataSet, new Keys.ExpressionKeys<>(new String[]{"key"}, inputDataSet.getType()));

	GroupReduceOperator<KV<K, V>, KV<K, Iterable<V>>> outputDataSet =
			new GroupReduceOperator<>(grouping, typeInformation, groupReduceFunction, transform.getName());
	context.setOutputDataSet(context.getOutput(transform), outputDataSet);
}
 
Example #24
Source File: MinByOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * This test validates that an index which is out of bounds throws an
 * IndexOutOfBoundsException.
 */
@Test(expected = IndexOutOfBoundsException.class)
public void testOutOfTupleBoundsGrouping3() {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	UnsortedGrouping<Tuple5<Integer, Long, String, Long, Integer>> groupDs = env.fromCollection(emptyTupleData, tupleTypeInfo).groupBy(0);

	// should not work, key out of tuple bounds
	groupDs.minBy(1, 2, 3, 4, -1);
}
 
Example #25
Source File: MinByOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * This test validates that an index which is out of bounds throws an
 * IndexOutOfBoundsException.
 */
@Test(expected = IndexOutOfBoundsException.class)
public void testOutOfTupleBoundsGrouping2() {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	UnsortedGrouping<Tuple5<Integer, Long, String, Long, Integer>> groupDs = env.fromCollection(emptyTupleData, tupleTypeInfo).groupBy(0);

	// should not work, key out of tuple bounds
	groupDs.minBy(-1);
}
 
Example #26
Source File: MinByOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * This test validates that an index which is out of bounds throws an
 * IndexOutOfBoundsException.
 */
@Test(expected = IndexOutOfBoundsException.class)
public void testOutOfTupleBoundsGrouping1() {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	UnsortedGrouping<Tuple5<Integer, Long, String, Long, Integer>> groupDs = env.fromCollection(emptyTupleData, tupleTypeInfo).groupBy(0);

	// should not work, key out of tuple bounds
	groupDs.minBy(5);
}
 
Example #27
Source File: MinByOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * This test validates that an InvalidProgramException is thrown when minBy
 * is used on a custom data type.
 */
@Test(expected = InvalidProgramException.class)
public void testCustomKeyFieldsGrouping() {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	this.customTypeData.add(new CustomType());

	UnsortedGrouping<CustomType> groupDs = env.fromCollection(customTypeData).groupBy(0);
	// should not work: groups on custom type
	groupDs.minBy(0);
}
 
Example #28
Source File: MinByOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * This test validates that no exceptions is thrown when an empty grouping
 * calls minBy().
 */
@Test
public void testMinByKeyFieldsGrouping() {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	UnsortedGrouping<Tuple5<Integer, Long, String, Long, Integer>> groupDs = env.fromCollection(emptyTupleData, tupleTypeInfo).groupBy(0);

	// should work
	try {
		groupDs.minBy(4, 0, 1, 2, 3);
	} catch (Exception e) {
		Assert.fail();
	}
}
 
Example #29
Source File: MaxByOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * This test validates that an index which is out of bounds throws an
 * IndexOutOfBoundsException.
 */
@Test(expected = IndexOutOfBoundsException.class)
public void testOutOfTupleBoundsGrouping3() {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	UnsortedGrouping<Tuple5<Integer, Long, String, Long, Integer>> groupDs = env.fromCollection(emptyTupleData, tupleTypeInfo).groupBy(0);

	// should not work, key out of tuple bounds
	groupDs.maxBy(1, 2, 3, 4, -1);
}
 
Example #30
Source File: MaxByOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * This test validates that an index which is out of bounds throws an
 * IndexOutOfBoundsException.
 */
@Test(expected = IndexOutOfBoundsException.class)
public void testOutOfTupleBoundsGrouping2() {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	UnsortedGrouping<Tuple5<Integer, Long, String, Long, Integer>> groupDs = env.fromCollection(emptyTupleData, tupleTypeInfo).groupBy(0);

	// should not work, key out of tuple bounds
	groupDs.maxBy(-1);
}