org.apache.flink.api.java.typeutils.TupleTypeInfo Java Examples
The following examples show how to use
org.apache.flink.api.java.typeutils.TupleTypeInfo.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source Project: flink Author: apache File: ProjectOperator.java License: Apache License 2.0 | 6 votes |
public Projection(DataSet<T> ds, int[] fieldIndexes) { if (!(ds.getType() instanceof TupleTypeInfo)) { throw new UnsupportedOperationException("project() can only be applied to DataSets of Tuples."); } if (fieldIndexes.length == 0) { throw new IllegalArgumentException("project() needs to select at least one (1) field."); } else if (fieldIndexes.length > Tuple.MAX_ARITY - 1) { throw new IllegalArgumentException( "project() may select only up to (" + (Tuple.MAX_ARITY - 1) + ") fields."); } int maxFieldIndex = ds.getType().getArity(); for (int fieldIndexe : fieldIndexes) { Preconditions.checkElementIndex(fieldIndexe, maxFieldIndex); } this.ds = ds; this.fieldIndexes = fieldIndexes; }
Example #2
Source Project: Flink-CEPplus Author: ljygz File: AbstractSortMergeOuterJoinIteratorITCase.java License: Apache License 2.0 | 6 votes |
@Before public void beforeTest() { ExecutionConfig config = new ExecutionConfig(); config.disableObjectReuse(); TupleTypeInfo<Tuple2<String, String>> typeInfo1 = TupleTypeInfo.getBasicTupleTypeInfo(String.class, String.class); TupleTypeInfo<Tuple2<String, Integer>> typeInfo2 = TupleTypeInfo.getBasicTupleTypeInfo(String.class, Integer.class); serializer1 = typeInfo1.createSerializer(config); serializer2 = typeInfo2.createSerializer(config); comparator1 = typeInfo1.createComparator(new int[]{0}, new boolean[]{true}, 0, config); comparator2 = typeInfo2.createComparator(new int[]{0}, new boolean[]{true}, 0, config); pairComp = new GenericPairComparator<>(comparator1, comparator2); this.memoryManager = new MemoryManager(MEMORY_SIZE, 1); this.ioManager = new IOManagerAsync(); }
Example #3
Source Project: flink Author: flink-tpc-ds File: AbstractSortMergeOuterJoinIteratorITCase.java License: Apache License 2.0 | 6 votes |
@Before public void beforeTest() { ExecutionConfig config = new ExecutionConfig(); config.disableObjectReuse(); TupleTypeInfo<Tuple2<String, String>> typeInfo1 = TupleTypeInfo.getBasicTupleTypeInfo(String.class, String.class); TupleTypeInfo<Tuple2<String, Integer>> typeInfo2 = TupleTypeInfo.getBasicTupleTypeInfo(String.class, Integer.class); serializer1 = typeInfo1.createSerializer(config); serializer2 = typeInfo2.createSerializer(config); comparator1 = typeInfo1.createComparator(new int[]{0}, new boolean[]{true}, 0, config); comparator2 = typeInfo2.createComparator(new int[]{0}, new boolean[]{true}, 0, config); pairComp = new GenericPairComparator<>(comparator1, comparator2); this.memoryManager = new MemoryManager(MEMORY_SIZE, 1); this.ioManager = new IOManagerAsync(); }
Example #4
Source Project: flink Author: apache File: AbstractSortMergeOuterJoinIteratorITCase.java License: Apache License 2.0 | 6 votes |
@Before public void beforeTest() { ExecutionConfig config = new ExecutionConfig(); config.disableObjectReuse(); TupleTypeInfo<Tuple2<String, String>> typeInfo1 = TupleTypeInfo.getBasicTupleTypeInfo(String.class, String.class); TupleTypeInfo<Tuple2<String, Integer>> typeInfo2 = TupleTypeInfo.getBasicTupleTypeInfo(String.class, Integer.class); serializer1 = typeInfo1.createSerializer(config); serializer2 = typeInfo2.createSerializer(config); comparator1 = typeInfo1.createComparator(new int[]{0}, new boolean[]{true}, 0, config); comparator2 = typeInfo2.createComparator(new int[]{0}, new boolean[]{true}, 0, config); pairComp = new GenericPairComparator<>(comparator1, comparator2); this.memoryManager = MemoryManagerBuilder.newBuilder().setMemorySize(MEMORY_SIZE).build(); this.ioManager = new IOManagerAsync(); }
Example #5
Source Project: Flink-CEPplus Author: ljygz File: ReplicatingDataSourceTest.java License: Apache License 2.0 | 6 votes |
/** * Tests compiler fail for join program with replicated data source behind rebalance. */ @Test(expected = CompilerException.class) public void checkJoinWithReplicatedSourceInputBehindRebalance() { ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment(); env.setParallelism(DEFAULT_PARALLELISM); TupleTypeInfo<Tuple1<String>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(String.class); ReplicatingInputFormat<Tuple1<String>, FileInputSplit> rif = new ReplicatingInputFormat<Tuple1<String>, FileInputSplit>(new TupleCsvInputFormat<Tuple1<String>>(new Path("/some/path"), typeInfo)); DataSet<Tuple1<String>> source1 = env.createInput(rif, new TupleTypeInfo<Tuple1<String>>(BasicTypeInfo.STRING_TYPE_INFO)); DataSet<Tuple1<String>> source2 = env.readCsvFile("/some/otherpath").types(String.class); DataSink<Tuple2<Tuple1<String>, Tuple1<String>>> out = source1 .rebalance() .join(source2).where("*").equalTo("*") .writeAsText("/some/newpath"); Plan plan = env.createProgramPlan(); // submit the plan to the compiler OptimizedPlan oPlan = compileNoStats(plan); }
Example #6
Source Project: Flink-CEPplus Author: ljygz File: Graph.java License: Apache License 2.0 | 6 votes |
/** * Apply a function to the attribute of each edge in the graph. * * @param mapper the map function to apply. * @return a new graph */ @SuppressWarnings({ "unchecked", "rawtypes" }) public <NV> Graph<K, VV, NV> mapEdges(final MapFunction<Edge<K, EV>, NV> mapper) { TypeInformation<K> keyType = ((TupleTypeInfo<?>) edges.getType()).getTypeAt(0); TypeInformation<NV> valueType; if (mapper instanceof ResultTypeQueryable) { valueType = ((ResultTypeQueryable) mapper).getProducedType(); } else { valueType = TypeExtractor.createTypeInfo(MapFunction.class, mapper.getClass(), 1, edges.getType(), null); } TypeInformation<Edge<K, NV>> returnType = (TypeInformation<Edge<K, NV>>) new TupleTypeInfo( Edge.class, keyType, keyType, valueType); return mapEdges(mapper, returnType); }
Example #7
Source Project: flink Author: apache File: ReplicatingDataSourceTest.java License: Apache License 2.0 | 6 votes |
/** * Tests compiler fail for join program with replicated data source behind rebalance. */ @Test(expected = CompilerException.class) public void checkJoinWithReplicatedSourceInputBehindRebalance() { ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment(); env.setParallelism(DEFAULT_PARALLELISM); TupleTypeInfo<Tuple1<String>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(String.class); ReplicatingInputFormat<Tuple1<String>, FileInputSplit> rif = new ReplicatingInputFormat<Tuple1<String>, FileInputSplit>(new TupleCsvInputFormat<Tuple1<String>>(new Path("/some/path"), typeInfo)); DataSet<Tuple1<String>> source1 = env.createInput(rif, new TupleTypeInfo<Tuple1<String>>(BasicTypeInfo.STRING_TYPE_INFO)); DataSet<Tuple1<String>> source2 = env.readCsvFile("/some/otherpath").types(String.class); DataSink<Tuple2<Tuple1<String>, Tuple1<String>>> out = source1 .rebalance() .join(source2).where("*").equalTo("*") .writeAsText("/some/newpath"); Plan plan = env.createProgramPlan(); // submit the plan to the compiler OptimizedPlan oPlan = compileNoStats(plan); }
Example #8
Source Project: flink Author: flink-tpc-ds File: JavaTableEnvironmentITCase.java License: Apache License 2.0 | 6 votes |
@Test public void testAsFromAndToTuple() throws Exception { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); BatchTableEnvironment tableEnv = BatchTableEnvironment.create(env, config()); Table table = tableEnv .fromDataSet(CollectionDataSets.get3TupleDataSet(env), "a, b, c") .select("a, b, c"); TypeInformation<?> ti = new TupleTypeInfo<Tuple3<Integer, Long, String>>( BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO); DataSet<?> ds = tableEnv.toDataSet(table, ti); List<?> results = ds.collect(); String expected = "(1,1,Hi)\n" + "(2,2,Hello)\n" + "(3,2,Hello world)\n" + "(4,3,Hello world, how are you?)\n" + "(5,3,I am fine.)\n" + "(6,3,Luke Skywalker)\n" + "(7,4,Comment#1)\n" + "(8,4,Comment#2)\n" + "(9,4,Comment#3)\n" + "(10,4,Comment#4)\n" + "(11,5,Comment#5)\n" + "(12,5,Comment#6)\n" + "(13,5,Comment#7)\n" + "(14,5,Comment#8)\n" + "(15,5,Comment#9)\n" + "(16,6,Comment#10)\n" + "(17,6,Comment#11)\n" + "(18,6,Comment#12)\n" + "(19,6,Comment#13)\n" + "(20,6,Comment#14)\n" + "(21,6,Comment#15)\n"; compareResultAsText(results, expected); }
Example #9
Source Project: flink-examples Author: mushketyk File: Java8WordCount.java License: MIT License | 6 votes |
public static void main(String[] args) throws Exception { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSource<String> lines = env.fromElements( "Apache Flink is a community-driven open source framework for distributed big data analytics,", "like Hadoop and Spark. The core of Apache Flink is a distributed streaming dataflow engine written", " in Java and Scala.[1][2] It aims to bridge the gap between MapReduce-like systems and shared-nothing", "parallel database systems. Therefore, Flink executes arbitrary dataflow programs in a data-parallel and", "pipelined manner.[3] Flink's pipelined runtime system enables the execution of bulk/batch and stream", "processing programs.[4][5] Furthermore, Flink's runtime supports the execution of iterative algorithms natively.[6]" ); lines.flatMap((line, out) -> { String[] words = line.split("\\W+"); for (String word : words) { out.collect(new Tuple2<>(word, 1)); } }) .returns(new TupleTypeInfo(TypeInformation.of(String.class), TypeInformation.of(Integer.class))) .groupBy(0) .sum(1) .print(); }
Example #10
Source Project: Flink-CEPplus Author: ljygz File: EitherSerializerTest.java License: Apache License 2.0 | 6 votes |
@Test public void testEitherWithTupleValues() { @SuppressWarnings("unchecked") Either<Tuple2<LongValue, LongValue>, DoubleValue>[] testData = new Either[] { Left(new Tuple2<>(new LongValue(2L), new LongValue(9L))), new Left<>(new Tuple2<>(new LongValue(Long.MIN_VALUE), new LongValue(Long.MAX_VALUE))), new Right<>(new DoubleValue(32.0)), Right(new DoubleValue(Double.MIN_VALUE)), Right(new DoubleValue(Double.MAX_VALUE))}; EitherTypeInfo<Tuple2<LongValue, LongValue>, DoubleValue> eitherTypeInfo = new EitherTypeInfo<>( new TupleTypeInfo<Tuple2<LongValue, LongValue>>(ValueTypeInfo.LONG_VALUE_TYPE_INFO, ValueTypeInfo.LONG_VALUE_TYPE_INFO), ValueTypeInfo.DOUBLE_VALUE_TYPE_INFO); EitherSerializer<Tuple2<LongValue, LongValue>, DoubleValue> eitherSerializer = (EitherSerializer<Tuple2<LongValue, LongValue>, DoubleValue>) eitherTypeInfo.createSerializer(new ExecutionConfig()); SerializerTestInstance<Either<Tuple2<LongValue, LongValue>, DoubleValue>> testInstance = new EitherSerializerTestInstance<>(eitherSerializer, eitherTypeInfo.getTypeClass(), -1, testData); testInstance.testAll(); }
Example #11
Source Project: Flink-CEPplus Author: ljygz File: EitherSerializerTest.java License: Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") @Test public void testEitherWithTuple() { Either<Tuple2<Long, Long>, Double>[] testData = new Either[] { Either.Left(new Tuple2<>(2L, 9L)), new Left<>(new Tuple2<>(Long.MIN_VALUE, Long.MAX_VALUE)), new Right<>(32.0), Right(Double.MIN_VALUE), Right(Double.MAX_VALUE)}; EitherTypeInfo<Tuple2<Long, Long>, Double> eitherTypeInfo = (EitherTypeInfo<Tuple2<Long, Long>, Double>) new EitherTypeInfo<Tuple2<Long, Long>, Double>( new TupleTypeInfo<Tuple2<Long, Long>>(BasicTypeInfo.LONG_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO), BasicTypeInfo.DOUBLE_TYPE_INFO); EitherSerializer<Tuple2<Long, Long>, Double> eitherSerializer = (EitherSerializer<Tuple2<Long, Long>, Double>) eitherTypeInfo.createSerializer(new ExecutionConfig()); SerializerTestInstance<Either<Tuple2<Long, Long>, Double>> testInstance = new EitherSerializerTestInstance<Either<Tuple2<Long, Long>, Double>>( eitherSerializer, eitherTypeInfo.getTypeClass(), -1, testData); testInstance.testAll(); }
Example #12
Source Project: flink Author: flink-tpc-ds File: DataStreamTest.java License: Apache License 2.0 | 6 votes |
@Test public void testPOJOWithNestedArrayNoHashCodeKeyRejection() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream<POJOWithHashCode> input = env.fromElements( new POJOWithHashCode(new int[] {1, 2})); TypeInformation<?> expectedTypeInfo = new TupleTypeInfo<Tuple1<int[]>>( PrimitiveArrayTypeInfo.INT_PRIMITIVE_ARRAY_TYPE_INFO); // adjust the rule expectedException.expect(InvalidProgramException.class); expectedException.expectMessage(new StringStartsWith("Type " + expectedTypeInfo + " cannot be used as key.")); input.keyBy("id"); }
Example #13
Source Project: flink Author: apache File: ValueCollectionDataSets.java License: Apache License 2.0 | 6 votes |
public static DataSet<Tuple3<Tuple2<IntValue, IntValue>, StringValue, IntValue>> getGroupSortedNestedTupleDataSet2(ExecutionEnvironment env) { List<Tuple3<Tuple2<IntValue, IntValue>, StringValue, IntValue>> data = new ArrayList<>(); data.add(new Tuple3<>(new Tuple2<IntValue, IntValue>(new IntValue(1), new IntValue(3)), new StringValue("a"), new IntValue(2))); data.add(new Tuple3<>(new Tuple2<IntValue, IntValue>(new IntValue(1), new IntValue(2)), new StringValue("a"), new IntValue(1))); data.add(new Tuple3<>(new Tuple2<IntValue, IntValue>(new IntValue(2), new IntValue(1)), new StringValue("a"), new IntValue(3))); data.add(new Tuple3<>(new Tuple2<IntValue, IntValue>(new IntValue(2), new IntValue(2)), new StringValue("b"), new IntValue(4))); data.add(new Tuple3<>(new Tuple2<IntValue, IntValue>(new IntValue(3), new IntValue(3)), new StringValue("c"), new IntValue(5))); data.add(new Tuple3<>(new Tuple2<IntValue, IntValue>(new IntValue(3), new IntValue(6)), new StringValue("c"), new IntValue(6))); data.add(new Tuple3<>(new Tuple2<IntValue, IntValue>(new IntValue(4), new IntValue(9)), new StringValue("c"), new IntValue(7))); TupleTypeInfo<Tuple3<Tuple2<IntValue, IntValue>, StringValue, IntValue>> type = new TupleTypeInfo<>( new TupleTypeInfo<Tuple2<IntValue, IntValue>>(ValueTypeInfo.INT_VALUE_TYPE_INFO, ValueTypeInfo.INT_VALUE_TYPE_INFO), ValueTypeInfo.STRING_VALUE_TYPE_INFO, ValueTypeInfo.INT_VALUE_TYPE_INFO ); return env.fromCollection(data, type); }
Example #14
Source Project: flink Author: flink-tpc-ds File: ProjectOperator.java License: Apache License 2.0 | 6 votes |
public Projection(DataSet<T> ds, int[] fieldIndexes) { if (!(ds.getType() instanceof TupleTypeInfo)) { throw new UnsupportedOperationException("project() can only be applied to DataSets of Tuples."); } if (fieldIndexes.length == 0) { throw new IllegalArgumentException("project() needs to select at least one (1) field."); } else if (fieldIndexes.length > Tuple.MAX_ARITY - 1) { throw new IllegalArgumentException( "project() may select only up to (" + (Tuple.MAX_ARITY - 1) + ") fields."); } int maxFieldIndex = ds.getType().getArity(); for (int fieldIndexe : fieldIndexes) { Preconditions.checkElementIndex(fieldIndexe, maxFieldIndex); } this.ds = ds; this.fieldIndexes = fieldIndexes; }
Example #15
Source Project: Flink-CEPplus Author: ljygz File: ExpressionKeysTest.java License: Apache License 2.0 | 6 votes |
@Test public void testAreCompatible9() throws Keys.IncompatibleKeysException { TypeInformation<Tuple3<String, Long, Integer>> t1 = new TupleTypeInfo<>( BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO ); TypeInformation<PojoWithMultiplePojos> t2 = TypeExtractor.getForClass(PojoWithMultiplePojos.class); ExpressionKeys<Tuple3<String, Long, Integer>> ek1 = new ExpressionKeys<>(new int[]{2,0}, t1); Keys<PojoWithMultiplePojos> ek2 = new Keys.SelectorFunctionKeys<>( new KeySelector3(), t2, new TupleTypeInfo<Tuple2<Integer, String>>(BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO) ); Assert.assertTrue(ek1.areCompatible(ek2)); }
Example #16
Source Project: Flink-CEPplus Author: ljygz File: CrossOperator.java License: Apache License 2.0 | 5 votes |
public DefaultCross(DataSet<I1> input1, DataSet<I2> input2, CrossHint hint, String defaultName) { super(input1, input2, new DefaultCrossFunction<I1, I2>(), new TupleTypeInfo<Tuple2<I1, I2>>( Preconditions.checkNotNull(input1, "input1 is null").getType(), Preconditions.checkNotNull(input2, "input2 is null").getType()), hint, defaultName); }
Example #17
Source Project: flink Author: apache File: SlidingWindowCheckMapper.java License: Apache License 2.0 | 5 votes |
@Override public void open(Configuration parameters) { ValueStateDescriptor<List<Tuple2<Event, Integer>>> previousWindowDescriptor = new ValueStateDescriptor<>("eventsSeenSoFar", new ListTypeInfo<>(new TupleTypeInfo<>(TypeInformation.of(Event.class), BasicTypeInfo.INT_TYPE_INFO))); eventsSeenSoFar = getRuntimeContext().getState(previousWindowDescriptor); ValueStateDescriptor<Long> lastSequenceNumberDescriptor = new ValueStateDescriptor<>("lastSequenceNumber", BasicTypeInfo.LONG_TYPE_INFO); lastSequenceNumber = getRuntimeContext().getState(lastSequenceNumberDescriptor); }
Example #18
Source Project: flink Author: flink-tpc-ds File: ReplicatingDataSourceTest.java License: Apache License 2.0 | 5 votes |
/** * Tests cross program with replicated data source behind map and filter. */ @Test public void checkCrossWithReplicatedSourceInputBehindMap() { ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment(); env.setParallelism(DEFAULT_PARALLELISM); TupleTypeInfo<Tuple1<String>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(String.class); ReplicatingInputFormat<Tuple1<String>, FileInputSplit> rif = new ReplicatingInputFormat<Tuple1<String>, FileInputSplit>(new TupleCsvInputFormat<Tuple1<String>>(new Path("/some/path"), typeInfo)); DataSet<Tuple1<String>> source1 = env.createInput(rif, new TupleTypeInfo<Tuple1<String>>(BasicTypeInfo.STRING_TYPE_INFO)); DataSet<Tuple1<String>> source2 = env.readCsvFile("/some/otherpath").types(String.class); DataSink<Tuple2<Tuple1<String>, Tuple1<String>>> out = source1 .map(new IdMap()) .filter(new NoFilter()) .cross(source2) .writeAsText("/some/newpath"); Plan plan = env.createProgramPlan(); // submit the plan to the compiler OptimizedPlan oPlan = compileNoStats(plan); // check the optimized Plan // when cross should have forward strategy on both sides SinkPlanNode sinkNode = oPlan.getDataSinks().iterator().next(); DualInputPlanNode crossNode = (DualInputPlanNode) sinkNode.getPredecessor(); ShipStrategyType crossIn1 = crossNode.getInput1().getShipStrategy(); ShipStrategyType crossIn2 = crossNode.getInput2().getShipStrategy(); Assert.assertEquals("Invalid ship strategy for an operator.", ShipStrategyType.FORWARD, crossIn1); Assert.assertEquals("Invalid ship strategy for an operator.", ShipStrategyType.FORWARD, crossIn2); }
Example #19
Source Project: flink Author: apache File: FieldAccessorTest.java License: Apache License 2.0 | 5 votes |
@Test(expected = CompositeType.InvalidFieldReferenceException.class) public void testIllegalTupleInPojoInTuple() { Tuple2<String, Foo> t = Tuple2.of("aa", new Foo(8, Tuple2.of("ddd", 9L), (short) 2)); TupleTypeInfo<Tuple2<String, Foo>> tpeInfo = (TupleTypeInfo<Tuple2<String, Foo>>) TypeExtractor.getForObject(t); FieldAccessorFactory.getAccessor(tpeInfo, "illegal.illegal.illegal", null); }
Example #20
Source Project: Flink-CEPplus Author: ljygz File: GroupReduceDriverTest.java License: Apache License 2.0 | 5 votes |
@Test public void testAllReduceDriverMutable() { try { TestTaskContext<GroupReduceFunction<Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>>, Tuple2<StringValue, IntValue>> context = new TestTaskContext<GroupReduceFunction<Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>>, Tuple2<StringValue, IntValue>>(); List<Tuple2<StringValue, IntValue>> data = DriverTestData.createReduceMutableData(); TupleTypeInfo<Tuple2<StringValue, IntValue>> typeInfo = (TupleTypeInfo<Tuple2<StringValue, IntValue>>) TypeExtractor.getForObject(data.get(0)); MutableObjectIterator<Tuple2<StringValue, IntValue>> input = new RegularToMutableObjectIterator<Tuple2<StringValue, IntValue>>(data.iterator(), typeInfo.createSerializer(new ExecutionConfig())); TypeComparator<Tuple2<StringValue, IntValue>> comparator = typeInfo.createComparator(new int[]{0}, new boolean[] {true}, 0, new ExecutionConfig()); GatheringCollector<Tuple2<StringValue, IntValue>> result = new GatheringCollector<Tuple2<StringValue, IntValue>>(typeInfo.createSerializer(new ExecutionConfig())); context.setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE); context.setInput1(input, typeInfo.createSerializer(new ExecutionConfig())); context.setComparator1(comparator); context.setCollector(result); context.setUdf(new ConcatSumMutableReducer()); GroupReduceDriver<Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>> driver = new GroupReduceDriver<Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>>(); driver.setup(context); driver.prepare(); driver.run(); Object[] res = result.getList().toArray(); Object[] expected = DriverTestData.createReduceMutableDataGroupedResult().toArray(); DriverTestData.compareTupleArrays(expected, res); } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); Assert.fail(e.getMessage()); } }
Example #21
Source Project: flink Author: apache File: HadoopReduceCombineFunction.java License: Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") @Override public TypeInformation<Tuple2<KEYOUT, VALUEOUT>> getProducedType() { Class<KEYOUT> outKeyClass = (Class<KEYOUT>) TypeExtractor.getParameterType(Reducer.class, reducer.getClass(), 2); Class<VALUEOUT> outValClass = (Class<VALUEOUT>) TypeExtractor.getParameterType(Reducer.class, reducer.getClass(), 3); final TypeInformation<KEYOUT> keyTypeInfo = TypeExtractor.getForClass(outKeyClass); final TypeInformation<VALUEOUT> valueTypleInfo = TypeExtractor.getForClass(outValClass); return new TupleTypeInfo<>(keyTypeInfo, valueTypleInfo); }
Example #22
Source Project: Flink-CEPplus Author: ljygz File: CrossOperator.java License: Apache License 2.0 | 5 votes |
protected ProjectCross(DataSet<I1> input1, DataSet<I2> input2, int[] fields, boolean[] isFromFirst, TupleTypeInfo<OUT> returnType, CrossProjection<I1, I2> crossProjection, CrossHint hint) { super(input1, input2, new ProjectCrossFunction<I1, I2, OUT>(fields, isFromFirst, returnType.createSerializer(input1.getExecutionEnvironment().getConfig()).createInstance()), returnType, hint, "unknown"); this.crossProjection = crossProjection; }
Example #23
Source Project: flink Author: flink-tpc-ds File: FieldAccessor.java License: Apache License 2.0 | 5 votes |
RecursiveTupleFieldAccessor(int pos, FieldAccessor<R, F> innerAccessor, TypeInformation<T> typeInfo) { checkNotNull(typeInfo, "typeInfo must not be null."); checkNotNull(innerAccessor, "innerAccessor must not be null."); int arity = ((TupleTypeInfo) typeInfo).getArity(); if (pos < 0 || pos >= arity) { throw new CompositeType.InvalidFieldReferenceException( "Tried to select " + ((Integer) pos).toString() + ". field on \"" + typeInfo.toString() + "\", which is an invalid index."); } this.pos = pos; this.innerAccessor = innerAccessor; this.fieldType = innerAccessor.fieldType; }
Example #24
Source Project: flink Author: flink-tpc-ds File: HadoopReduceCombineFunction.java License: Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") @Override public TypeInformation<Tuple2<KEYOUT, VALUEOUT>> getProducedType() { Class<KEYOUT> outKeyClass = (Class<KEYOUT>) TypeExtractor.getParameterType(Reducer.class, reducer.getClass(), 2); Class<VALUEOUT> outValClass = (Class<VALUEOUT>) TypeExtractor.getParameterType(Reducer.class, reducer.getClass(), 3); final TypeInformation<KEYOUT> keyTypeInfo = TypeExtractor.getForClass(outKeyClass); final TypeInformation<VALUEOUT> valueTypleInfo = TypeExtractor.getForClass(outValClass); return new TupleTypeInfo<>(keyTypeInfo, valueTypleInfo); }
Example #25
Source Project: flink Author: flink-tpc-ds File: StreamProjectTest.java License: Apache License 2.0 | 5 votes |
@Test public void testProject() throws Exception { TypeInformation<Tuple5<Integer, String, Integer, String, Integer>> inType = TypeExtractor .getForObject(new Tuple5<Integer, String, Integer, String, Integer>(2, "a", 3, "b", 4)); int[] fields = new int[]{4, 4, 3}; TupleSerializer<Tuple3<Integer, Integer, String>> serializer = new TupleTypeInfo<Tuple3<Integer, Integer, String>>(StreamProjection.extractFieldTypes(fields, inType)) .createSerializer(new ExecutionConfig()); @SuppressWarnings("unchecked") StreamProject<Tuple5<Integer, String, Integer, String, Integer>, Tuple3<Integer, Integer, String>> operator = new StreamProject<Tuple5<Integer, String, Integer, String, Integer>, Tuple3<Integer, Integer, String>>( fields, serializer); OneInputStreamOperatorTestHarness<Tuple5<Integer, String, Integer, String, Integer>, Tuple3<Integer, Integer, String>> testHarness = new OneInputStreamOperatorTestHarness<Tuple5<Integer, String, Integer, String, Integer>, Tuple3<Integer, Integer, String>>(operator); long initialTime = 0L; ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<Object>(); testHarness.open(); testHarness.processElement(new StreamRecord<Tuple5<Integer, String, Integer, String, Integer>>(new Tuple5<Integer, String, Integer, String, Integer>(2, "a", 3, "b", 4), initialTime + 1)); testHarness.processElement(new StreamRecord<Tuple5<Integer, String, Integer, String, Integer>>(new Tuple5<Integer, String, Integer, String, Integer>(2, "s", 3, "c", 2), initialTime + 2)); testHarness.processElement(new StreamRecord<Tuple5<Integer, String, Integer, String, Integer>>(new Tuple5<Integer, String, Integer, String, Integer>(2, "a", 3, "c", 2), initialTime + 3)); testHarness.processWatermark(new Watermark(initialTime + 2)); testHarness.processElement(new StreamRecord<Tuple5<Integer, String, Integer, String, Integer>>(new Tuple5<Integer, String, Integer, String, Integer>(2, "a", 3, "a", 7), initialTime + 4)); expectedOutput.add(new StreamRecord<Tuple3<Integer, Integer, String>>(new Tuple3<Integer, Integer, String>(4, 4, "b"), initialTime + 1)); expectedOutput.add(new StreamRecord<Tuple3<Integer, Integer, String>>(new Tuple3<Integer, Integer, String>(2, 2, "c"), initialTime + 2)); expectedOutput.add(new StreamRecord<Tuple3<Integer, Integer, String>>(new Tuple3<Integer, Integer, String>(2, 2, "c"), initialTime + 3)); expectedOutput.add(new Watermark(initialTime + 2)); expectedOutput.add(new StreamRecord<Tuple3<Integer, Integer, String>>(new Tuple3<Integer, Integer, String>(7, 7, "a"), initialTime + 4)); TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput()); }
Example #26
Source Project: Flink-CEPplus Author: ljygz File: JoinOperator.java License: Apache License 2.0 | 5 votes |
protected ProjectJoin(DataSet<I1> input1, DataSet<I2> input2, Keys<I1> keys1, Keys<I2> keys2, JoinHint hint, int[] fields, boolean[] isFromFirst, TupleTypeInfo<OUT> returnType, JoinProjection<I1, I2> joinProj) { super(input1, input2, keys1, keys2, new ProjectFlatJoinFunction<I1, I2, OUT>(fields, isFromFirst, returnType.createSerializer(input1.getExecutionEnvironment().getConfig()).createInstance()), returnType, hint, Utils.getCallLocationName(4)); this.joinProj = joinProj; }
Example #27
Source Project: flink Author: flink-tpc-ds File: SlidingWindowCheckMapper.java License: Apache License 2.0 | 5 votes |
@Override public void open(Configuration parameters) { ValueStateDescriptor<List<Tuple2<Event, Integer>>> previousWindowDescriptor = new ValueStateDescriptor<>("eventsSeenSoFar", new ListTypeInfo<>(new TupleTypeInfo<>(TypeInformation.of(Event.class), BasicTypeInfo.INT_TYPE_INFO))); eventsSeenSoFar = getRuntimeContext().getState(previousWindowDescriptor); ValueStateDescriptor<Long> lastSequenceNumberDescriptor = new ValueStateDescriptor<>("lastSequenceNumber", BasicTypeInfo.LONG_TYPE_INFO); lastSequenceNumber = getRuntimeContext().getState(lastSequenceNumberDescriptor); }
Example #28
Source Project: flink Author: apache File: ReplicatingDataSourceTest.java License: Apache License 2.0 | 5 votes |
/** * Tests cross program with replicated data source. */ @Test public void checkCrossWithReplicatedSourceInput() { ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment(); env.setParallelism(DEFAULT_PARALLELISM); TupleTypeInfo<Tuple1<String>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(String.class); ReplicatingInputFormat<Tuple1<String>, FileInputSplit> rif = new ReplicatingInputFormat<Tuple1<String>, FileInputSplit>(new TupleCsvInputFormat<Tuple1<String>>(new Path("/some/path"), typeInfo)); DataSet<Tuple1<String>> source1 = env.createInput(rif, new TupleTypeInfo<Tuple1<String>>(BasicTypeInfo.STRING_TYPE_INFO)); DataSet<Tuple1<String>> source2 = env.readCsvFile("/some/otherpath").types(String.class); DataSink<Tuple2<Tuple1<String>, Tuple1<String>>> out = source1 .cross(source2) .writeAsText("/some/newpath"); Plan plan = env.createProgramPlan(); // submit the plan to the compiler OptimizedPlan oPlan = compileNoStats(plan); // check the optimized Plan // when cross should have forward strategy on both sides SinkPlanNode sinkNode = oPlan.getDataSinks().iterator().next(); DualInputPlanNode crossNode = (DualInputPlanNode) sinkNode.getPredecessor(); ShipStrategyType crossIn1 = crossNode.getInput1().getShipStrategy(); ShipStrategyType crossIn2 = crossNode.getInput2().getShipStrategy(); Assert.assertEquals("Invalid ship strategy for an operator.", ShipStrategyType.FORWARD, crossIn1); Assert.assertEquals("Invalid ship strategy for an operator.", ShipStrategyType.FORWARD, crossIn2); }
Example #29
Source Project: flink Author: flink-tpc-ds File: CsvInputFormatTest.java License: Apache License 2.0 | 5 votes |
@Test public void testReadSparseWithPositionSetter() throws IOException { try { final String fileContent = "111|222|333|444|555|666|777|888|999|000|\n000|999|888|777|666|555|444|333|222|111|"; final FileInputSplit split = createTempFile(fileContent); final TupleTypeInfo<Tuple3<Integer, Integer, Integer>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(Integer.class, Integer.class, Integer.class); final CsvInputFormat<Tuple3<Integer, Integer, Integer>> format = new TupleCsvInputFormat<Tuple3<Integer, Integer, Integer>>(PATH, typeInfo, new int[]{0, 3, 7}); format.setFieldDelimiter("|"); format.configure(new Configuration()); format.open(split); Tuple3<Integer, Integer, Integer> result = new Tuple3<Integer, Integer, Integer>(); result = format.nextRecord(result); assertNotNull(result); assertEquals(Integer.valueOf(111), result.f0); assertEquals(Integer.valueOf(444), result.f1); assertEquals(Integer.valueOf(888), result.f2); result = format.nextRecord(result); assertNotNull(result); assertEquals(Integer.valueOf(000), result.f0); assertEquals(Integer.valueOf(777), result.f1); assertEquals(Integer.valueOf(333), result.f2); result = format.nextRecord(result); assertNull(result); assertTrue(format.reachedEnd()); } catch (Exception ex) { fail("Test failed due to a " + ex.getClass().getName() + ": " + ex.getMessage()); } }
Example #30
Source Project: flink Author: flink-tpc-ds File: FieldAccessorTest.java License: Apache License 2.0 | 5 votes |
@Test(expected = CompositeType.InvalidFieldReferenceException.class) public void testIllegalTupleInPojoInTuple() { Tuple2<String, Foo> t = Tuple2.of("aa", new Foo(8, Tuple2.of("ddd", 9L), (short) 2)); TupleTypeInfo<Tuple2<String, Foo>> tpeInfo = (TupleTypeInfo<Tuple2<String, Foo>>) TypeExtractor.getForObject(t); FieldAccessorFactory.getAccessor(tpeInfo, "illegal.illegal.illegal", null); }