org.apache.flink.api.java.tuple.Tuple Java Examples

The following examples show how to use org.apache.flink.api.java.tuple.Tuple. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: JoinOperator.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
public void join(T1 in1, T2 in2, Collector<R> out) {
	for (int i = 0; i < fields.length; i++) {
		if (isFromFirst[i]) {
			if (fields[i] >= 0 && in1 != null) {
				outTuple.setField(((Tuple) in1).getField(fields[i]), i);
			} else {
				outTuple.setField(in1, i);
			}
		} else {
			if (fields[i] >= 0 && in2 != null) {
				outTuple.setField(((Tuple) in2).getField(fields[i]), i);
			} else {
				outTuple.setField(in2, i);
			}
		}
	}
	out.collect(outTuple);
}
 
Example #2
Source File: DataSetUtils.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Summarize a DataSet of Tuples by collecting single pass statistics for all columns.
 *
 * <p>Example usage:
 * <pre>
 * {@code
 * Dataset<Tuple3<Double, String, Boolean>> input = // [...]
 * Tuple3<NumericColumnSummary,StringColumnSummary, BooleanColumnSummary> summary = DataSetUtils.summarize(input)
 *
 * summary.f0.getStandardDeviation()
 * summary.f1.getMaxLength()
 * }
 * </pre>
 * @return the summary as a Tuple the same width as input rows
 */
public static <R extends Tuple, T extends Tuple> R summarize(DataSet<T> input) throws Exception {
	if (!input.getType().isTupleType()) {
		throw new IllegalArgumentException("summarize() is only implemented for DataSet's of Tuples");
	}
	final TupleTypeInfoBase<?> inType = (TupleTypeInfoBase<?>) input.getType();
	DataSet<TupleSummaryAggregator<R>> result = input.mapPartition(new MapPartitionFunction<T, TupleSummaryAggregator<R>>() {
		@Override
		public void mapPartition(Iterable<T> values, Collector<TupleSummaryAggregator<R>> out) throws Exception {
			TupleSummaryAggregator<R> aggregator = SummaryAggregatorFactory.create(inType);
			for (Tuple value : values) {
				aggregator.aggregate(value);
			}
			out.collect(aggregator);
		}
	}).reduce(new ReduceFunction<TupleSummaryAggregator<R>>() {
		@Override
		public TupleSummaryAggregator<R> reduce(TupleSummaryAggregator<R> agg1, TupleSummaryAggregator<R> agg2) throws Exception {
			agg1.combine(agg2);
			return agg1;
		}
	});
	return result.collect().get(0).result();
}
 
Example #3
Source File: CassandraSink.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Writes a DataStream into a Cassandra database.
 *
 * @param input input DataStream
 * @param <IN>  input type
 * @return CassandraSinkBuilder, to further configure the sink
 */
public static <IN> CassandraSinkBuilder<IN> addSink(DataStream<IN> input) {
	TypeInformation<IN> typeInfo = input.getType();
	if (typeInfo instanceof TupleTypeInfo) {
		DataStream<Tuple> tupleInput = (DataStream<Tuple>) input;
		return (CassandraSinkBuilder<IN>) new CassandraTupleSinkBuilder<>(tupleInput, tupleInput.getType(), tupleInput.getType().createSerializer(tupleInput.getExecutionEnvironment().getConfig()));
	}
	if (typeInfo instanceof RowTypeInfo) {
		DataStream<Row> rowInput = (DataStream<Row>) input;
		return (CassandraSinkBuilder<IN>) new CassandraRowSinkBuilder(rowInput, rowInput.getType(), rowInput.getType().createSerializer(rowInput.getExecutionEnvironment().getConfig()));
	}
	if (typeInfo instanceof PojoTypeInfo) {
		return new CassandraPojoSinkBuilder<>(input, input.getType(), input.getType().createSerializer(input.getExecutionEnvironment().getConfig()));
	}
	if (typeInfo instanceof CaseClassTypeInfo) {
		DataStream<Product> productInput = (DataStream<Product>) input;
		return (CassandraSinkBuilder<IN>) new CassandraScalaProductSinkBuilder<>(productInput, productInput.getType(), productInput.getType().createSerializer(input.getExecutionEnvironment().getConfig()));
	}
	throw new IllegalArgumentException("No support for the type of the given DataStream: " + input.getType());
}
 
Example #4
Source File: DataStream.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Writes a DataStream to the file specified by the path parameter. The
 * writing is performed periodically every millis milliseconds.
 *
 * <p>For every field of an element of the DataStream the result of {@link Object#toString()}
 * is written. This method can only be used on data streams of tuples.
 *
 * @param path
 *            the path pointing to the location the text file is written to
 * @param writeMode
 *            Controls the behavior for existing files. Options are
 *            NO_OVERWRITE and OVERWRITE.
 * @param rowDelimiter
 *            the delimiter for two rows
 * @param fieldDelimiter
 *            the delimiter for two fields
 *
 * @return the closed DataStream
 */
@SuppressWarnings("unchecked")
@PublicEvolving
public <X extends Tuple> DataStreamSink<T> writeAsCsv(
		String path,
		WriteMode writeMode,
		String rowDelimiter,
		String fieldDelimiter) {
	Preconditions.checkArgument(
		getType().isTupleType(),
		"The writeAsCsv() method can only be used on data streams of tuples.");

	CsvOutputFormat<X> of = new CsvOutputFormat<>(
		new Path(path),
		rowDelimiter,
		fieldDelimiter);

	if (writeMode != null) {
		of.setWriteMode(writeMode);
	}

	return writeUsingOutputFormat((OutputFormat<T>) of);
}
 
Example #5
Source File: DeeplyEqualsChecker.java    From flink with Apache License 2.0 6 votes vote down vote up
private boolean deepEqualsTuple(Tuple tuple1, Tuple tuple2) {
	if (tuple1.getArity() != tuple2.getArity()) {
		return false;
	}

	for (int i = 0; i < tuple1.getArity(); i++) {
		Object o1 = tuple1.getField(i);
		Object o2 = tuple2.getField(i);

		if (!deepEquals(o1, o2)) {
			return false;
		}
	}

	return true;
}
 
Example #6
Source File: TripDurationToAverageTripDuration.java    From amazon-kinesis-analytics-taxi-consumer with Apache License 2.0 6 votes vote down vote up
@Override
public void apply(Tuple tuple, TimeWindow timeWindow, Iterable<TripDuration> iterable, Collector<AverageTripDuration> collector) {
  if (Iterables.size(iterable) > 1) {
    String location = Iterables.get(iterable, 0).pickupGeoHash;
    String airportCode = Iterables.get(iterable, 0).airportCode;

    long sumDuration = StreamSupport
        .stream(iterable.spliterator(), false)
        .mapToLong(trip -> trip.tripDuration)
        .sum();

    double avgDuration = (double) sumDuration / Iterables.size(iterable);

    collector.collect(new AverageTripDuration(location, airportCode, sumDuration, avgDuration, timeWindow.getEnd()));
  }
}
 
Example #7
Source File: StreamProjection.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
protected StreamProjection(DataStream<IN> dataStream, int[] fieldIndexes) {
	if (!dataStream.getType().isTupleType()) {
		throw new RuntimeException("Only Tuple DataStreams can be projected");
	}
	if (fieldIndexes.length == 0) {
		throw new IllegalArgumentException("project() needs to select at least one (1) field.");
	} else if (fieldIndexes.length > Tuple.MAX_ARITY - 1) {
		throw new IllegalArgumentException(
				"project() may select only up to (" + (Tuple.MAX_ARITY - 1) + ") fields.");
	}

	int maxFieldIndex = (dataStream.getType()).getArity();
	for (int i = 0; i < fieldIndexes.length; i++) {
		Preconditions.checkElementIndex(fieldIndexes[i], maxFieldIndex);
	}

	this.dataStream = dataStream;
	this.fieldIndexes = fieldIndexes;
}
 
Example #8
Source File: TupleSummaryAggregator.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
@SuppressWarnings("unchecked")
public R result() {
	try {
		Class tupleClass = Tuple.getTupleClass(columnAggregators.length);
		R tuple = (R) tupleClass.newInstance();
		for (int i = 0; i < columnAggregators.length; i++) {
			tuple.setField(columnAggregators[i].result(), i);
		}
		return tuple;
	}
	catch (InstantiationException | IllegalAccessException e) {
		throw new RuntimeException("Unexpected error instantiating Tuple class for aggregation results", e);

	}
}
 
Example #9
Source File: KafkaShuffleTestBase.java    From flink with Apache License 2.0 6 votes vote down vote up
static KeyedStream<Tuple3<Integer, Long, Integer>, Tuple> createKafkaShuffle(
		StreamExecutionEnvironment env,
		String topic,
		int numElementsPerProducer,
		int producerParallelism,
		TimeCharacteristic timeCharacteristic,
		int numberOfPartitions) {
	return createKafkaShuffle(
		env,
		topic,
		numElementsPerProducer,
		producerParallelism,
		timeCharacteristic,
		numberOfPartitions,
		false);
}
 
Example #10
Source File: WordCloudApply.java    From wingcloud with Apache License 2.0 6 votes vote down vote up
@Override
public void apply(Tuple tuple, TimeWindow window, Iterable<Tuple2<Long, String>> input, Collector<String> out) throws Exception {
    String shoptype = tuple.getField(0).toString();

    Iterator<Tuple2<Long, String>> it = input.iterator();

    //计数器
    Long count = 0L;
    while (it.hasNext()) {
        Tuple2<Long, String> next = it.next();
        count++;
    }
    System.err.println(Thread.currentThread().getId() + "【词云】window触发了,数据条数:" + count);
    //组装结果
    Tuple2<String, Long> res = new Tuple2<>(shoptype, count);
    String resstring = "{\"shoptype\":\"" + shoptype + "\",\"count\":\"" + count + "\"}";
    System.err.println("【词云】窗口数据 : "+ res);
    out.collect(resstring);
}
 
Example #11
Source File: ProjectOperator.java    From flink with Apache License 2.0 6 votes vote down vote up
public Projection(DataSet<T> ds, int[] fieldIndexes) {

			if (!(ds.getType() instanceof TupleTypeInfo)) {
				throw new UnsupportedOperationException("project() can only be applied to DataSets of Tuples.");
			}

			if (fieldIndexes.length == 0) {
				throw new IllegalArgumentException("project() needs to select at least one (1) field.");
			} else if (fieldIndexes.length > Tuple.MAX_ARITY - 1) {
				throw new IllegalArgumentException(
					"project() may select only up to (" + (Tuple.MAX_ARITY - 1) + ") fields.");
			}

			int maxFieldIndex = ds.getType().getArity();
			for (int fieldIndexe : fieldIndexes) {
				Preconditions.checkElementIndex(fieldIndexe, maxFieldIndex);
			}

			this.ds = ds;
			this.fieldIndexes = fieldIndexes;
		}
 
Example #12
Source File: KeySelectorUtil.java    From flink with Apache License 2.0 6 votes vote down vote up
public static <X> ArrayKeySelector<X> getSelectorForArray(int[] positions, TypeInformation<X> typeInfo) {
	if (positions == null || positions.length == 0 || positions.length > Tuple.MAX_ARITY) {
		throw new IllegalArgumentException("Array keys must have between 1 and " + Tuple.MAX_ARITY + " fields.");
	}

	TypeInformation<?> componentType;

	if (typeInfo instanceof BasicArrayTypeInfo) {
		BasicArrayTypeInfo<X, ?>  arrayInfo = (BasicArrayTypeInfo<X, ?>) typeInfo;
		componentType = arrayInfo.getComponentInfo();
	}
	else if (typeInfo instanceof PrimitiveArrayTypeInfo) {
		PrimitiveArrayTypeInfo<X> arrayType = (PrimitiveArrayTypeInfo<X>) typeInfo;
		componentType = arrayType.getComponentType();
	}
	else {
		throw new IllegalArgumentException("This method only supports arrays of primitives and boxed primitives.");
	}

	TypeInformation<?>[] primitiveInfos = new TypeInformation<?>[positions.length];
	Arrays.fill(primitiveInfos, componentType);

	return new ArrayKeySelector<>(positions, new TupleTypeInfo<>(primitiveInfos));
}
 
Example #13
Source File: DriverTestData.java    From flink with Apache License 2.0 6 votes vote down vote up
public static final void compareTupleArrays(Object[] expected, Object[] found) {
	if (expected.length != found.length) {
		Assert.assertEquals("Length of result is wrong", expected.length, found.length);
	}
	
	for (int i = 0; i < expected.length; i++) {
		Tuple v1 = (Tuple) expected[i];
		Tuple v2 = (Tuple) found[i];
		
		for (int k = 0; k < v1.getArity(); k++) {
			Object o1 = v1.getField(k);
			Object o2 = v2.getField(k);
			Assert.assertEquals(o1, o2);
		}
	}
}
 
Example #14
Source File: PythonPlanReceiver.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public Tuple deserialize(boolean normalized) throws IOException {
	Tuple result = createTuple(deserializer.length);
	for (int x = 0; x < result.getArity(); x++) {
		result.setField(deserializer[x].deserialize(normalized), x);
	}
	return result;
}
 
Example #15
Source File: PythonReceiver.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
public static Tuple createTuple(int size) {
	try {
		return Tuple.getTupleClass(size).newInstance();
	} catch (InstantiationException | IllegalAccessException e) {
		throw new RuntimeException(e);
	}
}
 
Example #16
Source File: SemanticPropertiesProjectionTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testCrossProjectionSemProps1() {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	DataSet<Tuple5<Integer, Long, String, Long, Integer>> tupleDs = env.fromCollection(emptyTupleData, tupleTypeInfo);

	tupleDs.cross(tupleDs)
			.projectFirst(2, 3)
			.projectSecond(1, 4)
			.output(new DiscardingOutputFormat<Tuple>());

	Plan plan = env.createProgramPlan();

	GenericDataSinkBase<?> sink = plan.getDataSinks().iterator().next();
	CrossOperatorBase<?, ?, ?, ?> projectCrossOperator = ((CrossOperatorBase<?, ?, ?, ?>) sink.getInput());

	DualInputSemanticProperties props = projectCrossOperator.getSemanticProperties();

	assertEquals(1, props.getForwardingTargetFields(0, 2).size());
	assertEquals(1, props.getForwardingTargetFields(0, 3).size());
	assertEquals(1, props.getForwardingTargetFields(1, 1).size());
	assertEquals(1, props.getForwardingTargetFields(1, 4).size());

	assertTrue(props.getForwardingTargetFields(0, 2).contains(0));
	assertTrue(props.getForwardingTargetFields(0, 3).contains(1));
	assertTrue(props.getForwardingTargetFields(1, 1).contains(2));
	assertTrue(props.getForwardingTargetFields(1, 4).contains(3));
}
 
Example #17
Source File: PythonSender.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public void serializeInternal(Tuple2<Tuple, byte[]> value) {
	int keySize = 0;
	for (int x = 0; x < value.f0.getArity(); x++) {
		keySize += ((byte[]) value.f0.getField(x)).length;
	}
	buffer = ByteBuffer.allocate(5 + keySize + value.f1.length);
	buffer.put(TYPE_KEY_VALUE);
	buffer.put((byte) value.f0.getArity());
	for (int x = 0; x < value.f0.getArity(); x++) {
		buffer.put((byte[]) value.f0.getField(x));
	}
	buffer.put(value.f1);
}
 
Example #18
Source File: KeySelectorUtil.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public TypeInformation<Tuple> getProducedType() {
	if (returnType == null) {
		throw new IllegalStateException("The return type information is not available after serialization");
	}
	return returnType;
}
 
Example #19
Source File: WindowTranslationTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
@SuppressWarnings("rawtypes")
public void testReduceWithEvictorAndProcessFunction() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime);

	DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2));

	DummyReducer reducer = new DummyReducer();

	DataStream<Tuple2<String, Integer>> window1 = source
			.keyBy(0)
			.window(SlidingEventTimeWindows.of(Time.of(1, TimeUnit.SECONDS), Time.of(100, TimeUnit.MILLISECONDS)))
			.evictor(CountEvictor.of(100))
			.reduce(
					reducer,
					new ProcessWindowFunction<Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple, TimeWindow>() {
						@Override
						public void process(
								Tuple tuple,
								Context context,
								Iterable<Tuple2<String, Integer>> elements,
								Collector<Tuple2<String, Integer>> out) throws Exception {
							for (Tuple2<String, Integer> in : elements) {
								out.collect(in);
							}
						}
					});

	OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>> transform = (OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>>) window1.getTransformation();
	OneInputStreamOperator<Tuple2<String, Integer>, Tuple2<String, Integer>> operator = transform.getOperator();
	Assert.assertTrue(operator instanceof EvictingWindowOperator);
	EvictingWindowOperator<String, Tuple2<String, Integer>, ?, ?> winOperator = (EvictingWindowOperator<String, Tuple2<String, Integer>, ?, ?>) operator;
	Assert.assertTrue(winOperator.getTrigger() instanceof EventTimeTrigger);
	Assert.assertTrue(winOperator.getEvictor() instanceof CountEvictor);
	Assert.assertTrue(winOperator.getWindowAssigner() instanceof SlidingEventTimeWindows);
	Assert.assertTrue(winOperator.getStateDescriptor() instanceof ListStateDescriptor);

	processElementAndEnsureOutput(winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple2<>("hello", 1));
}
 
Example #20
Source File: FieldsFromTupleTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testUserSpecifiedOrder() throws InstantiationException, IllegalAccessException {
	Tuple currentTuple = (Tuple) CLASSES[Tuple.MAX_ARITY - 1].newInstance();
	for (int i = 0; i < Tuple.MAX_ARITY; i++) {
		currentTuple.setField(testDouble[i], i);
	}

	double[] expected = { testDouble[5], testDouble[3], testDouble[6], testDouble[7],
			testDouble[0] };
	arrayEqualityCheck(expected, new FieldsFromTuple(5, 3, 6, 7, 0).extract(currentTuple));

	double[] expected2 = { testDouble[0], testDouble[Tuple.MAX_ARITY - 1] };
	arrayEqualityCheck(expected2,
			new FieldsFromTuple(0, Tuple.MAX_ARITY - 1).extract(currentTuple));

	double[] expected3 = { testDouble[Tuple.MAX_ARITY - 1], testDouble[0] };
	arrayEqualityCheck(expected3,
			new FieldsFromTuple(Tuple.MAX_ARITY - 1, 0).extract(currentTuple));

	double[] expected4 = { testDouble[13], testDouble[4], testDouble[5], testDouble[4],
			testDouble[2], testDouble[8], testDouble[6], testDouble[2], testDouble[8],
			testDouble[3], testDouble[5], testDouble[2], testDouble[16], testDouble[4],
			testDouble[3], testDouble[2], testDouble[6], testDouble[4], testDouble[7],
			testDouble[4], testDouble[2], testDouble[8], testDouble[7], testDouble[2] };
	arrayEqualityCheck(expected4, new FieldsFromTuple(13, 4, 5, 4, 2, 8, 6, 2, 8, 3, 5, 2, 16,
			4, 3, 2, 6, 4, 7, 4, 2, 8, 7, 2).extract(currentTuple));
}
 
Example #21
Source File: CSV.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void write(String executionName, PrintStream out, DataSet<T> data) throws Exception {
	if (Tuple.class.isAssignableFrom(data.getType().getTypeClass())) {
		data
			.writeAsCsv(filename.getValue(), lineDelimiter.getValue(), fieldDelimiter.getValue())
				.name("CSV: " + filename.getValue());
	} else {
		// line and field delimiters are ineffective when writing custom POJOs result types
		data
			.writeAsText(filename.getValue())
				.name("CSV: " + filename.getValue());
	}

	data.getExecutionEnvironment().execute();
}
 
Example #22
Source File: KeySelectorUtil.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public TypeInformation<Tuple> getProducedType() {
	if (returnType == null) {
		throw new IllegalStateException("The return type information is not available after serialization");
	}
	return returnType;
}
 
Example #23
Source File: RideCount.java    From flink-training-exercises with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {

		ParameterTool params = ParameterTool.fromArgs(args);
		final String input = params.get("input", ExerciseBase.pathToRideData);

		final int maxEventDelay = 60;       // events are out of order by max 60 seconds
		final int servingSpeedFactor = 600; // events of 10 minutes are served every second

		// set up streaming execution environment
		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

		// start the data generator
		DataStream<TaxiRide> rides = env.addSource(new TaxiRideSource(input, maxEventDelay, servingSpeedFactor));

		// map each ride to a tuple of (driverId, 1)
		DataStream<Tuple2<Long, Long>> tuples = rides.map(new MapFunction<TaxiRide, Tuple2<Long, Long>>() {
					@Override
					public Tuple2<Long, Long> map(TaxiRide ride) throws Exception {
						return new Tuple2<Long, Long>(ride.driverId, 1L) ;
					}
		});

		// partition the stream by the driverId
		KeyedStream<Tuple2<Long, Long>, Tuple> keyedByDriverId = tuples.keyBy(0);

		// count the rides for each driver
		DataStream<Tuple2<Long, Long>> rideCounts = keyedByDriverId.sum(1);

		// we could, in fact, print out any or all of these streams
		rideCounts.print();

		// run the cleansing pipeline
		env.execute("Ride Count");
	}
 
Example #24
Source File: ProjectOperator.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Chooses a projectTupleX according to the length of
 * {@link org.apache.flink.api.java.operators.ProjectOperator.Projection#fieldIndexes}.
 *
 * @return The projected DataSet.
 * @see org.apache.flink.api.java.operators.ProjectOperator.Projection
 */
@SuppressWarnings("unchecked")
public <OUT extends Tuple> ProjectOperator<T, OUT> projectTupleX() {
	ProjectOperator<T, OUT> projOperator;

	switch (fieldIndexes.length) {
	case 1: projOperator = (ProjectOperator<T, OUT>) projectTuple1(); break;
	case 2: projOperator = (ProjectOperator<T, OUT>) projectTuple2(); break;
	case 3: projOperator = (ProjectOperator<T, OUT>) projectTuple3(); break;
	case 4: projOperator = (ProjectOperator<T, OUT>) projectTuple4(); break;
	case 5: projOperator = (ProjectOperator<T, OUT>) projectTuple5(); break;
	case 6: projOperator = (ProjectOperator<T, OUT>) projectTuple6(); break;
	case 7: projOperator = (ProjectOperator<T, OUT>) projectTuple7(); break;
	case 8: projOperator = (ProjectOperator<T, OUT>) projectTuple8(); break;
	case 9: projOperator = (ProjectOperator<T, OUT>) projectTuple9(); break;
	case 10: projOperator = (ProjectOperator<T, OUT>) projectTuple10(); break;
	case 11: projOperator = (ProjectOperator<T, OUT>) projectTuple11(); break;
	case 12: projOperator = (ProjectOperator<T, OUT>) projectTuple12(); break;
	case 13: projOperator = (ProjectOperator<T, OUT>) projectTuple13(); break;
	case 14: projOperator = (ProjectOperator<T, OUT>) projectTuple14(); break;
	case 15: projOperator = (ProjectOperator<T, OUT>) projectTuple15(); break;
	case 16: projOperator = (ProjectOperator<T, OUT>) projectTuple16(); break;
	case 17: projOperator = (ProjectOperator<T, OUT>) projectTuple17(); break;
	case 18: projOperator = (ProjectOperator<T, OUT>) projectTuple18(); break;
	case 19: projOperator = (ProjectOperator<T, OUT>) projectTuple19(); break;
	case 20: projOperator = (ProjectOperator<T, OUT>) projectTuple20(); break;
	case 21: projOperator = (ProjectOperator<T, OUT>) projectTuple21(); break;
	case 22: projOperator = (ProjectOperator<T, OUT>) projectTuple22(); break;
	case 23: projOperator = (ProjectOperator<T, OUT>) projectTuple23(); break;
	case 24: projOperator = (ProjectOperator<T, OUT>) projectTuple24(); break;
	case 25: projOperator = (ProjectOperator<T, OUT>) projectTuple25(); break;
	default: throw new IllegalStateException("Excessive arity in tuple.");
	}

	return projOperator;
}
 
Example #25
Source File: SemanticPropertiesProjectionTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testJoinProjectionSemProps1() {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	DataSet<Tuple5<Integer, Long, String, Long, Integer>> tupleDs = env.fromCollection(emptyTupleData, tupleTypeInfo);

	tupleDs.join(tupleDs).where(0).equalTo(0)
			.projectFirst(2, 3)
			.projectSecond(1, 4)
			.output(new DiscardingOutputFormat<Tuple>());

	Plan plan = env.createProgramPlan();

	GenericDataSinkBase<?> sink = plan.getDataSinks().iterator().next();
	InnerJoinOperatorBase<?, ?, ?, ?> projectJoinOperator = ((InnerJoinOperatorBase<?, ?, ?, ?>) sink.getInput());

	DualInputSemanticProperties props = projectJoinOperator.getSemanticProperties();

	assertEquals(1, props.getForwardingTargetFields(0, 2).size());
	assertEquals(1, props.getForwardingTargetFields(0, 3).size());
	assertEquals(1, props.getForwardingTargetFields(1, 1).size());
	assertEquals(1, props.getForwardingTargetFields(1, 4).size());

	assertTrue(props.getForwardingTargetFields(0, 2).contains(0));
	assertTrue(props.getForwardingTargetFields(0, 3).contains(1));
	assertTrue(props.getForwardingTargetFields(1, 1).contains(2));
	assertTrue(props.getForwardingTargetFields(1, 4).contains(3));
}
 
Example #26
Source File: SemanticPropertiesProjectionTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testProjectionSemProps2() {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	DataSet<Tuple4<Integer, Tuple3<String, Integer, Long>, Tuple2<Long, Long>, String>> tupleDs = env.fromCollection(emptyNestedTupleData, nestedTupleTypeInfo);

	tupleDs.project(2, 3, 1, 2).output(new DiscardingOutputFormat<Tuple>());

	Plan plan = env.createProgramPlan();

	GenericDataSinkBase<?> sink = plan.getDataSinks().iterator().next();
	PlanProjectOperator<?, ?> projectOperator = ((PlanProjectOperator<?, ?>) sink.getInput());

	SingleInputSemanticProperties props = projectOperator.getSemanticProperties();

	assertNotNull(props.getForwardingTargetFields(0, 0));
	assertEquals(1, props.getForwardingTargetFields(0, 1).size());
	assertEquals(1, props.getForwardingTargetFields(0, 2).size());
	assertEquals(1, props.getForwardingTargetFields(0, 3).size());
	assertEquals(2, props.getForwardingTargetFields(0, 4).size());
	assertEquals(2, props.getForwardingTargetFields(0, 5).size());
	assertEquals(1, props.getForwardingTargetFields(0, 6).size());
	assertEquals(0, props.getForwardingTargetFields(0, 0).size());

	assertTrue(props.getForwardingTargetFields(0, 4).contains(0));
	assertTrue(props.getForwardingTargetFields(0, 5).contains(1));
	assertTrue(props.getForwardingTargetFields(0, 6).contains(2));
	assertTrue(props.getForwardingTargetFields(0, 1).contains(3));
	assertTrue(props.getForwardingTargetFields(0, 2).contains(4));
	assertTrue(props.getForwardingTargetFields(0, 3).contains(5));
	assertTrue(props.getForwardingTargetFields(0, 4).contains(6));
	assertTrue(props.getForwardingTargetFields(0, 5).contains(7));
}
 
Example #27
Source File: ProjectOperator.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Chooses a projectTupleX according to the length of
 * {@link org.apache.flink.api.java.operators.ProjectOperator.Projection#fieldIndexes}.
 *
 * @return The projected DataSet.
 * @see org.apache.flink.api.java.operators.ProjectOperator.Projection
 */
@SuppressWarnings("unchecked")
public <OUT extends Tuple> ProjectOperator<T, OUT> projectTupleX() {
	ProjectOperator<T, OUT> projOperator;

	switch (fieldIndexes.length) {
	case 1: projOperator = (ProjectOperator<T, OUT>) projectTuple1(); break;
	case 2: projOperator = (ProjectOperator<T, OUT>) projectTuple2(); break;
	case 3: projOperator = (ProjectOperator<T, OUT>) projectTuple3(); break;
	case 4: projOperator = (ProjectOperator<T, OUT>) projectTuple4(); break;
	case 5: projOperator = (ProjectOperator<T, OUT>) projectTuple5(); break;
	case 6: projOperator = (ProjectOperator<T, OUT>) projectTuple6(); break;
	case 7: projOperator = (ProjectOperator<T, OUT>) projectTuple7(); break;
	case 8: projOperator = (ProjectOperator<T, OUT>) projectTuple8(); break;
	case 9: projOperator = (ProjectOperator<T, OUT>) projectTuple9(); break;
	case 10: projOperator = (ProjectOperator<T, OUT>) projectTuple10(); break;
	case 11: projOperator = (ProjectOperator<T, OUT>) projectTuple11(); break;
	case 12: projOperator = (ProjectOperator<T, OUT>) projectTuple12(); break;
	case 13: projOperator = (ProjectOperator<T, OUT>) projectTuple13(); break;
	case 14: projOperator = (ProjectOperator<T, OUT>) projectTuple14(); break;
	case 15: projOperator = (ProjectOperator<T, OUT>) projectTuple15(); break;
	case 16: projOperator = (ProjectOperator<T, OUT>) projectTuple16(); break;
	case 17: projOperator = (ProjectOperator<T, OUT>) projectTuple17(); break;
	case 18: projOperator = (ProjectOperator<T, OUT>) projectTuple18(); break;
	case 19: projOperator = (ProjectOperator<T, OUT>) projectTuple19(); break;
	case 20: projOperator = (ProjectOperator<T, OUT>) projectTuple20(); break;
	case 21: projOperator = (ProjectOperator<T, OUT>) projectTuple21(); break;
	case 22: projOperator = (ProjectOperator<T, OUT>) projectTuple22(); break;
	case 23: projOperator = (ProjectOperator<T, OUT>) projectTuple23(); break;
	case 24: projOperator = (ProjectOperator<T, OUT>) projectTuple24(); break;
	case 25: projOperator = (ProjectOperator<T, OUT>) projectTuple25(); break;
	default: throw new IllegalStateException("Excessive arity in tuple.");
	}

	return projOperator;
}
 
Example #28
Source File: PeriodicStreamingJob.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public void run(SourceContext<Tuple> ctx) throws Exception {
	long offsetMs = offsetSeconds * 1000L;

	while (ms < durationMs) {
		synchronized (ctx.getCheckpointLock()) {
			ctx.collect(new Tuple2<>(ms + offsetMs, "key"));
		}
		ms += sleepMs;
		Thread.sleep(sleepMs);
	}
}
 
Example #29
Source File: UdfStreamOperatorCheckpointingITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Assembles a stream of a grouping field and some long data. Applies reduce functions
 * on this stream.
 */
@Override
public void testProgram(StreamExecutionEnvironment env) {

	// base stream
	KeyedStream<Tuple2<Integer, Long>, Tuple> stream = env.addSource(new StatefulMultipleSequence())
			.keyBy(0);

	stream
			// testing built-in aggregate
			.min(1)
			// failure generation
			.map(new OnceFailingIdentityMapFunction(NUM_INPUT))
			.keyBy(0)
			.addSink(new MinEvictingQueueSink());

	stream
			// testing UDF reducer
			.reduce(new ReduceFunction<Tuple2<Integer, Long>>() {
				@Override
				public Tuple2<Integer, Long> reduce(
						Tuple2<Integer, Long> value1, Tuple2<Integer, Long> value2) throws Exception {
					return Tuple2.of(value1.f0, value1.f1 + value2.f1);
				}
			})
			.keyBy(0)
			.addSink(new SumEvictingQueueSink());

	stream
			// testing UDF folder
			.fold(Tuple2.of(0, 0L), new FoldFunction<Tuple2<Integer, Long>, Tuple2<Integer, Long>>() {
				@Override
				public Tuple2<Integer, Long> fold(
						Tuple2<Integer, Long> accumulator, Tuple2<Integer, Long> value) throws Exception {
					return Tuple2.of(value.f0, accumulator.f1 + value.f1);
				}
			})
			.keyBy(0)
			.addSink(new FoldEvictingQueueSink());
}
 
Example #30
Source File: FieldFromTupleTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testSingleFieldExtraction() throws InstantiationException, IllegalAccessException {
	// extract single fields
	for (int i = 0; i < Tuple.MAX_ARITY; i++) {
		Tuple current = (Tuple) CLASSES[i].newInstance();
		for (int j = 0; j < i; j++) {
			current.setField(testStrings[j], j);
		}
		for (int j = 0; j < i; j++) {
			assertEquals(testStrings[j], new FieldFromTuple<String>(j).extract(current));
		}
	}
}